context
stringlengths
2.52k
185k
gt
stringclasses
1 value
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.SS.Util { using System; using System.Drawing; using System.IO; using NPOI.HSSF.UserModel; using NPOI.SS.UserModel; using NPOI.Util; /** * @author Yegor Kozlov */ public class ImageUtils { private static POILogger logger = POILogFactory.GetLogger(typeof(ImageUtils)); public static int PIXEL_DPI = 96; public static Size GetImageDimension(Stream is1) { using (Image img = Image.FromStream(is1)) { //return img.Size; int[] dpi = GetResolution(img); //if DPI is zero then assume standard 96 DPI //since cannot divide by zero if (dpi[0] == 0) dpi[0] = PIXEL_DPI; if (dpi[1] == 0) dpi[1] = PIXEL_DPI; Size size = new Size(); size.Width = img.Width * PIXEL_DPI / dpi[0]; size.Height = img.Height * PIXEL_DPI / dpi[1]; return size; } } /** * Return the dimension of this image * * @param is the stream Containing the image data * @param type type of the picture: {@link NPOI.SS.UserModel.Workbook#PICTURE_TYPE_JPEG}, * {@link NPOI.SS.UserModel.Workbook#PICTURE_TYPE_PNG} or {@link NPOI.SS.UserModel.Workbook#PICTURE_TYPE_DIB} * * @return image dimension in pixels */ public static Size GetImageDimension(Stream is1, PictureType type) { Size size = new Size(); switch (type) { case PictureType.JPEG: case PictureType.PNG: case PictureType.DIB: //we can calculate the preferred size only for JPEG, PNG and BMP //other formats like WMF, EMF and PICT are not supported in Java using (Image img = Image.FromStream(is1)) { int[] dpi = GetResolution(img); //if DPI is zero then assume standard 96 DPI //since cannot divide by zero if (dpi[0] == 0) dpi[0] = PIXEL_DPI; if (dpi[1] == 0) dpi[1] = PIXEL_DPI; size.Width = img.Width * PIXEL_DPI / dpi[0]; size.Height = img.Height * PIXEL_DPI / dpi[1]; return size; } default: logger.Log(POILogger.WARN, "Only JPEG, PNG and DIB pictures can be automatically sized"); break; } return size; } /** * The metadata of PNG and JPEG can contain the width of a pixel in millimeters. * Return the the "effective" dpi calculated as <code>25.4/HorizontalPixelSize</code> * and <code>25.4/VerticalPixelSize</code>. Where 25.4 is the number of mm in inch. * * @return array of two elements: <code>{horisontalPdi, verticalDpi}</code>. * {96, 96} is the default. */ public static int[] GetResolution(Image r) { return new int[] { (int)r.HorizontalResolution, (int)r.VerticalResolution }; } /** * Calculate and Set the preferred size (anchor) for this picture. * * @param scaleX the amount by which image width is multiplied relative to the original width. * @param scaleY the amount by which image height is multiplied relative to the original height. * @return the new Dimensions of the scaled picture in EMUs */ public static Size SetPreferredSize(IPicture picture, double scaleX, double scaleY) { IClientAnchor anchor = picture.ClientAnchor; bool isHSSF = (anchor is HSSFClientAnchor); IPictureData data = picture.PictureData; ISheet sheet = picture.Sheet; // in pixel Size imgSize = GetImageDimension(new MemoryStream(data.Data), data.PictureType); // in emus Size anchorSize = ImageUtils.GetDimensionFromAnchor(picture); double scaledWidth = (scaleX == Double.MaxValue) ? imgSize.Width : anchorSize.Width / Units.EMU_PER_PIXEL * scaleX; double scaledHeight = (scaleY == Double.MaxValue) ? imgSize.Height : anchorSize.Height / Units.EMU_PER_PIXEL * scaleY; double w = 0; int col2 = anchor.Col1; int dx2 = 0; //space in the leftmost cell w = sheet.GetColumnWidthInPixels(col2++); if (isHSSF) { w *= 1d - anchor.Dx1 / 1024d; } else { w -= anchor.Dx1 / (double)Units.EMU_PER_PIXEL; } while (w < scaledWidth) { w += sheet.GetColumnWidthInPixels(col2++); } if (w > scaledWidth) { //calculate dx2, offset in the rightmost cell double cw = sheet.GetColumnWidthInPixels(--col2); double delta = w - scaledWidth; if (isHSSF) { dx2 = (int)((cw - delta) / cw * 1024); } else { dx2 = (int)((cw - delta) * Units.EMU_PER_PIXEL); } if (dx2 < 0) dx2 = 0; } anchor.Col2 = (/*setter*/col2); anchor.Dx2 = (/*setter*/dx2); double h = 0; int row2 = anchor.Row1; int dy2 = 0; h = GetRowHeightInPixels(sheet, row2++); if (isHSSF) { h *= 1 - anchor.Dy1 / 256d; } else { h -= anchor.Dy1 / (double)Units.EMU_PER_PIXEL; } while (h < scaledHeight) { h += GetRowHeightInPixels(sheet, row2++); } if (h > scaledHeight) { double ch = GetRowHeightInPixels(sheet, --row2); double delta = h - scaledHeight; if (isHSSF) { dy2 = (int)((ch - delta) / ch * 256); } else { dy2 = (int)((ch - delta) * Units.EMU_PER_PIXEL); } if (dy2 < 0) dy2 = 0; } anchor.Row2 = (/*setter*/row2); anchor.Dy2 = (/*setter*/dy2); Size dim = new Size( (int)Math.Round(scaledWidth * Units.EMU_PER_PIXEL), (int)Math.Round(scaledHeight * Units.EMU_PER_PIXEL) ); return dim; } /** * Calculates the dimensions in EMUs for the anchor of the given picture * * @param picture the picture Containing the anchor * @return the dimensions in EMUs */ public static Size GetDimensionFromAnchor(IPicture picture) { IClientAnchor anchor = picture.ClientAnchor; bool isHSSF = (anchor is HSSFClientAnchor); ISheet sheet = picture.Sheet; double w = 0; int col2 = anchor.Col1; //space in the leftmost cell w = sheet.GetColumnWidthInPixels(col2++); if (isHSSF) { w *= 1 - anchor.Dx1 / 1024d; } else { w -= anchor.Dx1 / (double)Units.EMU_PER_PIXEL; } while (col2 < anchor.Col2) { w += sheet.GetColumnWidthInPixels(col2++); } if (isHSSF) { w += sheet.GetColumnWidthInPixels(col2) * anchor.Dx2 / 1024d; } else { w += anchor.Dx2 / (double)Units.EMU_PER_PIXEL; } double h = 0; int row2 = anchor.Row1; h = GetRowHeightInPixels(sheet, row2++); if (isHSSF) { h *= 1 - anchor.Dy1 / 256d; } else { h -= anchor.Dy1 / (double)Units.EMU_PER_PIXEL; } while (row2 < anchor.Row2) { h += GetRowHeightInPixels(sheet, row2++); } if (isHSSF) { h += GetRowHeightInPixels(sheet, row2) * anchor.Dy2 / 256; } else { h += anchor.Dy2 / (double)Units.EMU_PER_PIXEL; } w *= Units.EMU_PER_PIXEL; h *= Units.EMU_PER_PIXEL; return new Size((int)Math.Round(w), (int)Math.Round(h)); //return new Size((int)w * Units.EMU_PER_PIXEL, (int)h * Units.EMU_PER_PIXEL); } private static double GetRowHeightInPixels(ISheet sheet, int rowNum) { IRow r = sheet.GetRow(rowNum); double points = (r == null) ? sheet.DefaultRowHeightInPoints : r.HeightInPoints; return Units.ToEMU(points) / Units.EMU_PER_PIXEL; } } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2009, 2013 Oracle and/or its affiliates. All rights reserved. * */ using System; using System.Collections.Generic; using System.Runtime.InteropServices; using System.Text; using BerkeleyDB.Internal; namespace BerkeleyDB { /// <summary> /// A class representing a HashDatabase. The Hash format is an extensible, /// dynamic hashing scheme. /// </summary> public class HashDatabase : Database { private HashFunctionDelegate hashHandler; private EntryComparisonDelegate compareHandler; private EntryComparisonDelegate dupCompareHandler; private BDB_CompareDelegate doCompareRef; private BDB_HashDelegate doHashRef; private BDB_CompareDelegate doDupCompareRef; #region Constructors private HashDatabase(DatabaseEnvironment env, uint flags) : base(env, flags) { } internal HashDatabase(BaseDatabase clone) : base(clone) { } private void Config(HashDatabaseConfig cfg) { base.Config(cfg); /* * Database.Config calls set_flags, but that doesn't get the Hash * specific flags. No harm in calling it again. */ db.set_flags(cfg.flags); if (cfg.HashFunction != null) HashFunction = cfg.HashFunction; // The duplicate comparison function cannot change. if (cfg.DuplicateCompare != null) DupCompare = cfg.DuplicateCompare; if (cfg.fillFactorIsSet) db.set_h_ffactor(cfg.FillFactor); if (cfg.nelemIsSet) db.set_h_nelem(cfg.TableSize); if (cfg.HashComparison != null) Compare = cfg.HashComparison; } /// <summary> /// Instantiate a new HashDatabase object and open the database /// represented by <paramref name="Filename"/>. /// </summary> /// <remarks> /// <para> /// If <paramref name="Filename"/> is null, the database is strictly /// temporary and cannot be opened by any other thread of control, thus /// the database can only be accessed by sharing the single database /// object that created it, in circumstances where doing so is safe. /// </para> /// <para> /// If <see cref="DatabaseConfig.AutoCommit"/> is set, the operation /// will be implicitly transaction protected. Note that transactionally /// protected operations on a datbase object requires the object itself /// be transactionally protected during its open. /// </para> /// </remarks> /// <param name="Filename"> /// The name of an underlying file that will be used to back the /// database. In-memory databases never intended to be preserved on disk /// may be created by setting this parameter to null. /// </param> /// <param name="cfg">The database's configuration</param> /// <returns>A new, open database object</returns> public static HashDatabase Open( string Filename, HashDatabaseConfig cfg) { return Open(Filename, null, cfg, null); } /// <summary> /// Instantiate a new HashDatabase object and open the database /// represented by <paramref name="Filename"/> and /// <paramref name="DatabaseName"/>. /// </summary> /// <remarks> /// <para> /// If both <paramref name="Filename"/> and /// <paramref name="DatabaseName"/> are null, the database is strictly /// temporary and cannot be opened by any other thread of control, thus /// the database can only be accessed by sharing the single database /// object that created it, in circumstances where doing so is safe. If /// <paramref name="Filename"/> is null and /// <paramref name="DatabaseName"/> is non-null, the database can be /// opened by other threads of control and will be replicated to client /// sites in any replication group. /// </para> /// <para> /// If <see cref="DatabaseConfig.AutoCommit"/> is set, the operation /// will be implicitly transaction protected. Note that transactionally /// protected operations on a datbase object requires the object itself /// be transactionally protected during its open. /// </para> /// </remarks> /// <param name="Filename"> /// The name of an underlying file that will be used to back the /// database. In-memory databases never intended to be preserved on disk /// may be created by setting this parameter to null. /// </param> /// <param name="DatabaseName"> /// This parameter allows applications to have multiple databases in a /// single file. Although no DatabaseName needs to be specified, it is /// an error to attempt to open a second database in a file that was not /// initially created using a database name. /// </param> /// <param name="cfg">The database's configuration</param> /// <returns>A new, open database object</returns> public static HashDatabase Open( string Filename, string DatabaseName, HashDatabaseConfig cfg) { return Open(Filename, DatabaseName, cfg, null); } /// <summary> /// Instantiate a new HashDatabase object and open the database /// represented by <paramref name="Filename"/>. /// </summary> /// <remarks> /// <para> /// If <paramref name="Filename"/> is null, the database is strictly /// temporary and cannot be opened by any other thread of control, thus /// the database can only be accessed by sharing the single database /// object that created it, in circumstances where doing so is safe. /// </para> /// <para> /// If <paramref name="txn"/> is null, but /// <see cref="DatabaseConfig.AutoCommit"/> is set, the operation will /// be implicitly transaction protected. Note that transactionally /// protected operations on a datbase object requires the object itself /// be transactionally protected during its open. Also note that the /// transaction must be committed before the object is closed. /// </para> /// </remarks> /// <param name="Filename"> /// The name of an underlying file that will be used to back the /// database. In-memory databases never intended to be preserved on disk /// may be created by setting this parameter to null. /// </param> /// <param name="cfg">The database's configuration</param> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <returns>A new, open database object</returns> public static HashDatabase Open( string Filename, HashDatabaseConfig cfg, Transaction txn) { return Open(Filename, null, cfg, txn); } /// <summary> /// Instantiate a new HashDatabase object and open the database /// represented by <paramref name="Filename"/> and /// <paramref name="DatabaseName"/>. /// </summary> /// <remarks> /// <para> /// If both <paramref name="Filename"/> and /// <paramref name="DatabaseName"/> are null, the database is strictly /// temporary and cannot be opened by any other thread of control, thus /// the database can only be accessed by sharing the single database /// object that created it, in circumstances where doing so is safe. If /// <paramref name="Filename"/> is null and /// <paramref name="DatabaseName"/> is non-null, the database can be /// opened by other threads of control and will be replicated to client /// sites in any replication group. /// </para> /// <para> /// If <paramref name="txn"/> is null, but /// <see cref="DatabaseConfig.AutoCommit"/> is set, the operation will /// be implicitly transaction protected. Note that transactionally /// protected operations on a datbase object requires the object itself /// be transactionally protected during its open. Also note that the /// transaction must be committed before the object is closed. /// </para> /// </remarks> /// <param name="Filename"> /// The name of an underlying file that will be used to back the /// database. In-memory databases never intended to be preserved on disk /// may be created by setting this parameter to null. /// </param> /// <param name="DatabaseName"> /// This parameter allows applications to have multiple databases in a /// single file. Although no DatabaseName needs to be specified, it is /// an error to attempt to open a second database in a file that was not /// initially created using a database name. /// </param> /// <param name="cfg">The database's configuration</param> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <returns>A new, open database object</returns> public static HashDatabase Open(string Filename, string DatabaseName, HashDatabaseConfig cfg, Transaction txn) { HashDatabase ret = new HashDatabase(cfg.Env, 0); ret.Config(cfg); ret.db.open(Transaction.getDB_TXN(txn), Filename, DatabaseName, DBTYPE.DB_HASH, cfg.openFlags, 0); ret.isOpen = true; return ret; } #endregion Constructors #region Callbacks private static int doDupCompare( IntPtr dbp, IntPtr dbt1p, IntPtr dbt2p) { DB db = new DB(dbp, false); DBT dbt1 = new DBT(dbt1p, false); DBT dbt2 = new DBT(dbt2p, false); return ((HashDatabase)(db.api_internal)).DupCompare( DatabaseEntry.fromDBT(dbt1), DatabaseEntry.fromDBT(dbt2)); } private static uint doHash(IntPtr dbp, IntPtr datap, uint len) { DB db = new DB(dbp, false); byte[] t_data = new byte[len]; Marshal.Copy(datap, t_data, 0, (int)len); return ((HashDatabase)(db.api_internal)).hashHandler(t_data); } private static int doCompare(IntPtr dbp, IntPtr dbtp1, IntPtr dbtp2) { DB db = new DB(dbp, false); DBT dbt1 = new DBT(dbtp1, false); DBT dbt2 = new DBT(dbtp2, false); return ((HashDatabase)(db.api_internal)).compareHandler( DatabaseEntry.fromDBT(dbt1), DatabaseEntry.fromDBT(dbt2)); } #endregion Callbacks #region Properties /// <summary> /// The Hash key comparison function. The comparison function is called /// whenever it is necessary to compare a key specified by the /// application with a key currently stored in the tree. /// </summary> public EntryComparisonDelegate Compare { get { return compareHandler; } private set { if (value == null) db.set_h_compare(null); else if (compareHandler == null) { if (doCompareRef == null) doCompareRef = new BDB_CompareDelegate(doCompare); db.set_h_compare(doCompareRef); } compareHandler = value; } } /// <summary> /// The duplicate data item comparison function. /// </summary> public EntryComparisonDelegate DupCompare { get { return dupCompareHandler; } private set { /* Cannot be called after open. */ if (value == null) db.set_dup_compare(null); else if (dupCompareHandler == null) { if (doDupCompareRef == null) doDupCompareRef = new BDB_CompareDelegate(doDupCompare); db.set_dup_compare(doDupCompareRef); } dupCompareHandler = value; } } /// <summary> /// Whether the insertion of duplicate data items in the database is /// permitted, and whether duplicates items are sorted. /// </summary> public DuplicatesPolicy Duplicates { get { uint flags = 0; db.get_flags(ref flags); if ((flags & DbConstants.DB_DUPSORT) != 0) return DuplicatesPolicy.SORTED; else if ((flags & DbConstants.DB_DUP) != 0) return DuplicatesPolicy.UNSORTED; else return DuplicatesPolicy.NONE; } } /// <summary> /// The desired density within the hash table. /// </summary> public uint FillFactor { get { uint ret = 0; db.get_h_ffactor(ref ret); return ret; } } /// <summary> /// A user-defined hash function; if no hash function is specified, a /// default hash function is used. /// </summary> public HashFunctionDelegate HashFunction { get { return hashHandler; } private set { if (value == null) db.set_h_hash(null); else if (hashHandler == null) { if (doHashRef == null) doHashRef = new BDB_HashDelegate(doHash); db.set_h_hash(doHashRef); } hashHandler = value; } } /// <summary> /// An estimate of the final size of the hash table. /// </summary> public uint TableSize { get { uint ret = 0; db.get_h_nelem(ref ret); return ret; } } #endregion Properties #region Methods /// <summary> /// Compact the database, and optionally return unused database pages to /// the underlying filesystem. /// </summary> /// <remarks> /// If the operation occurs in a transactional database, the operation /// will be implicitly transaction protected using multiple /// transactions. These transactions will be periodically committed to /// avoid locking large sections of the tree. Any deadlocks encountered /// cause the compaction operation to be retried from the point of the /// last transaction commit. /// </remarks> /// <param name="cdata">Compact configuration parameters</param> /// <returns> /// Compact operation statistics, where <see cref="CompactData.End"/> /// holds the integer value representing which bucket the compaction /// stopped in. /// </returns> public CompactData Compact(CompactConfig cdata) { return Compact(cdata, null); } /// <summary> /// Compact the database, and optionally return unused database pages to /// the underlying filesystem. /// </summary> /// <remarks> /// <para> /// If <paramref name="txn"/> is non-null, then the operation is /// performed using that transaction. In this event, large sections of /// the tree may be locked during the course of the transaction. /// </para> /// <para> /// If <paramref name="txn"/> is null, but the operation occurs in a /// transactional database, the operation will be implicitly transaction /// protected using multiple transactions. These transactions will be /// periodically committed to avoid locking large sections of the tree. /// Any deadlocks encountered cause the compaction operation to be /// retried from the point of the last transaction commit. /// </para> /// </remarks> /// <param name="cdata">Compact configuration parameters</param> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <returns> /// Compact operation statistics, where <see cref="CompactData.End"/> /// holds the integer value representing which bucket the compaction /// stopped in. /// </returns> public CompactData Compact(CompactConfig cdata, Transaction txn) { DatabaseEntry end = null; if (cdata.returnEnd) end = new DatabaseEntry(); db.compact(Transaction.getDB_TXN(txn), cdata.start, cdata.stop, CompactConfig.getDB_COMPACT(cdata), cdata.flags, end); return new CompactData(CompactConfig.getDB_COMPACT(cdata), end); } /// <summary> /// Create a database cursor. /// </summary> /// <returns>A newly created cursor</returns> public new HashCursor Cursor() { return Cursor(new CursorConfig(), null); } /// <summary> /// Create a database cursor with the given configuration. /// </summary> /// <param name="cfg"> /// The configuration properties for the cursor. /// </param> /// <returns>A newly created cursor</returns> public new HashCursor Cursor(CursorConfig cfg) { return Cursor(cfg, null); } /// <summary> /// Create a transactionally protected database cursor. /// </summary> /// <param name="txn"> /// The transaction context in which the cursor may be used. /// </param> /// <returns>A newly created cursor</returns> public new HashCursor Cursor(Transaction txn) { return Cursor(new CursorConfig(), txn); } /// <summary> /// Create a transactionally protected database cursor with the given /// configuration. /// </summary> /// <param name="cfg"> /// The configuration properties for the cursor. /// </param> /// <param name="txn"> /// The transaction context in which the cursor may be used. /// </param> /// <returns>A newly created cursor</returns> public new HashCursor Cursor(CursorConfig cfg, Transaction txn) { if (cfg.Priority == CachePriority.DEFAULT) return new HashCursor( db.cursor(Transaction.getDB_TXN(txn), cfg.flags), Pagesize); else return new HashCursor(db.cursor(Transaction.getDB_TXN(txn), cfg.flags), Pagesize, cfg.Priority); } /// <summary> /// Return the database statistical information which does not require /// traversal of the database. /// </summary> /// <returns> /// The database statistical information which does not require /// traversal of the database. /// </returns> public HashStats FastStats() { return Stats(null, true, Isolation.DEGREE_THREE); } /// <summary> /// Return the database statistical information which does not require /// traversal of the database. /// </summary> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <returns> /// The database statistical information which does not require /// traversal of the database. /// </returns> public HashStats FastStats(Transaction txn) { return Stats(txn, true, Isolation.DEGREE_THREE); } /// <summary> /// Return the database statistical information which does not require /// traversal of the database. /// </summary> /// <overloads> /// <para> /// Among other things, this method makes it possible for applications /// to request key and record counts without incurring the performance /// penalty of traversing the entire database. /// </para> /// <para> /// The statistical information is described by the /// <see cref="BTreeStats"/>, <see cref="HashStats"/>, /// <see cref="QueueStats"/>, and <see cref="RecnoStats"/> classes. /// </para> /// </overloads> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <param name="isoDegree"> /// The level of isolation for database reads. /// <see cref="Isolation.DEGREE_ONE"/> will be silently ignored for /// databases which did not specify /// <see cref="DatabaseConfig.ReadUncommitted"/>. /// </param> /// <returns> /// The database statistical information which does not require /// traversal of the database. /// </returns> public HashStats FastStats(Transaction txn, Isolation isoDegree) { return Stats(txn, true, isoDegree); } /// <summary> /// Return pages to the filesystem that are already free and at the end /// of the file. /// </summary> /// <returns> /// The number of database pages returned to the filesystem /// </returns> public uint TruncateUnusedPages() { return TruncateUnusedPages(null); } /// <summary> /// Return pages to the filesystem that are already free and at the end /// of the file. /// </summary> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <returns> /// The number of database pages returned to the filesystem /// </returns> public uint TruncateUnusedPages(Transaction txn) { DB_COMPACT cdata = new DB_COMPACT(); db.compact(Transaction.getDB_TXN(txn), null, null, cdata, DbConstants.DB_FREELIST_ONLY, null); return cdata.compact_pages_truncated; } /// <summary> /// Store the key/data pair in the database only if it does not already /// appear in the database. /// </summary> /// <param name="key">The key to store in the database</param> /// <param name="data">The data item to store in the database</param> public void PutNoDuplicate(DatabaseEntry key, DatabaseEntry data) { PutNoDuplicate(key, data, null); } /// <summary> /// Store the key/data pair in the database only if it does not already /// appear in the database. /// </summary> /// <param name="key">The key to store in the database</param> /// <param name="data">The data item to store in the database</param> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> public void PutNoDuplicate( DatabaseEntry key, DatabaseEntry data, Transaction txn) { Put(key, data, txn, DbConstants.DB_NODUPDATA); } /// <summary> /// Return the database statistical information for this database. /// </summary> /// <returns>Database statistical information.</returns> public HashStats Stats() { return Stats(null, false, Isolation.DEGREE_THREE); } /// <summary> /// Return the database statistical information for this database. /// </summary> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <returns>Database statistical information.</returns> public HashStats Stats(Transaction txn) { return Stats(txn, false, Isolation.DEGREE_THREE); } /// <summary> /// Return the database statistical information for this database. /// </summary> /// <overloads> /// The statistical information is described by /// <see cref="BTreeStats"/>. /// </overloads> /// <param name="txn"> /// If the operation is part of an application-specified transaction, /// <paramref name="txn"/> is a Transaction object returned from /// <see cref="DatabaseEnvironment.BeginTransaction"/>; if /// the operation is part of a Berkeley DB Concurrent Data Store group, /// <paramref name="txn"/> is a handle returned from /// <see cref="DatabaseEnvironment.BeginCDSGroup"/>; otherwise null. /// </param> /// <param name="isoDegree"> /// The level of isolation for database reads. /// <see cref="Isolation.DEGREE_ONE"/> will be silently ignored for /// databases which did not specify /// <see cref="DatabaseConfig.ReadUncommitted"/>. /// </param> /// <returns>Database statistical information.</returns> public HashStats Stats(Transaction txn, Isolation isoDegree) { return Stats(txn, false, isoDegree); } private HashStats Stats( Transaction txn, bool fast, Isolation isoDegree) { uint flags = 0; flags |= fast ? DbConstants.DB_FAST_STAT : 0; switch (isoDegree) { case Isolation.DEGREE_ONE: flags |= DbConstants.DB_READ_UNCOMMITTED; break; case Isolation.DEGREE_TWO: flags |= DbConstants.DB_READ_COMMITTED; break; } HashStatStruct st = db.stat_hash(Transaction.getDB_TXN(txn), flags); return new HashStats(st); } #endregion Methods } }
using System; using System.Collections.Generic; using System.Linq; using CppSharp.AST.Extensions; namespace CppSharp.AST { public static class ClassExtensions { public static IEnumerable<Function> GetFunctionOverloads(this Class @class, Function function) { if (function.IsOperator) return @class.FindOperator(function.OperatorKind); return @class.Methods.Where(method => method.Name == function.Name); } public static IEnumerable<T> FindHierarchy<T>(this Class @class, Func<Class, IEnumerable<T>> func) where T : Declaration { foreach (var elem in func(@class)) yield return elem; foreach (var @base in @class.Bases) { if (!@base.IsClass) continue; foreach(var elem in @base.Class.FindHierarchy(func)) yield return elem; } } public static Class GetNonIgnoredRootBase(this Class @class) { while (true) { if (!@class.HasNonIgnoredBase || @class.BaseClass == null) return @class; @class = @class.BaseClass; } } public static Method GetBaseMethod(this Class @class, Method @override) { if (@class.BaseClass == null || @class.BaseClass.IsInterface) return null; var baseClass = @class.BaseClass.OriginalClass ?? @class.BaseClass; Method baseMethod = baseClass.GetBaseMethod(@override); if (baseMethod != null) return baseMethod; var methods = baseClass.Methods.Concat(baseClass.Declarations.OfType<Method>()); return methods.FirstOrDefault(@override.CanOverride); } public static Property GetBaseProperty(this Class @class, Property @override, bool onlyFirstBase = false, bool getTopmost = false) { foreach (var @base in @class.Bases) { if (!@base.IsClass || @base.Class.OriginalClass == @class || (onlyFirstBase && @base.Class.IsInterface)) continue; Property baseProperty; if (!getTopmost) { baseProperty = @base.Class.GetBaseProperty(@override, onlyFirstBase); if (baseProperty != null) return baseProperty; } var properties = @base.Class.Properties.Concat(@base.Class.Declarations.OfType<Property>()); baseProperty = (from property in properties where property.OriginalName == @override.OriginalName && property.Parameters.SequenceEqual(@override.Parameters, ParameterTypeComparer.Instance) select property).FirstOrDefault(); if (baseProperty != null) return baseProperty; if (getTopmost) { baseProperty = @base.Class.GetBaseProperty(@override, onlyFirstBase, true); if (baseProperty != null) return baseProperty; } } return null; } public static bool HasNonAbstractBasePropertyInPrimaryBase(this Class @class, Property property) { var baseProperty = @class.GetBaseProperty(property, true, true); return baseProperty != null && !baseProperty.IsPure && !((Class) baseProperty.OriginalNamespace).IsInterface; } public static Property GetPropertyByName(this Class @class, string propertyName) { Property property = @class.Properties.FirstOrDefault(m => m.Name == propertyName); if (property != null) return property; foreach (var baseClassSpecifier in @class.Bases.Where( b => b.Type.IsClass() && b.Class.IsDeclared)) { property = baseClassSpecifier.Class.GetPropertyByName(propertyName); if (property != null) return property; } return null; } public static Property GetPropertyByConstituentMethod(this Class @class, Method method) { var property = @class.Properties.FirstOrDefault(p => p.GetMethod == method); if (property != null) return property; property = @class.Properties.FirstOrDefault(p => p.SetMethod == method); if (property != null) return property; foreach (BaseClassSpecifier @base in @class.Bases.Where(b => b.Type.IsClass())) { property = @base.Class.GetPropertyByConstituentMethod(method); if (property != null) return property; } return null; } public static bool HasRefBase(this Class @class) { Class @base = null; if (@class.HasBaseClass) @base = @class.Bases[0].Class; return @base?.IsRefType == true && @base.IsGenerated; } public static IEnumerable<TranslationUnit> GetGenerated(this IEnumerable<TranslationUnit> units) { return units.Where(u => u.IsGenerated && (u.HasDeclarations || u.IsSystemHeader) && u.IsValid); } public static IEnumerable<Class> GetSpecializedClassesToGenerate( this Class dependentClass) { IEnumerable<Class> specializedClasses = GetSpecializedClassesOf(dependentClass); if (!specializedClasses.Any() || dependentClass.HasDependentValueFieldInLayout()) return specializedClasses; var specializations = new List<Class>(); var specialization = specializedClasses.FirstOrDefault(s => s.IsGenerated); if (specialization == null) specializations.Add(specializedClasses.First()); else specializations.Add(specialization); return specializations; } private static IEnumerable<Class> GetSpecializedClassesOf(this Class dependentClass) { if (dependentClass.IsTemplate) return dependentClass.Specializations; Class template = dependentClass.Namespace as Class; if (template == null || !template.IsTemplate) // just one level of nesting supported for the time being return Enumerable.Empty<Class>(); return template.Specializations.SelectMany(s => s.Classes.Where( c => c.Name == dependentClass.Name)).ToList(); } public static Class GetInterface(this Class @class) { var specialization = @class as ClassTemplateSpecialization; Class @interface = null; if (specialization == null) { @interface = @class.Namespace.Classes.Find( c => c.OriginalClass == @class && c.IsInterface); } else { Class template = specialization.TemplatedDecl.TemplatedClass; Class templatedInterface = @class.Namespace.Classes.Find( c => c.OriginalClass == template && c.IsInterface); if (templatedInterface != null) @interface = templatedInterface.Specializations.FirstOrDefault( s => s.OriginalClass == specialization && s.IsInterface); } return @interface; } public static bool HasDependentValueFieldInLayout(this Class @class) { if (@class.Fields.Any(f => IsValueDependent(f.Type))) return true; return @class.Bases.Where(b => b.IsClass).Select( b => b.Class).Any(HasDependentValueFieldInLayout); } private static bool IsValueDependent(Type type) { var desugared = type.Desugar(); if (desugared is TemplateParameterType) return true; var tagType = desugared as TagType; if (tagType?.IsDependent == true) return true; var templateType = desugared as TemplateSpecializationType; if (templateType?.Arguments.Any( a => a.Type.Type?.Desugar().IsDependent == true) == true) return true; var arrayType = desugared as ArrayType; return arrayType != null && IsValueDependent(arrayType.Type); } } }
/* Copyright (c) 2005-2006 Tomas Matousek and Martin Maly. The use and distribution terms for this software are contained in the file named License.txt, which can be found in the root of the Phalanger distribution. By using this software in any fashion, you are agreeing to be bound by the terms of this license. You must not remove this notice from this software. */ using System; using System.Text; using System.Data; using System.Collections; using System.Collections.Generic; using PHP.Core; using System.Diagnostics; namespace PHP.Library.Data { /// <summary> /// Abstract class implementing common functionality of PHP connection resources. /// </summary> public abstract class PhpDbConnection : PhpResource { #region Fields & Properties /// <summary> /// Connection string. /// </summary> public string/*!*/ ConnectionString { get { return connectionString; } } private string/*!*/ connectionString; /// <summary> /// Underlying database connection. /// </summary> public IDbConnection/*!*/Connection { get { return this.connection; } } protected IDbConnection/*!*/ connection; /// <summary> /// A result associated with this connection that possibly has not been closed yet. /// </summary> protected IDataReader pendingReader; /// <summary> /// Last result resource. /// </summary> public PhpDbResult LastResult { get { return lastResult; } } private PhpDbResult lastResult; /// <summary> /// Gets an exception thrown by last performed operation or a <B>null</B> reference /// if that operation succeeded. /// </summary> public Exception LastException { get { return lastException; } } private Exception lastException; /// <summary> /// Gets the number of rows affected by the last query executed on this connection. /// </summary> public int LastAffectedRows { get { if (lastResult == null) return -1; // SELECT gives -1, UPDATE/INSERT gives the number: return (lastResult.RecordsAffected >= 0) ? lastResult.RecordsAffected : lastResult.RowCount; } } #endregion /// <summary> /// Creates a new instance of <see cref="PhpDbConnection"/> with a specified connection. /// </summary> /// <param name="connectionString">Connection string.</param> /// <param name="connection">Database connection.</param> /// <param name="name">Connection resource name.</param> /// <exception cref="ArgumentNullException"><paramref name="connection"/> is a <B>null</B> reference.</exception> protected PhpDbConnection(string/*!*/ connectionString, IDbConnection/*!*/ connection, string/*!*/ name) : base(name) { if (connection == null) throw new ArgumentNullException("connection"); if (connectionString == null) throw new ArgumentNullException("connectionString"); this.connection = connection; this.connectionString = connectionString; } /// <summary> /// Gets a query result resource. /// </summary> /// <param name="connection">Database connection.</param> /// <param name="reader">Data reader to be used for result resource population.</param> /// <param name="convertTypes">Whether to convert data types to PHP ones.</param> /// <returns>Result resource holding all resulting data of the query.</returns> protected abstract PhpDbResult GetResult(PhpDbConnection/*!*/ connection, IDataReader/*!*/ reader, bool convertTypes); /// <summary> /// Creates a command instance. /// </summary> /// <returns>Instance of command specific for the database provider.</returns> protected abstract IDbCommand/*!*/ CreateCommand(); /// <summary> /// Builds a connection string. /// </summary> public static string/*!*/ BuildConnectionString(string server, string user, string password, string additionalSettings) { StringBuilder result = new StringBuilder(8); result.Append("server="); result.Append(server); // result.Append(";database="); // result.Append(database); result.Append(";user id="); result.Append(user); result.Append(";password="); result.Append(password); if (!String.IsNullOrEmpty(additionalSettings)) { result.Append(';'); result.AppendFormat(additionalSettings); } return result.ToString(); } /// <summary> /// Opens a database connection if it has not been opened yet. /// </summary> /// <returns><B>true</B> if successful.</returns> /// <exception cref="PhpException">Attempt to connect the database failed (Warning).</exception> /// <remarks> /// Sets <see cref="LastException"/> to <B>null</B> (on success) or to the exception object (on failure). /// </remarks> public bool Connect() { Debug.Assert(connection != null); if (connection.State == ConnectionState.Open) return true; connection.ConnectionString = this.ConnectionString; try { connection.Open(); lastException = null; } catch (Exception e) { lastException = e; PhpException.Throw(PhpError.Warning, LibResources.GetString("cannot_open_connection", GetExceptionMessage(e))); return false; } return true; } /// <summary> /// Closes connection and releases the resource. /// </summary> protected override void FreeManaged() { base.FreeManaged(); ClosePendingReader(); try { if (connection != null) { connection.Close(); } lastException = null; } catch (Exception e) { lastException = e; PhpException.Throw(PhpError.Warning, LibResources.GetString("error_closing_connection", GetExceptionMessage(e))); } connection = null; } /// <summary> /// Closes pending reader. /// </summary> public void ClosePendingReader() { if (pendingReader != null) { if (!pendingReader.IsClosed) pendingReader.Close(); pendingReader = null; } } /// <summary> /// Executes a query on the connection. /// </summary> /// <param name="query">The query.</param> /// <param name="convertTypes">Whether to convert data types to PHP ones.</param> /// <returns>PhpDbResult class representing the data read from database.</returns> /// <exception cref="ArgumentNullException"><paramref name="query"/> is a <B>null</B> reference.</exception> /// <exception cref="PhpException">Query execution failed (Warning).</exception> public PhpDbResult ExecuteQuery(string/*!*/ query, bool convertTypes) { if (query == null) throw new ArgumentNullException("query"); return ExecuteCommand(query, CommandType.Text, convertTypes, null, false); } /// <summary> /// Executes a stored procedure on the connection. /// </summary> /// <param name="procedureName">Procedure name.</param> /// <param name="parameters">Parameters.</param> /// <param name="skipResults">Whether to load results.</param> /// <returns>PhpDbResult class representing the data read from database.</returns> /// <exception cref="ArgumentNullException"><paramref name="procedureName"/> is a <B>null</B> reference.</exception> /// <exception cref="PhpException">Procedure execution failed (Warning).</exception> public PhpDbResult ExecuteProcedure(string/*!*/ procedureName, IEnumerable<IDataParameter> parameters, bool skipResults) { if (procedureName == null) throw new ArgumentNullException("procedureName"); return ExecuteCommand(procedureName, CommandType.StoredProcedure, true, parameters, skipResults); } /// <summary> /// Executes a command on the connection. /// </summary> /// <param name="commandText">Command text.</param> /// <param name="convertTypes">Whether to convert data types to PHP ones.</param> /// <param name="commandType">Command type.</param> /// <param name="parameters">Parameters.</param> /// <param name="skipResults">Whether to load results.</param> /// <returns>PhpDbResult class representing the data read from database.</returns> /// <exception cref="ArgumentNullException"><paramref name="commandText"/> is a <B>null</B> reference.</exception> /// <exception cref="PhpException">Command execution failed (Warning).</exception> public PhpDbResult ExecuteCommand(string/*!*/ commandText, CommandType commandType, bool convertTypes, IEnumerable<IDataParameter> parameters, bool skipResults) { if (commandText == null) throw new ArgumentNullException("commandText"); return (Connect()) ? ExecuteCommandInternal(commandText, commandType, convertTypes, parameters, skipResults) : null; } protected virtual PhpDbResult ExecuteCommandInternal(string/*!*/ commandText, CommandType commandType, bool convertTypes, IEnumerable<IDataParameter> parameters, bool skipResults) { ClosePendingReader(); // IDbCommand IDbCommand command = CreateCommand(); command.Connection = connection; command.CommandText = commandText; command.CommandType = commandType; if (parameters != null) { command.Parameters.Clear(); foreach (IDataParameter parameter in parameters) command.Parameters.Add(parameter); } // ExecuteReader PhpDbResult result = null; try { var/*!*/reader = this.pendingReader = command.ExecuteReader(); if (skipResults) { // reads all data: do { while (reader.Read()); } while (reader.NextResult()); } else { lastResult = null; // read all data into PhpDbResult: result = GetResult(this, reader, convertTypes); result.command = command; lastResult = result; } lastException = null; } catch (Exception e) { lastException = e; PhpException.Throw(PhpError.Warning, LibResources.GetString("command_execution_failed", GetExceptionMessage(e))); } // return result; } /// <summary> /// Reexecutes a command associated with a specified result resource to get schema of the command result. /// </summary> /// <param name="result">The result resource.</param> internal void ReexecuteSchemaQuery(PhpDbResult/*!*/ result) { if (!Connect() || result.Command == null) return; ClosePendingReader(); try { result.Reader = pendingReader = result.Command.ExecuteReader(CommandBehavior.KeyInfo | CommandBehavior.SchemaOnly); } catch (Exception e) { lastException = e; PhpException.Throw(PhpError.Warning, LibResources.GetString("command_execution_failed", GetExceptionMessage(e))); } } /// <summary> /// Changes the active database on opened connection. /// </summary> /// <param name="databaseName"></param> /// <returns>true if databse was changed; otherwise returns false</returns> public bool SelectDb(string databaseName) { ClosePendingReader(); try { if (this.connection.State == ConnectionState.Open) { connection.ChangeDatabase(databaseName); lastException = null; return true; } } catch (Exception e) { lastException = e; PhpException.Throw(PhpError.Warning, LibResources.GetString("database_selection_failed", GetExceptionMessage(e))); } return false; } /// <summary> /// Gets a message from an exception raised by the connector. /// Removes the ending dot. /// </summary> /// <param name="e">Exception.</param> /// <returns>The message.</returns> /// <exception cref="ArgumentNullException"><paramref name="e"/> is a <B>null</B> reference.</exception> public virtual string GetExceptionMessage(Exception/*!*/ e) { if (e == null) throw new ArgumentNullException("e"); return PhpException.ToErrorMessage(e.Message); } /// <summary> /// Gets the last error message. /// </summary> /// <returns>The message or an empty string if no error occured.</returns> public virtual string GetLastErrorMessage() { return (LastException != null) ? LastException.Message : String.Empty; } /// <summary> /// Gets the last error number. /// </summary> /// <returns>-1 on error, zero otherwise.</returns> /// <remarks>Should be implemented by the subclass if the respective provider supports error numbers.</remarks> public virtual int GetLastErrorNumber() { return (LastException != null) ? -1 : 0; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using pnp.api.contosoorders.Areas.HelpPage.ModelDescriptions; using pnp.api.contosoorders.Areas.HelpPage.Models; namespace pnp.api.contosoorders.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
/////////////////////////////////////////////////////////////////////////////// //File: SelectableWrappedList.cs // //Description: SelectableWrappedList, a MetaView list with selectable items // //References required: // System.Drawing // Wrapper.cs (MetaViewWrapper interface definitions) // //This file is Copyright (c) 2009 VirindiPlugins // //Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // //The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // //THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. /////////////////////////////////////////////////////////////////////////////// using System; using System.Collections.Generic; using System.Text; using System.Drawing; #if METAVIEW_PUBLIC_NS using MetaViewWrappers; #else using MyClasses.MetaViewWrappers; #endif namespace MyClasses { class SelectableWrappedList : IList { MyClasses.MetaViewWrappers.IList Underlying; int[] iSelectChangeColumns; public SelectableWrappedList(IList mylist, int[] SelectChangeColumns) { Underlying = mylist; Underlying.Click += new dClickedList(Underlying_Click); iSelectChangeColumns = SelectChangeColumns; } int iselectedrow = -1; public delegate void delint(int row); public event delint SelectionChanged; Color iSelectedColor = Color.BlueViolet; public Color SelectedColor { get { return iSelectedColor; } set { iSelectedColor = value; if (SelectedRow != -1) isetrowcolors(SelectedRow, value); } } public int SelectedRow { get { return iselectedrow; } set { if (value >= Underlying.RowCount) return; if (value < -1) return; iselectrow(value); } } void isetrowcolors(int r, Color c) { foreach (int col in iSelectChangeColumns) { Underlying[r][col].Color = c; } } void iresetrowcolors(int r) { foreach (int c in iSelectChangeColumns) { Underlying[r][c].ResetColor(); } } void iselectrow(int r) { if (SelectedRow == r) return; //deselect old row if (SelectedRow != -1) { iresetrowcolors(SelectedRow); } //select new row if (r != -1) { isetrowcolors(r, SelectedColor); } iselectedrow = r; if (SelectionChanged != null) SelectionChanged(r); } void Underlying_Click(object sender, int row, int col) { if (Array.Exists<int>(iSelectChangeColumns, delegate(int obj) { return obj == col; })) { if (SelectedRow == row) iselectrow(-1); else iselectrow(row); } if (Click != null) Click(this, row, col); } #region IList Members public event dClickedList Click; public virtual void Clear() { Underlying.Clear(); } public IListRow this[int row] { get { return Underlying[row]; } } public virtual IListRow AddRow() { return Underlying.AddRow(); } public virtual IListRow InsertRow(int pos) { if (SelectedRow >= pos) iselectedrow++; return Underlying.InsertRow(pos); } public int RowCount { get { return Underlying.RowCount; } } public virtual void RemoveRow(int index) { if (SelectedRow == index) iselectrow(-1); else if (SelectedRow > index) iselectedrow--; Underlying.RemoveRow(index); } public int ColCount { get { return Underlying.ColCount; } } public int ScrollPosition { get { return Underlying.ScrollPosition; } set { Underlying.ScrollPosition = value; } } #endregion #region IControl Members public string Name { get { return Underlying.Name; } } public bool Visible { get { return Underlying.Visible; } } #endregion #region IDisposable Members public void Dispose() { Underlying.Dispose(); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Runtime.CompilerServices; using Xunit; namespace System.Data.SqlClient.ManualTesting.Tests { [Trait("connection", "tcp")] public static class MARSSessionPoolingTest { private const string COMMAND_STATUS = "select count(*) as ConnectionCount from sys.dm_exec_connections where session_id=@@spid and net_transport='Session'; select count(*) as ActiveRequestCount from sys.dm_exec_requests where session_id=@@spid and status='running' or session_id=@@spid and status='suspended'"; private const string COMMAND_SPID = "select @@spid"; private const int CONCURRENT_COMMANDS = 5; private const string _COMMAND_RPC = "sp_who"; private const string _COMMAND_SQL = "select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; " + "select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; " + "select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; " + "select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; select * from sys.databases; " + "select * from sys.databases; print 'THIS IS THE END!'"; private static readonly string _testConnString = (new SqlConnectionStringBuilder(DataTestUtility.TcpConnStr) { PacketSize = 512, MaxPoolSize = 1, MultipleActiveResultSets = true }).ConnectionString; [CheckConnStrSetupFact] public static void MarsExecuteScalar_AllFlavors() { TestMARSSessionPooling("Case: Text, ExecuteScalar", _testConnString, CommandType.Text, ExecuteType.ExecuteScalar, ReaderTestType.ReaderClose, GCType.Wait); TestMARSSessionPooling("Case: RPC, ExecuteScalar", _testConnString, CommandType.StoredProcedure, ExecuteType.ExecuteScalar, ReaderTestType.ReaderClose, GCType.Wait); } [CheckConnStrSetupFact] public static void MarsExecuteNonQuery_AllFlavors() { TestMARSSessionPooling("Case: Text, ExecuteNonQuery", _testConnString, CommandType.Text, ExecuteType.ExecuteNonQuery, ReaderTestType.ReaderClose, GCType.Wait); TestMARSSessionPooling("Case: RPC, ExecuteNonQuery", _testConnString, CommandType.StoredProcedure, ExecuteType.ExecuteNonQuery, ReaderTestType.ReaderClose, GCType.Wait); } [CheckConnStrSetupFact] public static void MarsExecuteReader_Text_NoGC() { TestMARSSessionPooling("Case: Text, ExecuteReader, ReaderClose", _testConnString, CommandType.Text, ExecuteType.ExecuteReader, ReaderTestType.ReaderClose, GCType.Wait); TestMARSSessionPooling("Case: Text, ExecuteReader, ReaderDispose", _testConnString, CommandType.Text, ExecuteType.ExecuteReader, ReaderTestType.ReaderDispose, GCType.Wait); TestMARSSessionPooling("Case: Text, ExecuteReader, ConnectionClose", _testConnString, CommandType.Text, ExecuteType.ExecuteReader, ReaderTestType.ConnectionClose, GCType.Wait); } [CheckConnStrSetupFact] public static void MarsExecuteReader_RPC_NoGC() { TestMARSSessionPooling("Case: RPC, ExecuteReader, ReaderClose", _testConnString, CommandType.StoredProcedure, ExecuteType.ExecuteReader, ReaderTestType.ReaderClose, GCType.Wait); TestMARSSessionPooling("Case: RPC, ExecuteReader, ReaderDispose", _testConnString, CommandType.StoredProcedure, ExecuteType.ExecuteReader, ReaderTestType.ReaderDispose, GCType.Wait); TestMARSSessionPooling("Case: RPC, ExecuteReader, ConnectionClose", _testConnString, CommandType.StoredProcedure, ExecuteType.ExecuteReader, ReaderTestType.ConnectionClose, GCType.Wait); } [CheckConnStrSetupFact] public static void MarsExecuteReader_Text_WithGC() { TestMARSSessionPooling("Case: Text, ExecuteReader, GC-Wait", _testConnString, CommandType.Text, ExecuteType.ExecuteReader, ReaderTestType.ReaderGC, GCType.Wait); TestMARSSessionPooling("Case: Text, ExecuteReader, GC-NoWait", _testConnString, CommandType.Text, ExecuteType.ExecuteReader, ReaderTestType.ReaderGC, GCType.NoWait); } [CheckConnStrSetupFact] public static void MarsExecuteReader_StoredProcedure_WithGC() { TestMARSSessionPooling("Case: RPC, ExecuteReader, GC-Wait", _testConnString, CommandType.StoredProcedure, ExecuteType.ExecuteReader, ReaderTestType.ReaderGC, GCType.Wait); TestMARSSessionPooling("Case: RPC, ExecuteReader, GC-NoWait", _testConnString, CommandType.StoredProcedure, ExecuteType.ExecuteReader, ReaderTestType.ReaderGC, GCType.NoWait); TestMARSSessionPooling("Case: Text, ExecuteReader, NoCloses", _testConnString + " ", CommandType.Text, ExecuteType.ExecuteReader, ReaderTestType.NoCloses, GCType.Wait); TestMARSSessionPooling("Case: RPC, ExecuteReader, NoCloses", _testConnString + " ", CommandType.StoredProcedure, ExecuteType.ExecuteReader, ReaderTestType.NoCloses, GCType.Wait); } private enum ExecuteType { ExecuteScalar, ExecuteNonQuery, ExecuteReader, } private enum ReaderTestType { ReaderClose, ReaderDispose, ReaderGC, ConnectionClose, NoCloses, } private enum GCType { Wait, NoWait, } [MethodImpl(MethodImplOptions.NoInlining)] private static void TestMARSSessionPooling(string caseName, string connectionString, CommandType commandType, ExecuteType executeType, ReaderTestType readerTestType, GCType gcType) { SqlCommand[] cmd = new SqlCommand[CONCURRENT_COMMANDS]; SqlDataReader[] gch = new SqlDataReader[CONCURRENT_COMMANDS]; using (SqlConnection con = new SqlConnection(connectionString)) { con.Open(); for (int i = 0; i < CONCURRENT_COMMANDS; i++) { // Prepare all commands cmd[i] = con.CreateCommand(); switch (commandType) { case CommandType.Text: cmd[i].CommandText = _COMMAND_SQL; cmd[i].CommandTimeout = 120; break; case CommandType.StoredProcedure: cmd[i].CommandText = _COMMAND_RPC; cmd[i].CommandTimeout = 120; cmd[i].CommandType = CommandType.StoredProcedure; break; } } for (int i = 0; i < CONCURRENT_COMMANDS; i++) { switch (executeType) { case ExecuteType.ExecuteScalar: cmd[i].ExecuteScalar(); break; case ExecuteType.ExecuteNonQuery: cmd[i].ExecuteNonQuery(); break; case ExecuteType.ExecuteReader: if (readerTestType != ReaderTestType.ReaderGC) gch[i] = cmd[i].ExecuteReader(); switch (readerTestType) { case ReaderTestType.ReaderClose: { gch[i].Dispose(); break; } case ReaderTestType.ReaderDispose: gch[i].Dispose(); break; case ReaderTestType.ReaderGC: gch[i] = null; WeakReference weak = OpenReaderThenNullify(cmd[i]); GC.Collect(); if (gcType == GCType.Wait) { GC.WaitForPendingFinalizers(); Assert.False(weak.IsAlive, "Error - target still alive!"); } break; case ReaderTestType.ConnectionClose: GC.SuppressFinalize(gch[i]); con.Close(); con.Open(); break; case ReaderTestType.NoCloses: GC.SuppressFinalize(gch[i]); break; } break; } if (readerTestType != ReaderTestType.NoCloses) { con.Close(); con.Open(); // Close and open, to re-assure collection! } SqlCommand verificationCmd = con.CreateCommand(); verificationCmd.CommandText = COMMAND_STATUS; using (SqlDataReader rdr = verificationCmd.ExecuteReader()) { rdr.Read(); int connections = (int)rdr.GetValue(0); rdr.NextResult(); rdr.Read(); int requests = (int)rdr.GetValue(0); switch (executeType) { case ExecuteType.ExecuteScalar: case ExecuteType.ExecuteNonQuery: // 1 for connection, 1 for command Assert.True(connections == 2, "Failure - incorrect number of connections for ExecuteScalar! #connections: " + connections); // only 1 executing Assert.True(requests == 1, "Failure - incorrect number of requests for ExecuteScalar! #requests: " + requests); break; case ExecuteType.ExecuteReader: switch (readerTestType) { case ReaderTestType.ReaderClose: case ReaderTestType.ReaderDispose: case ReaderTestType.ConnectionClose: // 1 for connection, 1 for command Assert.True(connections == 2, "Failure - Incorrect number of connections for ReaderClose / ReaderDispose / ConnectionClose! #connections: " + connections); // only 1 executing Assert.True(requests == 1, "Failure - incorrect number of requests for ReaderClose/ReaderDispose/ConnectionClose! #requests: " + requests); break; case ReaderTestType.ReaderGC: switch (gcType) { case GCType.Wait: // 1 for connection, 1 for open reader Assert.True(connections == 2, "Failure - incorrect number of connections for ReaderGCWait! #connections: " + connections); // only 1 executing Assert.True(requests == 1, "Failure - incorrect number of requests for ReaderGCWait! #requests: " + requests); break; case GCType.NoWait: // 1 for connection, 1 for open reader Assert.True(connections == 2, "Failure - incorrect number of connections for ReaderGCNoWait! #connections: " + connections); // only 1 executing Assert.True(requests == 1, "Failure - incorrect number of requests for ReaderGCNoWait! #requests: " + requests); break; } break; case ReaderTestType.NoCloses: // 1 for connection, 1 for current command, 1 for 0 based array offset, plus i for open readers Assert.True(connections == (3 + i), "Failure - incorrect number of connections for NoCloses: " + connections); // 1 for current command, 1 for 0 based array offset, plus i open readers Assert.True(requests == (2 + i), "Failure - incorrect number of requests for NoCloses: " + requests); break; } break; } } } } } private static WeakReference OpenReaderThenNullify(SqlCommand command) { SqlDataReader reader = command.ExecuteReader(); WeakReference weak = new WeakReference(reader); reader = null; return weak; } } }
//--------------------------------------------------------------------- // <copyright file="ErrorLog.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // // @owner [....] // @backupOwner [....] //--------------------------------------------------------------------- namespace System.Data.Mapping.ViewGeneration.Structures { using System.Collections.Generic; using System.Data.Common.Utils; using System.Data.Entity; using System.Data.Mapping.ViewGeneration.Utils; using System.Data.Metadata.Edm; using System.Diagnostics; using System.Globalization; using System.Linq; using System.Text; internal class ErrorLog : InternalBase { #region Constructors internal ErrorLog() { m_log = new List<Record>(); } #endregion #region Fields private List<Record> m_log; #endregion #region Properties internal int Count { get { return m_log.Count; } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] // referenced (indirectly) by System.Data.Entity.Design.dll internal IEnumerable<EdmSchemaError> Errors { get { foreach (Record record in m_log) { yield return record.Error; } } } #endregion #region Methods internal void AddEntry(Record record) { EntityUtil.CheckArgumentNull(record, "record"); m_log.Add(record); } internal void Merge(ErrorLog log) { foreach (Record record in log.m_log) { m_log.Add(record); } } internal void PrintTrace() { StringBuilder builder = new StringBuilder(); ToCompactString(builder); Helpers.StringTraceLine(builder.ToString()); } internal override void ToCompactString(StringBuilder builder) { foreach (Record record in m_log) { record.ToCompactString(builder); } } internal string ToUserString() { StringBuilder builder = new StringBuilder(); foreach (Record record in m_log) { string recordString = record.ToUserString(); builder.AppendLine(recordString); } return builder.ToString(); } #endregion #region Nested classes/struct internal class Record : InternalBase { #region Constructor // effects: Creates an error record for wrappers, a debug message // and an error message given by "message". Note: wrappers cannot // be null internal Record(bool isError, ViewGenErrorCode errorCode, string message, IEnumerable<LeftCellWrapper> wrappers, string debugMessage) { Debug.Assert(wrappers != null); IEnumerable<Cell> cells = LeftCellWrapper.GetInputCellsForWrappers(wrappers); Init(isError, errorCode, message, cells, debugMessage); } internal Record(bool isError, ViewGenErrorCode errorCode, string message, Cell sourceCell, string debugMessage) { Init(isError, errorCode, message, new Cell[] { sourceCell }, debugMessage); } internal Record(bool isError, ViewGenErrorCode errorCode, string message, IEnumerable<Cell> sourceCells, string debugMessage) { Init(isError, errorCode, message, sourceCells, debugMessage); } //There are cases when we want to create a ViewGen error that is not specific to any mapping fragment //In this case, it is better to just create the EdmSchemaError directly and hold on to it. internal Record(EdmSchemaError error) { m_debugMessage = error.ToString(); m_mappingError = error; } private void Init(bool isError, ViewGenErrorCode errorCode, string message, IEnumerable<Cell> sourceCells, string debugMessage) { m_sourceCells = new List<Cell>(sourceCells); Debug.Assert(m_sourceCells.Count > 0, "Error record must have at least one cell"); // For certain foreign key messages, we may need the SSDL line numbers and file names CellLabel label = m_sourceCells[0].CellLabel; string sourceLocation = label.SourceLocation; int lineNumber = label.StartLineNumber; int columnNumber = label.StartLinePosition; string userMessage = InternalToString(message, debugMessage, m_sourceCells, sourceLocation, errorCode, isError, false); m_debugMessage = InternalToString(message, debugMessage, m_sourceCells, sourceLocation, errorCode, isError, true); m_mappingError = new EdmSchemaError(userMessage, (int)errorCode, EdmSchemaErrorSeverity.Error, sourceLocation, lineNumber, columnNumber); } #endregion #region Fields private EdmSchemaError m_mappingError; private List<Cell> m_sourceCells; private string m_debugMessage; #endregion #region Properties [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] // referenced (indirectly) by System.Data.Entity.Design.dll internal EdmSchemaError Error { get { return m_mappingError; } } #endregion #region Methods internal override void ToCompactString(StringBuilder builder) { builder.Append(m_debugMessage); } // effects: adds a comma-separated list of line numbers to the string builder private static void GetUserLinesFromCells(IEnumerable<Cell> sourceCells, StringBuilder lineBuilder, bool isInvariant) { var orderedCells = sourceCells.OrderBy<Cell, int>(cell => cell.CellLabel.StartLineNumber, Comparer<int>.Default); bool isFirst = true; // Get the line numbers foreach (Cell cell in orderedCells) { if (isFirst == false) { lineBuilder.Append(isInvariant ? EntityRes.GetString(EntityRes.ViewGen_CommaBlank) : ", "); } isFirst = false; lineBuilder.AppendFormat(CultureInfo.InvariantCulture, "{0}", cell.CellLabel.StartLineNumber); } Debug.Assert(isFirst == false, "No cells"); } // effects: Converts the message/debugMessage to a user-readable // message using resources (if isInvariant is false) or a test // message (if isInvariant is true) static private string InternalToString(string message, string debugMessage, List<Cell> sourceCells, string sourceLocation, ViewGenErrorCode errorCode, bool isError, bool isInvariant) { StringBuilder builder = new StringBuilder(); if (isInvariant) { builder.AppendLine(debugMessage); builder.Append(isInvariant ? "ERROR" : System.Data.Entity.Strings.ViewGen_Error); StringUtil.FormatStringBuilder(builder, " ({0}): ", (int)errorCode); } StringBuilder lineBuilder = new StringBuilder(); GetUserLinesFromCells(sourceCells, lineBuilder, isInvariant); if (isInvariant) { if (sourceCells.Count > 1) { StringUtil.FormatStringBuilder(builder, "Problem in Mapping Fragments starting at lines {0}: ", lineBuilder.ToString()); } else { StringUtil.FormatStringBuilder(builder, "Problem in Mapping Fragment starting at line {0}: ", lineBuilder.ToString()); } } else { if (sourceCells.Count > 1) { builder.Append(Strings.ViewGen_ErrorLog2(lineBuilder.ToString())); } else { builder.Append(Strings.ViewGen_ErrorLog(lineBuilder.ToString())); } } builder.AppendLine(message); return builder.ToString(); } internal string ToUserString() { return m_mappingError.ToString(); } #endregion } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Reflection; using System.Xml.Schema; namespace System.Xml.Xsl.Qil { /// <summary> /// Additional factory methods for constructing common QIL patterns. /// </summary> /// <remarks> /// Some of the methods here are exactly like the ones in QilFactory except /// that they perform constant-folding and other normalization. Others are /// "macro patterns" that simplify the task of constructing otherwise complex patterns. /// </remarks> internal class QilPatternFactory { private bool _debug; private QilFactory _f; public QilPatternFactory(QilFactory f, bool debug) { Debug.Assert(f != null); _f = f; _debug = debug; } public QilFactory BaseFactory { get { return _f; } } public bool IsDebug { get { return _debug; } } #region Convenience methods public QilLiteral String(string val) { return _f.LiteralString(val); } public QilLiteral Int32(int val) { return _f.LiteralInt32(val); } public QilLiteral Double(double val) { return _f.LiteralDouble(val); } public QilName QName(string local, string uri, string prefix) { return _f.LiteralQName(local, uri, prefix); } public QilName QName(string local, string uri) { return _f.LiteralQName(local, uri, System.String.Empty); } public QilName QName(string local) { return _f.LiteralQName(local, System.String.Empty, System.String.Empty); } public QilNode Unknown(XmlQueryType t) { return _f.Unknown(t); } #endregion #region meta //----------------------------------------------- // meta //----------------------------------------------- public QilExpression QilExpression(QilNode root, QilFactory factory) { return _f.QilExpression(root, factory); } public QilList FunctionList() { return _f.FunctionList(); } public QilList GlobalVariableList() { return _f.GlobalVariableList(); } public QilList GlobalParameterList() { return _f.GlobalParameterList(); } public QilList ActualParameterList() { return _f.ActualParameterList(); } public QilList ActualParameterList(QilNode arg1, QilNode arg2) { QilList result = _f.ActualParameterList(); result.Add(arg1); result.Add(arg2); return result; } public QilList ActualParameterList(params QilNode[] args) { return _f.ActualParameterList(args); } public QilList FormalParameterList() { return _f.FormalParameterList(); } public QilList FormalParameterList(QilNode arg1, QilNode arg2) { QilList result = _f.FormalParameterList(); result.Add(arg1); result.Add(arg2); return result; } public QilList FormalParameterList(params QilNode[] args) { return _f.FormalParameterList(args); } public QilList BranchList(params QilNode[] args) { return _f.BranchList(args); } public QilNode OptimizeBarrier(QilNode child) { return _f.OptimizeBarrier(child); } #endregion // meta #region specials //----------------------------------------------- // specials //----------------------------------------------- public QilNode DataSource(QilNode name, QilNode baseUri) { return _f.DataSource(name, baseUri); } public QilNode Nop(QilNode child) { return _f.Nop(child); } public QilNode Error(QilNode text) { return _f.Error(text); } public QilNode Warning(QilNode text) { return _f.Warning(text); } #endregion // specials #region variables //----------------------------------------------- // variables //----------------------------------------------- public QilIterator For(QilNode binding) { return _f.For(binding); } public QilIterator Let(QilNode binding) { return _f.Let(binding); } public QilParameter Parameter(XmlQueryType t) { return _f.Parameter(t); } public QilParameter Parameter(QilNode defaultValue, QilName name, XmlQueryType t) { return _f.Parameter(defaultValue, name, t); } public QilNode PositionOf(QilIterator expr) { return _f.PositionOf(expr); } #endregion // variables #region literals //----------------------------------------------- // literals //----------------------------------------------- public QilNode True() { return _f.True(); } public QilNode False() { return _f.False(); } public QilNode Boolean(bool b) { return b ? this.True() : this.False(); } #endregion // literals #region boolean operators //----------------------------------------------- // boolean operators //----------------------------------------------- private static void CheckLogicArg(QilNode arg) { Debug.Assert(arg != null, "Argument shouldn't be null"); Debug.Assert(arg.XmlType.TypeCode == XmlTypeCode.Boolean && arg.XmlType.IsSingleton, "The operand must be boolean-typed" ); } public QilNode And(QilNode left, QilNode right) { CheckLogicArg(left); CheckLogicArg(right); if (!_debug) { // True, True => True (right) other, True => other (left) // True, False => False (right) other, False => False (right) // True, other => other (right) other, other => And if (left.NodeType == QilNodeType.True || right.NodeType == QilNodeType.False) { return right; } if (left.NodeType == QilNodeType.False || right.NodeType == QilNodeType.True) { return left; } } return _f.And(left, right); } public QilNode Or(QilNode left, QilNode right) { CheckLogicArg(left); CheckLogicArg(right); if (!_debug) { // True, True => True (left) other, True => True (right) // True, False => True (left) other, False => other (left) // True, other => True (left) other, other => Or if (left.NodeType == QilNodeType.True || right.NodeType == QilNodeType.False) { return left; } if (left.NodeType == QilNodeType.False || right.NodeType == QilNodeType.True) { return right; } } return _f.Or(left, right); } public QilNode Not(QilNode child) { if (!_debug) { switch (child.NodeType) { case QilNodeType.True: return _f.False(); case QilNodeType.False: return _f.True(); case QilNodeType.Not: return ((QilUnary)child).Child; } } return _f.Not(child); } #endregion // boolean operators #region choice //----------------------------------------------- // choice //----------------------------------------------- public QilNode Conditional(QilNode condition, QilNode trueBranch, QilNode falseBranch) { if (!_debug) { switch (condition.NodeType) { case QilNodeType.True: return trueBranch; case QilNodeType.False: return falseBranch; case QilNodeType.Not: return this.Conditional(((QilUnary)condition).Child, falseBranch, trueBranch); } } return _f.Conditional(condition, trueBranch, falseBranch); } public QilNode Choice(QilNode expr, QilList branches) { if (!_debug) { switch (branches.Count) { case 1: // If expr has no side effects, it will be eliminated by optimizer return _f.Loop(_f.Let(expr), branches[0]); case 2: return _f.Conditional(_f.Eq(expr, _f.LiteralInt32(0)), branches[0], branches[1]); } } return _f.Choice(expr, branches); } #endregion // choice #region collection operators //----------------------------------------------- // collection operators //----------------------------------------------- public QilNode Length(QilNode child) { return _f.Length(child); } public QilNode Sequence() { return _f.Sequence(); } public QilNode Sequence(QilNode child) { if (!_debug) { return child; } QilList res = _f.Sequence(); res.Add(child); return res; } public QilNode Sequence(QilNode child1, QilNode child2) { QilList res = _f.Sequence(); res.Add(child1); res.Add(child2); return res; } public QilNode Sequence(params QilNode[] args) { if (!_debug) { switch (args.Length) { case 0: return _f.Sequence(); case 1: return args[0]; } } QilList res = _f.Sequence(); foreach (QilNode n in args) res.Add(n); return res; } public QilNode Union(QilNode left, QilNode right) { return _f.Union(left, right); } public QilNode Sum(QilNode collection) { return _f.Sum(collection); } #endregion // collection operators #region arithmetic operators //----------------------------------------------- // arithmetic operators //----------------------------------------------- public QilNode Negate(QilNode child) { return _f.Negate(child); } public QilNode Add(QilNode left, QilNode right) { return _f.Add(left, right); } public QilNode Subtract(QilNode left, QilNode right) { return _f.Subtract(left, right); } public QilNode Multiply(QilNode left, QilNode right) { return _f.Multiply(left, right); } public QilNode Divide(QilNode left, QilNode right) { return _f.Divide(left, right); } public QilNode Modulo(QilNode left, QilNode right) { return _f.Modulo(left, right); } #endregion // arithmetic operators #region string operators //----------------------------------------------- // string operators //----------------------------------------------- public QilNode StrLength(QilNode str) { return _f.StrLength(str); } public QilNode StrConcat(QilNode values) { if (!_debug) { if (values.XmlType.IsSingleton) return values; } return _f.StrConcat(values); } public QilNode StrConcat(params QilNode[] args) { return StrConcat((IList<QilNode>)args); } public QilNode StrConcat(IList<QilNode> args) { if (!_debug) { switch (args.Count) { case 0: return _f.LiteralString(string.Empty); case 1: return StrConcat(args[0]); } } return StrConcat(_f.Sequence(args)); } public QilNode StrParseQName(QilNode str, QilNode ns) { return _f.StrParseQName(str, ns); } #endregion // string operators #region value comparison operators //----------------------------------------------- // value comparison operators //----------------------------------------------- public QilNode Ne(QilNode left, QilNode right) { return _f.Ne(left, right); } public QilNode Eq(QilNode left, QilNode right) { return _f.Eq(left, right); } public QilNode Gt(QilNode left, QilNode right) { return _f.Gt(left, right); } public QilNode Ge(QilNode left, QilNode right) { return _f.Ge(left, right); } public QilNode Lt(QilNode left, QilNode right) { return _f.Lt(left, right); } public QilNode Le(QilNode left, QilNode right) { return _f.Le(left, right); } #endregion // value comparison operators #region node comparison operators //----------------------------------------------- // node comparison operators //----------------------------------------------- public QilNode Is(QilNode left, QilNode right) { return _f.Is(left, right); } public QilNode Before(QilNode left, QilNode right) { return _f.Before(left, right); } #endregion // node comparison operators #region loops //----------------------------------------------- // loops //----------------------------------------------- public QilNode Loop(QilIterator variable, QilNode body) { if (!_debug) { //((Loop (For $Binding) ($Binding) ) => ($binding)) if (body == variable.Binding) { return body; } } return _f.Loop(variable, body); } public QilNode Filter(QilIterator variable, QilNode expr) { if (!_debug) { //((Filter (For $Binding) (True ) ) => ($binding)) if (expr.NodeType == QilNodeType.True) { return variable.Binding; } // The following optimization is not safe if the iterator has side effects //((Filter (For $Binding) (False) ) => (Sequence)) } return _f.Filter(variable, expr); } #endregion // loops #region sorting //----------------------------------------------- // sorting //----------------------------------------------- public QilNode Sort(QilIterator iter, QilNode keys) { return _f.Sort(iter, keys); } public QilSortKey SortKey(QilNode key, QilNode collation) { return _f.SortKey(key, collation); } public QilNode DocOrderDistinct(QilNode collection) { if (collection.NodeType == QilNodeType.DocOrderDistinct) { return collection; } return _f.DocOrderDistinct(collection); } #endregion // sorting #region function definition and invocation //----------------------------------------------- // function definition and invocation //----------------------------------------------- public QilFunction Function(QilList args, QilNode sideEffects, XmlQueryType resultType) { Debug.Assert(args.NodeType == QilNodeType.FormalParameterList); return _f.Function(args, sideEffects, resultType); } public QilFunction Function(QilList args, QilNode defn, QilNode sideEffects) { Debug.Assert(args.NodeType == QilNodeType.FormalParameterList); return _f.Function(args, defn, sideEffects, defn.XmlType); } public QilNode Invoke(QilFunction func, QilList args) { Debug.Assert(args.NodeType == QilNodeType.ActualParameterList); Debug.Assert(func.Arguments.Count == args.Count); return _f.Invoke(func, args); } #endregion // function definition and invocation #region XML navigation //----------------------------------------------- // XML navigation //----------------------------------------------- public QilNode Content(QilNode context) { return _f.Content(context); } public QilNode Parent(QilNode context) { return _f.Parent(context); } public QilNode Root(QilNode context) { return _f.Root(context); } public QilNode XmlContext() { return _f.XmlContext(); } public QilNode Descendant(QilNode expr) { return _f.Descendant(expr); } public QilNode DescendantOrSelf(QilNode context) { return _f.DescendantOrSelf(context); } public QilNode Ancestor(QilNode expr) { return _f.Ancestor(expr); } public QilNode AncestorOrSelf(QilNode expr) { return _f.AncestorOrSelf(expr); } public QilNode Preceding(QilNode expr) { return _f.Preceding(expr); } public QilNode FollowingSibling(QilNode expr) { return _f.FollowingSibling(expr); } public QilNode PrecedingSibling(QilNode expr) { return _f.PrecedingSibling(expr); } public QilNode NodeRange(QilNode left, QilNode right) { return _f.NodeRange(left, right); } public QilBinary Deref(QilNode context, QilNode id) { return _f.Deref(context, id); } #endregion // XML navigation #region XML construction //----------------------------------------------- // XML construction //----------------------------------------------- public QilNode ElementCtor(QilNode name, QilNode content) { return _f.ElementCtor(name, content); } public QilNode AttributeCtor(QilNode name, QilNode val) { return _f.AttributeCtor(name, val); } public QilNode CommentCtor(QilNode content) { return _f.CommentCtor(content); } public QilNode PICtor(QilNode name, QilNode content) { return _f.PICtor(name, content); } public QilNode TextCtor(QilNode content) { return _f.TextCtor(content); } public QilNode RawTextCtor(QilNode content) { return _f.RawTextCtor(content); } public QilNode DocumentCtor(QilNode child) { return _f.DocumentCtor(child); } public QilNode NamespaceDecl(QilNode prefix, QilNode uri) { return _f.NamespaceDecl(prefix, uri); } public QilNode RtfCtor(QilNode content, QilNode baseUri) { return _f.RtfCtor(content, baseUri); } #endregion // XML construction #region Node properties //----------------------------------------------- // Node properties //----------------------------------------------- public QilNode NameOf(QilNode expr) { return _f.NameOf(expr); } public QilNode LocalNameOf(QilNode expr) { return _f.LocalNameOf(expr); } public QilNode NamespaceUriOf(QilNode expr) { return _f.NamespaceUriOf(expr); } public QilNode PrefixOf(QilNode expr) { return _f.PrefixOf(expr); } #endregion // Node properties #region Type operators //----------------------------------------------- // Type operators //----------------------------------------------- public QilNode TypeAssert(QilNode expr, XmlQueryType t) { return _f.TypeAssert(expr, t); } public QilNode IsType(QilNode expr, XmlQueryType t) { Debug.Assert(t != null, "Type can't be null"); return _f.IsType(expr, t); } public QilNode IsEmpty(QilNode set) { return _f.IsEmpty(set); } #endregion // Type operators #region XPath operators //----------------------------------------------- // XPath operators //----------------------------------------------- public QilNode XPathNodeValue(QilNode expr) { return _f.XPathNodeValue(expr); } public QilNode XPathFollowing(QilNode expr) { return _f.XPathFollowing(expr); } public QilNode XPathNamespace(QilNode expr) { return _f.XPathNamespace(expr); } public QilNode XPathPreceding(QilNode expr) { return _f.XPathPreceding(expr); } #endregion // XPath operators #region XSLT //----------------------------------------------- // XSLT //----------------------------------------------- public QilNode XsltGenerateId(QilNode expr) { return _f.XsltGenerateId(expr); } public QilNode XsltInvokeEarlyBound(QilNode name, MethodInfo d, XmlQueryType t, IList<QilNode> args) { QilList list = _f.ActualParameterList(); list.Add(args); return _f.XsltInvokeEarlyBound(name, _f.LiteralObject(d), list, t); } public QilNode XsltInvokeLateBound(QilNode name, IList<QilNode> args) { QilList list = _f.ActualParameterList(); list.Add(args); return _f.XsltInvokeLateBound(name, list); } public QilNode XsltCopy(QilNode expr, QilNode content) { return _f.XsltCopy(expr, content); } public QilNode XsltCopyOf(QilNode expr) { return _f.XsltCopyOf(expr); } public QilNode XsltConvert(QilNode expr, XmlQueryType t) { return _f.XsltConvert(expr, t); } #endregion // XSLT } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.IO; using System.Runtime; using System.ServiceModel; using System.ServiceModel.Channels; using System.ServiceModel.Description; using System.ServiceModel.Diagnostics; using System.Threading; using System.Threading.Tasks; using System.Xml; namespace System.ServiceModel.Dispatcher { internal class StreamFormatter { private string _wrapperName; private string _wrapperNS; private string _partName; private string _partNS; private int _streamIndex; private bool _isRequest; private string _operationName; private const int returnValueIndex = -1; internal static StreamFormatter Create(MessageDescription messageDescription, string operationName, bool isRequest) { MessagePartDescription streamPart = ValidateAndGetStreamPart(messageDescription, isRequest, operationName); if (streamPart == null) return null; return new StreamFormatter(messageDescription, streamPart, operationName, isRequest); } private StreamFormatter(MessageDescription messageDescription, MessagePartDescription streamPart, string operationName, bool isRequest) { if ((object)streamPart == (object)messageDescription.Body.ReturnValue) _streamIndex = returnValueIndex; else _streamIndex = streamPart.Index; _wrapperName = messageDescription.Body.WrapperName; _wrapperNS = messageDescription.Body.WrapperNamespace; _partName = streamPart.Name; _partNS = streamPart.Namespace; _isRequest = isRequest; _operationName = operationName; } internal void Serialize(XmlDictionaryWriter writer, object[] parameters, object returnValue) { Stream streamValue = GetStreamAndWriteStartWrapperIfNecessary(writer, parameters, returnValue); var streamProvider = new OperationStreamProvider(streamValue); StreamFormatterHelper.WriteValue(writer, streamProvider); WriteEndWrapperIfNecessary(writer); } internal async Task SerializeAsync(XmlDictionaryWriter writer, object[] parameters, object returnValue) { Stream streamValue = await GetStreamAndWriteStartWrapperIfNecessaryAsync(writer, parameters, returnValue); var streamProvider = new OperationStreamProvider(streamValue); await StreamFormatterHelper.WriteValueAsync(writer, streamProvider); await WriteEndWrapperIfNecessaryAsync(writer); } private Stream GetStreamAndWriteStartWrapperIfNecessary(XmlDictionaryWriter writer, object[] parameters, object returnValue) { Stream streamValue = GetStreamValue(parameters, returnValue); if (streamValue == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(_partName); if (WrapperName != null) writer.WriteStartElement(WrapperName, WrapperNamespace); writer.WriteStartElement(PartName, PartNamespace); return streamValue; } private async Task<Stream> GetStreamAndWriteStartWrapperIfNecessaryAsync(XmlDictionaryWriter writer, object[] parameters, object returnValue) { Stream streamValue = GetStreamValue(parameters, returnValue); if (streamValue == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(_partName); if (WrapperName != null) await writer.WriteStartElementAsync(null, WrapperName, WrapperNamespace); await writer.WriteStartElementAsync(null, PartName, PartNamespace); return streamValue; } private void WriteEndWrapperIfNecessary(XmlDictionaryWriter writer) { writer.WriteEndElement(); if (_wrapperName != null) writer.WriteEndElement(); } private Task WriteEndWrapperIfNecessaryAsync(XmlDictionaryWriter writer) { writer.WriteEndElement(); if (_wrapperName != null) writer.WriteEndElement(); return Task.CompletedTask; } internal IAsyncResult BeginSerialize(XmlDictionaryWriter writer, object[] parameters, object returnValue, AsyncCallback callback, object state) { return new SerializeAsyncResult(this, writer, parameters, returnValue, callback, state); } public void EndSerialize(IAsyncResult result) { SerializeAsyncResult.End(result); } internal class SerializeAsyncResult : AsyncResult { private static AsyncCompletion s_handleEndSerialize = new AsyncCompletion(HandleEndSerialize); private StreamFormatter _streamFormatter; private XmlDictionaryWriter _writer; internal SerializeAsyncResult(StreamFormatter streamFormatter, XmlDictionaryWriter writer, object[] parameters, object returnValue, AsyncCallback callback, object state) : base(callback, state) { _streamFormatter = streamFormatter; _writer = writer; // As we use the Task-returning method for async operation, // we shouldn't get to this point. Throw exception just in case. throw ExceptionHelper.AsError(NotImplemented.ByDesign); } private static bool HandleEndSerialize(IAsyncResult result) { SerializeAsyncResult thisPtr = (SerializeAsyncResult)result.AsyncState; thisPtr._streamFormatter.WriteEndWrapperIfNecessary(thisPtr._writer); return true; } public static void End(IAsyncResult result) { AsyncResult.End<SerializeAsyncResult>(result); } } internal void Deserialize(object[] parameters, ref object retVal, Message message) { SetStreamValue(parameters, ref retVal, new MessageBodyStream(message, WrapperName, WrapperNamespace, PartName, PartNamespace, _isRequest)); } internal string WrapperName { get { return _wrapperName; } set { _wrapperName = value; } } internal string WrapperNamespace { get { return _wrapperNS; } set { _wrapperNS = value; } } internal string PartName { get { return _partName; } } internal string PartNamespace { get { return _partNS; } } private Stream GetStreamValue(object[] parameters, object returnValue) { if (_streamIndex == returnValueIndex) return (Stream)returnValue; return (Stream)parameters[_streamIndex]; } private void SetStreamValue(object[] parameters, ref object returnValue, Stream streamValue) { if (_streamIndex == returnValueIndex) returnValue = streamValue; else parameters[_streamIndex] = streamValue; } private static MessagePartDescription ValidateAndGetStreamPart(MessageDescription messageDescription, bool isRequest, string operationName) { MessagePartDescription part = GetStreamPart(messageDescription); if (part != null) return part; if (HasStream(messageDescription)) { if (messageDescription.IsTypedMessage) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.Format(SR.SFxInvalidStreamInTypedMessage, messageDescription.MessageName))); else if (isRequest) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.Format(SR.SFxInvalidStreamInRequest, operationName))); else throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.Format(SR.SFxInvalidStreamInResponse, operationName))); } return null; } private static bool HasStream(MessageDescription messageDescription) { if (messageDescription.Body.ReturnValue != null && messageDescription.Body.ReturnValue.Type == typeof(Stream)) return true; foreach (MessagePartDescription part in messageDescription.Body.Parts) { if (part.Type == typeof(Stream)) return true; } return false; } private static MessagePartDescription GetStreamPart(MessageDescription messageDescription) { if (OperationFormatter.IsValidReturnValue(messageDescription.Body.ReturnValue)) { if (messageDescription.Body.Parts.Count == 0) if (messageDescription.Body.ReturnValue.Type == typeof(Stream)) return messageDescription.Body.ReturnValue; } else { if (messageDescription.Body.Parts.Count == 1) if (messageDescription.Body.Parts[0].Type == typeof(Stream)) return messageDescription.Body.Parts[0]; } return null; } internal static bool IsStream(MessageDescription messageDescription) { return GetStreamPart(messageDescription) != null; } internal class MessageBodyStream : Stream { private Message _message; private XmlDictionaryReader _reader; private ReadAheadWrappingStream _readAheadStream; private long _position; private string _wrapperName, _wrapperNs; private string _elementName, _elementNs; private bool _isRequest; internal MessageBodyStream(Message message, string wrapperName, string wrapperNs, string elementName, string elementNs, bool isRequest) { _message = message; _position = 0; _wrapperName = wrapperName; _wrapperNs = wrapperNs; _elementName = elementName; _elementNs = elementNs; _isRequest = isRequest; } public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { EnsureStreamIsOpen(); if (buffer == null) throw TraceUtility.ThrowHelperError(new ArgumentNullException("buffer"), _message); if (offset < 0) throw TraceUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", offset, SR.Format(SR.ValueMustBeNonNegative)), _message); if (count < 0) throw TraceUtility.ThrowHelperError(new ArgumentOutOfRangeException("count", count, SR.Format(SR.ValueMustBeNonNegative)), _message); if (buffer.Length - offset < count) throw TraceUtility.ThrowHelperError(new ArgumentException(SR.Format(SR.SFxInvalidStreamOffsetLength, offset + count)), _message); try { if (_reader == null) { _readAheadStream = _message.Properties[ReadAheadWrappingStream.ReadAheadWrappingStreamPropertyName] as ReadAheadWrappingStream; // Fill buffer so reading to the body contents shouldn't cause a read through to transport stream if (_readAheadStream != null) { await _readAheadStream.EnsureBufferedAsync(cancellationToken); } _reader = _message.GetReaderAtBodyContents(); if (_wrapperName != null) { _reader.MoveToContent(); _reader.ReadStartElement(_wrapperName, _wrapperNs); } _reader.MoveToContent(); if (_reader.NodeType == XmlNodeType.EndElement) { return 0; } _reader.ReadStartElement(_elementName, _elementNs); } if (_reader.MoveToContent() != XmlNodeType.Text) { await ExhaustAsync(_reader, _readAheadStream, cancellationToken); return 0; } if (_readAheadStream != null) { // Calculate number of UTF8 bytes needed to represent the requested bytes in base64. // The +3 is to adjust for integer division truncating instead of rounding and the final // == padding that can occur at the end of a base64 encoded string. int base64EncodedBytes = (int) ((8L*count)/6L) + 3; await _readAheadStream.EnsureBufferedAsync(base64EncodedBytes, cancellationToken); } int bytesRead = _reader.ReadContentAsBase64(buffer, offset, count); _position += bytesRead; if (bytesRead == 0) { await ExhaustAsync(_reader, _readAheadStream, cancellationToken); } return bytesRead; } catch (Exception ex) { if (Fx.IsFatal(ex)) throw; throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new IOException(SR.Format(SR.SFxStreamIOException), ex)); } } public override int Read(byte[] buffer, int offset, int count) { EnsureStreamIsOpen(); if (buffer == null) throw TraceUtility.ThrowHelperError(new ArgumentNullException("buffer"), _message); if (offset < 0) throw TraceUtility.ThrowHelperError(new ArgumentOutOfRangeException("offset", offset, SR.Format(SR.ValueMustBeNonNegative)), _message); if (count < 0) throw TraceUtility.ThrowHelperError(new ArgumentOutOfRangeException("count", count, SR.Format(SR.ValueMustBeNonNegative)), _message); if (buffer.Length - offset < count) throw TraceUtility.ThrowHelperError(new ArgumentException(SR.Format(SR.SFxInvalidStreamOffsetLength, offset + count)), _message); try { if (_reader == null) { _reader = _message.GetReaderAtBodyContents(); if (_wrapperName != null) { _reader.MoveToContent(); _reader.ReadStartElement(_wrapperName, _wrapperNs); } _reader.MoveToContent(); if (_reader.NodeType == XmlNodeType.EndElement) { return 0; } _reader.ReadStartElement(_elementName, _elementNs); } if (_reader.MoveToContent() != XmlNodeType.Text) { Exhaust(_reader); return 0; } int bytesRead = _reader.ReadContentAsBase64(buffer, offset, count); _position += bytesRead; if (bytesRead == 0) { Exhaust(_reader); } return bytesRead; } catch (Exception ex) { if (Fx.IsFatal(ex)) throw; throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new IOException(SR.Format(SR.SFxStreamIOException), ex)); } } private void EnsureStreamIsOpen() { if (_message.State == MessageState.Closed) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ObjectDisposedException(SR.Format( _isRequest ? SR.SFxStreamRequestMessageClosed : SR.SFxStreamResponseMessageClosed))); } private static void Exhaust(XmlDictionaryReader reader) { if (reader != null) { while (reader.Read()) { // drain } } } private static async Task ExhaustAsync(XmlDictionaryReader reader, ReadAheadWrappingStream readAheadStream, CancellationToken cancellationToken) { if (reader != null) { do { if (readAheadStream != null) { await readAheadStream.EnsureBufferedAsync(cancellationToken); } } while (reader.Read()); } } public override long Position { get { EnsureStreamIsOpen(); return _position; } set { throw TraceUtility.ThrowHelperError(new NotSupportedException(), _message); } } protected override void Dispose(bool isDisposing) { _message.Close(); if (_reader != null) { _reader.Dispose(); _reader = null; } base.Dispose(isDisposing); } public override bool CanRead { get { return _message.State != MessageState.Closed; } } public override bool CanSeek { get { return false; } } public override bool CanWrite { get { return false; } } public override long Length { get { throw TraceUtility.ThrowHelperError(new NotSupportedException(), _message); } } public override void Flush() { throw TraceUtility.ThrowHelperError(new NotSupportedException(), _message); } public override long Seek(long offset, SeekOrigin origin) { throw TraceUtility.ThrowHelperError(new NotSupportedException(), _message); } public override void SetLength(long value) { throw TraceUtility.ThrowHelperError(new NotSupportedException(), _message); } public override void Write(byte[] buffer, int offset, int count) { throw TraceUtility.ThrowHelperError(new NotSupportedException(), _message); } } internal class OperationStreamProvider { private Stream _stream; internal OperationStreamProvider(Stream stream) { _stream = stream; } public Stream GetStream() { return _stream; } public void ReleaseStream(Stream stream) { //Noop } } internal class StreamFormatterHelper { // The method was duplicated from the desktop implementation of // System.Xml.XmlDictionaryWriter.WriteValue(IStreamProvider) public static void WriteValue(XmlDictionaryWriter writer, OperationStreamProvider value) { if (value == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("value")); Stream stream = value.GetStream(); if (stream == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new XmlException(SR.Format(SR.XmlInvalidStream))); } int blockSize = 256; int bytesRead = 0; byte[] block = new byte[blockSize]; while (true) { bytesRead = stream.Read(block, 0, blockSize); if (bytesRead > 0) { writer.WriteBase64(block, 0, bytesRead); } else { break; } if (blockSize < 65536 && bytesRead == blockSize) { blockSize = blockSize * 16; block = new byte[blockSize]; } } value.ReleaseStream(stream); } public static async Task WriteValueAsync(XmlDictionaryWriter writer, OperationStreamProvider value) { if (value == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("value")); Stream stream = value.GetStream(); if (stream == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new XmlException(SR.Format(SR.XmlInvalidStream))); } int blockSize = 256; int bytesRead = 0; byte[] block = new byte[blockSize]; while (true) { bytesRead = await stream.ReadAsync(block, 0, blockSize); if (bytesRead > 0) { // XmlDictionaryWriter has not implemented WriteBase64Async() yet. writer.WriteBase64(block, 0, bytesRead); } else { break; } if (blockSize < 65536 && bytesRead == blockSize) { blockSize = blockSize * 16; block = new byte[blockSize]; } } value.ReleaseStream(stream); } } } }
using System; using System.Data; using System.Text; using bv.common; using bv.common.Core; using bv.common.db; using bv.common.db.Core; using EIDSS; using eidss.model.Avr; using eidss.model.AVR.Db; using eidss.model.Avr.Tree; using eidss.model.Core; using eidss.model.Enums; using eidss.model.WindowsService.Serialization; using Ionic.Zlib; namespace eidss.avr.db.DBService { public class BaseAvrDbService : BaseDbService { public BaseAvrDbService() { UseDatasetCopyInPost = false; } public override DataSet GetDetail(object id) { var ds = new DataSet(); m_ID = id; return (ds); } public override bool PostDetail(DataSet dataSet, int postType, IDbTransaction transaction = null) { return true; } public override void AcceptChanges(DataSet ds) { // this method shoul duplicate base method bu WITHOUT line m_IsNewObject = false foreach (DataTable table in ds.Tables) { if (!SkipAcceptChanges(table)) { table.AcceptChanges(); } } RaiseAcceptChangesEvent(ds); } #region publish & unpublish public void PublishUnpublish(long id, AvrTreeElementType type, bool isPublish) { long globalId = -1; if (!isPublish) { globalId = AvrQueryLayoutTreeDbHelper.GetGlobalId(id, type); } // todo: refactor to use new framework here lock (Connection) { try { if (Connection.State != ConnectionState.Open) { Connection.Open(); } using (IDbTransaction transaction = Connection.BeginTransaction()) { try { if (isPublish) { Publish(id, transaction, type); } else { Unpublish(globalId, transaction, type); } transaction.Commit(); } catch (Exception) { transaction.Rollback(); throw; } } } finally { if (Connection.State != ConnectionState.Open) { Connection.Open(); } } } } private void Publish(long id, IDbTransaction transaction, AvrTreeElementType type) { string spName; string inputParamName; string outputParamName; EventType eventType; switch (type) { case AvrTreeElementType.Layout: spName = "spAsLayoutPublish"; inputParamName = "@idflLayout"; outputParamName = "@idfsLayout"; eventType = EventType.AVRLayoutPublishedLocal; break; case AvrTreeElementType.Folder: spName = "spAsFolderPublish"; inputParamName = "@idflLayoutFolder"; outputParamName = "@idfsLayoutFolder"; eventType = EventType.AVRLayoutFolderPublishedLocal; break; case AvrTreeElementType.Query: spName = "spAsQueryPublish"; inputParamName = "@idflQuery"; outputParamName = "@idfsQuery"; eventType = EventType.AVRQueryPublishedLocal; break; default: throw new AvrException("Unsupported AvrTreeElementType " + type); } object publishedId; using (IDbCommand cmd = CreateSPCommand(spName, transaction)) { AddAndCheckParam(cmd, inputParamName, id); AddTypedParam(cmd, outputParamName, SqlDbType.BigInt, ParameterDirection.Output); cmd.ExecuteNonQuery(); publishedId = GetParamValue(cmd, outputParamName); } LookupCache.NotifyChange("Layout", transaction); LookupCache.NotifyChange("LayoutFolder", transaction); LookupCache.NotifyChange("Query", transaction); if (publishedId != null) { EidssEventLog.Instance.CreateProcessedEvent(eventType, id, 0, 0, null, EidssUserContext.User.ID, transaction); } } private void Unpublish(long publisedId, IDbTransaction transaction, AvrTreeElementType type) { string spName; string inputParamName; string outputParamName; EventType eventType; switch (type) { case AvrTreeElementType.Layout: spName = "spAsLayoutUnpublish"; inputParamName = "@idfsLayout"; outputParamName = "@idflLayout"; eventType = EventType.AVRLayoutUnpublishedLocal; break; case AvrTreeElementType.Folder: spName = "spAsFolderUnpublish"; inputParamName = "@idfsLayoutFolder"; outputParamName = "@idflLayoutFolder"; eventType = EventType.AVRLayoutFolderUnpublishedLocal; break; case AvrTreeElementType.Query: spName = "spAsQueryUnpublish"; inputParamName = "@idfsQuery"; outputParamName = "@idflQuery"; eventType = EventType.AVRQueryUnpublishedLocal; break; default: throw new AvrException("Unsupported AvrTreeElementType " + type); } object id = null; using (IDbCommand cmd = CreateSPCommand(spName, transaction)) { AddAndCheckParam(cmd, inputParamName, publisedId); AddTypedParam(cmd, outputParamName, SqlDbType.BigInt, ParameterDirection.Output); cmd.ExecuteNonQuery(); id = GetParamValue(cmd, outputParamName); } LookupCache.NotifyChange("Layout", transaction); LookupCache.NotifyChange("LayoutFolder", transaction); LookupCache.NotifyChange("Query", transaction); //Create event for local element, because local element shall be opened when we click to event EidssEventLog.Instance.CreateProcessedEvent(eventType, id, 0, 0, null, EidssUserContext.User.ID, transaction); } #endregion #region Helper Methods internal void ChangeIdForNewObject(long newId) { m_ID = m_IsNewObject ? newId : CorrectId(m_ID, newId); } internal static long CorrectId(object id, long defaultId) { if (Utils.IsEmpty(id) || (!(id is long)) || ((long)id <= 0)) { id = defaultId; } return (long)id; } public void AddAndCheckParam(IDbCommand cmd, DataColumn paramColumn, object paramValue) { Utils.CheckNotNullOrEmpty(paramColumn.ColumnName, "paramColumn.ColumnName"); AddAndCheckParam(cmd, string.Format("@{0}", paramColumn.ColumnName), paramValue); } public void AddAndCheckParam(IDbCommand cmd, string paramName, object paramValue) { AddAndCheckParam(cmd, paramName, paramValue, ParameterDirection.Input); } public void AddAndCheckParam(IDbCommand cmd, string paramName, object paramValue, ParameterDirection direction) { Utils.CheckNotNull(cmd, "cmd"); Utils.CheckNotNullOrEmpty(paramName, "paramName"); Utils.CheckNotNull(paramValue, "paramValue"); AddParam(cmd, paramName, paramValue, ref m_Error, direction); if (m_Error != null) { throw new AvrDbException(m_Error.Text); } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using Xunit; namespace System.Security.Cryptography.EcDsa.Tests { public partial class ECDsaTests : ECDsaTestsBase { #if netcoreapp [Fact] public void KeySizeProp() { using (ECDsa e = ECDsaFactory.Create()) { e.KeySize = 384; Assert.Equal(384, e.KeySize); ECParameters p384 = e.ExportParameters(false); Assert.True(p384.Curve.IsNamed); p384.Validate(); e.KeySize = 521; Assert.Equal(521, e.KeySize); ECParameters p521 = e.ExportParameters(false); Assert.True(p521.Curve.IsNamed); p521.Validate(); // Ensure the key was regenerated Assert.NotEqual(p384.Curve.Oid.FriendlyName, p521.Curve.Oid.FriendlyName); } } [Theory, MemberData(nameof(TestNewCurves))] public void TestRegenKeyExplicit(CurveDef curveDef) { ECParameters param, param2; ECDsa ec, newEc; using (ec = ECDsaFactory.Create(curveDef.Curve)) { param = ec.ExportExplicitParameters(true); Assert.NotEqual(null, param.D); using (newEc = ECDsaFactory.Create()) { newEc.ImportParameters(param); // The curve name is not flowed on explicit export\import (by design) so this excercises logic // that regenerates based on current curve values newEc.GenerateKey(param.Curve); param2 = newEc.ExportExplicitParameters(true); // Only curve should match ComparePrivateKey(param, param2, false); ComparePublicKey(param.Q, param2.Q, false); CompareCurve(param.Curve, param2.Curve); // Specify same curve name newEc.GenerateKey(curveDef.Curve); Assert.Equal(curveDef.KeySize, newEc.KeySize); param2 = newEc.ExportExplicitParameters(true); // Only curve should match ComparePrivateKey(param, param2, false); ComparePublicKey(param.Q, param2.Q, false); CompareCurve(param.Curve, param2.Curve); // Specify different curve than current if (param.Curve.IsPrime) { if (curveDef.Curve.Oid.FriendlyName != ECCurve.NamedCurves.nistP256.Oid.FriendlyName) { // Specify different curve (nistP256) by explicit value newEc.GenerateKey(ECCurve.NamedCurves.nistP256); Assert.Equal(256, newEc.KeySize); param2 = newEc.ExportExplicitParameters(true); // Keys should should not match ComparePrivateKey(param, param2, false); ComparePublicKey(param.Q, param2.Q, false); // P,X,Y (and others) should not match Assert.True(param2.Curve.IsPrime); Assert.NotEqual(param.Curve.Prime, param2.Curve.Prime); Assert.NotEqual(param.Curve.G.X, param2.Curve.G.X); Assert.NotEqual(param.Curve.G.Y, param2.Curve.G.Y); // Reset back to original newEc.GenerateKey(param.Curve); Assert.Equal(curveDef.KeySize, newEc.KeySize); ECParameters copyOfParam1 = newEc.ExportExplicitParameters(true); // Only curve should match ComparePrivateKey(param, copyOfParam1, false); ComparePublicKey(param.Q, copyOfParam1.Q, false); CompareCurve(param.Curve, copyOfParam1.Curve); // Set back to nistP256 newEc.GenerateKey(param2.Curve); Assert.Equal(256, newEc.KeySize); param2 = newEc.ExportExplicitParameters(true); // Keys should should not match ComparePrivateKey(param, param2, false); ComparePublicKey(param.Q, param2.Q, false); // P,X,Y (and others) should not match Assert.True(param2.Curve.IsPrime); Assert.NotEqual(param.Curve.Prime, param2.Curve.Prime); Assert.NotEqual(param.Curve.G.X, param2.Curve.G.X); Assert.NotEqual(param.Curve.G.Y, param2.Curve.G.Y); } } else if (param.Curve.IsCharacteristic2) { if (curveDef.Curve.Oid.Value != ECDSA_Sect193r1_OID_VALUE) { if (ECDsaFactory.IsCurveValid(new Oid(ECDSA_Sect193r1_OID_VALUE))) { // Specify different curve by name newEc.GenerateKey(ECCurve.CreateFromValue(ECDSA_Sect193r1_OID_VALUE)); Assert.Equal(193, newEc.KeySize); param2 = newEc.ExportExplicitParameters(true); // Keys should should not match ComparePrivateKey(param, param2, false); ComparePublicKey(param.Q, param2.Q, false); // Polynomial,X,Y (and others) should not match Assert.True(param2.Curve.IsCharacteristic2); Assert.NotEqual(param.Curve.Polynomial, param2.Curve.Polynomial); Assert.NotEqual(param.Curve.G.X, param2.Curve.G.X); Assert.NotEqual(param.Curve.G.Y, param2.Curve.G.Y); } } } } } } [Theory] [MemberData(nameof(TestCurves))] public void TestRegenKeyNamed(CurveDef curveDef) { ECParameters param, param2; ECDsa ec; using (ec = ECDsaFactory.Create(curveDef.Curve)) { param = ec.ExportParameters(true); Assert.NotEqual(param.D, null); param.Validate(); ec.GenerateKey(param.Curve); param2 = ec.ExportParameters(true); param2.Validate(); // Only curve should match ComparePrivateKey(param, param2, false); ComparePublicKey(param.Q, param2.Q, false); CompareCurve(param.Curve, param2.Curve); } } [ConditionalFact(nameof(ECExplicitCurvesSupported))] public void TestRegenKeyNistP256() { ECParameters param, param2; ECDsa ec; using (ec = ECDsaFactory.Create(256)) { param = ec.ExportExplicitParameters(true); Assert.NotEqual(param.D, null); ec.GenerateKey(param.Curve); param2 = ec.ExportExplicitParameters(true); // Only curve should match ComparePrivateKey(param, param2, false); ComparePublicKey(param.Q, param2.Q, false); CompareCurve(param.Curve, param2.Curve); } } [Theory] [MemberData(nameof(TestCurves))] public void TestChangeFromNamedCurveToKeySize(CurveDef curveDef) { using (ECDsa ec = ECDsaFactory.Create(curveDef.Curve)) { ECParameters param = ec.ExportParameters(false); // Avoid comparing against same key as in curveDef if (ec.KeySize != 384 && ec.KeySize != 521) { ec.KeySize = 384; ECParameters param384 = ec.ExportParameters(false); Assert.NotEqual(param.Curve.Oid.FriendlyName, param384.Curve.Oid.FriendlyName); Assert.Equal(384, ec.KeySize); ec.KeySize = 521; ECParameters param521 = ec.ExportParameters(false); Assert.NotEqual(param384.Curve.Oid.FriendlyName, param521.Curve.Oid.FriendlyName); Assert.Equal(521, ec.KeySize); } } } [ConditionalFact(nameof(ECExplicitCurvesSupported))] public void TestPositive256WithExplicitParameters() { using (ECDsa ecdsa = ECDsaFactory.Create()) { ecdsa.ImportParameters(ECDsaTestData.GetNistP256ExplicitTestData()); Verify256(ecdsa, true); } } [Fact] public void TestNegative256WithRandomKey() { using (ECDsa ecdsa = ECDsaFactory.Create(ECCurve.NamedCurves.nistP256)) { Verify256(ecdsa, false); // will not match because of randomness } } #endif // netcoreapp [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArray_NullData_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("data", () => ecdsa.SignData((byte[])null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArray_DefaultHashAlgorithm_ThrowsArgumentException(ECDsa ecdsa) { Assert.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new byte[0], default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArraySpan_NullData_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("data", () => ecdsa.SignData(null, -1, -1, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArraySpan_NegativeOffset_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.SignData(new byte[0], -1, -1, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArraySpan_OffsetGreaterThanCount_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.SignData(new byte[0], 2, 1, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArraySpan_NegativeCount_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.SignData(new byte[0], 0, -1, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArraySpan_CountGreaterThanLengthMinusOffset_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.SignData(new byte[0], 0, 1, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArraySpan_DefaultHashAlgorithm_ThrowsArgumentException(ECDsa ecdsa) { Assert.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new byte[0], 0, 0, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataByteArraySpan_EmptyHashAlgorithm_ThrowsArgumentException(ECDsa ecdsa) { Assert.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new byte[10], 0, 10, new HashAlgorithmName(""))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataStream_NullData_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("data", () => ecdsa.SignData((Stream)null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void SignDataStream_DefaultHashAlgorithm_ThrowsArgumentException(ECDsa ecdsa) { Assert.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.SignData(new MemoryStream(), default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArray_NullData_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("data", () => ecdsa.VerifyData((byte[])null, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArray_NullSignature_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("signature", () => ecdsa.VerifyData(new byte[0], null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArray_DefaultHashAlgorithm_ThrowsArgumentException(ECDsa ecdsa) { Assert.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new byte[0], new byte[0], default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArraySpan_NullData_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("data", () => ecdsa.VerifyData((byte[])null, -1, -1, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArraySpan_NegativeOffset_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.VerifyData(new byte[0], -1, -1, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArraySpan_OffsetGreaterThanCount_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("offset", () => ecdsa.VerifyData(new byte[0], 2, 1, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArraySpan_NegativeCount_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.VerifyData(new byte[0], 0, -1, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArraySpan_CountGreaterThanLengthMinusOffset_ThrowsArgumentOutOfRangeException(ECDsa ecdsa) { Assert.Throws<ArgumentOutOfRangeException>("count", () => ecdsa.VerifyData(new byte[0], 0, 1, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArraySpan_NullSignature_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("signature", () => ecdsa.VerifyData(new byte[0], 0, 0, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataByteArraySpan_EmptyHashAlgorithm_ThrowsArgumentException(ECDsa ecdsa) { Assert.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new byte[10], 0, 10, new byte[0], new HashAlgorithmName(""))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataStream_NullData_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("data", () => ecdsa.VerifyData((Stream)null, null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataStream_NullSignature_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>("signature", () => ecdsa.VerifyData(new MemoryStream(), null, default(HashAlgorithmName))); } [Theory, MemberData(nameof(AllImplementations))] public void VerifyDataStream_DefaultHashAlgorithm_ThrowsArgumentException(ECDsa ecdsa) { Assert.Throws<ArgumentException>("hashAlgorithm", () => ecdsa.VerifyData(new MemoryStream(), new byte[0], default(HashAlgorithmName))); } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// public static IEnumerable<object[]> AllImplementations() { return new[] { new ECDsa[] { ECDsaFactory.Create() }, new ECDsa[] { new ECDsaStub() }, }; } public static IEnumerable<object[]> RealImplementations() { return new[] { new ECDsa[] { ECDsaFactory.Create() }, }; } [Theory, MemberData(nameof(RealImplementations))] public void SignHash_NullHash_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>( "hash", () => ecdsa.SignHash(null)); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyHash_NullHash_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>( "hash", () => ecdsa.VerifyHash(null, null)); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyHash_NullSignature_ThrowsArgumentNullException(ECDsa ecdsa) { Assert.Throws<ArgumentNullException>( "signature", () => ecdsa.VerifyHash(new byte[0], null)); } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// [Theory, MemberData(nameof(RealImplementations))] public void SignDataByteArray_UnsupportedHashAlgorithm_ThrowsCryptographicException(ECDsa ecdsa) { Assert.Throws<CryptographicException>( () => ecdsa.SignData(new byte[0], new HashAlgorithmName("NOT_A_REAL_HASH_ALGORITHM"))); } [Theory, MemberData(nameof(RealImplementations))] public void SignDataByteArraySpan_UnsupportedHashAlgorithm_ThrowsCryptographicException(ECDsa ecdsa) { Assert.Throws<CryptographicException>( () => ecdsa.SignData(new byte[0], 0, 0, new HashAlgorithmName("NOT_A_REAL_HASH_ALGORITHM"))); } [Theory, MemberData(nameof(RealImplementations))] public void SignDataStream_UnsupportedHashAlgorithm_ThrowsCryptographicException(ECDsa ecdsa) { Assert.Throws<CryptographicException>( () => ecdsa.SignData(new MemoryStream(), new HashAlgorithmName("NOT_A_REAL_HASH_ALGORITHM"))); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyDataByteArray_UnsupportedHashAlgorithm_ThrowsCryptographicException(ECDsa ecdsa) { Assert.Throws<CryptographicException>( () => ecdsa.VerifyData(new byte[0], new byte[0], new HashAlgorithmName("NOT_A_REAL_HASH_ALGORITHM"))); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyDataByteArraySpan_UnsupportedHashAlgorithm_ThrowsCryptographicException(ECDsa ecdsa) { Assert.Throws<CryptographicException>( () => ecdsa.VerifyData(new byte[0], 0, 0, new byte[0], new HashAlgorithmName("NOT_A_REAL_HASH_ALGORITHM"))); } [Theory, MemberData(nameof(RealImplementations))] public void VerifyDataStream_UnsupportedHashAlgorithm_ThrowsCryptographicException(ECDsa ecdsa) { Assert.Throws<CryptographicException>( () => ecdsa.VerifyData(new MemoryStream(), new byte[0], new HashAlgorithmName("NOT_A_REAL_HASH_ALGORITHM"))); } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// [Theory] [MemberData(nameof(RealImplementations))] public void SignData_MaxOffset_ZeroLength_NoThrow(ECDsa ecdsa) { // Explicitly larger than Array.Empty byte[] data = new byte[10]; byte[] signature = ecdsa.SignData(data, data.Length, 0, HashAlgorithmName.SHA256); Assert.True(ecdsa.VerifyData(Array.Empty<byte>(), signature, HashAlgorithmName.SHA256)); } [Theory] [MemberData(nameof(RealImplementations))] public void VerifyData_MaxOffset_ZeroLength_NoThrow(ECDsa ecdsa) { // Explicitly larger than Array.Empty byte[] data = new byte[10]; byte[] signature = ecdsa.SignData(Array.Empty<byte>(), HashAlgorithmName.SHA256); Assert.True(ecdsa.VerifyData(data, data.Length, 0, signature, HashAlgorithmName.SHA256)); } [Theory] [MemberData(nameof(RealImplementations))] public void Roundtrip_WithOffset(ECDsa ecdsa) { byte[] data = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; byte[] halfData = { 5, 6, 7, 8, 9 }; byte[] dataSignature = ecdsa.SignData(data, 5, data.Length - 5, HashAlgorithmName.SHA256); byte[] halfDataSignature = ecdsa.SignData(halfData, HashAlgorithmName.SHA256); // Cross-feed the VerifyData calls to prove that both offsets work Assert.True(ecdsa.VerifyData(data, 5, data.Length - 5, halfDataSignature, HashAlgorithmName.SHA256)); Assert.True(ecdsa.VerifyData(halfData, dataSignature, HashAlgorithmName.SHA256)); } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// [Theory] [InlineData(256)] [InlineData(384)] [InlineData(521)] public void CreateKey(int keySize) { using (ECDsa ecdsa = ECDsaFactory.Create()) { // Step 1, don't throw here. ecdsa.KeySize = keySize; // Step 2, ensure the key was generated without throwing. ecdsa.SignData(Array.Empty<byte>(), HashAlgorithmName.SHA256); } } ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// public static IEnumerable<object[]> InteroperableSignatureConfigurations() { foreach (HashAlgorithmName hashAlgorithm in new[] { HashAlgorithmName.MD5, HashAlgorithmName.SHA1, HashAlgorithmName.SHA256, HashAlgorithmName.SHA384, HashAlgorithmName.SHA512 }) { yield return new object[] { ECDsaFactory.Create(), hashAlgorithm }; } } [Theory] [MemberData(nameof(InteroperableSignatureConfigurations))] public void SignVerify_InteroperableSameKeys_RoundTripsUnlessTampered(ECDsa ecdsa, HashAlgorithmName hashAlgorithm) { byte[] data = Encoding.UTF8.GetBytes("something to repeat and sign"); // large enough to make hashing work though multiple iterations and not a multiple of 4KB it uses. byte[] dataArray = new byte[33333]; MemoryStream dataStream = new MemoryStream(dataArray, true); while (dataStream.Position < dataArray.Length - data.Length) { dataStream.Write(data, 0, data.Length); } dataStream.Position = 0; byte[] dataArray2 = new byte[dataArray.Length + 2]; dataArray.CopyTo(dataArray2, 1); ArraySegment<byte> dataSpan = new ArraySegment<byte>(dataArray2, 1, dataArray.Length); HashAlgorithm halg; if (hashAlgorithm == HashAlgorithmName.MD5) halg = MD5.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA1) halg = SHA1.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA256) halg = SHA256.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA384) halg = SHA384.Create(); else if (hashAlgorithm == HashAlgorithmName.SHA512) halg = SHA512.Create(); else throw new Exception("Hash algorithm not supported."); List<byte[]> signatures = new List<byte[]>(6); // Compute a signature using each of the SignData overloads. Then, verify it using each // of the VerifyData overloads, and VerifyHash overloads. // // Then, verify that VerifyHash fails if the data is tampered with. signatures.Add(ecdsa.SignData(dataArray, hashAlgorithm)); signatures.Add(ecdsa.SignData(dataSpan.Array, dataSpan.Offset, dataSpan.Count, hashAlgorithm)); signatures.Add(ecdsa.SignData(dataStream, hashAlgorithm)); dataStream.Position = 0; signatures.Add(ecdsa.SignHash(halg.ComputeHash(dataArray))); signatures.Add(ecdsa.SignHash(halg.ComputeHash(dataSpan.Array, dataSpan.Offset, dataSpan.Count))); signatures.Add(ecdsa.SignHash(halg.ComputeHash(dataStream))); dataStream.Position = 0; foreach (byte[] signature in signatures) { Assert.True(ecdsa.VerifyData(dataArray, signature, hashAlgorithm), "Verify 1"); Assert.True(ecdsa.VerifyData(dataSpan.Array, dataSpan.Offset, dataSpan.Count, signature, hashAlgorithm), "Verify 2"); Assert.True(ecdsa.VerifyData(dataStream, signature, hashAlgorithm), "Verify 3"); Assert.True(dataStream.Position == dataArray.Length, "Check stream read 3A"); dataStream.Position = 0; Assert.True(ecdsa.VerifyHash(halg.ComputeHash(dataArray), signature), "Verify 4"); Assert.True(ecdsa.VerifyHash(halg.ComputeHash(dataSpan.Array, dataSpan.Offset, dataSpan.Count), signature), "Verify 5"); Assert.True(ecdsa.VerifyHash(halg.ComputeHash(dataStream), signature), "Verify 6"); Assert.True(dataStream.Position == dataArray.Length, "Check stream read 6A"); dataStream.Position = 0; } int distinctSignatures = signatures.Distinct(new ByteArrayComparer()).Count(); Assert.True(distinctSignatures == signatures.Count, "Signing should be randomized"); foreach (byte[] signature in signatures) { signature[signature.Length - 1] ^= 0xFF; // flip some bits Assert.False(ecdsa.VerifyData(dataArray, signature, hashAlgorithm), "Verify Tampered 1"); Assert.False(ecdsa.VerifyData(dataSpan.Array, dataSpan.Offset, dataSpan.Count, signature, hashAlgorithm), "Verify Tampered 2"); Assert.False(ecdsa.VerifyData(dataStream, signature, hashAlgorithm), "Verify Tampered 3"); Assert.True(dataStream.Position == dataArray.Length, "Check stream read 3B"); dataStream.Position = 0; Assert.False(ecdsa.VerifyHash(halg.ComputeHash(dataArray), signature), "Verify Tampered 4"); Assert.False(ecdsa.VerifyHash(halg.ComputeHash(dataSpan.Array, dataSpan.Offset, dataSpan.Count), signature), "Verify Tampered 5"); Assert.False(ecdsa.VerifyHash(halg.ComputeHash(dataStream), signature), "Verify Tampered 6"); Assert.True(dataStream.Position == dataArray.Length, "Check stream read 6B"); dataStream.Position = 0; } } private class ByteArrayComparer : IEqualityComparer<byte[]> { public bool Equals(byte[] x, byte[] y) { return x.SequenceEqual(y); } public int GetHashCode(byte[] obj) { int h = 5381; foreach (byte b in obj) { h = unchecked((h << 5) + h) ^ b.GetHashCode(); } return h; } } } }
#region File Description //----------------------------------------------------------------------------- // GameplayScreen.cs // // Microsoft XNA Community Game Platform // Copyright (C) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- #endregion #region Using Statements using System; using System.Collections.Generic; using System.Text; using GameStateManagement; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input.Touch; using Microsoft.Xna.Framework.Content; using Microsoft.Xna.Framework.GamerServices; using Microsoft.Phone.Shell; using YachtServices; using System.Threading; #endregion namespace Yacht { /// <summary> /// The main gameplay screen. /// </summary> class GameplayScreen : GameScreen { #region Fields Texture2D background; GameStateHandler gameStateHandler; DiceHandler diceHandler; string name; Timer timer = null; Random random = new Random(); GameTypes gameType; #endregion #region Initializations /// <summary> /// Initialize a new game screen. /// </summary> /// <param name="gameType">The type of game for which this screen is created.</param> public GameplayScreen(GameTypes gameType) { this.gameType = gameType; EnabledGestures = GestureType.Tap | GestureType.VerticalDrag | GestureType.DragComplete; } /// <summary> /// Initialize a new game screen. /// </summary> /// <param name="name">The name of the human player participating in the game.</param> /// <param name="gameType">The type of game for which this screen is created.</param> public GameplayScreen(string name, GameTypes gameType) : this(gameType) { this.name = name; } #endregion #region Loading /// <summary> /// Load all the game content. /// </summary> public override void LoadContent() { base.LoadContent(); Dice.LoadAssets(ScreenManager.Game.Content); ContentManager content = ScreenManager.Game.Content; background = content.Load<Texture2D>(@"Images\bg"); // When reaching a gameplay screen, we know that there is a yacht state in the current state object YachtState yachtState = (YachtState)PhoneApplicationService.Current.State[Constants.YachtStateKey]; InitializeDiceHandler(yachtState.PlayerDiceState, gameType); yachtState.PlayerDiceState = diceHandler.DiceState; diceHandler.PositionDice(); if (gameType == GameTypes.Offline) { InitializeGameStateHandler(yachtState.YachGameState); yachtState.YachGameState = gameStateHandler.State; } else { // Register for network notifications NetworkManager.Instance.GameStateArrived += ServerGameStateArrived; NetworkManager.Instance.GameOver += GameOverEnded; NetworkManager.Instance.Banned += Banned; NetworkManager.Instance.GameUnavailable += GameUnavailable; NetworkManager.Instance.ServiceError += ServerErrorOccurred; // Get the updated game state instead of initializing from the state object NetworkManager.Instance.GetGameState(); } } /// <summary> /// Handler called when the server declares that the game is over. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void GameOverEnded(object sender, YachtGameOverEventArgs e) { gameStateHandler.ShowGameOver(e.EndGameState); if (gameStateHandler.WinnerPlayer is HumanPlayer) { AudioManager.PlaySound("Winner"); } else { AudioManager.PlaySound("Loss"); } } /// <summary> /// Initializes the dice handler used to handle the player's dice, and loads resources used for displaying /// it. /// </summary> /// <param name="diceState">State to initialize the dice handler according to, or null to use the default /// initial state.</param> /// <param name="gameType">The game type for which the dice are initialized.</param> private void InitializeDiceHandler(DiceState diceState, GameTypes gameType) { diceHandler = new DiceHandler(ScreenManager.Game.GraphicsDevice, diceState); diceHandler.LoadAssets(ScreenManager.Game.Content); } /// <summary> /// Initializes the game state handler used to manage the game state, and loads resources used for displaying /// it. /// </summary> /// <param name="state">State to initialize the game handler according to, or null to use the default /// initial state.</param> private void InitializeGameStateHandler(GameState state) { gameStateHandler = new GameStateHandler(diceHandler, ScreenManager.input, name, state, ScreenManager.Game.GraphicsDevice.Viewport.Bounds, ScreenManager.Game.Content); } #endregion #region Update and Render /// <summary> /// Handle user input. /// </summary> /// <param name="input">User input information.</param> public override void HandleInput(InputState input) { if (!Guide.IsVisible) { if (input.IsPauseGame(null)) { QuiteGame(); } if (gameStateHandler != null && gameStateHandler.IsGameOver && input.Gestures.Count > 0 && input.Gestures[0].GestureType == GestureType.Tap) { // We exit an online game after it has finished, so remove the state data. PhoneApplicationService.Current.State.Remove(Constants.YachtStateKey); ExitScreen(); Dispose(); if (gameStateHandler.State.GameType == GameTypes.Offline) { ScreenManager.AddScreen(new MainMenuScreen(), null); } else { ScreenManager.AddScreen(new SelectOnlineGameScreen(NetworkManager.Instance.name), null); } } for (int i = 0; i < input.Gestures.Count; i++) { if (gameStateHandler != null && gameStateHandler.IsInitialized) { gameStateHandler.HandleInput(input.Gestures[i]); } } } base.HandleInput(input); } /// <summary> /// Pause the game. /// </summary> private void QuiteGame() { if (gameStateHandler.State.GameType == GameTypes.Offline) { // Give the user a chance to save his current progress Guide.BeginShowMessageBox("Save Game", "Do you want to save your progress?", new String[] { "Yes", "No" }, 0, MessageBoxIcon.Warning, ShowSaveDialogEnded, null); } else { if (gameStateHandler.IsGameOver) { HandleExitScreen(); } else { // Give the user a chance to abort exiting the game Guide.BeginShowMessageBox("Are you sure you want to leave the game?", " ", new String[] { "Yes", "No" }, 0, MessageBoxIcon.Warning, AbortExitDialogEnded, null); } } } /// <summary> /// Handler for the warning box displayed when the user tries to exit the game. /// </summary> /// <param name="result">The popup messagebox result.</param> private void AbortExitDialogEnded(IAsyncResult result) { int? res = Guide.EndShowMessageBox(result); if (res == 0) { HandleExitScreen(); } } /// <summary> /// Exits the gameplay screen. /// </summary> private void HandleExitScreen() { ExitScreen(); Dispose(); // We voluntarily exit an online game, so remove the state data. PhoneApplicationService.Current.State.Remove(Constants.YachtStateKey); NetworkManager.Instance.Unregister(); ScreenManager.AddScreen(new MainMenuScreen(), null); } /// <summary> /// Handler for the dialog box which offers to save the game state. /// </summary> /// <param name="result">The popup messagebox result.</param> private void ShowSaveDialogEnded(IAsyncResult result) { int? res = Guide.EndShowMessageBox(result); if (res.HasValue) { // Store the user's progress if (res.Value == 0) { YachtGame.SaveGameState(); } // Remove state information before exiting the game PhoneApplicationService.Current.State.Remove(Constants.YachtStateKey); ExitScreen(); Dispose(); ScreenManager.AddScreen(new MainMenuScreen(), null); } } /// <summary> /// Perform the game's update logic. /// </summary> /// <param name="gameTime">Game time information.</param> /// <param name="otherScreenHasFocus">Whether another screen has the focus currently.</param> /// <param name="coveredByOtherScreen">Whether this screen is covered by another screen.</param> public override void Update(GameTime gameTime, bool otherScreenHasFocus, bool coveredByOtherScreen) { if (!Guide.IsVisible && gameStateHandler != null && !gameStateHandler.IsGameOver) { gameStateHandler.SetScoreDice(diceHandler.GetHoldingDice()); diceHandler.Update(); if (gameStateHandler.IsInitialized && gameStateHandler.CurrentPlayer != null && !gameStateHandler.IsWaitingForPlayer) { if (!(gameStateHandler.CurrentPlayer is AIPlayer)) { gameStateHandler.CurrentPlayer.PerformPlayerLogic(); } else { if (timer == null) { timer = new Timer(MakeAIPlay, gameStateHandler.CurrentPlayer, random.Next(300, 600), -1); } } } } base.Update(gameTime, otherScreenHasFocus, coveredByOtherScreen); } /// <summary> /// Draws the game. /// </summary> /// <param name="gameTime">Game time information.</param> public override void Draw(GameTime gameTime) { ScreenManager.GraphicsDevice.Clear(Color.CornflowerBlue); ScreenManager.SpriteBatch.Begin(); // Draw all game component ScreenManager.SpriteBatch.Draw(background, Vector2.Zero, Color.White); if (!Guide.IsVisible) { if (gameStateHandler != null && gameStateHandler.IsInitialized) { if (diceHandler != null && !gameStateHandler.IsGameOver) { diceHandler.Draw(ScreenManager.SpriteBatch); if (!gameStateHandler.IsWaitingForPlayer) { gameStateHandler.CurrentPlayer.Draw(ScreenManager.SpriteBatch); } } gameStateHandler.Draw(ScreenManager.SpriteBatch); } DrawGameOver(); } ScreenManager.SpriteBatch.End(); base.Draw(gameTime); } /// <summary> /// Draws information about the winning player once the game is over. /// </summary> private void DrawGameOver() { if (gameStateHandler != null && gameStateHandler.IsGameOver) { Rectangle screenBounds = ScreenManager.Game.GraphicsDevice.Viewport.Bounds; string winnerText = string.Format("{0} is the winner!", gameStateHandler.WinnerPlayer.Name); Vector2 measure = YachtGame.Font.MeasureString(winnerText); Vector2 position = new Vector2(screenBounds.Center.X - measure.X / 2, screenBounds.Bottom - 100); ScreenManager.SpriteBatch.DrawString(YachtGame.Font, winnerText, position, Color.White); } } #endregion /// <summary> /// Handler called once the server reports a specific game is unavailable. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void GameUnavailable(object sender, EventArgs e) { while (Guide.IsVisible) { }; Guide.BeginShowMessageBox("The game " + NetworkManager.Instance.gameName + " is unavailable", "Do you want to create a new game with same name, or join another game?", new[] { "Create", "Join" }, 0, MessageBoxIcon.None, UnavailableGameDialogEnded, null); } /// <summary> /// Called after the player dismisses the dialog stating a game is unavailable. /// </summary> /// <param name="result">The messagebox selection result.</param> private void UnavailableGameDialogEnded(IAsyncResult result) { int? res = Guide.EndShowMessageBox(result); if (res == 0) { // Create a new game with the same name. NetworkManager.Instance.NewGameCreated += Instance_NewGameCreated; NetworkManager.Instance.NewGame(NetworkManager.Instance.gameName); } else { // Return to the game selection screen. PhoneApplicationService.Current.State.Remove(Constants.YachtStateKey); ExitScreen(); Dispose(); if (res == 1) { ScreenManager.AddScreen(new SelectOnlineGameScreen(NetworkManager.Instance.name), null); } else { ScreenManager.AddScreen(new MainMenuScreen(), null); } } } /// <summary> /// Handler called once a new game instance has been created. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void Instance_NewGameCreated(object sender, BooleanEventArgs e) { NetworkManager.Instance.NewGameCreated -= Instance_NewGameCreated; if (e.Answer) { NetworkManager.Instance.GetGameState(); } else { Guide.BeginShowMessageBox("Cannot create the game with same name the name is in use", "", new String[] { "OK" }, 0, MessageBoxIcon.Alert, null, null); ExitScreen(); Dispose(); ScreenManager.AddScreen(new SelectOnlineGameScreen(NetworkManager.Instance.name), null); } } /// <summary> /// Handler called when an error occurs communicating with the server. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void ServerErrorOccurred(object sender, ExceptionEventArgs e) { Guide.BeginShowMessageBox("There was a server error. Please try to connect again.", " ", new[] { "OK" }, 0, MessageBoxIcon.Error, ErrorDialogEnded, null); } /// <summary> /// Called once the server error dialog is dismissed. /// </summary> /// <param name="result">The messagebox selection result.</param> void ErrorDialogEnded(IAsyncResult result) { ExitScreen(); Dispose(); ScreenManager.AddScreen(new SelectOnlineGameScreen(NetworkManager.Instance.name), null); } /// <summary> /// Called when the server kicks the player from the game. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void Banned(object sender, EventArgs e) { ExitScreen(); Dispose(); ScreenManager.AddScreen(new MainMenuScreen(), null); } /// <summary> /// Handler called when a game state is received from the server. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void ServerGameStateArrived(object sender, YachtGameStateEventArgs e) { if (gameStateHandler == null) { InitializeGameStateHandler(e.GameState); YachtState yachtState = (YachtState)PhoneApplicationService.Current.State[Constants.YachtStateKey]; yachtState.NetworkManagerState = NetworkManager.Instance; // Update state object with the new game state yachtState.YachGameState = gameStateHandler.State; // Reset the dice state if it is not valid for this turn if (diceHandler.DiceState.ValidForTurn != e.GameState.StepsMade) { diceHandler.Reset(false); } NetworkManager.Instance.ScoreCardArrived += ServerScoreCardArrived; } else { if (e.GameState.StepsMade != gameStateHandler.State.StepsMade) { diceHandler.Reset(false); } gameStateHandler.SetState(e.GameState); } diceHandler.DiceState.ValidForTurn = gameStateHandler.State.StepsMade; AudioManager.PlaySoundRandom("TurnChange", 2); if (e.GameState.Players[e.GameState.CurrentPlayer].PlayerID == NetworkManager.Instance.playerID) { NetworkManager.Instance.GetScoreCard(); } } /// <summary> /// Handler called when score card information arrives from the server. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void ServerScoreCardArrived(object sender, YachtScoreCardEventArgs e) { gameStateHandler.UpdateScoreCard(e.ScoreCard); } /// <summary> /// Causes local AI players to play. /// </summary> /// <param name="obj">Additional user data.</param> private void MakeAIPlay(object obj) { timer = null; (obj as AIPlayer).PerformPlayerLogic(); } /// <summary> /// Performs necessary cleanup before disposing of the screen. /// </summary> public void Dispose() { if (NetworkManager.Instance != null) { NetworkManager.Instance.GameStateArrived -= ServerGameStateArrived; NetworkManager.Instance.ServiceError -= ServerErrorOccurred; NetworkManager.Instance.Banned -= Banned; NetworkManager.Instance.GameUnavailable -= GameUnavailable; NetworkManager.Instance.ScoreCardArrived -= ServerScoreCardArrived; NetworkManager.Instance.GameOver -= GameOverEnded; } } } }
#define thread //comment out this line if you would like to disable multi-threaded search using UnityEngine; using System.Collections; using System.IO; using System.Linq; #if thread using System.Threading; #endif public class FileBrowser{ //public //Optional Parameters public string name = "File Browser"; //Just a name to identify the file browser with //GUI Options public GUISkin guiSkin; //The GUISkin to use public int layoutType{ get{ return layout; } } //returns the current Layout type public Texture2D fileTexture,directoryTexture,backTexture,driveTexture; //textures used to represent file types public GUIStyle backStyle,cancelStyle,selectStyle; //styles used for specific buttons public Color selectedColor = new Color(0.5f,0.5f,0.9f); //the color of the selected file public bool isVisible{ get{ return visible; } } //check if the file browser is currently visible //File Options public string searchPattern = "*"; //search pattern used to find files public string[] extensionsAllowed; //Output public FileInfo outputFile; //the selected output file //Search public bool showSearch = false; //show the search bar public bool searchRecursively = false; //search current folder and sub folders //Protected //GUI protected Vector2 fileScroll=Vector2.zero,folderScroll=Vector2.zero,driveScroll=Vector2.zero; protected Color defaultColor; protected int layout; protected Rect guiSize; protected GUISkin oldSkin; protected bool visible = false; //Search protected string searchBarString = ""; //string used in search bar protected bool isSearching = false; //do not show the search bar if searching //File Information protected DirectoryInfo currentDirectory; protected FileInformation[] files; protected DirectoryInformation[] directories,drives; protected DirectoryInformation parentDir; protected bool getFiles = true,showDrives=false; protected int selectedFile = -1; //Threading protected float startSearchTime = 0f; #if thread protected Thread t; #endif //Constructors public FileBrowser(string directory,int layoutStyle,Rect guiRect){ currentDirectory = new DirectoryInfo(directory); layout = layoutStyle; guiSize = guiRect; } #if (UNITY_IPHONE || UNITY_ANDROID || UNITY_BLACKBERRY || UNITY_WP8) public FileBrowser(string directory,int layoutStyle):this(directory,layoutStyle,new Rect(0,0,Screen.width,Screen.height)){} public FileBrowser(string directory):this(directory,1){} #else public FileBrowser(string directory,int layoutStyle):this(directory,layoutStyle,new Rect(Screen.width*0.125f,Screen.height*0.125f,Screen.width*0.75f,Screen.height*0.75f)){} public FileBrowser(string directory):this(directory,0){} #endif public FileBrowser(Rect guiRect):this(){ guiSize = guiRect; } public FileBrowser(int layoutStyle):this(Directory.GetCurrentDirectory(),layoutStyle){} public FileBrowser():this(Directory.GetCurrentDirectory()){} //set variables public void setDirectory(string dir){ currentDirectory=new DirectoryInfo(dir); } public void setLayout(int l){ layout=l; } public void setGUIRect(Rect r){ guiSize=r; } //gui function to be called during OnGUI public bool draw(){ if(getFiles){ getFileList(currentDirectory); getFiles=false; } if(guiSkin){ oldSkin = GUI.skin; GUI.skin = guiSkin; } GUILayout.BeginArea(guiSize); GUILayout.BeginVertical("box"); switch(layout){ case 0: GUILayout.BeginHorizontal("box"); GUILayout.FlexibleSpace(); GUILayout.Label(currentDirectory.FullName); GUILayout.FlexibleSpace(); if(showSearch){ drawSearchField(); GUILayout.Space(10); } GUILayout.EndHorizontal(); GUILayout.BeginHorizontal("box"); GUILayout.BeginVertical(GUILayout.MaxWidth(300)); folderScroll = GUILayout.BeginScrollView(folderScroll); if(showDrives){ foreach(DirectoryInformation di in drives){ if(di.button()){ getFileList(di.di); } } }else{ parentDir.gc.text = ".."; if((backStyle != null)?parentDir.button(backStyle):parentDir.button()) getFileList(parentDir.di); } foreach(DirectoryInformation di in directories){ if(di.button()){ getFileList(di.di); } } GUILayout.EndScrollView(); GUILayout.EndVertical(); GUILayout.BeginVertical("box"); if(isSearching){ drawSearchMessage(); }else{ fileScroll = GUILayout.BeginScrollView(fileScroll); for(int fi=0;fi<files.Length;fi++){ if(selectedFile==fi){ defaultColor = GUI.color; GUI.color = selectedColor; } if(files[fi].button()){ outputFile = files[fi].fi; selectedFile = fi; } if(selectedFile==fi) GUI.color = defaultColor; } GUILayout.EndScrollView(); } GUILayout.BeginHorizontal("box"); GUILayout.FlexibleSpace(); if((cancelStyle == null)?GUILayout.Button("Exit"):GUILayout.Button("Cancel",cancelStyle)){ outputFile = null; return true; } GUILayout.FlexibleSpace(); if((selectStyle == null)?GUILayout.Button("Open"):GUILayout.Button("Select",selectStyle)){ return true; } GUILayout.FlexibleSpace(); GUILayout.EndHorizontal(); GUILayout.EndVertical(); GUILayout.EndHorizontal(); break; case 1: //mobile preferred layout default: if(showSearch){ GUILayout.BeginHorizontal("box"); GUILayout.FlexibleSpace(); drawSearchField(); GUILayout.FlexibleSpace(); GUILayout.EndHorizontal(); } fileScroll = GUILayout.BeginScrollView(fileScroll); if(isSearching){ drawSearchMessage(); }else{ if(showDrives){ GUILayout.BeginHorizontal(); foreach(DirectoryInformation di in drives){ if(di.button()){ getFileList(di.di); } } GUILayout.EndHorizontal(); }else{ if((backStyle != null)?parentDir.button(backStyle):parentDir.button()) getFileList(parentDir.di); } foreach(DirectoryInformation di in directories){ if(di.button()){ getFileList(di.di); } } for(int fi=0;fi<files.Length;fi++){ if(selectedFile==fi){ defaultColor = GUI.color; GUI.color = selectedColor; } if(files[fi].button()){ outputFile = files[fi].fi; selectedFile = fi; } if(selectedFile==fi) GUI.color = defaultColor; } } GUILayout.EndScrollView(); if((selectStyle == null)?GUILayout.Button("Select"):GUILayout.Button("Select",selectStyle)){ return true; } if((cancelStyle == null)?GUILayout.Button("Cancel"):GUILayout.Button("Cancel",cancelStyle)){ outputFile = null; return true; } break; } GUILayout.EndVertical(); GUILayout.EndArea(); if(guiSkin){GUI.skin = oldSkin;} return false; } protected void drawSearchField(){ if(isSearching){ GUILayout.Label("Searching For: \""+searchBarString+"\""); }else{ searchBarString = GUILayout.TextField(searchBarString,GUILayout.MinWidth(150)); if(GUILayout.Button("search")){ if(searchBarString.Length > 0){ isSearching = true; #if thread startSearchTime = Time.time; t = new Thread(threadSearchFileList); t.Start(true); #else searchFileList(currentDirectory); #endif }else{ getFileList(currentDirectory); } } } } protected void drawSearchMessage(){ float tt = Time.time-startSearchTime; if(tt>1) GUILayout.Button("Searching"); if(tt>2) GUILayout.Button("For"); if(tt>3) GUILayout.Button("\""+searchBarString+"\""); if(tt>4) GUILayout.Button("....."); if(tt>5) GUILayout.Button("It's"); if(tt>6) GUILayout.Button("Taking"); if(tt>7) GUILayout.Button("A"); if(tt>8) GUILayout.Button("While"); if(tt>9) GUILayout.Button("....."); } public void getFileList(DirectoryInfo di){ //set current directory currentDirectory = di; //get parent if(backTexture) parentDir = (di.Parent==null)?new DirectoryInformation(di,backTexture):new DirectoryInformation(di.Parent,backTexture); else parentDir = (di.Parent==null)?new DirectoryInformation(di):new DirectoryInformation(di.Parent); showDrives = di.Parent==null; //get drives string[] drvs = System.IO.Directory.GetLogicalDrives(); drives = new DirectoryInformation[drvs.Length]; for(int v=0;v<drvs.Length;v++){ drives[v]= (driveTexture==null)?new DirectoryInformation(new DirectoryInfo(drvs[v])):new DirectoryInformation(new DirectoryInfo(drvs[v]),driveTexture); } //get directories DirectoryInfo[] dia = di.GetDirectories(); directories = new DirectoryInformation[dia.Length]; for(int d=0;d<dia.Length;d++){ if(directoryTexture) directories[d] = new DirectoryInformation(dia[d],directoryTexture); else directories[d] = new DirectoryInformation(dia[d]); } //get files FileInfo[] fia = di.GetFiles(searchPattern); if (extensionsAllowed != null) { //manually implemented this because with the line above only one extension would be allowed ~BD 1.6.2016 fia = di.GetFiles().Where(f => extensionsAllowed.Contains(f.Extension.ToLower())).ToArray(); // from http://stackoverflow.com/a/3527295 } //FileInfo[] fia = searchDirectory(di,searchPattern); files = new FileInformation[fia.Length]; for(int f=0;f<fia.Length;f++){ if(fileTexture) files[f] = new FileInformation(fia[f],fileTexture); else files[f] = new FileInformation(fia[f]); } } public void searchFileList(DirectoryInfo di){ searchFileList(di,fileTexture!=null); } protected void searchFileList(DirectoryInfo di,bool hasTexture){ //(searchBarString.IndexOf("*") >= 0)?searchBarString:"*"+searchBarString+"*"; //this allows for more intuitive searching for strings in file names FileInfo[] fia = di.GetFiles((searchBarString.IndexOf("*") >= 0)?searchBarString:"*"+searchBarString+"*",(searchRecursively)?SearchOption.AllDirectories:SearchOption.TopDirectoryOnly); files = new FileInformation[fia.Length]; for(int f=0;f<fia.Length;f++){ if(hasTexture) files[f] = new FileInformation(fia[f],fileTexture); else files[f] = new FileInformation(fia[f]); } #if thread #else isSearching = false; #endif } protected void threadSearchFileList(object hasTexture){ searchFileList(currentDirectory,(bool)hasTexture); isSearching = false; } //search a directory by a search pattern, this is optionally recursive public static FileInfo[] searchDirectory(DirectoryInfo di,string sp,bool recursive){ return di.GetFiles(sp,(recursive)?SearchOption.AllDirectories:SearchOption.TopDirectoryOnly); } public static FileInfo[] searchDirectory(DirectoryInfo di,string sp){ return searchDirectory(di,sp,false); } public float brightness(Color c){ return c.r*.3f+c.g*.59f+c.b*.11f; } //to string public override string ToString(){ return "Name: "+name+"\nVisible: "+isVisible.ToString()+"\nDirectory: "+currentDirectory+"\nLayout: "+layout.ToString()+"\nGUI Size: "+guiSize.ToString()+"\nDirectories: "+directories.Length.ToString()+"\nFiles: "+files.Length.ToString(); } } public class FileInformation{ public FileInfo fi; public GUIContent gc; public FileInformation(FileInfo f){ fi=f; gc = new GUIContent(fi.Name); } public FileInformation(FileInfo f,Texture2D img){ fi = f; gc = new GUIContent(fi.Name,img); } public bool button(){return GUILayout.Button(gc);} public void label(){ GUILayout.Label(gc); } public bool button(GUIStyle gs){return GUILayout.Button(gc,gs);} public void label(GUIStyle gs){ GUILayout.Label(gc,gs); } } public class DirectoryInformation{ public DirectoryInfo di; public GUIContent gc; public DirectoryInformation(DirectoryInfo d){ di=d; gc = new GUIContent(d.Name); } public DirectoryInformation(DirectoryInfo d,Texture2D img){ di=d; gc = new GUIContent(d.Name,img); } public bool button(){return GUILayout.Button(gc);} public void label(){ GUILayout.Label(gc); } public bool button(GUIStyle gs){return GUILayout.Button(gc,gs);} public void label(GUIStyle gs){ GUILayout.Label(gc,gs); } }
using System; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Linq; using System.Net; using System.Net.Security; using System.Security.Cryptography.X509Certificates; using System.Text; using System.Threading.Tasks; using Akka.Actor; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.ResponseCompression; using Microsoft.AspNetCore.Server.Kestrel.Https; using Microsoft.Extensions.DependencyInjection; using Neo.IO; using Neo.IO.Json; using Neo.Ledger; using Neo.Network.P2P; using Neo.Network.P2P.Payloads; using Neo.Persistence; using Neo.Plugins; using Neo.SmartContract; using Neo.SmartContract.Native; using Neo.VM; using Neo.Wallets; namespace Neo.Network.RPC { public sealed class RpcServer : IDisposable { private class CheckWitnessHashes : IVerifiable { private readonly UInt160[] _scriptHashesForVerifying; public Witness[] Witnesses { get; set; } public int Size { get; } public CheckWitnessHashes(UInt160[] scriptHashesForVerifying) { _scriptHashesForVerifying = scriptHashesForVerifying; } public void Serialize(BinaryWriter writer) { throw new NotImplementedException(); } public void Deserialize(BinaryReader reader) { throw new NotImplementedException(); } public void DeserializeUnsigned(BinaryReader reader) { throw new NotImplementedException(); } public UInt160[] GetScriptHashesForVerifying(StoreView snapshot) { return _scriptHashesForVerifying; } public void SerializeUnsigned(BinaryWriter writer) { throw new NotImplementedException(); } } public Wallet Wallet { get; set; } public long MaxGasInvoke { get; } private IWebHost host; private readonly NeoSystem system; public RpcServer(NeoSystem system, Wallet wallet = null, long maxGasInvoke = default) { this.system = system; this.Wallet = wallet; this.MaxGasInvoke = maxGasInvoke; } private static JObject CreateErrorResponse(JObject id, int code, string message, JObject data = null) { JObject response = CreateResponse(id); response["error"] = new JObject(); response["error"]["code"] = code; response["error"]["message"] = message; if (data != null) response["error"]["data"] = data; return response; } private static JObject CreateResponse(JObject id) { JObject response = new JObject(); response["jsonrpc"] = "2.0"; response["id"] = id; return response; } public void Dispose() { if (host != null) { host.Dispose(); host = null; } } private JObject GetInvokeResult(byte[] script, IVerifiable checkWitnessHashes = null) { ApplicationEngine engine = ApplicationEngine.Run(script, checkWitnessHashes, extraGAS: MaxGasInvoke); JObject json = new JObject(); json["script"] = script.ToHexString(); json["state"] = engine.State; json["gas_consumed"] = engine.GasConsumed.ToString(); try { json["stack"] = new JArray(engine.ResultStack.Select(p => p.ToParameter().ToJson())); } catch (InvalidOperationException) { json["stack"] = "error: recursive reference"; } return json; } private static JObject GetRelayResult(RelayResultReason reason, UInt256 hash) { switch (reason) { case RelayResultReason.Succeed: { var ret = new JObject(); ret["hash"] = hash.ToString(); return ret; } case RelayResultReason.AlreadyExists: throw new RpcException(-501, "Block or transaction already exists and cannot be sent repeatedly."); case RelayResultReason.OutOfMemory: throw new RpcException(-502, "The memory pool is full and no more transactions can be sent."); case RelayResultReason.UnableToVerify: throw new RpcException(-503, "The block cannot be validated."); case RelayResultReason.Invalid: throw new RpcException(-504, "Block or transaction validation failed."); case RelayResultReason.PolicyFail: throw new RpcException(-505, "One of the Policy filters failed."); default: throw new RpcException(-500, "Unknown error."); } } private JObject Process(string method, JArray _params) { switch (method) { case "getbestblockhash": { return GetBestBlockHash(); } case "getblock": { JObject key = _params[0]; bool verbose = _params.Count >= 2 && _params[1].AsBoolean(); return GetBlock(key, verbose); } case "getblockcount": { return GetBlockCount(); } case "getblockhash": { uint height = uint.Parse(_params[0].AsString()); return GetBlockHash(height); } case "getblockheader": { JObject key = _params[0]; bool verbose = _params.Count >= 2 && _params[1].AsBoolean(); return GetBlockHeader(key, verbose); } case "getblocksysfee": { uint height = uint.Parse(_params[0].AsString()); return GetBlockSysFee(height); } case "getconnectioncount": { return GetConnectionCount(); } case "getcontractstate": { UInt160 script_hash = UInt160.Parse(_params[0].AsString()); return GetContractState(script_hash); } case "getpeers": { return GetPeers(); } case "getrawmempool": { bool shouldGetUnverified = _params.Count >= 1 && _params[0].AsBoolean(); return GetRawMemPool(shouldGetUnverified); } case "getrawtransaction": { UInt256 hash = UInt256.Parse(_params[0].AsString()); bool verbose = _params.Count >= 2 && _params[1].AsBoolean(); return GetRawTransaction(hash, verbose); } case "getstorage": { UInt160 script_hash = UInt160.Parse(_params[0].AsString()); byte[] key = _params[1].AsString().HexToBytes(); return GetStorage(script_hash, key); } case "gettransactionheight": { UInt256 hash = UInt256.Parse(_params[0].AsString()); return GetTransactionHeight(hash); } case "getvalidators": { return GetValidators(); } case "getversion": { return GetVersion(); } case "invokefunction": { UInt160 script_hash = UInt160.Parse(_params[0].AsString()); string operation = _params[1].AsString(); ContractParameter[] args = _params.Count >= 3 ? ((JArray)_params[2]).Select(p => ContractParameter.FromJson(p)).ToArray() : new ContractParameter[0]; return InvokeFunction(script_hash, operation, args); } case "invokescript": { byte[] script = _params[0].AsString().HexToBytes(); CheckWitnessHashes checkWitnessHashes = null; if (_params.Count > 1) { UInt160[] scriptHashesForVerifying = _params.Skip(1).Select(u => UInt160.Parse(u.AsString())).ToArray(); checkWitnessHashes = new CheckWitnessHashes(scriptHashesForVerifying); } return GetInvokeResult(script, checkWitnessHashes); } case "listplugins": { return ListPlugins(); } case "sendrawtransaction": { Transaction tx = _params[0].AsString().HexToBytes().AsSerializable<Transaction>(); return SendRawTransaction(tx); } case "submitblock": { Block block = _params[0].AsString().HexToBytes().AsSerializable<Block>(); return SubmitBlock(block); } case "validateaddress": { string address = _params[0].AsString(); return ValidateAddress(address); } default: throw new RpcException(-32601, "Method not found"); } } private async Task ProcessAsync(HttpContext context) { context.Response.Headers["Access-Control-Allow-Origin"] = "*"; context.Response.Headers["Access-Control-Allow-Methods"] = "GET, POST"; context.Response.Headers["Access-Control-Allow-Headers"] = "Content-Type"; context.Response.Headers["Access-Control-Max-Age"] = "31536000"; if (context.Request.Method != "GET" && context.Request.Method != "POST") return; JObject request = null; if (context.Request.Method == "GET") { string jsonrpc = context.Request.Query["jsonrpc"]; string id = context.Request.Query["id"]; string method = context.Request.Query["method"]; string _params = context.Request.Query["params"]; if (!string.IsNullOrEmpty(id) && !string.IsNullOrEmpty(method) && !string.IsNullOrEmpty(_params)) { try { _params = Encoding.UTF8.GetString(Convert.FromBase64String(_params)); } catch (FormatException) { } request = new JObject(); if (!string.IsNullOrEmpty(jsonrpc)) request["jsonrpc"] = jsonrpc; request["id"] = id; request["method"] = method; request["params"] = JObject.Parse(_params); } } else if (context.Request.Method == "POST") { using (StreamReader reader = new StreamReader(context.Request.Body)) { try { request = JObject.Parse(reader); } catch (FormatException) { } } } JObject response; if (request == null) { response = CreateErrorResponse(null, -32700, "Parse error"); } else if (request is JArray array) { if (array.Count == 0) { response = CreateErrorResponse(request["id"], -32600, "Invalid Request"); } else { response = array.Select(p => ProcessRequest(context, p)).Where(p => p != null).ToArray(); } } else { response = ProcessRequest(context, request); } if (response == null || (response as JArray)?.Count == 0) return; context.Response.ContentType = "application/json-rpc"; await context.Response.WriteAsync(response.ToString(), Encoding.UTF8); } private JObject ProcessRequest(HttpContext context, JObject request) { if (!request.ContainsProperty("id")) return null; if (!request.ContainsProperty("method") || !request.ContainsProperty("params") || !(request["params"] is JArray)) { return CreateErrorResponse(request["id"], -32600, "Invalid Request"); } JObject result = null; try { string method = request["method"].AsString(); JArray _params = (JArray)request["params"]; foreach (IRpcPlugin plugin in Plugin.RpcPlugins) plugin.PreProcess(context, method, _params); foreach (IRpcPlugin plugin in Plugin.RpcPlugins) { result = plugin.OnProcess(context, method, _params); if (result != null) break; } if (result == null) result = Process(method, _params); foreach (IRpcPlugin plugin in Plugin.RpcPlugins) plugin.PostProcess(context, method, _params, result); } catch (FormatException) { return CreateErrorResponse(request["id"], -32602, "Invalid params"); } catch (IndexOutOfRangeException) { return CreateErrorResponse(request["id"], -32602, "Invalid params"); } catch (Exception ex) { #if DEBUG return CreateErrorResponse(request["id"], ex.HResult, ex.Message, ex.StackTrace); #else return CreateErrorResponse(request["id"], ex.HResult, ex.Message); #endif } JObject response = CreateResponse(request["id"]); response["result"] = result; return response; } public void Start(IPAddress bindAddress, int port, string sslCert = null, string password = null, string[] trustedAuthorities = null) { host = new WebHostBuilder().UseKestrel(options => options.Listen(bindAddress, port, listenOptions => { if (string.IsNullOrEmpty(sslCert)) return; listenOptions.UseHttps(sslCert, password, httpsConnectionAdapterOptions => { if (trustedAuthorities is null || trustedAuthorities.Length == 0) return; httpsConnectionAdapterOptions.ClientCertificateMode = ClientCertificateMode.RequireCertificate; httpsConnectionAdapterOptions.ClientCertificateValidation = (cert, chain, err) => { if (err != SslPolicyErrors.None) return false; X509Certificate2 authority = chain.ChainElements[chain.ChainElements.Count - 1].Certificate; return trustedAuthorities.Contains(authority.Thumbprint); }; }); })) .Configure(app => { app.UseResponseCompression(); app.Run(ProcessAsync); }) .ConfigureServices(services => { services.AddResponseCompression(options => { // options.EnableForHttps = false; options.Providers.Add<GzipCompressionProvider>(); options.MimeTypes = ResponseCompressionDefaults.MimeTypes.Concat(new[] { "application/json-rpc" }); }); services.Configure<GzipCompressionProviderOptions>(options => { options.Level = CompressionLevel.Fastest; }); }) .Build(); host.Start(); } private JObject GetBestBlockHash() { return Blockchain.Singleton.CurrentBlockHash.ToString(); } private JObject GetBlock(JObject key, bool verbose) { Block block; if (key is JNumber) { uint index = uint.Parse(key.AsString()); block = Blockchain.Singleton.GetBlock(index); } else { UInt256 hash = UInt256.Parse(key.AsString()); block = Blockchain.Singleton.View.GetBlock(hash); } if (block == null) throw new RpcException(-100, "Unknown block"); if (verbose) { JObject json = block.ToJson(); json["confirmations"] = Blockchain.Singleton.Height - block.Index + 1; UInt256 hash = Blockchain.Singleton.GetNextBlockHash(block.Hash); if (hash != null) json["nextblockhash"] = hash.ToString(); return json; } return block.ToArray().ToHexString(); } private JObject GetBlockCount() { return Blockchain.Singleton.Height + 1; } private JObject GetBlockHash(uint height) { if (height <= Blockchain.Singleton.Height) { return Blockchain.Singleton.GetBlockHash(height).ToString(); } throw new RpcException(-100, "Invalid Height"); } private JObject GetBlockHeader(JObject key, bool verbose) { Header header; if (key is JNumber) { uint height = uint.Parse(key.AsString()); header = Blockchain.Singleton.GetHeader(height); } else { UInt256 hash = UInt256.Parse(key.AsString()); header = Blockchain.Singleton.View.GetHeader(hash); } if (header == null) throw new RpcException(-100, "Unknown block"); if (verbose) { JObject json = header.ToJson(); json["confirmations"] = Blockchain.Singleton.Height - header.Index + 1; UInt256 hash = Blockchain.Singleton.GetNextBlockHash(header.Hash); if (hash != null) json["nextblockhash"] = hash.ToString(); return json; } return header.ToArray().ToHexString(); } private JObject GetBlockSysFee(uint height) { if (height <= Blockchain.Singleton.Height) using (ApplicationEngine engine = NativeContract.GAS.TestCall("getSysFeeAmount", height)) { return engine.ResultStack.Peek().GetBigInteger().ToString(); } throw new RpcException(-100, "Invalid Height"); } private JObject GetConnectionCount() { return LocalNode.Singleton.ConnectedCount; } private JObject GetContractState(UInt160 script_hash) { ContractState contract = Blockchain.Singleton.View.Contracts.TryGet(script_hash); return contract?.ToJson() ?? throw new RpcException(-100, "Unknown contract"); } private JObject GetPeers() { JObject json = new JObject(); json["unconnected"] = new JArray(LocalNode.Singleton.GetUnconnectedPeers().Select(p => { JObject peerJson = new JObject(); peerJson["address"] = p.Address.ToString(); peerJson["port"] = p.Port; return peerJson; })); json["bad"] = new JArray(); //badpeers has been removed json["connected"] = new JArray(LocalNode.Singleton.GetRemoteNodes().Select(p => { JObject peerJson = new JObject(); peerJson["address"] = p.Remote.Address.ToString(); peerJson["port"] = p.ListenerTcpPort; return peerJson; })); return json; } private JObject GetRawMemPool(bool shouldGetUnverified) { if (!shouldGetUnverified) return new JArray(Blockchain.Singleton.MemPool.GetVerifiedTransactions().Select(p => (JObject)p.Hash.ToString())); JObject json = new JObject(); json["height"] = Blockchain.Singleton.Height; Blockchain.Singleton.MemPool.GetVerifiedAndUnverifiedTransactions( out IEnumerable<Transaction> verifiedTransactions, out IEnumerable<Transaction> unverifiedTransactions); json["verified"] = new JArray(verifiedTransactions.Select(p => (JObject)p.Hash.ToString())); json["unverified"] = new JArray(unverifiedTransactions.Select(p => (JObject)p.Hash.ToString())); return json; } private JObject GetRawTransaction(UInt256 hash, bool verbose) { Transaction tx = Blockchain.Singleton.GetTransaction(hash); if (tx == null) throw new RpcException(-100, "Unknown transaction"); if (verbose) { JObject json = tx.ToJson(); TransactionState txState = Blockchain.Singleton.View.Transactions.TryGet(hash); if (txState != null) { Header header = Blockchain.Singleton.GetHeader(txState.BlockIndex); json["blockhash"] = header.Hash.ToString(); json["confirmations"] = Blockchain.Singleton.Height - header.Index + 1; json["blocktime"] = header.Timestamp; json["vmState"] = txState.VMState; } return json; } return tx.ToArray().ToHexString(); } private JObject GetStorage(UInt160 script_hash, byte[] key) { StorageItem item = Blockchain.Singleton.View.Storages.TryGet(new StorageKey { ScriptHash = script_hash, Key = key }) ?? new StorageItem(); return item.Value?.ToHexString(); } private JObject GetTransactionHeight(UInt256 hash) { uint? height = Blockchain.Singleton.View.Transactions.TryGet(hash)?.BlockIndex; if (height.HasValue) return height.Value; throw new RpcException(-100, "Unknown transaction"); } private JObject GetValidators() { using (SnapshotView snapshot = Blockchain.Singleton.GetSnapshot()) { var validators = NativeContract.NEO.GetValidators(snapshot); return NativeContract.NEO.GetRegisteredValidators(snapshot).Select(p => { JObject validator = new JObject(); validator["publickey"] = p.PublicKey.ToString(); validator["votes"] = p.Votes.ToString(); validator["active"] = validators.Contains(p.PublicKey); return validator; }).ToArray(); } } private JObject GetVersion() { JObject json = new JObject(); json["tcpPort"] = LocalNode.Singleton.ListenerTcpPort; json["wsPort"] = LocalNode.Singleton.ListenerWsPort; json["nonce"] = LocalNode.Nonce; json["useragent"] = LocalNode.UserAgent; return json; } private JObject InvokeFunction(UInt160 script_hash, string operation, ContractParameter[] args) { byte[] script; using (ScriptBuilder sb = new ScriptBuilder()) { script = sb.EmitAppCall(script_hash, operation, args).ToArray(); } return GetInvokeResult(script); } private JObject InvokeScript(byte[] script) { return GetInvokeResult(script); } private JObject ListPlugins() { return new JArray(Plugin.Plugins .OrderBy(u => u.Name) .Select(u => new JObject { ["name"] = u.Name, ["version"] = u.Version.ToString(), ["interfaces"] = new JArray(u.GetType().GetInterfaces() .Select(p => p.Name) .Where(p => p.EndsWith("Plugin")) .Select(p => (JObject)p)) })); } private JObject SendRawTransaction(Transaction tx) { RelayResultReason reason = system.Blockchain.Ask<RelayResultReason>(tx).Result; return GetRelayResult(reason, tx.Hash); } private JObject SubmitBlock(Block block) { RelayResultReason reason = system.Blockchain.Ask<RelayResultReason>(block).Result; return GetRelayResult(reason, block.Hash); } private JObject ValidateAddress(string address) { JObject json = new JObject(); UInt160 scriptHash; try { scriptHash = address.ToScriptHash(); } catch { scriptHash = null; } json["address"] = address; json["isvalid"] = scriptHash != null; return json; } } }
#region License /* The MIT License Copyright (c) 2008 Sky Morey Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion using System.Collections.Generic; using System.Collections; namespace System.Threading { /// <summary> /// FrugalThreadPool /// </summary> /// http://www.albahari.com/threading/part4.aspx public class FrugalThreadPool : Patterns.Disposeable { private Thread[] _threadPool; private object[] _threadContext; private Queue<IEnumerable> _workQueue = new Queue<IEnumerable>(); private ThreadStatus _threadStatus = ThreadStatus.Idle; private int _joiningThreadPoolCount; private object _joiningObject = new object(); private Func<object> _threadContextBuilder; private Action<object, object> _executor; /// <summary> /// ThreadStatus /// </summary> private enum ThreadStatus { /// <summary> /// Idle /// </summary> Idle, /// <summary> /// Join /// </summary> Join, /// <summary> /// Stop /// </summary> Stop, } /// <summary> /// Initializes a new instance of the <see cref="FrugalThreadPool2"/> class. /// </summary> public FrugalThreadPool(Action<object, object> executor) : this(4, executor, null) { } /// <summary> /// Initializes a new instance of the <see cref="FrugalThreadPool2"/> class. /// </summary> /// <param name="threadCount">The thread count.</param> public FrugalThreadPool(int threadCount, Action<object, object> executor) : this(threadCount, executor, null) { } /// <summary> /// Initializes a new instance of the <see cref="FrugalThreadPool2"/> class. /// </summary> /// <param name="threadCount">The thread count.</param> /// <param name="threadContext">The thread context.</param> public FrugalThreadPool(int threadCount, Action<object, object> executor, Func<object> threadContextBuilder) { if (_executor == null) throw new ArgumentNullException("executor"); _executor = executor; _threadPool = new Thread[threadCount]; _threadContext = new object[threadCount]; _threadContextBuilder = threadContextBuilder; for (int threadIndex = 0; threadIndex < _threadPool.Length; threadIndex++) { object threadContext; _threadPool[threadIndex] = CreateAndStartThread("FrugalPool: " + threadIndex.ToString(), out threadContext); _threadContext[threadIndex] = threadContext; } } /// <summary> /// Releases unmanaged and - optionally - managed resources /// </summary> /// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param> protected override void Dispose(bool disposing) { if (disposing) { lock (this) { _threadStatus = ThreadStatus.Stop; Monitor.PulseAll(this); } foreach (Thread thread in _threadPool) thread.Join(); } } /// <summary> /// Creates the and start thread. /// </summary> /// <param name="name">The name.</param> /// <returns></returns> private Thread CreateAndStartThread(string name, out object threadContext) { var thread = new Thread(ThreadWorker) { Name = name }; threadContext = (_threadContextBuilder == null ? null : _threadContextBuilder()); thread.Start(threadContext); return thread; } /// <summary> /// Gets the thread context. /// </summary> /// <value>The thread context.</value> public object[] ThreadContexts { get { return _threadContext; } } /// <summary> /// Threads the worker. /// </summary> private void ThreadWorker(object threadContext) { IEnumerable list; while (true) { lock (this) { while (_workQueue.Count == 0) { switch (_threadStatus) { case ThreadStatus.Stop: return; case ThreadStatus.Join: lock (_joiningObject) { _joiningThreadPoolCount--; Monitor.Pulse(_joiningObject); } break; } Monitor.Wait(this); } list = _workQueue.Dequeue(); } if (list != null) foreach (object obj in list) _executor(obj, threadContext); } } /// <summary> /// Adds the specified list. /// </summary> /// <param name="list">The list.</param> public void Add(IEnumerable list) { if (_threadStatus != ThreadStatus.Idle) throw new InvalidOperationException(); lock (this) { _workQueue.Enqueue(list); Monitor.Pulse(this); } } /// <summary> /// Joins this instance. /// </summary> public void Join() { lock (this) { _threadStatus = ThreadStatus.Join; _joiningThreadPoolCount = _threadPool.Length; Monitor.PulseAll(this); } lock (_joiningObject) { while (_joiningThreadPoolCount > 0) Monitor.Wait(_joiningObject); _threadStatus = ThreadStatus.Idle; } } /// <summary> /// Joins the and change. /// </summary> /// <param name="executor">The executor.</param> public void JoinAndChange(Action<object, object> executor) { Join(); _executor = executor; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using Lucene.Net.Spatial.Prefix; using Lucene.Net.Spatial.Prefix.Tree; using Lucene.Net.Spatial.Queries; using NUnit.Framework; using Spatial4n.Core.Context; using Spatial4n.Core.Distance; using Spatial4n.Core.Shapes; using Spatial4n.Core.Util; namespace Lucene.Net.Contrib.Spatial.Test.Prefix { public class TestRecursivePrefixTreeStrategy : StrategyTestCase { private int maxLength; //Tests should call this first. private void init(int maxLength) { this.maxLength = maxLength; this.ctx = SpatialContext.GEO; var grid = new GeohashPrefixTree(ctx, maxLength); this.strategy = new RecursivePrefixTreeStrategy(grid, GetType().Name); } [Test] public void testFilterWithVariableScanLevel() { init(GeohashPrefixTree.GetMaxLevelsPossible()); getAddAndVerifyIndexedDocuments(DATA_WORLD_CITIES_POINTS); //execute queries for each prefix grid scan level for (int i = 0; i <= maxLength; i++) { ((RecursivePrefixTreeStrategy) strategy).SetPrefixGridScanLevel(i); executeQueries(SpatialMatchConcern.FILTER, QTEST_Cities_Intersects_BBox); } } [Test] public void testOneMeterPrecision() { init(GeohashPrefixTree.GetMaxLevelsPossible()); GeohashPrefixTree grid = (GeohashPrefixTree) ((RecursivePrefixTreeStrategy) strategy).GetGrid(); //DWS: I know this to be true. 11 is needed for one meter double degrees = DistanceUtils.Dist2Degrees(0.001, DistanceUtils.EARTH_MEAN_RADIUS_KM); assertEquals(11, grid.GetLevelForDistance(degrees)); } [Test] public void testPrecision() { init(GeohashPrefixTree.GetMaxLevelsPossible()); Point iPt = ctx.MakePoint(2.8028712999999925, 48.3708044); //lon, lat addDocument(newDoc("iPt", iPt)); commit(); Point qPt = ctx.MakePoint(2.4632387000000335, 48.6003516); double KM2DEG = DistanceUtils.Dist2Degrees(1, DistanceUtils.EARTH_MEAN_RADIUS_KM); double DEG2KM = 1/KM2DEG; const double DIST = 35.75; //35.7499... assertEquals(DIST, ctx.GetDistCalc().Distance(iPt, qPt)*DEG2KM, 0.001); //distErrPct will affect the query shape precision. The indexed precision // was set to nearly zilch via init(GeohashPrefixTree.getMaxLevelsPossible()); const double distErrPct = 0.025; //the suggested default, by the way const double distMult = 1 + distErrPct; assertTrue(35.74*distMult >= DIST); checkHits(q(qPt, 35.74*KM2DEG, distErrPct), 1, null); assertTrue(30*distMult < DIST); checkHits(q(qPt, 30*KM2DEG, distErrPct), 0, null); assertTrue(33*distMult < DIST); checkHits(q(qPt, 33*KM2DEG, distErrPct), 0, null); assertTrue(34*distMult < DIST); checkHits(q(qPt, 34*KM2DEG, distErrPct), 0, null); } [Test] public void geohashRecursiveRandom() { init(12); var random = NewRandom(); //1. Iterate test with the cluster at some worldly point of interest var clusterCenters = new Point[] {ctx.MakePoint(-180, 0), ctx.MakePoint(0, 90), ctx.MakePoint(0, -90)}; foreach (var clusterCenter in clusterCenters) { //2. Iterate on size of cluster (a really small one and a large one) String hashCenter = GeohashUtils.EncodeLatLon(clusterCenter.GetY(), clusterCenter.GetX(), maxLength); //calculate the number of degrees in the smallest grid box size (use for both lat & lon) String smallBox = hashCenter.Substring(0, hashCenter.Length - 1); //chop off leaf precision Rectangle clusterDims = GeohashUtils.DecodeBoundary(smallBox, ctx); double smallRadius = Math.Max(clusterDims.GetMaxX() - clusterDims.GetMinX(), clusterDims.GetMaxY() - clusterDims.GetMinY()); Assert.IsTrue(smallRadius < 1); const double largeRadius = 20d; //good large size; don't use >=45 for this test code to work double[] radiusDegs = {largeRadius, smallRadius}; foreach (double radiusDeg in radiusDegs) { //3. Index random points in this cluster box deleteAll(); var points = new List<Point>(); for (int i = 0; i < 20; i++) { //Note that this will not result in randomly distributed points in the // circle, they will be concentrated towards the center a little. But // it's good enough. Point pt = ctx.GetDistCalc().PointOnBearing(clusterCenter, random.NextDouble()*radiusDeg, random.Next()*360, ctx, null); pt = alignGeohash(pt); points.Add(pt); addDocument(newDoc("" + i, pt)); } commit(); //3. Use some query centers. Each is twice the cluster's radius away. for (int ri = 0; ri < 4; ri++) { Point queryCenter = ctx.GetDistCalc().PointOnBearing(clusterCenter, radiusDeg*2, random.Next(360), ctx, null); queryCenter = alignGeohash(queryCenter); //4.1 Query a small box getting nothing checkHits(q(queryCenter, radiusDeg - smallRadius/2), 0, null); //4.2 Query a large box enclosing the cluster, getting everything checkHits(q(queryCenter, radiusDeg*3 + smallRadius/2), points.Count, null); //4.3 Query a medium box getting some (calculate the correct solution and verify) double queryDist = radiusDeg*2; //Find matching points. Put into int[] of doc ids which is the same thing as the index into points list. int[] ids = new int[points.Count]; int ids_sz = 0; for (int i = 0; i < points.Count; i++) { Point point = points[i]; if (ctx.GetDistCalc().Distance(queryCenter, point) <= queryDist) ids[ids_sz++] = i; } var ids_new = new int[ids_sz]; // will pad with 0's if larger Array.Copy(ids, ids_new, ids_sz); ids = ids_new; //assert ids_sz > 0 (can't because randomness keeps us from being able to) checkHits(q(queryCenter, queryDist), ids.Length, ids); } } //for radiusDeg } //for clusterCenter } private SpatialArgs q(Point pt, double dist, double distErrPct = 0.0) { Shape shape = ctx.MakeCircle(pt, dist); var args = new SpatialArgs(SpatialOperation.Intersects, shape); args.DistErrPct = distErrPct; return args; } private void checkHits(SpatialArgs args, int assertNumFound, int[] assertIds) { SearchResults got = executeQuery(strategy.MakeQuery(args), 100); assertEquals("" + args, assertNumFound, got.numFound); if (assertIds != null) { var gotIds = new HashSet<int>(); foreach (SearchResult result in got.results) { gotIds.Add(int.Parse(result.document.Get("id"))); } foreach (int assertId in assertIds) { Assert.True(gotIds.Contains(assertId), "has " + assertId); } } } /* NGeohash round-trip for given precision. */ private Point alignGeohash(Point p) { return GeohashUtils.Decode(GeohashUtils.EncodeLatLon(p.GetY(), p.GetX(), maxLength), ctx); } } }
/* Copyright 2019 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Copyright 2010 ESRI // // All rights reserved under the copyright laws of the United States // and applicable international laws, treaties, and conventions. // // You may freely redistribute and use this sample code, with or // without modification, provided you include the original copyright // notice and use restrictions. // // See the use restrictions at &lt;your ArcGIS install location&gt;/DeveloperKit10.0/userestrictions.txt. // namespace SchematicCreateBasicSettingsAddIn using System; using System.Collections.Generic; using System.Collections; using System.Collections.Specialized; using System.Text; using System.IO; using ESRI.ArcGIS.Schematic; using ESRI.ArcGIS.Geodatabase; using ESRI.ArcGIS.DataSourcesGDB; using ESRI.ArcGIS.ArcCatalogUI; using ESRI.ArcGIS.ArcCatalog; using ESRI.ArcGIS.Catalog; using ESRI.ArcGIS.CatalogUI; using ESRI.ArcGIS.ArcMap; using ESRI.ArcGIS.ArcMapUI; using ESRI.ArcGIS.Carto; using ESRI.ArcGIS.NetworkAnalysis; using System.Windows.Forms; using ESRI.ArcGIS.esriSystem; namespace SchematicCreateBasicSettingsAddIn { public class GenerateSchematicTemplate : ESRI.ArcGIS.Desktop.AddIns.Button { #region Variables public frmDatasetTemplateName formNames; ESRI.ArcGIS.Geodatabase.IWorkspace m_pWS; String m_sfn; ESRI.ArcGIS.Schematic.ISchematicBuilder m_pB; ESRI.ArcGIS.Schematic.ISchematicDataset m_pSDS; ESRI.ArcGIS.Schematic.ISchematicStandardBuilder m_pSB; ESRI.ArcGIS.Schematic.ISchematicDiagramClass m_pSDT; ESRI.ArcGIS.Schematic.ISchematicDatasetImport m_pSDI; NameEvents templateInfo; public frmSelectItemsToReduce formReduce; private bool blnCancel; public frmAdvanced formAdvanced; private string strLayers; private string strNodeLayers; NameValueCollection m_myCol = new NameValueCollection(); private IGxObject m_SelectedObject = null; #endregion Attributes public GenerateSchematicTemplate() { } private IEnumLayer GetLayers() { //now get the map document to parse out the feature classes GxDialog pGxDialog = new GxDialogClass(); IEnumGxObject pEnumGxObject; bool pResult; pGxDialog.ObjectFilter = new GxFilterMapsClass(); pGxDialog.Title = "Select a map document"; try { pResult = pGxDialog.DoModalOpen(0, out pEnumGxObject); //check to see if the user canceled the dialog if (pResult == false) return null; IGxObject pGxObject = pEnumGxObject.Next(); IMapReader pMapReader = new MapReaderClass(); pMapReader.Open(pGxObject.FullName.ToString()); IMap pMap = pMapReader.get_Map(0); ESRI.ArcGIS.esriSystem.UID pUID = new ESRI.ArcGIS.esriSystem.UIDClass(); pUID.Value = "{40A9E885-5533-11D0-98BE-00805F7CED21}"; //feature layer IEnumLayer pLayers = pMap.get_Layers(pUID, true); return pLayers; } catch { //error getting layers return null; } } private Dictionary<string, IFeatureClass> ProcessFCs(IEnumFeatureClass fcComplexEdge, IEnumFeatureClass fcComplexNode, IEnumFeatureClass fcSimpleEdge, IEnumFeatureClass fcSimpleNode) { Dictionary<string, IFeatureClass> pDictionary = new Dictionary<string, IFeatureClass>(); //handle complex edge IFeatureClass fc = fcComplexEdge.Next(); if (fc != null) { do { try { pDictionary.Add(fc.AliasName, fc); } catch { //do nothing } fc = fcComplexEdge.Next(); } while (fc != null); } //handle complex node fc = fcComplexNode.Next(); if (fc != null) { do { try { pDictionary.Add(fc.AliasName, fc); } catch { //do nothing } fc = fcComplexNode.Next(); } while (fc != null); } //handle simple edge fc = fcSimpleEdge.Next(); if (fc != null) { do { try { pDictionary.Add(fc.AliasName, fc); } catch { //do nothing } fc = fcSimpleEdge.Next(); } while (fc != null); } //handle simple node fc = fcSimpleNode.Next(); if (fc != null) { do { try { pDictionary.Add(fc.AliasName, fc); } catch { //do nothing } fc = fcSimpleNode.Next(); } while (fc != null); } return pDictionary; } private string CreateSchLayers(IEnumLayer pLayers) { if (pLayers == null) return ""; ILayer pLayer = pLayers.Next(); IFeatureLayer featureLayer; IFeatureClass featureClass; string pStrLayerNames = ""; IDataset pDataset; System.Windows.Forms.Cursor.Current = Cursors.WaitCursor; System.Windows.Forms.Cursor.Show(); m_pSDS.DesignMode = true; m_pSDI = (ESRI.ArcGIS.Schematic.ISchematicDatasetImport)m_pSDS; Dictionary<string, IFeatureClass> myDictionary = new Dictionary<string, IFeatureClass>(); IGeometricNetwork gn = null; do { featureLayer = (IFeatureLayer)pLayer; featureClass = featureLayer.FeatureClass; pDataset = (IDataset)featureClass; if (featureClass.FeatureType == esriFeatureType.esriFTSimpleJunction || featureClass.FeatureType == esriFeatureType.esriFTSimpleEdge || featureClass.FeatureType == esriFeatureType.esriFTComplexEdge || featureClass.FeatureType == esriFeatureType.esriFTComplexJunction) { //The FeatureType property of feature classes that implement this interface will be esriFTSimpleJunction, esriDTSimpleEdge, esriFTComplexJunction, or esriFTComplexEdge. INetworkClass networkClass = (INetworkClass)featureLayer.FeatureClass; if (networkClass.GeometricNetwork != null) { //we have a network class if ((gn == null) || (gn != networkClass.GeometricNetwork)) { //need to process all the classes Dictionary<string, IFeatureClass> localDictionary = new Dictionary<string, IFeatureClass>(); gn = networkClass.GeometricNetwork; IEnumFeatureClass fcComplexEdge = networkClass.GeometricNetwork.get_ClassesByType(esriFeatureType.esriFTComplexEdge); IEnumFeatureClass fcComplexNode = networkClass.GeometricNetwork.get_ClassesByType(esriFeatureType.esriFTComplexJunction); IEnumFeatureClass fcSimpleEdge = networkClass.GeometricNetwork.get_ClassesByType(esriFeatureType.esriFTSimpleEdge); IEnumFeatureClass fcSimpleNode = networkClass.GeometricNetwork.get_ClassesByType(esriFeatureType.esriFTSimpleJunction); localDictionary = ProcessFCs(fcComplexEdge, fcComplexNode, fcSimpleEdge, fcSimpleNode); if (myDictionary.Count == 0) //just copy it { myDictionary = localDictionary; } else //merge { Dictionary<string, IFeatureClass>.KeyCollection keyColl = localDictionary.Keys; foreach (string s in keyColl) { IFeatureClass fc; bool bln = localDictionary.TryGetValue(s, out fc); myDictionary.Add(s, fc); } } } //Build up the string that will go to the select items to reduce form pStrLayerNames += pDataset.Name.ToString(); pStrLayerNames += ";"; //Build up the string for just the node feature classes if (featureClass.FeatureType == esriFeatureType.esriFTSimpleJunction || featureClass.FeatureType == esriFeatureType.esriFTComplexJunction) { strNodeLayers += pDataset.Name.ToString(); strNodeLayers += ";"; } //create the fields collections to be used by the frmAdvanced form IFields pFields = featureClass.Fields; if (pFields.FieldCount > 0) { for (int i = 0; i < pFields.FieldCount; i++) { //don't mess with objectid or shape or GlobalID if ((pFields.get_Field(i).Name.ToString() != "OBJECTID") && (pFields.get_Field(i).Name.ToString() != "SHAPE") && (pFields.get_Field(i).Name.ToString() != "GlobalID") && (pFields.get_Field(i).Name.ToString() != featureClass.OIDFieldName.ToString()) && (pFields.get_Field(i).Name.ToString() != featureClass.ShapeFieldName.ToString())) { m_myCol.Add(pDataset.Name.ToString(), pFields.get_Field(i).Name.ToString()); } } } //remove the layer from the list of dictionary classes if (myDictionary.ContainsKey(featureClass.AliasName)) { myDictionary.Remove(featureClass.AliasName); } m_pSDI.ImportFeatureLayer(featureLayer, m_pSDT, true, true, true); } } pLayer = pLayers.Next(); } while (pLayer != null); //handle any feature classes that were not in the map if (myDictionary.Count > 0) { Dictionary<string, IFeatureClass>.KeyCollection keyColl = myDictionary.Keys; foreach (string s in keyColl) { IFeatureClass fc; bool bln = myDictionary.TryGetValue(s, out fc); IObjectClass o = (IObjectClass)fc; pDataset = (IDataset)fc; pStrLayerNames += pDataset.Name.ToString(); pStrLayerNames += ";"; //Build up the string for just the node feature classes if (fc.FeatureType == esriFeatureType.esriFTSimpleJunction || featureClass.FeatureType == esriFeatureType.esriFTComplexJunction) { strNodeLayers += pDataset.Name.ToString(); strNodeLayers += ";"; } //create the fields collections to be used by the frmAdvanced form IFields pFields = fc.Fields; if (pFields.FieldCount > 0) { for (int i = 0; i < pFields.FieldCount; i++) { //don't mess with objectid or shape or GlobalID if ((pFields.get_Field(i).Name.ToString() != "OBJECTID") && (pFields.get_Field(i).Name.ToString() != "SHAPE") && (pFields.get_Field(i).Name.ToString() != "GlobalID") && (pFields.get_Field(i).Name.ToString() != fc.OIDFieldName.ToString()) && (pFields.get_Field(i).Name.ToString() != fc.ShapeFieldName.ToString())) { m_myCol.Add(pDataset.Name.ToString(), pFields.get_Field(i).Name.ToString()); } } } if ((fc.FeatureType == esriFeatureType.esriFTComplexJunction) || (fc.FeatureType == esriFeatureType.esriFTSimpleJunction)) { //node m_pSDI.ImportObjectClass(o, m_pSDT, true, esriSchematicElementType.esriSchematicNodeType); } else { //link m_pSDI.ImportObjectClass(o, m_pSDT, true, esriSchematicElementType.esriSchematicLinkType); } } } m_pSDS.Save(ESRI.ArcGIS.esriSystem.esriArcGISVersion.esriArcGISVersionCurrent, true); m_pSDS.DesignMode = false; return pStrLayerNames; } protected override void OnClick() { blnCancel = false; formNames = new frmDatasetTemplateName(); formNames.cancelFormEvent += new EventHandler(formNames_cancelFormEvent); formNames.nextFormEvent += new EventHandler<NameEvents>(formNames_nextFormEvent); m_SelectedObject = ArcCatalog.ThisApplication.SelectedObject; if ((m_SelectedObject.Category == "Schematic Dataset") || (m_SelectedObject.Category.ToLower().Contains("database"))) { if (m_SelectedObject.Category.ToLower().Contains("database")) { //get dataset and template names, then create the objects formNames.blnNewDataset = true; } else { //dataset, just get template names, then create objects formNames.blnNewDataset = false; } //show the first form of the wizard if (formNames.ShowDialog() == DialogResult.Cancel) { formNames = null; return; } } else { //we are not on a database or a schematic dataset blnCancel = true; } if (blnCancel == true) { System.Windows.Forms.MessageBox.Show("The name of the dataset or template already exists. Please try again with valid names."); } if (blnCancel != true) //only true if the user cancels the first form formNames_cancelFormEvent { System.Windows.Forms.Cursor.Current = Cursors.WaitCursor; IEnumLayer pEnumLayer = GetLayers(); if (pEnumLayer == null) { //should only happen if the user clicks cancel on the gxdialog blnCancel = true; } else { strLayers = CreateSchLayers(pEnumLayer); if (strLayers.Length > 0) //make sure we get something back { //find out if we need to create node reduction rules formReduce = new frmSelectItemsToReduce(); formReduce.doneFormEvent += new EventHandler<ReduceEvents>(formReduce_doneFormEvent); formReduce.cancelFormEvent += new EventHandler(formReduce_cancelFormEvent); formReduce.itemList = strNodeLayers; System.Windows.Forms.Cursor.Current = Cursors.Default; formReduce.ShowDialog(); } else { //this can happen if the map document didn't have any //layers corresponding to a geometric network blnCancel = true; } } System.Windows.Forms.Cursor.Current = Cursors.Default; } if (blnCancel != true) //could have cancelled on either frmDatasetTemplateName or frmSelectItemsToReduce { //Advanced Form formAdvanced = new frmAdvanced(); formAdvanced.doneFormEvent += new EventHandler<AdvancedEvents>(formAdvanced_doneFormEvent); formAdvanced.strLayers = this.strLayers; formAdvanced.strNodeLayers = this.strNodeLayers; formAdvanced.m_myCol = this.m_myCol; formAdvanced.ShowDialog(); } ArcCatalog.ThisApplication.Refresh(m_sfn); try { ArcCatalog.ThisApplication.Location = m_SelectedObject.FullName.ToString(); } catch { } cleanUp(); } void formReduce_cancelFormEvent(object sender, EventArgs e) { blnCancel = true; formReduce.Close(); } void formAdvanced_doneFormEvent(object sender, AdvancedEvents e) { m_pSDS.DesignMode = true; formAdvanced.Cursor = System.Windows.Forms.Cursors.WaitCursor; //process the algorithm if there is one if (e.AlgorithmName != "") { ISchematicAlgoSmartTree a = new SchematicAlgoSmartTreeClass(); if (e.AlgorithmParams.Count > 0) { Dictionary<string, string>.KeyCollection keys = e.AlgorithmParams.Keys; string strValue = ""; foreach (string s in keys) { if (s == "Direction") { e.AlgorithmParams.TryGetValue(s, out strValue); if (strValue == "Top to Bottom") { a.Direction = esriSchematicAlgoDirection.esriSchematicAlgoTopDown; } else if (strValue == "Bottom to Top") { a.Direction = esriSchematicAlgoDirection.esriSchematicAlgoBottomUp; } else if (strValue == "Left to Right") { a.Direction = esriSchematicAlgoDirection.esriSchematicAlgoLeftRight; } else { a.Direction = esriSchematicAlgoDirection.esriSchematicAlgoRightLeft; } } } if (e.RootClass != "") { ISchematicElementClassContainer pECC = (ISchematicElementClassContainer)m_pSDS; ISchematicElementClass pEC = pECC.GetSchematicElementClass(e.RootClass); ESRI.ArcGIS.esriSystem.UID u = new ESRI.ArcGIS.esriSystem.UID(); u.Value = "{3AD9D8B8-0A1D-4F32-ABB5-54B848A46F85}"; ISchematicAttributeConstant pAttrConst = (ISchematicAttributeConstant)pEC.CreateSchematicAttribute("RootFlag", u); ISchematicAttributeManagement pAttrMgmt = (ISchematicAttributeManagement)pAttrConst; pAttrMgmt.StorageMode = esriSchematicAttributeStorageMode.esriSchematicAttributeFieldStorage; pAttrConst.ConstantValue = "-1"; } } m_pSDT.SchematicAlgorithm = (ISchematicAlgorithm)a; } //check to see if we need to add associated fields if (e.FieldsToCreate != null) { if (e.FieldsToCreate.Count > 0) { ISchematicElementClassContainer pECC = (ISchematicElementClassContainer)m_pSDS; //create the associated field attributes string[] keys = e.FieldsToCreate.AllKeys; foreach (string s in keys) { //get the feature class ISchematicElementClass pEC = pECC.GetSchematicElementClass(s); if (pEC != null) { string strName = ""; string[] values = e.FieldsToCreate.GetValues(s); foreach (string v in values) { //create the field ESRI.ArcGIS.esriSystem.UID u = new ESRI.ArcGIS.esriSystem.UID(); u.Value = "{7DE3A19D-32D0-41CD-B896-37CA3AFBD88A}"; IClass pClass = (IClass)pEC; //only handle names that don't already exist in the schematic tables if (pClass.FindField(v) == -1) { strName = v.ToString(); ISchematicAttributeAssociatedField pFieldAttr = (ISchematicAttributeAssociatedField)pEC.CreateSchematicAttribute(strName, u); pFieldAttr.AssociatedFieldName = v; ISchematicAttributeManagement pAttrMgmt = (ISchematicAttributeManagement)pFieldAttr; pAttrMgmt.StorageMode = esriSchematicAttributeStorageMode.esriSchematicAttributeFieldStorage; } } } } } } m_pSDS.Save(ESRI.ArcGIS.esriSystem.esriArcGISVersion.esriArcGISVersionCurrent, true); m_pSDS.DesignMode = false; formAdvanced.Cursor = System.Windows.Forms.Cursors.Default; formAdvanced.Close(); } private Boolean CreateTemplate(NameEvents templateInfo) { //need to get everything first IGxDatabase pDatabase = null; ISchematicDiagramClassContainer pDiagramClassContainer = null; if (m_SelectedObject.Category == "Schematic Dataset") { pDatabase = (IGxDatabase)m_SelectedObject.Parent; } else //on the database already { pDatabase = (IGxDatabase)m_SelectedObject; } m_pWS = pDatabase.Workspace; ESRI.ArcGIS.Schematic.ISchematicWorkspaceFactory pSWF = new SchematicWorkspaceFactory(); ESRI.ArcGIS.Schematic.ISchematicWorkspace pSW = pSWF.Open(m_pWS); m_pSDS = pSW.get_SchematicDatasetByName(templateInfo.DatasetName); //check to see if the template name already exists pDiagramClassContainer = (ISchematicDiagramClassContainer)m_pSDS; m_pSDT = pDiagramClassContainer.GetSchematicDiagramClass(templateInfo.TemplateName.ToString()); if (m_pSDT != null) return false; //create the schematic template m_pSDT = m_pSDS.CreateSchematicDiagramClass(templateInfo.TemplateName); if ((templateInfo.AutoCreate == true) || (templateInfo.UseVertices == true)) { m_pB = (ESRI.ArcGIS.Schematic.ISchematicBuilder)m_pSDT; m_pSB = (ESRI.ArcGIS.Schematic.ISchematicStandardBuilder)m_pSDT.SchematicBuilder; m_pSB.InitializeLinksVertices = templateInfo.UseVertices; m_pSB.AutoCreateElementClasses = templateInfo.AutoCreate; } m_pSDS.Save(ESRI.ArcGIS.esriSystem.esriArcGISVersion.esriArcGISVersion10, false); return true; } private Boolean CreateDataset(NameEvents templateInfo) { try { IGxDatabase pDatabase = (IGxDatabase)m_SelectedObject; m_pWS = pDatabase.Workspace; ESRI.ArcGIS.Schematic.ISchematicWorkspaceFactory pSWF = new SchematicWorkspaceFactory(); ESRI.ArcGIS.Schematic.ISchematicWorkspace pSW = pSWF.Open(m_pWS); //check to see if this dataset name is already used m_pSDS = pSW.get_SchematicDatasetByName(templateInfo.DatasetName.ToString()); if (m_pSDS != null) return false; m_pSDS = pSW.CreateSchematicDataset(templateInfo.DatasetName, ""); return true; } catch { //nothing return false; } } void formNames_cancelFormEvent(object sender, EventArgs e) { //user is canceling the wizard formNames.Close(); formNames = null; blnCancel = true; } void formReduce_doneFormEvent(object sender, ReduceEvents e) { //user click the done button on the reduce form ISchematicBuilderRule pIsbr; ISchematicBuilderRuleContainer pIsbrc = (ISchematicBuilderRuleContainer)m_pSDT; ISchematicBuilderRuleContainerEdit pIsbrce = (ISchematicBuilderRuleContainerEdit)pIsbrc; formReduce.Cursor = System.Windows.Forms.Cursors.WaitCursor; string[] selectedItems = e.SelectedObjects; m_pSDS.DesignMode = true; foreach (string s in selectedItems) { //setup rule properties ISchematicNodeReductionRuleByPriority pRule = new SchematicNodeReductionRuleByPriorityClass(); pRule.NodeDegreeConstraint = true; pRule.ReduceNodeDegree0 = true; pRule.ReduceNodeDegree2 = true; pRule.ReduceNodeDegree1 = false; pRule.ReduceNodeDegreeSup3 = false; //set the name and class to reduce ISchematicNodeReductionRule pNR = (ISchematicNodeReductionRule)pRule; pNR.Description = "Remove " + s.ToString(); pNR.NodeClassName = s.ToString(); //add it to the template pIsbr = pIsbrce.AddSchematicBuilderRule(); pIsbr.SchematicRule = (ISchematicRule)pRule; } //save and close m_pSDS.Save(ESRI.ArcGIS.esriSystem.esriArcGISVersion.esriArcGISVersion10, false); m_pSDS.DesignMode = false; formReduce.Cursor = System.Windows.Forms.Cursors.Default; formReduce.Close(); } void formNames_nextFormEvent(object sender, NameEvents e) { Boolean blnCheck = false; //check if we need to create a new dataset templateInfo = new NameEvents(e.NewDataset, e.DatasetName, e.TemplateName, e.UseVertices); formNames.Cursor = System.Windows.Forms.Cursors.WaitCursor; if (templateInfo.NewDataset == true) { blnCheck = CreateDataset(templateInfo); if (blnCheck == false) { //name already exists blnCancel = true; } else { blnCheck = CreateTemplate(templateInfo); if (blnCheck == false) { //name already exists blnCancel = true; } } } else //just create a new template { blnCheck = CreateTemplate(templateInfo); if (blnCheck == false) { //name already exists blnCancel = true; } } formNames.Cursor = System.Windows.Forms.Cursors.Default; formNames.Close(); } protected override void OnUpdate() { Enabled = ArcCatalog.Application != null; if ((ArcCatalog.ThisApplication.SelectedObject.Category == "File Geodatabase") || (ArcCatalog.ThisApplication.SelectedObject.Category == "Personal Geodatabase") || (ArcCatalog.ThisApplication.SelectedObject.Category == "Schematic Dataset") || (ArcCatalog.ThisApplication.SelectedObject.Category == "Spatial Database Connection")) { Enabled = true; } else { Enabled = false; } } void cleanUp() { //m_pWSF = null; m_pWS = null; m_pSDT = null; m_pSDS = null; m_pSB = null; m_pB = null; m_SelectedObject = null; templateInfo = null; m_pSDI = null; formNames = null; formReduce = null; m_sfn = ""; blnCancel = false; strLayers = ""; strNodeLayers = ""; } } }
#region Copyright notice and license // Copyright 2015 gRPC authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using System.Collections.Generic; using System.Threading.Tasks; using Grpc.Core.Internal; using Grpc.Core.Utils; using NUnit.Framework; namespace Grpc.Core.Internal.Tests { /// <summary> /// Uses fake native call to test interaction of <c>AsyncCall</c> wrapping code with C core in different situations. /// </summary> public class AsyncCallTest { Channel channel; FakeNativeCall fakeCall; AsyncCall<string, string> asyncCall; FakeBufferReaderManager fakeBufferReaderManager; [SetUp] public void Init() { channel = new Channel("localhost", ChannelCredentials.Insecure); fakeCall = new FakeNativeCall(); var callDetails = new CallInvocationDetails<string, string>(channel, "someMethod", null, Marshallers.StringMarshaller, Marshallers.StringMarshaller, new CallOptions()); asyncCall = new AsyncCall<string, string>(callDetails, fakeCall); fakeBufferReaderManager = new FakeBufferReaderManager(); } [TearDown] public void Cleanup() { channel.ShutdownAsync().Wait(); fakeBufferReaderManager.Dispose(); } [Test] public void AsyncUnary_CanBeStartedOnlyOnce() { asyncCall.UnaryCallAsync("request1"); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.UnaryCallAsync("abc")); } [Test] public void AsyncUnary_StreamingOperationsNotAllowed() { asyncCall.UnaryCallAsync("request1"); Assert.ThrowsAsync(typeof(InvalidOperationException), async () => await asyncCall.ReadMessageAsync()); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.SendMessageAsync("abc", new WriteFlags())); } [Test] public void AsyncUnary_Success() { var resultTask = asyncCall.UnaryCallAsync("request1"); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata()), CreateResponsePayload(), new Metadata()); AssertUnaryResponseSuccess(asyncCall, fakeCall, resultTask); } [Test] public void AsyncUnary_NonSuccessStatusCode() { var resultTask = asyncCall.UnaryCallAsync("request1"); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, CreateClientSideStatus(StatusCode.InvalidArgument), CreateNullResponse(), new Metadata()); AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.InvalidArgument); } [Test] public void AsyncUnary_NullResponsePayload() { var resultTask = asyncCall.UnaryCallAsync("request1"); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata()), null, new Metadata()); // failure to deserialize will result in InvalidArgument status. AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.Internal); } [Test] public void AsyncUnary_RequestSerializationExceptionDoesntLeakResources() { string nullRequest = null; // will throw when serializing Assert.Throws(typeof(ArgumentNullException), () => asyncCall.UnaryCallAsync(nullRequest)); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); } [Test] public void AsyncUnary_StartCallFailureDoesntLeakResources() { fakeCall.MakeStartCallFail(); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.UnaryCallAsync("request1")); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); } [Test] public void SyncUnary_RequestSerializationExceptionDoesntLeakResources() { string nullRequest = null; // will throw when serializing Assert.Throws(typeof(ArgumentNullException), () => asyncCall.UnaryCall(nullRequest)); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); } [Test] public void SyncUnary_StartCallFailureDoesntLeakResources() { fakeCall.MakeStartCallFail(); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.UnaryCall("request1")); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); } [Test] public void ClientStreaming_StreamingReadNotAllowed() { asyncCall.ClientStreamingCallAsync(); Assert.ThrowsAsync(typeof(InvalidOperationException), async () => await asyncCall.ReadMessageAsync()); } [Test] public void ClientStreaming_NoRequest_Success() { var resultTask = asyncCall.ClientStreamingCallAsync(); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata()), CreateResponsePayload(), new Metadata()); AssertUnaryResponseSuccess(asyncCall, fakeCall, resultTask); } [Test] public void ClientStreaming_NoRequest_NonSuccessStatusCode() { var resultTask = asyncCall.ClientStreamingCallAsync(); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, CreateClientSideStatus(StatusCode.InvalidArgument), CreateNullResponse(), new Metadata()); AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.InvalidArgument); } [Test] public void ClientStreaming_MoreRequests_Success() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var writeTask = requestStream.WriteAsync("request1"); fakeCall.SendCompletionCallback.OnSendCompletion(true); writeTask.Wait(); var writeTask2 = requestStream.WriteAsync("request2"); fakeCall.SendCompletionCallback.OnSendCompletion(true); writeTask2.Wait(); var completeTask = requestStream.CompleteAsync(); fakeCall.SendCompletionCallback.OnSendCompletion(true); completeTask.Wait(); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata()), CreateResponsePayload(), new Metadata()); AssertUnaryResponseSuccess(asyncCall, fakeCall, resultTask); } [Test] public void ClientStreaming_WriteFailureThrowsRpcException() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var writeTask = requestStream.WriteAsync("request1"); fakeCall.SendCompletionCallback.OnSendCompletion(false); // The write will wait for call to finish to receive the status code. Assert.IsFalse(writeTask.IsCompleted); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, CreateClientSideStatus(StatusCode.Internal), CreateNullResponse(), new Metadata()); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(StatusCode.Internal, ex.Status.StatusCode); AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.Internal); } [Test] public void ClientStreaming_WriteFailureThrowsRpcException2() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var writeTask = requestStream.WriteAsync("request1"); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, CreateClientSideStatus(StatusCode.Internal), CreateNullResponse(), new Metadata()); fakeCall.SendCompletionCallback.OnSendCompletion(false); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(StatusCode.Internal, ex.Status.StatusCode); AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.Internal); } [Test] public void ClientStreaming_WriteFailureThrowsRpcException3() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var writeTask = requestStream.WriteAsync("request1"); fakeCall.SendCompletionCallback.OnSendCompletion(false); // Until the delayed write completion has been triggered, // we still act as if there was an active write. Assert.Throws(typeof(InvalidOperationException), () => requestStream.WriteAsync("request2")); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, CreateClientSideStatus(StatusCode.Internal), CreateNullResponse(), new Metadata()); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(StatusCode.Internal, ex.Status.StatusCode); // Following attempts to write keep delivering the same status var ex2 = Assert.ThrowsAsync<RpcException>(async () => await requestStream.WriteAsync("after call has finished")); Assert.AreEqual(StatusCode.Internal, ex2.Status.StatusCode); AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.Internal); } [Test] public void ClientStreaming_WriteAfterReceivingStatusThrowsRpcException() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata()), CreateResponsePayload(), new Metadata()); AssertUnaryResponseSuccess(asyncCall, fakeCall, resultTask); var writeTask = requestStream.WriteAsync("request1"); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(Status.DefaultSuccess, ex.Status); } [Test] public void ClientStreaming_WriteAfterReceivingStatusThrowsRpcException2() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(new Status(StatusCode.OutOfRange, ""), new Metadata()), CreateResponsePayload(), new Metadata()); AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.OutOfRange); var writeTask = requestStream.WriteAsync("request1"); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(StatusCode.OutOfRange, ex.Status.StatusCode); } [Test] public void ClientStreaming_WriteAfterCompleteThrowsInvalidOperationException() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); requestStream.CompleteAsync(); Assert.Throws(typeof(InvalidOperationException), () => requestStream.WriteAsync("request1")); fakeCall.SendCompletionCallback.OnSendCompletion(true); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata()), CreateResponsePayload(), new Metadata()); AssertUnaryResponseSuccess(asyncCall, fakeCall, resultTask); } [Test] public void ClientStreaming_CompleteAfterReceivingStatusSucceeds() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata()), CreateResponsePayload(), new Metadata()); AssertUnaryResponseSuccess(asyncCall, fakeCall, resultTask); Assert.DoesNotThrowAsync(async () => await requestStream.CompleteAsync()); } [Test] public void ClientStreaming_WriteAfterCancellationRequestThrowsTaskCanceledException() { var resultTask = asyncCall.ClientStreamingCallAsync(); var requestStream = new ClientRequestStream<string, string>(asyncCall); asyncCall.Cancel(); Assert.IsTrue(fakeCall.IsCancelled); var writeTask = requestStream.WriteAsync("request1"); Assert.ThrowsAsync(typeof(TaskCanceledException), async () => await writeTask); fakeCall.UnaryResponseClientCallback.OnUnaryResponseClient(true, CreateClientSideStatus(StatusCode.Cancelled), CreateNullResponse(), new Metadata()); AssertUnaryResponseError(asyncCall, fakeCall, resultTask, StatusCode.Cancelled); } [Test] public void ClientStreaming_StartCallFailureDoesntLeakResources() { fakeCall.MakeStartCallFail(); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.ClientStreamingCallAsync()); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); } [Test] public void ServerStreaming_StreamingSendNotAllowed() { asyncCall.StartServerStreamingCall("request1"); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.SendMessageAsync("abc", new WriteFlags())); } [Test] public void ServerStreaming_NoResponse_Success1() { asyncCall.StartServerStreamingCall("request1"); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask = responseStream.MoveNext(); fakeCall.ReceivedResponseHeadersCallback.OnReceivedResponseHeaders(true, new Metadata()); Assert.AreEqual(0, asyncCall.ResponseHeadersAsync.Result.Count); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata())); AssertStreamingResponseSuccess(asyncCall, fakeCall, readTask); } [Test] public void ServerStreaming_NoResponse_Success2() { asyncCall.StartServerStreamingCall("request1"); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask = responseStream.MoveNext(); // try alternative order of completions fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata())); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); AssertStreamingResponseSuccess(asyncCall, fakeCall, readTask); } [Test] public void ServerStreaming_NoResponse_ReadFailure() { asyncCall.StartServerStreamingCall("request1"); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(false, CreateNullResponse()); // after a failed read, we rely on C core to deliver appropriate status code. fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, CreateClientSideStatus(StatusCode.Internal)); AssertStreamingResponseError(asyncCall, fakeCall, readTask, StatusCode.Internal); } [Test] public void ServerStreaming_MoreResponses_Success() { asyncCall.StartServerStreamingCall("request1"); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask1 = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateResponsePayload()); Assert.IsTrue(readTask1.Result); Assert.AreEqual("response1", responseStream.Current); var readTask2 = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateResponsePayload()); Assert.IsTrue(readTask2.Result); Assert.AreEqual("response1", responseStream.Current); var readTask3 = responseStream.MoveNext(); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata())); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); AssertStreamingResponseSuccess(asyncCall, fakeCall, readTask3); } [Test] public void ServerStreaming_RequestSerializationExceptionDoesntLeakResources() { string nullRequest = null; // will throw when serializing Assert.Throws(typeof(ArgumentNullException), () => asyncCall.StartServerStreamingCall(nullRequest)); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask = responseStream.MoveNext(); } [Test] public void ServerStreaming_StartCallFailureDoesntLeakResources() { fakeCall.MakeStartCallFail(); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.StartServerStreamingCall("request1")); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); } [Test] public void DuplexStreaming_NoRequestNoResponse_Success() { asyncCall.StartDuplexStreamingCall(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var responseStream = new ClientResponseStream<string, string>(asyncCall); var writeTask1 = requestStream.CompleteAsync(); fakeCall.SendCompletionCallback.OnSendCompletion(true); Assert.DoesNotThrowAsync(async () => await writeTask1); var readTask = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata())); AssertStreamingResponseSuccess(asyncCall, fakeCall, readTask); } [Test] public void DuplexStreaming_WriteAfterReceivingStatusThrowsRpcException() { asyncCall.StartDuplexStreamingCall(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata())); AssertStreamingResponseSuccess(asyncCall, fakeCall, readTask); var writeTask = requestStream.WriteAsync("request1"); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(Status.DefaultSuccess, ex.Status); } [Test] public void DuplexStreaming_CompleteAfterReceivingStatusSuceeds() { asyncCall.StartDuplexStreamingCall(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, new ClientSideStatus(Status.DefaultSuccess, new Metadata())); AssertStreamingResponseSuccess(asyncCall, fakeCall, readTask); Assert.DoesNotThrowAsync(async () => await requestStream.CompleteAsync()); } [Test] public void DuplexStreaming_WriteFailureThrowsRpcException() { asyncCall.StartDuplexStreamingCall(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var responseStream = new ClientResponseStream<string, string>(asyncCall); var writeTask = requestStream.WriteAsync("request1"); fakeCall.SendCompletionCallback.OnSendCompletion(false); // The write will wait for call to finish to receive the status code. Assert.IsFalse(writeTask.IsCompleted); var readTask = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, CreateClientSideStatus(StatusCode.PermissionDenied)); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(StatusCode.PermissionDenied, ex.Status.StatusCode); AssertStreamingResponseError(asyncCall, fakeCall, readTask, StatusCode.PermissionDenied); } [Test] public void DuplexStreaming_WriteFailureThrowsRpcException2() { asyncCall.StartDuplexStreamingCall(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var responseStream = new ClientResponseStream<string, string>(asyncCall); var writeTask = requestStream.WriteAsync("request1"); var readTask = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, CreateClientSideStatus(StatusCode.PermissionDenied)); fakeCall.SendCompletionCallback.OnSendCompletion(false); var ex = Assert.ThrowsAsync<RpcException>(async () => await writeTask); Assert.AreEqual(StatusCode.PermissionDenied, ex.Status.StatusCode); AssertStreamingResponseError(asyncCall, fakeCall, readTask, StatusCode.PermissionDenied); } [Test] public void DuplexStreaming_WriteAfterCancellationRequestThrowsTaskCanceledException() { asyncCall.StartDuplexStreamingCall(); var requestStream = new ClientRequestStream<string, string>(asyncCall); var responseStream = new ClientResponseStream<string, string>(asyncCall); asyncCall.Cancel(); Assert.IsTrue(fakeCall.IsCancelled); var writeTask = requestStream.WriteAsync("request1"); Assert.ThrowsAsync(typeof(TaskCanceledException), async () => await writeTask); var readTask = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, CreateClientSideStatus(StatusCode.Cancelled)); AssertStreamingResponseError(asyncCall, fakeCall, readTask, StatusCode.Cancelled); } [Test] public void DuplexStreaming_ReadAfterCancellationRequestCanSucceed() { asyncCall.StartDuplexStreamingCall(); var responseStream = new ClientResponseStream<string, string>(asyncCall); asyncCall.Cancel(); Assert.IsTrue(fakeCall.IsCancelled); var readTask1 = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateResponsePayload()); Assert.IsTrue(readTask1.Result); Assert.AreEqual("response1", responseStream.Current); var readTask2 = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, CreateClientSideStatus(StatusCode.Cancelled)); AssertStreamingResponseError(asyncCall, fakeCall, readTask2, StatusCode.Cancelled); } [Test] public void DuplexStreaming_ReadStartedBeforeCancellationRequestCanSucceed() { asyncCall.StartDuplexStreamingCall(); var responseStream = new ClientResponseStream<string, string>(asyncCall); var readTask1 = responseStream.MoveNext(); // initiate the read before cancel request asyncCall.Cancel(); Assert.IsTrue(fakeCall.IsCancelled); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateResponsePayload()); Assert.IsTrue(readTask1.Result); Assert.AreEqual("response1", responseStream.Current); var readTask2 = responseStream.MoveNext(); fakeCall.ReceivedMessageCallback.OnReceivedMessage(true, CreateNullResponse()); fakeCall.ReceivedStatusOnClientCallback.OnReceivedStatusOnClient(true, CreateClientSideStatus(StatusCode.Cancelled)); AssertStreamingResponseError(asyncCall, fakeCall, readTask2, StatusCode.Cancelled); } [Test] public void DuplexStreaming_StartCallFailureDoesntLeakResources() { fakeCall.MakeStartCallFail(); Assert.Throws(typeof(InvalidOperationException), () => asyncCall.StartDuplexStreamingCall()); Assert.AreEqual(0, channel.GetCallReferenceCount()); Assert.IsTrue(fakeCall.IsDisposed); } ClientSideStatus CreateClientSideStatus(StatusCode statusCode) { return new ClientSideStatus(new Status(statusCode, ""), new Metadata()); } IBufferReader CreateResponsePayload() { return fakeBufferReaderManager.CreateSingleSegmentBufferReader(Marshallers.StringMarshaller.Serializer("response1")); } IBufferReader CreateNullResponse() { return fakeBufferReaderManager.CreateNullPayloadBufferReader(); } static void AssertUnaryResponseSuccess(AsyncCall<string, string> asyncCall, FakeNativeCall fakeCall, Task<string> resultTask) { Assert.IsTrue(resultTask.IsCompleted); Assert.IsTrue(fakeCall.IsDisposed); Assert.AreEqual(Status.DefaultSuccess, asyncCall.GetStatus()); Assert.AreEqual(0, asyncCall.ResponseHeadersAsync.Result.Count); Assert.AreEqual(0, asyncCall.GetTrailers().Count); Assert.AreEqual("response1", resultTask.Result); } static void AssertStreamingResponseSuccess(AsyncCall<string, string> asyncCall, FakeNativeCall fakeCall, Task<bool> moveNextTask) { Assert.IsTrue(moveNextTask.IsCompleted); Assert.IsTrue(fakeCall.IsDisposed); Assert.IsFalse(moveNextTask.Result); Assert.AreEqual(Status.DefaultSuccess, asyncCall.GetStatus()); Assert.AreEqual(0, asyncCall.GetTrailers().Count); } static void AssertUnaryResponseError(AsyncCall<string, string> asyncCall, FakeNativeCall fakeCall, Task<string> resultTask, StatusCode expectedStatusCode) { Assert.IsTrue(resultTask.IsCompleted); Assert.IsTrue(fakeCall.IsDisposed); Assert.AreEqual(expectedStatusCode, asyncCall.GetStatus().StatusCode); var ex = Assert.ThrowsAsync<RpcException>(async () => await resultTask); Assert.AreEqual(expectedStatusCode, ex.Status.StatusCode); Assert.AreEqual(0, asyncCall.ResponseHeadersAsync.Result.Count); Assert.AreEqual(0, asyncCall.GetTrailers().Count); } static void AssertStreamingResponseError(AsyncCall<string, string> asyncCall, FakeNativeCall fakeCall, Task<bool> moveNextTask, StatusCode expectedStatusCode) { Assert.IsTrue(moveNextTask.IsCompleted); Assert.IsTrue(fakeCall.IsDisposed); var ex = Assert.ThrowsAsync<RpcException>(async () => await moveNextTask); Assert.AreEqual(expectedStatusCode, ex.Status.StatusCode); Assert.AreEqual(expectedStatusCode, asyncCall.GetStatus().StatusCode); Assert.AreEqual(0, asyncCall.GetTrailers().Count); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Tests.Cache { using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; using System.Transactions; using Apache.Ignite.Core.Cache; using Apache.Ignite.Core.Cache.Configuration; using Apache.Ignite.Core.Impl.Common; using Apache.Ignite.Core.Transactions; using NUnit.Framework; /// <summary> /// Transactional cache tests. /// </summary> public abstract class CacheAbstractTransactionalTest : CacheAbstractTest { /// <summary> /// Simple cache lock test (while <see cref="TestLock"/> is ignored). /// </summary> [Test] public void TestLockSimple() { var cache = Cache(); const int key = 7; Action<ICacheLock> checkLock = lck => { using (lck) { Assert.Throws<InvalidOperationException>(lck.Exit); // can't exit if not entered lck.Enter(); Assert.IsTrue(cache.IsLocalLocked(key, true)); Assert.IsTrue(cache.IsLocalLocked(key, false)); lck.Exit(); Assert.IsFalse(cache.IsLocalLocked(key, true)); Assert.IsFalse(cache.IsLocalLocked(key, false)); Assert.IsTrue(lck.TryEnter()); Assert.IsTrue(cache.IsLocalLocked(key, true)); Assert.IsTrue(cache.IsLocalLocked(key, false)); lck.Exit(); } Assert.Throws<ObjectDisposedException>(lck.Enter); // Can't enter disposed lock }; checkLock(cache.Lock(key)); checkLock(cache.LockAll(new[] { key, 1, 2, 3 })); } /// <summary> /// Tests cache locks. /// </summary> [Test] [Ignore("IGNITE-835")] public void TestLock() { var cache = Cache(); const int key = 7; // Lock CheckLock(cache, key, () => cache.Lock(key)); // LockAll CheckLock(cache, key, () => cache.LockAll(new[] { key, 2, 3, 4, 5 })); } /// <summary> /// Internal lock test routine. /// </summary> /// <param name="cache">Cache.</param> /// <param name="key">Key.</param> /// <param name="getLock">Function to get the lock.</param> private static void CheckLock(ICache<int, int> cache, int key, Func<ICacheLock> getLock) { var sharedLock = getLock(); using (sharedLock) { Assert.Throws<InvalidOperationException>(() => sharedLock.Exit()); // can't exit if not entered sharedLock.Enter(); try { Assert.IsTrue(cache.IsLocalLocked(key, true)); Assert.IsTrue(cache.IsLocalLocked(key, false)); EnsureCannotLock(getLock, sharedLock); sharedLock.Enter(); try { Assert.IsTrue(cache.IsLocalLocked(key, true)); Assert.IsTrue(cache.IsLocalLocked(key, false)); EnsureCannotLock(getLock, sharedLock); } finally { sharedLock.Exit(); } Assert.IsTrue(cache.IsLocalLocked(key, true)); Assert.IsTrue(cache.IsLocalLocked(key, false)); EnsureCannotLock(getLock, sharedLock); Assert.Throws<SynchronizationLockException>(() => sharedLock.Dispose()); // can't dispose while locked } finally { sharedLock.Exit(); } Assert.IsFalse(cache.IsLocalLocked(key, true)); Assert.IsFalse(cache.IsLocalLocked(key, false)); var innerTask = new Task(() => { Assert.IsTrue(sharedLock.TryEnter()); sharedLock.Exit(); using (var otherLock = getLock()) { Assert.IsTrue(otherLock.TryEnter()); otherLock.Exit(); } }); innerTask.Start(); innerTask.Wait(); } Assert.IsFalse(cache.IsLocalLocked(key, true)); Assert.IsFalse(cache.IsLocalLocked(key, false)); var outerTask = new Task(() => { using (var otherLock = getLock()) { Assert.IsTrue(otherLock.TryEnter()); otherLock.Exit(); } }); outerTask.Start(); outerTask.Wait(); Assert.Throws<ObjectDisposedException>(() => sharedLock.Enter()); // Can't enter disposed lock } /// <summary> /// Ensure that lock cannot be obtained by other threads. /// </summary> /// <param name="getLock">Get lock function.</param> /// <param name="sharedLock">Shared lock.</param> private static void EnsureCannotLock(Func<ICacheLock> getLock, ICacheLock sharedLock) { var task = new Task(() => { Assert.IsFalse(sharedLock.TryEnter()); Assert.IsFalse(sharedLock.TryEnter(TimeSpan.FromMilliseconds(100))); using (var otherLock = getLock()) { Assert.IsFalse(otherLock.TryEnter()); Assert.IsFalse(otherLock.TryEnter(TimeSpan.FromMilliseconds(100))); } }); task.Start(); task.Wait(); } /// <summary> /// Tests that commit applies cache changes. /// </summary> [Test] public void TestTxCommit([Values(true, false)] bool async) { var cache = Cache(); Assert.IsNull(Transactions.Tx); using (var tx = Transactions.TxStart()) { cache.Put(1, 1); cache.Put(2, 2); if (async) { var task = tx.CommitAsync(); task.Wait(); Assert.IsTrue(task.IsCompleted); } else tx.Commit(); } Assert.AreEqual(1, cache.Get(1)); Assert.AreEqual(2, cache.Get(2)); Assert.IsNull(Transactions.Tx); } /// <summary> /// Tests that rollback reverts cache changes. /// </summary> [Test] public void TestTxRollback() { var cache = Cache(); cache.Put(1, 1); cache.Put(2, 2); Assert.IsNull(Transactions.Tx); using (var tx = Transactions.TxStart()) { cache.Put(1, 10); cache.Put(2, 20); tx.Rollback(); } Assert.AreEqual(1, cache.Get(1)); Assert.AreEqual(2, cache.Get(2)); Assert.IsNull(Transactions.Tx); } /// <summary> /// Tests that Dispose without Commit reverts changes. /// </summary> [Test] public void TestTxClose() { var cache = Cache(); cache.Put(1, 1); cache.Put(2, 2); Assert.IsNull(Transactions.Tx); using (Transactions.TxStart()) { cache.Put(1, 10); cache.Put(2, 20); } Assert.AreEqual(1, cache.Get(1)); Assert.AreEqual(2, cache.Get(2)); Assert.IsNull(Transactions.Tx); } /// <summary> /// Tests all concurrency and isolation modes with and without timeout. /// </summary> [Test] public void TestTxAllModes([Values(true, false)] bool withTimeout) { var cache = Cache(); int cntr = 0; foreach (TransactionConcurrency concurrency in Enum.GetValues(typeof(TransactionConcurrency))) { foreach (TransactionIsolation isolation in Enum.GetValues(typeof(TransactionIsolation))) { Console.WriteLine("Test tx [concurrency=" + concurrency + ", isolation=" + isolation + "]"); Assert.IsNull(Transactions.Tx); using (var tx = withTimeout ? Transactions.TxStart(concurrency, isolation, TimeSpan.FromMilliseconds(1100), 10) : Transactions.TxStart(concurrency, isolation)) { Assert.AreEqual(concurrency, tx.Concurrency); Assert.AreEqual(isolation, tx.Isolation); if (withTimeout) Assert.AreEqual(1100, tx.Timeout.TotalMilliseconds); cache.Put(1, cntr); tx.Commit(); } Assert.IsNull(Transactions.Tx); Assert.AreEqual(cntr, cache.Get(1)); cntr++; } } } /// <summary> /// Tests that transaction properties are applied and propagated properly. /// </summary> [Test] public void TestTxAttributes() { ITransaction tx = Transactions.TxStart(TransactionConcurrency.Optimistic, TransactionIsolation.RepeatableRead, TimeSpan.FromMilliseconds(2500), 100); Assert.IsFalse(tx.IsRollbackOnly); Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency); Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation); Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds); Assert.AreEqual(TransactionState.Active, tx.State); Assert.IsTrue(tx.StartTime.Ticks > 0); Assert.AreEqual(tx.NodeId, GetIgnite(0).GetCluster().GetLocalNode().Id); DateTime startTime1 = tx.StartTime; tx.Commit(); Assert.IsFalse(tx.IsRollbackOnly); Assert.AreEqual(TransactionState.Committed, tx.State); Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency); Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation); Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds); Assert.AreEqual(startTime1, tx.StartTime); Thread.Sleep(100); tx = Transactions.TxStart(TransactionConcurrency.Pessimistic, TransactionIsolation.ReadCommitted, TimeSpan.FromMilliseconds(3500), 200); Assert.IsFalse(tx.IsRollbackOnly); Assert.AreEqual(TransactionConcurrency.Pessimistic, tx.Concurrency); Assert.AreEqual(TransactionIsolation.ReadCommitted, tx.Isolation); Assert.AreEqual(3500, tx.Timeout.TotalMilliseconds); Assert.AreEqual(TransactionState.Active, tx.State); Assert.IsTrue(tx.StartTime.Ticks > 0); Assert.IsTrue(tx.StartTime > startTime1); DateTime startTime2 = tx.StartTime; tx.Rollback(); Assert.AreEqual(TransactionState.RolledBack, tx.State); Assert.AreEqual(TransactionConcurrency.Pessimistic, tx.Concurrency); Assert.AreEqual(TransactionIsolation.ReadCommitted, tx.Isolation); Assert.AreEqual(3500, tx.Timeout.TotalMilliseconds); Assert.AreEqual(startTime2, tx.StartTime); Thread.Sleep(100); tx = Transactions.TxStart(TransactionConcurrency.Optimistic, TransactionIsolation.RepeatableRead, TimeSpan.FromMilliseconds(2500), 100); Assert.IsFalse(tx.IsRollbackOnly); Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency); Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation); Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds); Assert.AreEqual(TransactionState.Active, tx.State); Assert.IsTrue(tx.StartTime > startTime2); DateTime startTime3 = tx.StartTime; tx.Commit(); Assert.IsFalse(tx.IsRollbackOnly); Assert.AreEqual(TransactionState.Committed, tx.State); Assert.AreEqual(TransactionConcurrency.Optimistic, tx.Concurrency); Assert.AreEqual(TransactionIsolation.RepeatableRead, tx.Isolation); Assert.AreEqual(2500, tx.Timeout.TotalMilliseconds); Assert.AreEqual(startTime3, tx.StartTime); // Check defaults. tx = Transactions.TxStart(); Assert.AreEqual(Transactions.DefaultTransactionConcurrency, tx.Concurrency); Assert.AreEqual(Transactions.DefaultTransactionIsolation, tx.Isolation); Assert.AreEqual(Transactions.DefaultTimeout, tx.Timeout); tx.Commit(); } /// <summary> /// Tests <see cref="ITransaction.IsRollbackOnly"/> flag. /// </summary> [Test] public void TestTxRollbackOnly() { var cache = Cache(); cache.Put(1, 1); cache.Put(2, 2); var tx = Transactions.TxStart(); cache.Put(1, 10); cache.Put(2, 20); Assert.IsFalse(tx.IsRollbackOnly); tx.SetRollbackonly(); Assert.IsTrue(tx.IsRollbackOnly); Assert.AreEqual(TransactionState.MarkedRollback, tx.State); var ex = Assert.Throws<TransactionRollbackException>(() => tx.Commit()); tx.Dispose(); Assert.AreEqual(TransactionState.RolledBack, tx.State); Assert.IsTrue(tx.IsRollbackOnly); Assert.AreEqual(1, cache.Get(1)); Assert.AreEqual(2, cache.Get(2)); Assert.IsNull(Transactions.Tx); } /// <summary> /// Tests transaction metrics. /// </summary> [Test] public void TestTxMetrics() { var cache = Cache(); var startTime = DateTime.UtcNow.AddSeconds(-1); Transactions.ResetMetrics(); var metrics = Transactions.GetMetrics(); Assert.AreEqual(0, metrics.TxCommits); Assert.AreEqual(0, metrics.TxRollbacks); using (Transactions.TxStart()) { cache.Put(1, 1); } using (var tx = Transactions.TxStart()) { cache.Put(1, 1); tx.Commit(); } metrics = Transactions.GetMetrics(); Assert.AreEqual(1, metrics.TxCommits); Assert.AreEqual(1, metrics.TxRollbacks); Assert.LessOrEqual(startTime, metrics.CommitTime); Assert.LessOrEqual(startTime, metrics.RollbackTime); Assert.GreaterOrEqual(DateTime.UtcNow, metrics.CommitTime); Assert.GreaterOrEqual(DateTime.UtcNow, metrics.RollbackTime); } /// <summary> /// Tests transaction state transitions. /// </summary> [Test] public void TestTxStateAndExceptions() { var tx = Transactions.TxStart(); Assert.AreEqual(TransactionState.Active, tx.State); Assert.AreEqual(Thread.CurrentThread.ManagedThreadId, tx.ThreadId); tx.AddMeta("myMeta", 42); Assert.AreEqual(42, tx.Meta<int>("myMeta")); Assert.AreEqual(42, tx.RemoveMeta<int>("myMeta")); tx.RollbackAsync().Wait(); Assert.AreEqual(TransactionState.RolledBack, tx.State); Assert.Throws<InvalidOperationException>(() => tx.Commit()); tx = Transactions.TxStart(); Assert.AreEqual(TransactionState.Active, tx.State); tx.CommitAsync().Wait(); Assert.AreEqual(TransactionState.Committed, tx.State); var task = tx.RollbackAsync(); // Illegal, but should not fail here; will fail in task Assert.Throws<AggregateException>(() => task.Wait()); } /// <summary> /// Tests the transaction deadlock detection. /// </summary> [Test] public void TestTxDeadlockDetection() { if (LocalCache()) { return; } var cache = Cache(); var keys0 = Enumerable.Range(1, 100).ToArray(); cache.PutAll(keys0.ToDictionary(x => x, x => x)); var barrier = new Barrier(2); Action<int[]> increment = keys => { using (var tx = Transactions.TxStart(TransactionConcurrency.Pessimistic, TransactionIsolation.RepeatableRead, TimeSpan.FromSeconds(0.5), 0)) { foreach (var key in keys) cache[key]++; barrier.SignalAndWait(500); tx.Commit(); } }; // Increment keys within tx in different order to cause a deadlock. var aex = Assert.Throws<AggregateException>(() => Task.WaitAll(new[] { TaskRunner.Run(() => increment(keys0)), TaskRunner.Run(() => increment(keys0.Reverse().ToArray())) }, TimeSpan.FromSeconds(40))); Assert.AreEqual(2, aex.InnerExceptions.Count); var deadlockEx = aex.InnerExceptions.OfType<TransactionDeadlockException>().First(); Assert.IsTrue(deadlockEx.Message.Trim().StartsWith("Deadlock detected:"), deadlockEx.Message); } /// <summary> /// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/>. /// </summary> [Test] public void TestTransactionScopeSingleCache() { var cache = Cache(); cache[1] = 1; cache[2] = 2; // Commit. using (var ts = new TransactionScope()) { cache[1] = 10; cache[2] = 20; Assert.IsNotNull(cache.Ignite.GetTransactions().Tx); ts.Complete(); } Assert.AreEqual(10, cache[1]); Assert.AreEqual(20, cache[2]); // Rollback. using (new TransactionScope()) { cache[1] = 100; cache[2] = 200; } Assert.AreEqual(10, cache[1]); Assert.AreEqual(20, cache[2]); } /// <summary> /// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/> /// with multiple participating caches. /// </summary> [Test] public void TestTransactionScopeMultiCache([Values(true, false)] bool async) { var cache1 = Cache(); var cache2 = GetIgnite(0).GetOrCreateCache<int, int>(new CacheConfiguration(cache1.Name + "_") { AtomicityMode = CacheAtomicityMode.Transactional }); cache1[1] = 1; cache2[1] = 2; // Commit. using (var ts = new TransactionScope()) { if (async) { cache1.PutAsync(1, 10); cache2.PutAsync(1, 20); } else { cache1.Put(1, 10); cache2.Put(1, 20); } ts.Complete(); } Assert.AreEqual(10, cache1[1]); Assert.AreEqual(20, cache2[1]); // Rollback. using (new TransactionScope()) { if (async) { cache1.PutAsync(1, 100); cache2.PutAsync(1, 200); } else { cache1.Put(1, 100); cache2.Put(1, 200); } } Assert.AreEqual(10, cache1[1]); Assert.AreEqual(20, cache2[1]); } /// <summary> /// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/> /// when Ignite tx is started manually. /// </summary> [Test] public void TestTransactionScopeWithManualIgniteTx() { var cache = Cache(); var transactions = cache.Ignite.GetTransactions(); cache[1] = 1; // When Ignite tx is started manually, it won't be enlisted in TransactionScope. using (var tx = transactions.TxStart()) { using (new TransactionScope()) { cache[1] = 2; } // Revert transaction scope. tx.Commit(); // Commit manual tx. } Assert.AreEqual(2, cache[1]); } /// <summary> /// Test Ignite transaction with <see cref="TransactionScopeOption.Suppress"/> option. /// </summary> [Test] public void TestSuppressedTransactionScope() { var cache = Cache(); cache[1] = 1; using (new TransactionScope(TransactionScopeOption.Suppress)) { cache[1] = 2; } // Even though transaction is not completed, the value is updated, because tx is suppressed. Assert.AreEqual(2, cache[1]); } /// <summary> /// Test Ignite transaction enlistment in ambient <see cref="TransactionScope"/> with nested scopes. /// </summary> [Test] public void TestNestedTransactionScope() { var cache = Cache(); cache[1] = 1; foreach (var option in new[] {TransactionScopeOption.Required, TransactionScopeOption.RequiresNew}) { // Commit. using (var ts1 = new TransactionScope()) { using (var ts2 = new TransactionScope(option)) { cache[1] = 2; ts2.Complete(); } cache[1] = 3; ts1.Complete(); } Assert.AreEqual(3, cache[1]); // Rollback. using (new TransactionScope()) { using (new TransactionScope(option)) cache[1] = 4; cache[1] = 5; } // In case with Required option there is a single tx // that gets aborted, second put executes outside the tx. Assert.AreEqual(option == TransactionScopeOption.Required ? 5 : 3, cache[1], option.ToString()); } } /// <summary> /// Test that ambient <see cref="TransactionScope"/> options propagate to Ignite transaction. /// </summary> [Test] public void TestTransactionScopeOptions() { var cache = Cache(); var transactions = cache.Ignite.GetTransactions(); var modes = new[] { Tuple.Create(IsolationLevel.Serializable, TransactionIsolation.Serializable), Tuple.Create(IsolationLevel.RepeatableRead, TransactionIsolation.RepeatableRead), Tuple.Create(IsolationLevel.ReadCommitted, TransactionIsolation.ReadCommitted), Tuple.Create(IsolationLevel.ReadUncommitted, TransactionIsolation.ReadCommitted), Tuple.Create(IsolationLevel.Snapshot, TransactionIsolation.ReadCommitted), Tuple.Create(IsolationLevel.Chaos, TransactionIsolation.ReadCommitted), }; foreach (var mode in modes) { using (new TransactionScope(TransactionScopeOption.Required, new TransactionOptions { IsolationLevel = mode.Item1 })) { cache[1] = 1; var tx = transactions.Tx; Assert.AreEqual(mode.Item2, tx.Isolation); Assert.AreEqual(transactions.DefaultTransactionConcurrency, tx.Concurrency); } } } /// <summary> /// Tests all transactional operations with <see cref="TransactionScope"/>. /// </summary> [Test] public void TestTransactionScopeAllOperations() { for (var i = 0; i < 10; i++) { CheckTxOp((cache, key) => cache.Put(key, -5)); CheckTxOp((cache, key) => cache.PutAsync(key, -5)); CheckTxOp((cache, key) => cache.PutAll(new Dictionary<int, int> {{key, -7}})); CheckTxOp((cache, key) => cache.PutAllAsync(new Dictionary<int, int> {{key, -7}})); CheckTxOp((cache, key) => { cache.Remove(key); cache.PutIfAbsent(key, -10); }); CheckTxOp((cache, key) => { cache.Remove(key); cache.PutIfAbsentAsync(key, -10); }); CheckTxOp((cache, key) => cache.GetAndPut(key, -9)); CheckTxOp((cache, key) => cache.GetAndPutAsync(key, -9)); CheckTxOp((cache, key) => { cache.Remove(key); cache.GetAndPutIfAbsent(key, -10); }); CheckTxOp((cache, key) => { cache.Remove(key); cache.GetAndPutIfAbsentAsync(key, -10); }); CheckTxOp((cache, key) => cache.GetAndRemove(key)); CheckTxOp((cache, key) => cache.GetAndRemoveAsync(key)); CheckTxOp((cache, key) => cache.GetAndReplace(key, -11)); CheckTxOp((cache, key) => cache.GetAndReplaceAsync(key, -11)); CheckTxOp((cache, key) => cache.Invoke(key, new AddProcessor(), 1)); CheckTxOp((cache, key) => cache.InvokeAsync(key, new AddProcessor(), 1)); CheckTxOp((cache, key) => cache.InvokeAll(new[] {key}, new AddProcessor(), 1)); CheckTxOp((cache, key) => cache.InvokeAllAsync(new[] {key}, new AddProcessor(), 1)); CheckTxOp((cache, key) => cache.Remove(key)); CheckTxOp((cache, key) => cache.RemoveAsync(key)); CheckTxOp((cache, key) => cache.RemoveAll(new[] {key})); CheckTxOp((cache, key) => cache.RemoveAllAsync(new[] {key})); CheckTxOp((cache, key) => cache.Replace(key, 100)); CheckTxOp((cache, key) => cache.ReplaceAsync(key, 100)); CheckTxOp((cache, key) => cache.Replace(key, cache[key], 100)); CheckTxOp((cache, key) => cache.ReplaceAsync(key, cache[key], 100)); } } /// <summary> /// Checks that cache operation behaves transactionally. /// </summary> private void CheckTxOp(Action<ICache<int, int>, int> act) { var isolationLevels = new[] { IsolationLevel.Serializable, IsolationLevel.RepeatableRead, IsolationLevel.ReadCommitted, IsolationLevel.ReadUncommitted, IsolationLevel.Snapshot, IsolationLevel.Chaos }; foreach (var isolationLevel in isolationLevels) { var txOpts = new TransactionOptions {IsolationLevel = isolationLevel}; const TransactionScopeOption scope = TransactionScopeOption.Required; var cache = Cache(); cache[1] = 1; cache[2] = 2; // Rollback. using (new TransactionScope(scope, txOpts)) { act(cache, 1); Assert.IsNotNull(cache.Ignite.GetTransactions().Tx, "Transaction has not started."); } Assert.AreEqual(1, cache[1]); Assert.AreEqual(2, cache[2]); using (new TransactionScope(scope, txOpts)) { act(cache, 1); act(cache, 2); } Assert.AreEqual(1, cache[1]); Assert.AreEqual(2, cache[2]); // Commit. using (var ts = new TransactionScope(scope, txOpts)) { act(cache, 1); ts.Complete(); } Assert.IsTrue(!cache.ContainsKey(1) || cache[1] != 1); Assert.AreEqual(2, cache[2]); using (var ts = new TransactionScope(scope, txOpts)) { act(cache, 1); act(cache, 2); ts.Complete(); } Assert.IsTrue(!cache.ContainsKey(1) || cache[1] != 1); Assert.IsTrue(!cache.ContainsKey(2) || cache[2] != 2); } } [Serializable] private class AddProcessor : ICacheEntryProcessor<int, int, int, int> { public int Process(IMutableCacheEntry<int, int> entry, int arg) { entry.Value += arg; return arg; } } } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ using System; using System.IO; using System.Diagnostics; namespace Axiom.SceneManagers.Multiverse { public class MosaicDescription { public string MosaicName { get; private set; } public string MosaicType { get; private set; } public string FileExt { get; private set; } public int SizeXPixels { get; private set; } public int SizeZPixels { get; private set; } public int SizeXTiles { get; private set; } public int SizeZTiles { get; private set; } public int TileSizeSamples { get; private set; } public int MetersPerSample { get; private set; } public int MPSShift { get; private set; } public int MPSMask { get; private set; } public bool WrapFlag { get; private set; } public bool UnifiedScale { get; private set; } public float GlobalMaxHeightMeters { get; private set; } public float GlobalMinHeightMeters { get; private set; } public bool Modified { get; private set; } public string DefaultTerrainSaveDirectory { get; set;} private bool[,] availableTiles; public MosaicDescription(string newName, MosaicDescription src) : this(newName, src.MosaicType, src.DefaultTerrainSaveDirectory, src.FileExt, src.SizeXPixels, src.SizeZPixels, src.SizeXTiles, src.SizeZTiles, src.TileSizeSamples, src.MetersPerSample, src.WrapFlag, src.UnifiedScale, src.GlobalMinHeightMeters, src.GlobalMaxHeightMeters, 0, false) { } public MosaicDescription(string mosaicName, string mosaicType, string defaultTerrainSaveDirectory, string fileExt, int sizeXPixels, int sizeZPixels, int sizeXTiles, int sizeZTiles, int tileSizeSamples, int metersPerSample, bool wrapFlag, bool unifiedScale, float globalMinHeightMeters, float globalMaxHeightMeters, int tileNum, bool tileOK) { Modified = true; // We're creating a brand new mosaic MosaicName = mosaicName; MosaicType = mosaicType; DefaultTerrainSaveDirectory = defaultTerrainSaveDirectory; FileExt = fileExt; SizeXPixels = sizeXPixels; SizeZPixels = sizeZPixels; SizeXTiles = sizeXTiles; SizeZTiles = sizeZTiles; TileSizeSamples = tileSizeSamples; MetersPerSample = metersPerSample; WrapFlag = wrapFlag; UnifiedScale = unifiedScale; GlobalMinHeightMeters = globalMinHeightMeters; GlobalMaxHeightMeters = globalMaxHeightMeters; InitMpsStuff(); InitTilesState(tileNum, tileOK); // Make sure all tiles are available for (int z=0; z< sizeZTiles; z++) { for (int x = 0; x < sizeXTiles; x++) { availableTiles[x, z] = true; } } } public MosaicDescription(Stream s) { Modified = false; // We're recreating an existing mosaic StreamReader r = new StreamReader(s); string line; //todo: get file location of the mosaic! update DefaultTerrainSaveDirectory with it while ((line = r.ReadLine()) != "#EOF") { if (line[0] == '#') { string label; string val1; string val2; ParseLine(line, out label, out val1, out val2); switch (label) { case "MosaicName": MosaicName = val1; break; case "MosaicType": MosaicType = val1; break; case "FileExt": FileExt = val1; break; case "nPxlsX": SizeXPixels = int.Parse(val1); break; case "nPxlsY": SizeZPixels = int.Parse(val1); break; case "nMapsX": SizeXTiles = int.Parse(val1); break; case "nMapsY": SizeZTiles = int.Parse(val1); break; case "SubMapSize": TileSizeSamples = int.Parse(val1); break; case "HorizScale": float horizScale = float.Parse(val1); // meters per sample should be in int MetersPerSample = (int)Math.Round(horizScale); InitMpsStuff(); break; case "WrapFlag": WrapFlag = (val1 == "TRUE"); break; case "TileState": int tileNum = int.Parse(val1); bool tileOK = (val2 == "OK"); InitTilesState(tileNum, tileOK); break; case "UnifiedScale": UnifiedScale = (val1 == "TRUE"); break; case "GlobalMinAlt": GlobalMinHeightMeters = float.Parse(val1); break; case "GlobalMaxAlt": GlobalMaxHeightMeters = float.Parse(val1); break; } } } r.Close(); } public void Save(bool force) { if (!force && !Modified) { return; } string mmfFile = Path.Combine(DefaultTerrainSaveDirectory, MosaicName + ".mmf"); if (File.Exists(mmfFile)) { File.Delete(mmfFile); } Save(new StreamWriter(mmfFile)); } private void Save(TextWriter writer) { // Header writer.WriteLine("L3DT Mosaic master file"); // Properties WriteLine(writer, "MosaicName", MosaicName); WriteLine(writer, "MosaicType", MosaicType); WriteLine(writer, "FileExt", FileExt); WriteLine(writer, "nPxlsX", SizeXPixels); WriteLine(writer, "nPxlsY", SizeZPixels); WriteLine(writer, "nMapsX", SizeXTiles); WriteLine(writer, "nMapsY", SizeZTiles); WriteLine(writer, "SubMapSize", TileSizeSamples); WriteLine(writer, "HorizScale", MetersPerSample); WriteLine(writer, "WrapFlag", WrapFlag ? "TRUE" : "FALSE"); WriteLine(writer, "UnifiedScale", UnifiedScale ? "TRUE" : "FALSE"); WriteLine(writer, "GlobalMinAlt", GlobalMinHeightMeters); WriteLine(writer, "GlobalMaxAlt", GlobalMaxHeightMeters); // Tile states for (int i = 0; i < SizeXTiles * SizeZTiles; i++) { WriteLine(writer, "TileState", "" + i, "OK"); } // Trailer writer.WriteLine("#EOF"); writer.Close(); } private static void ParseLine(string line, out string label, out string val1, out string val2) { int labelend = line.IndexOf(':'); label = line.Substring(1, labelend - 1); int firstTab = line.IndexOf('\t'); int lastTab = line.LastIndexOf('\t'); if (firstTab == lastTab) { val1 = line.Substring(firstTab + 1); val2 = null; } else { val1 = line.Substring(firstTab + 1, lastTab - firstTab - 1); val2 = line.Substring(lastTab + 1); } return; } private static void WriteLine(TextWriter writer, string label, object val1) { writer.WriteLine("#" + label + ":\t" + val1); } private static void WriteLine(TextWriter writer, string label, object val1, object val2) { writer.WriteLine("#" + label + ":\t" + val1 + "\t" + val2); } private void InitMpsStuff() { // compute mask MPSMask = MetersPerSample - 1; // compute shift MPSShift = 0; int tmp = MPSMask; while (tmp > 0) { tmp = tmp >> 1; MPSShift++; } if ((1 << MPSShift) != MetersPerSample) { throw new Core.AxiomException("The HorizScale parameter in the mosaic is not a power of 2."); } Debug.Assert((1 << MPSShift) == MetersPerSample); } private void InitTilesState(int tileNum, bool tileOK) { if (availableTiles == null) { availableTiles = new bool[SizeXTiles, SizeZTiles]; } int y = tileNum / SizeXTiles; int x = tileNum - (y * SizeXTiles); y = SizeZTiles - y - 1; availableTiles[x, y] = tileOK; } public bool TileAvailable(int x, int y) { return availableTiles[x,y]; } } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Cloud.ErrorReporting.V1Beta1 { /// <summary>Settings for <see cref="ErrorGroupServiceClient"/> instances.</summary> public sealed partial class ErrorGroupServiceSettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="ErrorGroupServiceSettings"/>.</summary> /// <returns>A new instance of the default <see cref="ErrorGroupServiceSettings"/>.</returns> public static ErrorGroupServiceSettings GetDefault() => new ErrorGroupServiceSettings(); /// <summary>Constructs a new <see cref="ErrorGroupServiceSettings"/> object with default settings.</summary> public ErrorGroupServiceSettings() { } private ErrorGroupServiceSettings(ErrorGroupServiceSettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); GetGroupSettings = existing.GetGroupSettings; UpdateGroupSettings = existing.UpdateGroupSettings; OnCopy(existing); } partial void OnCopy(ErrorGroupServiceSettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>ErrorGroupServiceClient.GetGroup</c> and <c>ErrorGroupServiceClient.GetGroupAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 100 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings GetGroupSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to /// <c>ErrorGroupServiceClient.UpdateGroup</c> and <c>ErrorGroupServiceClient.UpdateGroupAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>Initial retry delay: 100 milliseconds.</description></item> /// <item><description>Retry delay multiplier: 1.3</description></item> /// <item><description>Retry maximum delay: 60000 milliseconds.</description></item> /// <item><description>Maximum attempts: Unlimited</description></item> /// <item> /// <description> /// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>, /// <see cref="grpccore::StatusCode.DeadlineExceeded"/>. /// </description> /// </item> /// <item><description>Timeout: 600 seconds.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings UpdateGroupSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(100), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded))); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="ErrorGroupServiceSettings"/> object.</returns> public ErrorGroupServiceSettings Clone() => new ErrorGroupServiceSettings(this); } /// <summary> /// Builder class for <see cref="ErrorGroupServiceClient"/> to provide simple configuration of credentials, endpoint /// etc. /// </summary> public sealed partial class ErrorGroupServiceClientBuilder : gaxgrpc::ClientBuilderBase<ErrorGroupServiceClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public ErrorGroupServiceSettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public ErrorGroupServiceClientBuilder() { UseJwtAccessWithScopes = ErrorGroupServiceClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref ErrorGroupServiceClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<ErrorGroupServiceClient> task); /// <summary>Builds the resulting client.</summary> public override ErrorGroupServiceClient Build() { ErrorGroupServiceClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<ErrorGroupServiceClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<ErrorGroupServiceClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private ErrorGroupServiceClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return ErrorGroupServiceClient.Create(callInvoker, Settings); } private async stt::Task<ErrorGroupServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return ErrorGroupServiceClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => ErrorGroupServiceClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => ErrorGroupServiceClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => ErrorGroupServiceClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>ErrorGroupService client wrapper, for convenient use.</summary> /// <remarks> /// Service for retrieving and updating individual error groups. /// </remarks> public abstract partial class ErrorGroupServiceClient { /// <summary> /// The default endpoint for the ErrorGroupService service, which is a host of /// "clouderrorreporting.googleapis.com" and a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "clouderrorreporting.googleapis.com:443"; /// <summary>The default ErrorGroupService scopes.</summary> /// <remarks> /// The default ErrorGroupService scopes are: /// <list type="bullet"> /// <item><description>https://www.googleapis.com/auth/cloud-platform</description></item> /// </list> /// </remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { "https://www.googleapis.com/auth/cloud-platform", }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="ErrorGroupServiceClient"/> using the default credentials, endpoint and /// settings. To specify custom credentials or other settings, use <see cref="ErrorGroupServiceClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="ErrorGroupServiceClient"/>.</returns> public static stt::Task<ErrorGroupServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) => new ErrorGroupServiceClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="ErrorGroupServiceClient"/> using the default credentials, endpoint and /// settings. To specify custom credentials or other settings, use <see cref="ErrorGroupServiceClientBuilder"/>. /// </summary> /// <returns>The created <see cref="ErrorGroupServiceClient"/>.</returns> public static ErrorGroupServiceClient Create() => new ErrorGroupServiceClientBuilder().Build(); /// <summary> /// Creates a <see cref="ErrorGroupServiceClient"/> which uses the specified call invoker for remote operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="ErrorGroupServiceSettings"/>.</param> /// <returns>The created <see cref="ErrorGroupServiceClient"/>.</returns> internal static ErrorGroupServiceClient Create(grpccore::CallInvoker callInvoker, ErrorGroupServiceSettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } ErrorGroupService.ErrorGroupServiceClient grpcClient = new ErrorGroupService.ErrorGroupServiceClient(callInvoker); return new ErrorGroupServiceClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC ErrorGroupService client</summary> public virtual ErrorGroupService.ErrorGroupServiceClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Get the specified group. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual ErrorGroup GetGroup(GetGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Get the specified group. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> GetGroupAsync(GetGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Get the specified group. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> GetGroupAsync(GetGroupRequest request, st::CancellationToken cancellationToken) => GetGroupAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Get the specified group. /// </summary> /// <param name="groupName"> /// Required. The group resource name. Written as /// `projects/{projectID}/groups/{group_name}`. Call /// [`groupStats.list`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/projects.groupStats/list) /// to return a list of groups belonging to this project. /// /// Example: `projects/my-project-123/groups/my-group` /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual ErrorGroup GetGroup(string groupName, gaxgrpc::CallSettings callSettings = null) => GetGroup(new GetGroupRequest { GroupName = gax::GaxPreconditions.CheckNotNullOrEmpty(groupName, nameof(groupName)), }, callSettings); /// <summary> /// Get the specified group. /// </summary> /// <param name="groupName"> /// Required. The group resource name. Written as /// `projects/{projectID}/groups/{group_name}`. Call /// [`groupStats.list`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/projects.groupStats/list) /// to return a list of groups belonging to this project. /// /// Example: `projects/my-project-123/groups/my-group` /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> GetGroupAsync(string groupName, gaxgrpc::CallSettings callSettings = null) => GetGroupAsync(new GetGroupRequest { GroupName = gax::GaxPreconditions.CheckNotNullOrEmpty(groupName, nameof(groupName)), }, callSettings); /// <summary> /// Get the specified group. /// </summary> /// <param name="groupName"> /// Required. The group resource name. Written as /// `projects/{projectID}/groups/{group_name}`. Call /// [`groupStats.list`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/projects.groupStats/list) /// to return a list of groups belonging to this project. /// /// Example: `projects/my-project-123/groups/my-group` /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> GetGroupAsync(string groupName, st::CancellationToken cancellationToken) => GetGroupAsync(groupName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Get the specified group. /// </summary> /// <param name="groupName"> /// Required. The group resource name. Written as /// `projects/{projectID}/groups/{group_name}`. Call /// [`groupStats.list`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/projects.groupStats/list) /// to return a list of groups belonging to this project. /// /// Example: `projects/my-project-123/groups/my-group` /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual ErrorGroup GetGroup(ErrorGroupName groupName, gaxgrpc::CallSettings callSettings = null) => GetGroup(new GetGroupRequest { GroupNameAsErrorGroupName = gax::GaxPreconditions.CheckNotNull(groupName, nameof(groupName)), }, callSettings); /// <summary> /// Get the specified group. /// </summary> /// <param name="groupName"> /// Required. The group resource name. Written as /// `projects/{projectID}/groups/{group_name}`. Call /// [`groupStats.list`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/projects.groupStats/list) /// to return a list of groups belonging to this project. /// /// Example: `projects/my-project-123/groups/my-group` /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> GetGroupAsync(ErrorGroupName groupName, gaxgrpc::CallSettings callSettings = null) => GetGroupAsync(new GetGroupRequest { GroupNameAsErrorGroupName = gax::GaxPreconditions.CheckNotNull(groupName, nameof(groupName)), }, callSettings); /// <summary> /// Get the specified group. /// </summary> /// <param name="groupName"> /// Required. The group resource name. Written as /// `projects/{projectID}/groups/{group_name}`. Call /// [`groupStats.list`](https://cloud.google.com/error-reporting/reference/rest/v1beta1/projects.groupStats/list) /// to return a list of groups belonging to this project. /// /// Example: `projects/my-project-123/groups/my-group` /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> GetGroupAsync(ErrorGroupName groupName, st::CancellationToken cancellationToken) => GetGroupAsync(groupName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual ErrorGroup UpdateGroup(UpdateGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> UpdateGroupAsync(UpdateGroupRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> UpdateGroupAsync(UpdateGroupRequest request, st::CancellationToken cancellationToken) => UpdateGroupAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="group"> /// Required. The group which replaces the resource on the server. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual ErrorGroup UpdateGroup(ErrorGroup group, gaxgrpc::CallSettings callSettings = null) => UpdateGroup(new UpdateGroupRequest { Group = gax::GaxPreconditions.CheckNotNull(group, nameof(group)), }, callSettings); /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="group"> /// Required. The group which replaces the resource on the server. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> UpdateGroupAsync(ErrorGroup group, gaxgrpc::CallSettings callSettings = null) => UpdateGroupAsync(new UpdateGroupRequest { Group = gax::GaxPreconditions.CheckNotNull(group, nameof(group)), }, callSettings); /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="group"> /// Required. The group which replaces the resource on the server. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<ErrorGroup> UpdateGroupAsync(ErrorGroup group, st::CancellationToken cancellationToken) => UpdateGroupAsync(group, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); } /// <summary>ErrorGroupService client wrapper implementation, for convenient use.</summary> /// <remarks> /// Service for retrieving and updating individual error groups. /// </remarks> public sealed partial class ErrorGroupServiceClientImpl : ErrorGroupServiceClient { private readonly gaxgrpc::ApiCall<GetGroupRequest, ErrorGroup> _callGetGroup; private readonly gaxgrpc::ApiCall<UpdateGroupRequest, ErrorGroup> _callUpdateGroup; /// <summary> /// Constructs a client wrapper for the ErrorGroupService service, with the specified gRPC client and settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings">The base <see cref="ErrorGroupServiceSettings"/> used within this client.</param> public ErrorGroupServiceClientImpl(ErrorGroupService.ErrorGroupServiceClient grpcClient, ErrorGroupServiceSettings settings) { GrpcClient = grpcClient; ErrorGroupServiceSettings effectiveSettings = settings ?? ErrorGroupServiceSettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callGetGroup = clientHelper.BuildApiCall<GetGroupRequest, ErrorGroup>(grpcClient.GetGroupAsync, grpcClient.GetGroup, effectiveSettings.GetGroupSettings).WithGoogleRequestParam("group_name", request => request.GroupName); Modify_ApiCall(ref _callGetGroup); Modify_GetGroupApiCall(ref _callGetGroup); _callUpdateGroup = clientHelper.BuildApiCall<UpdateGroupRequest, ErrorGroup>(grpcClient.UpdateGroupAsync, grpcClient.UpdateGroup, effectiveSettings.UpdateGroupSettings).WithGoogleRequestParam("group.name", request => request.Group?.Name); Modify_ApiCall(ref _callUpdateGroup); Modify_UpdateGroupApiCall(ref _callUpdateGroup); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_GetGroupApiCall(ref gaxgrpc::ApiCall<GetGroupRequest, ErrorGroup> call); partial void Modify_UpdateGroupApiCall(ref gaxgrpc::ApiCall<UpdateGroupRequest, ErrorGroup> call); partial void OnConstruction(ErrorGroupService.ErrorGroupServiceClient grpcClient, ErrorGroupServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC ErrorGroupService client</summary> public override ErrorGroupService.ErrorGroupServiceClient GrpcClient { get; } partial void Modify_GetGroupRequest(ref GetGroupRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_UpdateGroupRequest(ref UpdateGroupRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Get the specified group. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override ErrorGroup GetGroup(GetGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetGroupRequest(ref request, ref callSettings); return _callGetGroup.Sync(request, callSettings); } /// <summary> /// Get the specified group. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<ErrorGroup> GetGroupAsync(GetGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetGroupRequest(ref request, ref callSettings); return _callGetGroup.Async(request, callSettings); } /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override ErrorGroup UpdateGroup(UpdateGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_UpdateGroupRequest(ref request, ref callSettings); return _callUpdateGroup.Sync(request, callSettings); } /// <summary> /// Replace the data for the specified group. /// Fails if the group does not exist. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<ErrorGroup> UpdateGroupAsync(UpdateGroupRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_UpdateGroupRequest(ref request, ref callSettings); return _callUpdateGroup.Async(request, callSettings); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Reflection; using System.Collections.Generic; using Debug = System.Diagnostics.Debug; namespace Internal.Reflection.Execution { ///////////////////////////////////////////////////////////////////////////////////////////////////// // // **** WARNING **** // // A large portion of the logic present in this file is duplicated in ndp\rh\src\rtm\system\runtime\typecast.cs // // **** WARNING **** // ///////////////////////////////////////////////////////////////////////////////////////////////////// // This is not a general purpose type comparison facility. It is limited to what constraint validation needs. internal static partial class ConstraintValidator { private static bool ImplementsInterface(Type pObjType, Type pTargetType) { Debug.Assert(!pTargetType.IsArray, "did not expect array type"); Debug.Assert(pTargetType.IsInterface, "IsInstanceOfInterface called with non-interface EEType"); foreach (var pInterfaceType in pObjType.GetInterfaces()) { if (AreTypesEquivalentInternal(pInterfaceType, pTargetType)) { return true; } } // We did not find the interface type in the list of supported interfaces. There's still one // chance left: if the target interface is generic and one or more of its type parameters is co or // contra variant then the object can still match if it implements a different instantiation of // the interface with type compatible generic arguments. // // An additional edge case occurs because of array covariance. This forces us to treat any generic // interfaces implemented by arrays as covariant over their one type parameter. // if (pTargetType.HasGenericVariance || (fArrayCovariance && pTargetType.IsGenericType)) // if (pTargetType.IsGenericType) { bool fArrayCovariance = pObjType.IsArray; Type pTargetGenericType = pTargetType.GetGenericTypeDefinition(); // Fetch the instantiations lazily only once we get a potential match Type[] pTargetInstantiation = null; Type[] pTargetGenericInstantiation = null; foreach (var pInterfaceType in pObjType.GetInterfaces()) { // We can ignore interfaces which are not also marked as having generic variance // unless we're dealing with array covariance. // if (pInterfaceType.HasGenericVariance || (fArrayCovariance && pInterfaceType.IsGenericType)) if (!pInterfaceType.IsGenericType) continue; // If the generic types aren't the same then the types aren't compatible. if (!pInterfaceType.GetGenericTypeDefinition().Equals(pTargetGenericType)) continue; Type[] pInterfaceInstantiation = pInterfaceType.GetGenericArguments(); if (pTargetInstantiation == null) { pTargetInstantiation = pTargetType.GetGenericArguments(); if (!fArrayCovariance) pTargetGenericInstantiation = pTargetGenericType.GetGenericArguments(); } // Compare the instantiations to see if they're compatible taking variance into account. if (TypeParametersAreCompatible(pInterfaceInstantiation, pTargetInstantiation, pTargetGenericInstantiation, fArrayCovariance)) return true; if (fArrayCovariance) { Debug.Assert(pInterfaceInstantiation.Length == 1, "arity mismatch for array generic interface"); Debug.Assert(pTargetInstantiation.Length == 1, "arity mismatch for array generic interface"); // Special case for generic interfaces on arrays. Arrays of integral types (including enums) // can be cast to generic interfaces over the integral types of the same size. For example // int[] . IList<uint>. if (ArePrimitveTypesEquivalentSize(pInterfaceInstantiation[0], pTargetInstantiation[0])) { // We have checked that the interface type definition matches above. The checks are ordered differently // here compared with rtm\system\runtime\typecast.cs version because of TypeInfo does not let us do // the HasGenericVariance optimization. return true; } } } } return false; } // Compare two types to see if they are compatible via generic variance. private static bool TypesAreCompatibleViaGenericVariance(Type pSourceType, Type pTargetType) { Type pTargetGenericType = pTargetType.GetGenericTypeDefinition(); Type pSourceGenericType = pSourceType.GetGenericTypeDefinition(); // If the generic types aren't the same then the types aren't compatible. if (pTargetGenericType.Equals(pSourceGenericType)) { // Compare the instantiations to see if they're compatible taking variance into account. if (TypeParametersAreCompatible(pSourceType.GetGenericArguments(), pTargetType.GetGenericArguments(), pTargetGenericType.GetGenericArguments(), false)) { return true; } } return false; } // Compare two sets of generic type parameters to see if they're assignment compatible taking generic // variance into account. It's assumed they've already had their type definition matched (which // implies their arities are the same as well). The fForceCovariance argument tells the method to // override the defined variance of each parameter and instead assume it is covariant. This is used to // implement covariant array interfaces. private static bool TypeParametersAreCompatible(Type[] pSourceInstantiation, Type[] pTargetInstantiation, Type[] pVarianceInfo, bool fForceCovariance) { // The types represent different instantiations of the same generic type. The // arity of both had better be the same. Debug.Assert(pSourceInstantiation.Length == pTargetInstantiation.Length, "arity mismatch betweeen generic instantiations"); Debug.Assert(fForceCovariance || pTargetInstantiation.Length == pVarianceInfo.Length, "arity mismatch betweeen generic instantiations"); // Walk through the instantiations comparing the cast compatibility of each pair // of type args. for (int i = 0; i < pTargetInstantiation.Length; i++) { Type pTargetArgType = pTargetInstantiation[i]; Type pSourceArgType = pSourceInstantiation[i]; GenericParameterAttributes varType; if (fForceCovariance) varType = GenericParameterAttributes.Covariant; else varType = pVarianceInfo[i].GenericParameterAttributes & GenericParameterAttributes.VarianceMask; switch (varType) { case GenericParameterAttributes.None: // Non-variant type params need to be identical. if (!AreTypesEquivalentInternal(pSourceArgType, pTargetArgType)) return false; break; case GenericParameterAttributes.Covariant: // For covariance (or out type params in C#) the object must implement an // interface with a more derived type arg than the target interface. Or // the object interface can have a type arg that is an interface // implemented by the target type arg. // For instance: // class Foo : ICovariant<String> is ICovariant<Object> // class Foo : ICovariant<Bar> is ICovariant<IBar> // class Foo : ICovariant<IBar> is ICovariant<Object> if (!AreTypesAssignableInternal(pSourceArgType, pTargetArgType, false, false)) return false; break; case GenericParameterAttributes.Contravariant: // For contravariance (or in type params in C#) the object must implement // an interface with a less derived type arg than the target interface. Or // the object interface can have a type arg that is a class implementing // the interface that is the target type arg. // For instance: // class Foo : IContravariant<Object> is IContravariant<String> // class Foo : IContravariant<IBar> is IContravariant<Bar> // class Foo : IContravariant<Object> is IContravariant<IBar> if (!AreTypesAssignableInternal(pTargetArgType, pSourceArgType, false, false)) return false; break; default: Debug.Fail("unknown generic variance type"); return false; } } return true; } // // Determines if a value of the source type can be assigned to a location of the target type. // It does not handle ICastable, and cannot since we do not have an actual object instance here. // This routine assumes that the source type is boxed, i.e. a value type source is presumed to be // compatible with Object and ValueType and an enum source is additionally compatible with Enum. // private static bool AreTypesAssignable(Type pSourceType, Type pTargetType) { // Special case: T can be cast to Nullable<T> (where T is a value type). Call this case out here // since this is only applicable if T is boxed, which is not true for any other callers of // AreTypesAssignableInternal, so no sense making all the other paths pay the cost of the check. if (pTargetType.IsNullable() && pSourceType.IsValueType && !pSourceType.IsNullable()) { Type pNullableType = pTargetType.GetNullableType(); return AreTypesEquivalentInternal(pSourceType, pNullableType); } return AreTypesAssignableInternal(pSourceType, pTargetType, true, false); } // Internally callable version of the export method above. Has two additional parameters: // fBoxedSource : assume the source type is boxed so that value types and enums are // compatible with Object, ValueType and Enum (if applicable) // fAllowSizeEquivalence : allow identically sized integral types and enums to be considered // equivalent (currently used only for array element types) private static bool AreTypesAssignableInternal(Type pSourceType, Type pTargetType, bool fBoxedSource, bool fAllowSizeEquivalence) { // // Are the types identical? // if (AreTypesEquivalentInternal(pSourceType, pTargetType)) return true; // // Handle cast to interface cases. // if (pTargetType.IsInterface) { // Value types can only be cast to interfaces if they're boxed. if (!fBoxedSource && pSourceType.IsValueType) return false; if (ImplementsInterface(pSourceType, pTargetType)) return true; // Are the types compatible due to generic variance? // if (pTargetType.HasGenericVariance && pSourceType.HasGenericVariance) if (pTargetType.IsGenericType && pSourceType.IsGenericType) return TypesAreCompatibleViaGenericVariance(pSourceType, pTargetType); return false; } if (pSourceType.IsInterface) { // The only non-interface type an interface can be cast to is Object. return pTargetType.IsSystemObject(); } // // Handle cast to array cases. // if (pTargetType.IsArray) { if (pSourceType.IsArray) { if (pSourceType.GetElementType().IsPointer) { // If the element types are pointers, then only exact matches are correct. // As we've already called AreTypesEquivalent at the start of this function, // return false as the exact match case has already been handled. // int** is not compatible with uint**, nor is int*[] oompatible with uint*[]. return false; } else { // Source type is also a pointer. Are the element types compatible? Note that using // AreTypesAssignableInternal here handles array covariance as well as IFoo[] . Foo[] // etc. Pass false for fBoxedSource since int[] is not assignable to object[]. return AreTypesAssignableInternal(pSourceType.GetElementType(), pTargetType.GetElementType(), false, true); } } // Can't cast a non-array type to an array. return false; } if (pSourceType.IsArray) { // Target type is not an array. But we can still cast arrays to Object or System.Array. return pTargetType.IsSystemObject() || pTargetType.IsSystemArray(); } // // Handle pointer cases // if (pTargetType.IsPointer) { if (pSourceType.IsPointer) { if (pSourceType.GetElementType().IsPointer) { // If the element types are pointers, then only exact matches are correct. // As we've already called AreTypesEquivalent at the start of this function, // return false as the exact match case has already been handled. // int** is not compatible with uint**, nor is int*[] compatible with uint*[]. return false; } else { // Source type is also a pointer. Are the element types compatible? Note that using // AreTypesAssignableInternal here handles array covariance as well as IFoo[] . Foo[] // etc. Pass false for fBoxedSource since int[] is not assignable to object[]. return AreTypesAssignableInternal(pSourceType.GetElementType(), pTargetType.GetElementType(), false, true); } } return false; } else if (pSourceType.IsPointer) { return false; } // // Handle cast to other (non-interface, non-array) cases. // if (pSourceType.IsValueType) { // Certain value types of the same size are treated as equivalent when the comparison is // between array element types (indicated by fAllowSizeEquivalence). These are integer types // of the same size (e.g. int and uint) and the base type of enums vs all integer types of the // same size. if (fAllowSizeEquivalence && pTargetType.IsValueType) { if (ArePrimitveTypesEquivalentSize(pSourceType, pTargetType)) return true; // Non-identical value types aren't equivalent in any other case (since value types are // sealed). return false; } // If the source type is a value type but it's not boxed then we've run out of options: the types // are not identical, the target type isn't an interface and we're not allowed to check whether // the target type is a parent of this one since value types are sealed and thus the only matches // would be against Object, ValueType or Enum, all of which are reference types and not compatible // with non-boxed value types. if (!fBoxedSource) return false; } // // Are the types compatible via generic variance? // // if (pTargetType.HasGenericVariance && pSourceType.HasGenericVariance) if (pTargetType.IsGenericType && pSourceType.IsGenericType) { if (TypesAreCompatibleViaGenericVariance(pSourceType, pTargetType)) return true; } // Is the source type derived from the target type? if (IsDerived(pSourceType, pTargetType)) return true; return false; } private static bool IsDerived(Type pDerivedType, Type pBaseType) { Debug.Assert(!pBaseType.IsInterface, "did not expect interface type"); for (;;) { if (AreTypesEquivalentInternal(pDerivedType, pBaseType)) return true; Type baseType = pDerivedType.BaseType; if (baseType == null) return false; pDerivedType = baseType; } } // Method to compare two types pointers for type equality // We cannot just compare the pointers as there can be duplicate type instances // for cloned and constructed types. private static bool AreTypesEquivalentInternal(Type pType1, Type pType2) { if (!pType1.IsInstantiatedTypeInfo() && !pType2.IsInstantiatedTypeInfo()) return pType1.Equals(pType2); if (pType1.IsGenericType && pType2.IsGenericType) { if (!pType1.GetGenericTypeDefinition().Equals(pType2.GetGenericTypeDefinition())) return false; Type[] args1 = pType1.GetGenericArguments(); Type[] args2 = pType2.GetGenericArguments(); Debug.Assert(args1.Length == args2.Length); for (int i = 0; i < args1.Length; i++) { if (!AreTypesEquivalentInternal(args1[i], args2[i])) return false; } return true; } if (pType1.IsArray && pType2.IsArray) { if (pType1.GetArrayRank() != pType2.GetArrayRank()) return false; return AreTypesEquivalentInternal(pType1.GetElementType(), pType2.GetElementType()); } if (pType1.IsPointer && pType2.IsPointer) { return AreTypesEquivalentInternal(pType1.GetElementType(), pType2.GetElementType()); } return false; } private static bool ArePrimitveTypesEquivalentSize(Type pType1, Type pType2) { int normalizedType1 = NormalizedPrimitiveTypeSizeForIntegerTypes(pType1); if (normalizedType1 == 0) return false; int normalizedType2 = NormalizedPrimitiveTypeSizeForIntegerTypes(pType2); return normalizedType1 == normalizedType2; } } }
using MatterHackers.Agg.Image; using MatterHackers.Agg.RasterizerScanline; using MatterHackers.Agg.UI; using MatterHackers.Agg.VertexSource; using System; using System.Collections.Generic; using System.Diagnostics; namespace MatterHackers.Agg { public class gouraud_mesh_application : GuiWidget { private Stopwatch stopwatch = new Stopwatch(); public struct mesh_point { public double x, y; public double dx, dy; public RGBA_Bytes color; public RGBA_Bytes dc; public mesh_point(double x_, double y_, double dx_, double dy_, RGBA_Bytes c, RGBA_Bytes dc_) { x = (x_); y = (y_); dx = (dx_); dy = (dy_); color = (c); dc = (dc_); } }; public struct mesh_triangle { public int p1, p2, p3; public mesh_triangle(int i, int j, int k) { p1 = (i); p2 = (j); p3 = (k); } }; public struct mesh_edge { public int p1, p2; public int tl, tr; public mesh_edge(int p1_, int p2_, int tl_, int tr_) { p1 = (p1_); p2 = (p2_); tl = (tl_); tr = (tr_); } }; private static System.Random rand = new Random(); private static double random(double v1, double v2) { return (v2 - v1) * (rand.Next() % 1000) / 999.0 + v1; } public class mesh_ctrl { private int m_cols; private int m_rows; private int m_drag_idx; private double m_drag_dx; private double m_drag_dy; private double m_cell_w; private double m_cell_h; private double m_start_x; private double m_start_y; private VectorPOD<mesh_point> m_vertices = new VectorPOD<mesh_point>(); private VectorPOD<mesh_triangle> m_triangles = new VectorPOD<mesh_triangle>(); private VectorPOD<mesh_edge> m_edges = new VectorPOD<mesh_edge>(); public mesh_ctrl() { m_cols = (0); m_rows = (0); m_drag_idx = (-1); m_drag_dx = (0); m_drag_dy = (0); } public void generate(int cols, int rows, double cell_w, double cell_h, double start_x, double start_y) { m_cols = cols; m_rows = rows; m_cell_w = cell_w; m_cell_h = cell_h; m_start_x = start_x; m_start_y = start_y; m_vertices.remove_all(); for (int i = 0; i < m_rows; i++) { double x = start_x; for (int j = 0; j < m_cols; j++) { double dx = random(-0.5, 0.5); double dy = random(-0.5, 0.5); RGBA_Bytes c = new RGBA_Bytes(rand.Next() & 0xFF, rand.Next() & 0xFF, rand.Next() & 0xFF); RGBA_Bytes dc = new RGBA_Bytes(rand.Next() & 1, rand.Next() & 1, rand.Next() & 1); m_vertices.add(new mesh_point(x, start_y, dx, dy, c, dc)); x += cell_w; } start_y += cell_h; } // 4---3 // |t2/| // | / | // |/t1| // 1---2 m_triangles.remove_all(); m_edges.remove_all(); for (int i = 0; i < m_rows - 1; i++) { for (int j = 0; j < m_cols - 1; j++) { int p1 = i * m_cols + j; int p2 = p1 + 1; int p3 = p2 + m_cols; int p4 = p1 + m_cols; m_triangles.add(new mesh_triangle((int)p1, (int)p2, (int)p3)); m_triangles.add(new mesh_triangle((int)p3, (int)p4, (int)p1)); int curr_cell = i * (m_cols - 1) + j; int left_cell = j != 0 ? (int)(curr_cell - 1) : -1; int bott_cell = i != 0 ? (int)(curr_cell - (m_cols - 1)) : -1; int curr_t1 = curr_cell * 2; int curr_t2 = curr_t1 + 1; int left_t1 = (left_cell >= 0) ? left_cell * 2 : -1; int left_t2 = (left_cell >= 0) ? left_t1 + 1 : -1; int bott_t1 = (bott_cell >= 0) ? bott_cell * 2 : -1; int bott_t2 = (bott_cell >= 0) ? bott_t1 + 1 : -1; m_edges.add(new mesh_edge((int)p1, (int)p2, curr_t1, bott_t2)); m_edges.add(new mesh_edge((int)p1, (int)p3, curr_t2, curr_t1)); m_edges.add(new mesh_edge((int)p1, (int)p4, left_t1, curr_t2)); if (j == m_cols - 2) // Last column { m_edges.add(new mesh_edge((int)p2, (int)p3, curr_t1, -1)); } if (i == m_rows - 2) // Last row { m_edges.add(new mesh_edge((int)p3, (int)p4, curr_t2, -1)); } } } } public void randomize_points(double delta) { int i, j; for (i = 0; i < m_rows; i++) { for (j = 0; j < m_cols; j++) { double xc = j * m_cell_w + m_start_x; double yc = i * m_cell_h + m_start_y; double x1 = xc - m_cell_w / 4; double y1 = yc - m_cell_h / 4; double x2 = xc + m_cell_w / 4; double y2 = yc + m_cell_h / 4; mesh_point p = vertex(j, i); p.x += p.dx; p.y += p.dy; if (p.x < x1) { p.x = x1; p.dx = -p.dx; } if (p.y < y1) { p.y = y1; p.dy = -p.dy; } if (p.x > x2) { p.x = x2; p.dx = -p.dx; } if (p.y > y2) { p.y = y2; p.dy = -p.dy; } } } } public void rotate_colors() { int i; for (i = 1; i < m_vertices.size(); i++) { RGBA_Bytes c = m_vertices[i].color; RGBA_Bytes dc = m_vertices[i].dc; int r = (int)c.Red0To255 + (dc.Red0To255 != 0 ? 5 : -5); int g = (int)c.Green0To255 + (dc.Green0To255 != 0 ? 5 : -5); int b = (int)c.Blue0To255 + (dc.Blue0To255 != 0 ? 5 : -5); if (r < 0) { r = 0; dc.Red0To255 ^= 1; } if (r > 255) { r = 255; dc.Red0To255 ^= 1; } if (g < 0) { g = 0; dc.Green0To255 ^= 1; } if (g > 255) { g = 255; dc.Green0To255 ^= 1; } if (b < 0) { b = 0; dc.Blue0To255 ^= 1; } if (b > 255) { b = 255; dc.Blue0To255 ^= 1; } c.Red0To255 = (int)r; c.Green0To255 = (int)g; c.Blue0To255 = (int)b; } } public bool OnMouseDown(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; if (mouseEvent.Button == MouseButtons.Left) { int i; for (i = 0; i < m_vertices.size(); i++) { if (agg_math.calc_distance(x, y, m_vertices[i].x, m_vertices[i].y) < 5) { m_drag_idx = i; m_drag_dx = x - m_vertices[i].x; m_drag_dy = y - m_vertices[i].y; return true; } } } return false; } public bool OnMouseMove(MouseEventArgs mouseEvent) { double x = mouseEvent.X; double y = mouseEvent.Y; if (mouseEvent.Button == MouseButtons.Left) { if (m_drag_idx >= 0) { m_vertices.Array[m_drag_idx].x = x - m_drag_dx; m_vertices.Array[m_drag_idx].y = y - m_drag_dy; return true; } } return false; } public bool OnMouseUp(MouseEventArgs mouseEvent) { bool ret = m_drag_idx >= 0; m_drag_idx = -1; return ret; } public int num_vertices() { return m_vertices.size(); } public mesh_point vertex(int i) { return m_vertices[i]; } public mesh_point vertex(int x, int y) { return m_vertices[(int)y * m_rows + (int)x]; } public int num_triangles() { return m_triangles.size(); } public mesh_triangle triangle(int i) { return m_triangles[i]; } public int num_edges() { return m_edges.size(); } public mesh_edge edge(int i) { return m_edges[i]; } } public class styles_gouraud : IStyleHandler { private List<span_gouraud_rgba> m_triangles = new List<span_gouraud_rgba>(); public styles_gouraud(mesh_ctrl mesh, GammaLookUpTable gamma) { int i; for (i = 0; i < mesh.num_triangles(); i++) { mesh_triangle t = mesh.triangle(i); mesh_point p1 = mesh.vertex(t.p1); mesh_point p2 = mesh.vertex(t.p2); mesh_point p3 = mesh.vertex(t.p3); RGBA_Bytes c1 = p1.color; RGBA_Bytes c2 = p2.color; RGBA_Bytes c3 = p3.color; c1.apply_gamma_dir(gamma); c2.apply_gamma_dir(gamma); c3.apply_gamma_dir(gamma); span_gouraud_rgba gouraud = new span_gouraud_rgba(c1, c2, c3, p1.x, p1.y, p2.x, p2.y, p3.x, p3.y); gouraud.prepare(); m_triangles.Add(gouraud); } } public bool is_solid(int style) { return false; } public RGBA_Bytes color(int style) { return new RGBA_Bytes(0, 0, 0, 0); } public void generate_span(RGBA_Bytes[] span, int spanIndex, int x, int y, int len, int style) { m_triangles[style].generate(span, spanIndex, x, y, len); } }; private mesh_ctrl m_mesh = new mesh_ctrl(); private GammaLookUpTable m_gamma = new GammaLookUpTable(); public gouraud_mesh_application() { AnchorAll(); // m_gamma.gamma(2.0); m_mesh.generate(20, 20, 17, 17, 40, 40); UiThread.RunOnIdle(OnIdle); } public override void OnDraw(Graphics2D graphics2D) { ImageBuffer widgetsSubImage = ImageBuffer.NewSubImageReference(graphics2D.DestImage, graphics2D.GetClippingRect()); IImageByte backBuffer = widgetsSubImage; IImageByte destImage = backBuffer; ImageClippingProxy clippingProxy = new ImageClippingProxy(destImage); clippingProxy.clear(new RGBA_Floats(0, 0, 0)); ScanlineRasterizer ras = new ScanlineRasterizer(); scanline_unpacked_8 sl = new scanline_unpacked_8(); scanline_bin sl_bin = new scanline_bin(); rasterizer_compound_aa rasc = new rasterizer_compound_aa(); span_allocator alloc = new span_allocator(); int i; styles_gouraud styles = new styles_gouraud(m_mesh, m_gamma); stopwatch.Restart(); rasc.reset(); //rasc.clip_box(40, 40, width() - 40, height() - 40); for (i = 0; i < m_mesh.num_edges(); i++) { mesh_edge e = m_mesh.edge(i); mesh_point p1 = m_mesh.vertex(e.p1); mesh_point p2 = m_mesh.vertex(e.p2); rasc.styles(e.tl, e.tr); rasc.move_to_d(p1.x, p1.y); rasc.line_to_d(p2.x, p2.y); } ScanlineRenderer scanlineRenderer = new ScanlineRenderer(); scanlineRenderer.RenderCompound(rasc, sl, sl_bin, clippingProxy, alloc, styles); double tm = stopwatch.ElapsedMilliseconds; gsv_text t = new gsv_text(); t.SetFontSize(10.0); Stroke pt = new Stroke(t); pt.width(1.5); pt.line_cap(LineCap.Round); pt.line_join(LineJoin.Round); string buf = string.Format("{0:F2} ms, {1} triangles, {2:F0} tri/sec", tm, m_mesh.num_triangles(), m_mesh.num_triangles() / tm * 1000.0); t.start_point(10.0, 10.0); t.text(buf); ras.add_path(pt); scanlineRenderer.RenderSolid(clippingProxy, ras, sl, new RGBA_Bytes(255, 255, 255)); if (m_gamma.GetGamma() != 1.0) { ((ImageBuffer)destImage).apply_gamma_inv(m_gamma); } base.OnDraw(graphics2D); } public override void OnMouseMove(MouseEventArgs mouseEvent) { if (m_mesh.OnMouseMove(mouseEvent)) { Invalidate(); } base.OnMouseMove(mouseEvent); } public override void OnMouseDown(MouseEventArgs mouseEvent) { if (m_mesh.OnMouseDown(mouseEvent)) { Invalidate(); } base.OnMouseDown(mouseEvent); } public override void OnMouseUp(MouseEventArgs mouseEvent) { if (m_mesh.OnMouseUp(mouseEvent)) { Invalidate(); } base.OnMouseUp(mouseEvent); } public void OnIdle() { m_mesh.randomize_points(1.0); m_mesh.rotate_colors(); Invalidate(); UiThread.RunOnIdle(OnIdle); } [STAThread] public static void Main(string[] args) { AppWidgetFactory appWidget = new GouraudMeshShadingFactory(); appWidget.CreateWidgetAndRunInWindow(); } } public class GouraudMeshShadingFactory : AppWidgetFactory { public override GuiWidget NewWidget() { return new gouraud_mesh_application(); } public override AppWidgetInfo GetAppParameters() { AppWidgetInfo appWidgetInfo = new AppWidgetInfo( "Vector", "Gouraud Mesh Shading", "Yet another example that demonstrates the power of compound shape rasterization. Here we create a " + "mesh of triangles and render them in one pass with multiple Gouraud shaders (span_gouraud_rgba). " + "The example demonstrates perfect Anti-Aliasing and perfect triangle stitching (seamless edges) at the same time.", 400, 400); return appWidgetInfo; } } }
using System; using Csla; using SelfLoadSoftDelete.DataAccess; using SelfLoadSoftDelete.DataAccess.ERLevel; namespace SelfLoadSoftDelete.Business.ERLevel { /// <summary> /// G05_SubContinent_Child (editable child object).<br/> /// This is a generated base class of <see cref="G05_SubContinent_Child"/> business object. /// </summary> /// <remarks> /// This class is an item of <see cref="G04_SubContinent"/> collection. /// </remarks> [Serializable] public partial class G05_SubContinent_Child : BusinessBase<G05_SubContinent_Child> { #region State Fields [NotUndoable] private byte[] _rowVersion = new byte[] {}; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="SubContinent_Child_Name"/> property. /// </summary> public static readonly PropertyInfo<string> SubContinent_Child_NameProperty = RegisterProperty<string>(p => p.SubContinent_Child_Name, "Sub Continent Child Name"); /// <summary> /// Gets or sets the Sub Continent Child Name. /// </summary> /// <value>The Sub Continent Child Name.</value> public string SubContinent_Child_Name { get { return GetProperty(SubContinent_Child_NameProperty); } set { SetProperty(SubContinent_Child_NameProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="G05_SubContinent_Child"/> object. /// </summary> /// <returns>A reference to the created <see cref="G05_SubContinent_Child"/> object.</returns> internal static G05_SubContinent_Child NewG05_SubContinent_Child() { return DataPortal.CreateChild<G05_SubContinent_Child>(); } /// <summary> /// Factory method. Loads a <see cref="G05_SubContinent_Child"/> object, based on given parameters. /// </summary> /// <param name="parentSubContinent_ID1">The ParentSubContinent_ID1 parameter of the G05_SubContinent_Child to fetch.</param> /// <returns>A reference to the fetched <see cref="G05_SubContinent_Child"/> object.</returns> internal static G05_SubContinent_Child GetG05_SubContinent_Child(int parentSubContinent_ID1) { return DataPortal.FetchChild<G05_SubContinent_Child>(parentSubContinent_ID1); } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="G05_SubContinent_Child"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public G05_SubContinent_Child() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="G05_SubContinent_Child"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="G05_SubContinent_Child"/> object from the database, based on given criteria. /// </summary> /// <param name="parentSubContinent_ID1">The Parent Sub Continent ID1.</param> protected void Child_Fetch(int parentSubContinent_ID1) { var args = new DataPortalHookArgs(parentSubContinent_ID1); OnFetchPre(args); using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var dal = dalManager.GetProvider<IG05_SubContinent_ChildDal>(); var data = dal.Fetch(parentSubContinent_ID1); Fetch(data); } OnFetchPost(args); } /// <summary> /// Loads a <see cref="G05_SubContinent_Child"/> object from the given <see cref="G05_SubContinent_ChildDto"/>. /// </summary> /// <param name="data">The G05_SubContinent_ChildDto to use.</param> private void Fetch(G05_SubContinent_ChildDto data) { // Value properties LoadProperty(SubContinent_Child_NameProperty, data.SubContinent_Child_Name); _rowVersion = data.RowVersion; var args = new DataPortalHookArgs(data); OnFetchRead(args); } /// <summary> /// Inserts a new <see cref="G05_SubContinent_Child"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(G04_SubContinent parent) { var dto = new G05_SubContinent_ChildDto(); dto.Parent_SubContinent_ID = parent.SubContinent_ID; dto.SubContinent_Child_Name = SubContinent_Child_Name; using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(dto); OnInsertPre(args); var dal = dalManager.GetProvider<IG05_SubContinent_ChildDal>(); using (BypassPropertyChecks) { var resultDto = dal.Insert(dto); _rowVersion = resultDto.RowVersion; args = new DataPortalHookArgs(resultDto); } OnInsertPost(args); } } /// <summary> /// Updates in the database all changes made to the <see cref="G05_SubContinent_Child"/> object. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update(G04_SubContinent parent) { if (!IsDirty) return; var dto = new G05_SubContinent_ChildDto(); dto.Parent_SubContinent_ID = parent.SubContinent_ID; dto.SubContinent_Child_Name = SubContinent_Child_Name; dto.RowVersion = _rowVersion; using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(dto); OnUpdatePre(args); var dal = dalManager.GetProvider<IG05_SubContinent_ChildDal>(); using (BypassPropertyChecks) { var resultDto = dal.Update(dto); _rowVersion = resultDto.RowVersion; args = new DataPortalHookArgs(resultDto); } OnUpdatePost(args); } } /// <summary> /// Self deletes the <see cref="G05_SubContinent_Child"/> object from database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf(G04_SubContinent parent) { using (var dalManager = DalFactorySelfLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); OnDeletePre(args); var dal = dalManager.GetProvider<IG05_SubContinent_ChildDal>(); using (BypassPropertyChecks) { dal.Delete(parent.SubContinent_ID); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using Prism.Properties; namespace Prism.Modularity { /// <summary> /// Represents a group of <see cref="ModuleInfo"/> instances that are usually deployed together. <see cref="ModuleInfoGroup"/>s /// are also used by the <see cref="ModuleCatalog"/> to prevent common deployment problems such as having a module that's required /// at startup that depends on modules that will only be downloaded on demand. /// /// The group also forwards <see cref="Ref"/> and <see cref="InitializationMode"/> values to the <see cref="ModuleInfo"/>s that it /// contains. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")] public class ModuleInfoGroup : IModuleCatalogItem, IList<ModuleInfo>, IList // IList must be supported in Silverlight 2 to be able to add items from XAML { private readonly Collection<ModuleInfo> modules = new Collection<ModuleInfo>(); /// <summary> /// Gets or sets the <see cref="ModuleInfo.InitializationMode"/> for the whole group. Any <see cref="ModuleInfo"/> classes that are /// added after setting this value will also get this <see cref="InitializationMode"/>. /// </summary> /// <see cref="ModuleInfo.InitializationMode"/> /// <value>The initialization mode.</value> public InitializationMode InitializationMode { get; set; } /// <summary> /// Gets or sets the <see cref="ModuleInfo.Ref"/> value for the whole group. Any <see cref="ModuleInfo"/> classes that are /// added after setting this value will also get this <see cref="Ref"/>. /// /// The ref value will also be used by the <see cref="IModuleManager"/> to determine which <see cref="IModuleTypeLoader"/> to use. /// For example, using an "file://" prefix with a valid URL will cause the FileModuleTypeLoader to be used /// (Only available in the desktop version of CAL). /// </summary> /// <see cref="ModuleInfo.Ref"/> /// <value>The ref value that will be used.</value> public string Ref { get; set; } /// <summary> /// Adds an <see cref="ModuleInfo"/> moduleInfo to the <see cref="ModuleInfoGroup"/>. /// </summary> /// <param name="item">The <see cref="ModuleInfo"/> to the <see cref="ModuleInfoGroup"/>.</param> public void Add(ModuleInfo item) { this.ForwardValues(item); this.modules.Add(item); } /// <summary> /// Forwards <see cref="InitializationMode"/> and <see cref="Ref"/> properties from this <see cref="ModuleInfoGroup"/> /// to <paramref name="moduleInfo"/>. /// </summary> /// <param name="moduleInfo">The module info to forward values to.</param> /// <exception cref="ArgumentNullException">An <see cref="ArgumentNullException"/> is thrown if <paramref name="moduleInfo"/> is <see langword="null"/>.</exception> protected void ForwardValues(ModuleInfo moduleInfo) { if (moduleInfo == null) throw new System.ArgumentNullException("moduleInfo"); if (moduleInfo.Ref == null) { moduleInfo.Ref = this.Ref; } if (moduleInfo.InitializationMode == InitializationMode.WhenAvailable && this.InitializationMode != InitializationMode.WhenAvailable) { moduleInfo.InitializationMode = this.InitializationMode; } } /// <summary> /// Removes all <see cref="ModuleInfo"/>s from the <see cref="ModuleInfoGroup"/>. /// </summary> public void Clear() { this.modules.Clear(); } /// <summary> /// Determines whether the <see cref="ModuleInfoGroup"/> contains a specific value. /// </summary> /// <param name="item">The object to locate in the <see cref="ModuleInfoGroup"/>.</param> /// <returns> /// true if <paramref name="item"/> is found in the <see cref="ModuleInfoGroup"/>; otherwise, false. /// </returns> public bool Contains(ModuleInfo item) { return this.modules.Contains(item); } /// <summary> /// Copies the elements of the <see cref="ModuleInfoGroup"/> to an <see cref="T:System.Array"/>, starting at a particular <see cref="T:System.Array"/> index. /// </summary> /// <param name="array">The one-dimensional <see cref="T:System.Array"/> that is the destination of the elements copied from <see cref="ModuleInfoGroup"/>. The <see cref="T:System.Array"/> must have zero-based indexing.</param> /// <param name="arrayIndex">The zero-based index in <paramref name="array"/> at which copying begins.</param> /// <exception cref="T:System.ArgumentNullException"> /// <paramref name="array"/> is null. /// </exception> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="arrayIndex"/> is less than 0. /// </exception> /// <exception cref="T:System.ArgumentException"> /// <paramref name="array"/> is multidimensional. /// -or- /// <paramref name="arrayIndex"/> is equal to or greater than the length of <paramref name="array"/>. /// -or- /// The number of elements in the source <see cref="ModuleInfoGroup"/> is greater than the available space from <paramref name="arrayIndex"/> to the end of the destination <paramref name="array"/>. /// </exception> public void CopyTo(ModuleInfo[] array, int arrayIndex) { this.modules.CopyTo(array, arrayIndex); } /// <summary> /// Gets the number of elements contained in the <see cref="ModuleInfoGroup"/>. /// </summary> /// <value></value> /// <returns> /// The number of elements contained in the <see cref="ModuleInfoGroup"/>. /// </returns> public int Count { get { return this.modules.Count; } } /// <summary> /// Gets a value indicating whether the <see cref="ModuleInfoGroup"/> is read-only. /// </summary> /// <value></value> /// <returns>false, because the <see cref="ModuleInfoGroup"/> is not Read-Only. /// </returns> public bool IsReadOnly { get { return false; } } /// <summary> /// Removes the first occurrence of a specific object from the <see cref="ModuleInfoGroup"/>. /// </summary> /// <param name="item">The object to remove from the <see cref="ModuleInfoGroup"/>.</param> /// <returns> /// true if <paramref name="item"/> was successfully removed from the <see cref="ModuleInfoGroup"/>; otherwise, false. This method also returns false if <paramref name="item"/> is not found in the original <see cref="ModuleInfoGroup"/>. /// </returns> public bool Remove(ModuleInfo item) { return this.modules.Remove(item); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection. /// </returns> public IEnumerator<ModuleInfo> GetEnumerator() { return this.modules.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return this.GetEnumerator(); } /// <summary> /// Adds an item to the <see cref="ModuleInfoGroup"/>. /// </summary> /// <param name="value"> /// The <see cref="T:System.Object"/> to add to the <see cref="ModuleInfoGroup"/>. /// Must be of type <see cref="ModuleInfo"/> /// </param> /// <returns> /// The position into which the new element was inserted. /// </returns> int IList.Add(object value) { this.Add((ModuleInfo)value); return 1; } /// <summary> /// Determines whether the <see cref="ModuleInfoGroup"/> contains a specific value. /// </summary> /// <param name="value"> /// The <see cref="T:System.Object"/> to locate in the <see cref="ModuleInfoGroup"/>. /// Must be of type <see cref="ModuleInfo"/> /// </param> /// <returns> /// true if the <see cref="T:System.Object"/> is found in the <see cref="ModuleInfoGroup"/>; otherwise, false. /// </returns> bool IList.Contains(object value) { if (value == null) throw new ArgumentNullException("value"); ModuleInfo moduleInfo = value as ModuleInfo; if (moduleInfo == null) throw new ArgumentException(Resources.ValueMustBeOfTypeModuleInfo, "value"); return this.Contains(moduleInfo); } /// <summary> /// Determines the index of a specific item in the <see cref="ModuleInfoGroup"/>. /// </summary> /// <param name="value"> /// The <see cref="T:System.Object"/> to locate in the <see cref="ModuleInfoGroup"/>. /// Must be of type <see cref="ModuleInfo"/> /// </param> /// <returns> /// The index of <paramref name="value"/> if found in the list; otherwise, -1. /// </returns> public int IndexOf(object value) { return this.modules.IndexOf((ModuleInfo)value); } /// <summary> /// Inserts an item to the <see cref="ModuleInfoGroup"/> at the specified index. /// </summary> /// <param name="index">The zero-based index at which <paramref name="value"/> should be inserted.</param> /// <param name="value"> /// The <see cref="T:System.Object"/> to insert into the <see cref="ModuleInfoGroup"/>. /// Must be of type <see cref="ModuleInfo"/> /// </param> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="index"/> is not a valid index in the <see cref="ModuleInfoGroup"/>. /// </exception> /// <exception cref="ArgumentNullException"> /// If <paramref name="value"/> is null. /// </exception> /// <exception cref="ArgumentException"> /// If <paramref name="value"/> is not of type <see cref="ModuleInfo"/> /// </exception> public void Insert(int index, object value) { if (value == null) throw new ArgumentNullException("value"); ModuleInfo moduleInfo = value as ModuleInfo; if (moduleInfo == null) throw new ArgumentException(Resources.ValueMustBeOfTypeModuleInfo, "value"); this.modules.Insert(index, moduleInfo); } /// <summary> /// Gets a value indicating whether the <see cref="ModuleInfoGroup"/> has a fixed size. /// </summary> /// <returns>false, because the <see cref="ModuleInfoGroup"/> does not have a fixed length. /// </returns> public bool IsFixedSize { get { return false; } } /// <summary> /// Removes the first occurrence of a specific object from the <see cref="ModuleInfoGroup"/>. /// </summary> /// <param name="value"> /// The <see cref="T:System.Object"/> to remove from the <see cref="ModuleInfoGroup"/>. /// Must be of type <see cref="ModuleInfo"/> /// </param> void IList.Remove(object value) { this.Remove((ModuleInfo)value); } /// <summary> /// Removes the <see cref="T:System.Collections.Generic.IList`1"/> item at the specified index. /// </summary> /// <param name="index">The zero-based index of the item to remove.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="index"/> is not a valid index in the <see cref="T:System.Collections.Generic.IList`1"/>. /// </exception> /// <exception cref="T:System.NotSupportedException"> /// The <see cref="T:System.Collections.Generic.IList`1"/> is read-only. /// </exception> public void RemoveAt(int index) { this.modules.RemoveAt(index); } /// <summary> /// Gets or sets the <see cref="System.Object"/> at the specified index. /// </summary> /// <value></value> object IList.this[int index] { get { return this[index]; } set { this[index] = (ModuleInfo)value; } } /// <summary> /// Copies the elements of the <see cref="T:System.Collections.ICollection"/> to an <see cref="T:System.Array"/>, starting at a particular <see cref="T:System.Array"/> index. /// </summary> /// <param name="array">The one-dimensional <see cref="T:System.Array"/> that is the destination of the elements copied from <see cref="T:System.Collections.ICollection"/>. The <see cref="T:System.Array"/> must have zero-based indexing.</param> /// <param name="index">The zero-based index in <paramref name="array"/> at which copying begins.</param> /// <exception cref="T:System.ArgumentNullException"> /// <paramref name="array"/> is null. /// </exception> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="index"/> is less than zero. /// </exception> /// <exception cref="T:System.ArgumentException"> /// <paramref name="array"/> is multidimensional. /// -or- /// <paramref name="index"/> is equal to or greater than the length of <paramref name="array"/>. /// -or- /// The number of elements in the source <see cref="T:System.Collections.ICollection"/> is greater than the available space from <paramref name="index"/> to the end of the destination <paramref name="array"/>. /// </exception> /// <exception cref="T:System.ArgumentException"> /// The type of the source <see cref="T:System.Collections.ICollection"/> cannot be cast automatically to the type of the destination <paramref name="array"/>. /// </exception> void ICollection.CopyTo(Array array, int index) { ((ICollection)this.modules).CopyTo(array, index); } /// <summary> /// Gets a value indicating whether access to the <see cref="T:System.Collections.ICollection"/> is synchronized (thread safe). /// </summary> /// <value></value> /// <returns>true if access to the <see cref="T:System.Collections.ICollection"/> is synchronized (thread safe); otherwise, false. /// </returns> public bool IsSynchronized { get { return ((ICollection)this.modules).IsSynchronized; } } /// <summary> /// Gets an object that can be used to synchronize access to the <see cref="T:System.Collections.ICollection"/>. /// </summary> /// <value></value> /// <returns> /// An object that can be used to synchronize access to the <see cref="T:System.Collections.ICollection"/>. /// </returns> public object SyncRoot { get { return ((ICollection)this.modules).SyncRoot; } } /// <summary> /// Determines the index of a specific item in the <see cref="T:System.Collections.Generic.IList`1"/>. /// </summary> /// <param name="item">The object to locate in the <see cref="T:System.Collections.Generic.IList`1"/>.</param> /// <returns> /// The index of <paramref name="item"/> if found in the list; otherwise, -1. /// </returns> public int IndexOf(ModuleInfo item) { return this.modules.IndexOf(item); } /// <summary> /// Inserts an item to the <see cref="T:System.Collections.Generic.IList`1"/> at the specified index. /// </summary> /// <param name="index">The zero-based index at which <paramref name="item"/> should be inserted.</param> /// <param name="item">The object to insert into the <see cref="T:System.Collections.Generic.IList`1"/>.</param> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// <paramref name="index"/> is not a valid index in the <see cref="T:System.Collections.Generic.IList`1"/>. /// </exception> public void Insert(int index, ModuleInfo item) { this.modules.Insert(index, item); } /// <summary> /// Gets or sets the <see cref="ModuleInfo"/> at the specified index. /// </summary> /// <value>The <see cref="ModuleInfo"/> at the specified index </value> public ModuleInfo this[int index] { get { return this.modules[index]; } set { this.modules[index] = value; } } } }
// // AssemblyParser.cs // // Author: // Aaron Bockover <abock@rd.io> // Stephane Delcroix <stephane@delcroix.org> // // Copyright 2012 Rdio, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections; using System.IO; using System.Linq; using System.Collections.Generic; using System.Resources; using System.Reflection; using Mono.Cecil; using Mono.Cecil.Cil; using Vernacular.Analyzers; using Vernacular.Tool; namespace Vernacular.Parsers { public sealed class AssemblyParser : Parser { private static readonly List<string> LOCALIZATION_TYPES = new List<string> { "Vernacular.Catalog", "Vernacular.Xaml.Catalog" }; private static readonly List<string> LOCALIZATION_METHODS = new List<string> { "GetString", "GetPluralString", "GetGenderString", "GetPluralGenderString" }; private readonly Parser embedded_resource_parser; private readonly List<ModuleDefinition> modules = new List<ModuleDefinition> (); private readonly List<MethodDefinition> localization_methods = new List<MethodDefinition> (); public AssemblyParser (Parser embeddedResourceParser = null) { embedded_resource_parser = embeddedResourceParser; } public override IEnumerable<string> SupportedFileExtensions { get { yield return ".exe"; yield return ".dll"; } } private void Add (ModuleDefinition module) { try { module.ReadSymbols(); } catch (FileNotFoundException) { } catch (InvalidOperationException) { } foreach (var resource in from res in module.Resources where res.ResourceType == ResourceType.Embedded select res as EmbeddedResource) { AddResource (resource); } modules.Add (module); LocateLocalizationMethods (module); } private void AddResource (EmbeddedResource resource) { if (embedded_resource_parser == null) { return; } if (Path.GetExtension (resource.Name) == ".resources") { using (var reader = new ResourceReader (resource.GetResourceStream ())) { foreach (DictionaryEntry re in reader) { if (embedded_resource_parser.SupportedFileExtensions.Contains (Path.GetExtension (re.Key as string))) { embedded_resource_parser.Add (re.Value as Stream, re.Key as string); } } } } else if (embedded_resource_parser.SupportedFileExtensions.Contains (Path.GetExtension(resource.Name))) { embedded_resource_parser.Add (resource.GetResourceStream (), resource.Name); } } public override void Add (string path) { var module = ModuleDefinition.ReadModule (path); Add (module); } public override void Add (Stream stream, string path) { var module = ModuleDefinition.ReadModule (stream); Add (module); } public override IEnumerable<ILocalizationUnit> Parse () { return from module in modules from localization_unit in Parse (module) select localization_unit; } private IEnumerable<LocalizedString> Parse (ModuleDefinition module) { return from type in module.GetTypes() where !LOCALIZATION_TYPES.Contains (type.FullName) from method in type.Methods where method.HasBody && method.Body.Instructions.Count > 0 from invocation in ParseLocalizedStringInvocations (method.Body.Instructions [0]) from localized_string in ParseLocalizedStringInvocation (invocation) select localized_string; } private void LocateLocalizationMethods (ModuleDefinition module) { localization_methods.AddRange ( from type in module.Types where LOCALIZATION_TYPES.Contains (type.FullName) from method in type.Methods where method.IsStatic && method.IsPublic && method.HasBody && LOCALIZATION_METHODS.Contains (method.Name) select method ); } private MethodDefinition ResolveLocalizationMethodReference (MethodReference methodReference) { if (methodReference == null) { return null; } return localization_methods.Find (method_definition => { if (method_definition.FullName != methodReference.FullName || method_definition.ReturnType.FullName != methodReference.ReturnType.FullName || !method_definition.HasParameters || !methodReference.HasParameters || method_definition.Parameters.Count != methodReference.Parameters.Count) { return false; } for (int i = 0; i < method_definition.Parameters.Count; i++) { if (method_definition.Parameters [i].ParameterType.FullName != methodReference.Parameters [i].ParameterType.FullName) { return false; } } return true; }); } private bool RetainInstruction (Instruction instruction, Dictionary<string, Instruction> memory) { return instruction.OpCode == OpCodes.Ldstr || instruction.OpCode == OpCodes.Ldnull; } private class LocalizedStringInvocation { public List<Instruction> Instructions; public List<Instruction> AllInstructions; public SequencePoint SequencePoint; public MethodDefinition Method; } private IEnumerable<LocalizedStringInvocation> ParseLocalizedStringInvocations (Instruction startInstruction) { SequencePoint last_sequence_point = null; var memory = new Dictionary<string, Instruction> (); var retained_instructions = new List<Instruction> (); var all_instructions = new List<Instruction> (); var instruction = startInstruction; while (instruction != null) { all_instructions.Add (instruction); if (instruction.SequencePoint != null) { last_sequence_point = instruction.SequencePoint; } string location; if (instruction.OpCode != OpCodes.Call && instruction.Next != null && instruction.Next.IsStoreInstruction (out location)) { memory [location] = instruction; instruction = instruction.Next.Next; continue; } else if (instruction.IsLoadInstruction (out location)) { Instruction stored_instruction; if (memory.TryGetValue (location, out stored_instruction)) { memory.Remove (location); if (RetainInstruction (stored_instruction, memory)) { retained_instructions.Add (stored_instruction); } } } else if (RetainInstruction (instruction, memory)) { retained_instructions.Add (instruction); } else if (instruction.OpCode == OpCodes.Call) { var localization_call = ResolveLocalizationMethodReference (instruction.Operand as MethodReference); if (localization_call != null) { yield return new LocalizedStringInvocation { Instructions = new List<Instruction> (retained_instructions), AllInstructions = new List<Instruction> (all_instructions), SequencePoint = last_sequence_point, Method = localization_call }; memory.Clear (); retained_instructions.Clear (); all_instructions.Clear (); last_sequence_point = null; } } instruction = instruction.Next; } } private IEnumerable<LocalizedString> ParseLocalizedStringInvocation (LocalizedStringInvocation invocation) { var strings = new Stack<KeyValuePair<string, string>> (); Log ("+ {0}", invocation.Method); if (invocation.SequencePoint != null) { Log (" @ {0}:{1}", RelativeDocumentUrl (invocation.SequencePoint.Document.Url), invocation.SequencePoint.StartLine); } foreach (var instruction in invocation.AllInstructions) { Log (true, " | {0}", instruction); } Log (true, " |".PadRight (70, '-')); int i = invocation.Instructions.Count; foreach (var param in invocation.Method.Parameters.Reverse ()) { if (param.ParameterType.FullName == "System.String" && i > 0) { var instruction = invocation.Instructions [--i]; if (instruction.OpCode == OpCodes.Ldnull || instruction.OpCode == OpCodes.Ldstr) { strings.Push (new KeyValuePair<string, string> ( param.Name, instruction.Operand as string )); } Log (" | [{0}]: {1}", param.Name, instruction); } } // Detect if the GetString call is nested (an argument to) in a // String.Format call, which is a warning (Catalog.Format should be // used since it will never throw an exception). var call_instruction = invocation.AllInstructions [invocation.AllInstructions.Count - 1]; var is_string_format = false; if (call_instruction.Next != null && call_instruction.Next.Next != null && call_instruction.Next.OpCode == OpCodes.Ldarg_0 && call_instruction.Next.Next.OpCode == OpCodes.Call) { var string_format_call = call_instruction.Next.Next.Operand as MethodReference; if (string_format_call != null && string_format_call.DeclaringType.FullName == "System.String" && string_format_call.Name == "Format") { is_string_format = true; } } Log (true, " |".PadRight (70, '-')); foreach (var @string in GenerateLocalizedStrings (invocation.SequencePoint, strings, invocation.Method.Name.Contains("Gender"))) { Log (" | {0}", @string); if (is_string_format) { @string.Warnings.Add ("String.Format is unsafe - use Catalog.Format instead"); } yield return @string; } Log (); } private IEnumerable<LocalizedString> GenerateLocalizedStrings (SequencePoint sequencePoint, Stack<KeyValuePair<string, string>> parameters, bool gendered) { var neutral = CreateLocalizedString (LanguageGender.Neutral, sequencePoint); var masculine = CreateLocalizedString (LanguageGender.Masculine, sequencePoint); var feminine = CreateLocalizedString (LanguageGender.Feminine, sequencePoint); while (parameters.Count > 0) { var param = parameters.Pop (); switch (param.Key) { case "comment": neutral.DeveloperComments = param.Value; masculine.DeveloperComments = param.Value; feminine.DeveloperComments = param.Value; break; case "message": case "singularMessage": if (gendered) { masculine.UntranslatedSingularValue = param.Value; feminine.UntranslatedSingularValue = param.Value; } neutral.UntranslatedSingularValue = param.Value; break; case "pluralMessage": if (gendered) { masculine.UntranslatedPluralValue = param.Value; feminine.UntranslatedPluralValue = param.Value; } neutral.UntranslatedPluralValue = param.Value; break; case "masculineMessage": case "singularMasculineMessage": masculine.UntranslatedSingularValue = param.Value; break; case "pluralMasculineMessage": masculine.UntranslatedPluralValue = param.Value; break; case "feminineMessage": case "singularFeminineMessage": feminine.UntranslatedSingularValue = param.Value; break; case "pluralFeminineMessage": feminine.UntranslatedPluralValue = param.Value; break; } } if (neutral.IsDefined) { yield return neutral; } if (masculine.IsDefined) { yield return masculine; } if (feminine.IsDefined) { yield return feminine; } } private LocalizedString CreateLocalizedString (LanguageGender gender, SequencePoint sequencePoint) { var localized_string = new LocalizedString { Gender = gender }; if (sequencePoint != null) { localized_string.AddReference (RelativeDocumentUrl (sequencePoint.Document.Url), sequencePoint.StartLine); } if (StringAnalyzer.CheckFormatArguments (localized_string.UntranslatedSingularValue) || StringAnalyzer.CheckFormatArguments (localized_string.UntranslatedPluralValue)) { localized_string.StringFormatHint = "csharp-format"; } return localized_string; } } }
using System; using System.Collections.Generic; using System.Text; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Audio; using Microsoft.Xna.Framework.Content; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; namespace GameProject { /// <remarks> /// A number tile /// </remarks> class NumberTile { #region Fields // original length of each side of the tile int originalSideLength; // whether or not this tile is the correct number bool isCorrectNumber; // drawing support Texture2D texture; Rectangle drawRectangle; Rectangle sourceRectangle; Texture2D blinkingTexture; Texture2D currentTexture; // blinking support const int TOTAL_BLINK_MILLISECONDS = 2000; int elapsedBlinkMilliseconds = 0; const int FRAME_BLINK_MILLISECONDS = 1000; int elapsedFrameMilliseconds = 0; // shrinking support const int TOTAL_SHRINK_MILLISECONDS = 4000; int elapsedShrinkMilliseconds = 0; // fields if the tile is visible, if the tile is blinking and if the tile is shrinking bool isVisible = true; bool isBlinking = false; bool isShrinking = false; // clickStarted and buttonReleased fields bool buttonReleased = false; bool clickStarted = false; // audio support SoundBank soundBank; #endregion #region Constructors /// <summary> /// Constructor /// </summary> /// <param name="contentManager">the content manager</param> /// <param name="center">the center of the tile</param> /// <param name="sideLength">the side length for the tile</param> /// <param name="number">the number for the tile</param> /// <param name="correctNumber">the correct number</param> /// <param name="soundBank">the sound bank for playing cues</param> public NumberTile(ContentManager contentManager, Vector2 center, int sideLength, int number, int correctNumber, SoundBank soundBank) { // set original side length field this.originalSideLength = sideLength; // set sound bank field this.soundBank = soundBank; // load content for the tile and create draw rectangle LoadContent(contentManager, number); drawRectangle = new Rectangle((int)center.X - sideLength / 2, (int)center.Y - sideLength / 2, sideLength, sideLength); // set isCorrectNumber flag isCorrectNumber = number == correctNumber; } #endregion #region Public methods // <summary> // Updates the tile based on game time and mouse state // </summary> // <param name="gameTime">the current GameTime</param> // <param name="mouse">the current mouse state</param> // <return>true if the correct number was guessed, false otherwise</return> public bool Update(GameTime gameTime, MouseState mouse) { // handle blinking if (isBlinking) { elapsedBlinkMilliseconds = elapsedBlinkMilliseconds + gameTime.ElapsedGameTime.Milliseconds; if (elapsedBlinkMilliseconds < TOTAL_BLINK_MILLISECONDS) { elapsedFrameMilliseconds += gameTime.ElapsedGameTime.Milliseconds; if (elapsedFrameMilliseconds < FRAME_BLINK_MILLISECONDS) { elapsedFrameMilliseconds -= FRAME_BLINK_MILLISECONDS; if (sourceRectangle.X == 0) sourceRectangle.X = currentTexture.Width / 2; else sourceRectangle.X = 0; } //Resetting the Frame time elapsedFrameMilliseconds = 0; isBlinking = true; } else { // return true isVisible = false; return true; } } // handle shrinking else if (isShrinking) { elapsedShrinkMilliseconds += gameTime.ElapsedGameTime.Milliseconds; int tileSideLength = (int)(originalSideLength * ((float)(TOTAL_SHRINK_MILLISECONDS - elapsedShrinkMilliseconds) / TOTAL_SHRINK_MILLISECONDS)); // int tileSideLength = originalSideLength * // (TOTAL_SHRINK_MILLISECONDS - elapsedShrinkMilliseconds) / TOTAL_SHRINK_MILLISECONDS; if (tileSideLength > 0) { drawRectangle.Width = tileSideLength; drawRectangle.Height = tileSideLength; } else { isVisible = false; } } else { if (drawRectangle.Contains(mouse.X, mouse.Y)) { sourceRectangle.X = texture.Width / 2; if (mouse.LeftButton == ButtonState.Pressed && buttonReleased) { clickStarted = true; buttonReleased = false; } else if (mouse.LeftButton == ButtonState.Released) { buttonReleased = true; // if click finished on button, change game state if (clickStarted) { // check for blinking or shrinking if (isCorrectNumber) { isBlinking = true; // changing the current texture currentTexture = blinkingTexture; sourceRectangle.X = 0; // play correct tile sound soundBank.PlayCue("correctGuess"); } else { isShrinking = true; // play incorrect tile sound soundBank.PlayCue("incorrectGuess"); } } } } else { sourceRectangle.X = 0; clickStarted = false; buttonReleased = false; } } // if we get here, return false return false; } /// <summary> /// Draws the number tile /// </summary> /// <param name="spriteBatch">the SpriteBatch to use for the drawing</param> public void Draw(SpriteBatch spriteBatch) { // draw the tile if (isVisible) { spriteBatch.Draw(currentTexture, drawRectangle, sourceRectangle, Color.White); } } #endregion #region Private methods /// <summary> /// Loads the content for the tile /// </summary> /// <param name="contentManager">the content manager</param> /// <param name="number">the tile number</param> private void LoadContent(ContentManager contentManager, int number) { // convert the number to a string string numberString = ConvertIntToString(number); // load content for the tile and set source rectangle texture = contentManager.Load<Texture2D>(numberString); sourceRectangle = new Rectangle(drawRectangle.X, drawRectangle.Y, texture.Width / 2, texture.Height); // load content for the blinking texture blinkingTexture = contentManager.Load<Texture2D>("blinking" + numberString); // setting the current texture to non-blinking texture currentTexture = texture; } /// <summary> /// Converts an integer to a string for the corresponding number /// </summary> /// <param name="number">the integer to convert</param> /// <returns>the string for the corresponding number</returns> private String ConvertIntToString(int number) { switch (number) { case 1: return "one"; case 2: return "two"; case 3: return "three"; case 4: return "four"; case 5: return "five"; case 6: return "six"; case 7: return "seven"; case 8: return "eight"; case 9: return "nine"; default: throw new Exception("Unsupported number for number tile"); } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Globalization; /// <summary> /// Int64.System.IConvertible.ToSByte(IFormatProvider) /// </summary> public class Int64IConvertibleToSByte { public static int Main() { Int64IConvertibleToSByte ui64IContSByte = new Int64IConvertibleToSByte(); TestLibrary.TestFramework.BeginTestCase("Int64IConvertibleToSByte"); if (ui64IContSByte.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[PosTest]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; retVal = PosTest4() && retVal; TestLibrary.TestFramework.LogInformation("[NegTest]"); retVal = NegTest1() && retVal; retVal = NegTest2() && retVal; retVal = NegTest3() && retVal; return retVal; } #region PositiveTest public bool PosTest1() { bool retVal = true; CultureInfo myculture = new CultureInfo("en-us"); IFormatProvider provider = myculture.NumberFormat; TestLibrary.TestFramework.BeginScenario("PosTest1:The Int64 value which is in the range of SByte IConvertible To SByte 1"); try { long int64A = (long)SByte.MaxValue; IConvertible iConvert = (IConvertible)(int64A); sbyte sbyteA = iConvert.ToSByte(provider); if (sbyteA != 127) { TestLibrary.TestFramework.LogError("001", "the ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("002", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest2:The Int64 value which is in the range of SByte IConvertible To SByte 2"); try { long int64A = (long)SByte.MinValue; IConvertible iConvert = (IConvertible)(int64A); sbyte sbyteA = iConvert.ToSByte(null); if (sbyteA != (SByte)int64A) { TestLibrary.TestFramework.LogError("003", "the ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("004", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; CultureInfo myculture = new CultureInfo("el-GR"); IFormatProvider provider = myculture.NumberFormat; TestLibrary.TestFramework.BeginScenario("PosTest3:The Int64 value which is in the range of SByte IConvertible To SByte 3"); try { long int64A = this.GetInt32(0, 127); IConvertible iConvert = (IConvertible)(int64A); sbyte sbyteA = iConvert.ToSByte(provider); if (sbyteA != (SByte)int64A) { TestLibrary.TestFramework.LogError("005", "the ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest4:The Int64 value which is in the range of SByte IConvertible To SByte 4"); try { long int64A = this.GetInt32(1, 129) * (-1); IConvertible iConvert = (IConvertible)(int64A); sbyte sbyteA = iConvert.ToSByte(null); if (sbyteA != (SByte)int64A) { TestLibrary.TestFramework.LogError("007", "the ActualResult is not the ExpectResult"); retVal = false; } } catch (Exception e) { TestLibrary.TestFramework.LogError("008", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #region NegativeTest public bool NegTest1() { bool retVal = true; CultureInfo myculture = new CultureInfo("en-us"); IFormatProvider provider = myculture.NumberFormat; TestLibrary.TestFramework.BeginScenario("NegTest1:The Int64 value which is out the range of SByte IConvertible To SByte 1"); try { long int64A = Int64.MaxValue; IConvertible iConvert = (IConvertible)(int64A); sbyte sbyteA = iConvert.ToSByte(provider); retVal = false; TestLibrary.TestFramework.LogError("N001", "Int64 value out of the range of SByte but not throw exception"); } catch (OverflowException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N002", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest2() { bool retVal = true; CultureInfo myculture = new CultureInfo("en-us"); IFormatProvider provider = myculture.NumberFormat; TestLibrary.TestFramework.BeginScenario("NegTest2:The Int64 value which is out the range of SByte IConvertible To SByte 2"); try { long int64A = TestLibrary.Generator.GetInt64(-55); if (int64A <= 127) { int64A = int64A + 127; } IConvertible iConvert = (IConvertible)(int64A); sbyte sbyteA = iConvert.ToSByte(provider); retVal = false; TestLibrary.TestFramework.LogError("N004", "Int64 value out of the range of Byte but not throw exception"); } catch (OverflowException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N005", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest3() { bool retVal = true; CultureInfo myculture = new CultureInfo("en-us"); IFormatProvider provider = myculture.NumberFormat; TestLibrary.TestFramework.BeginScenario("NegTest3:The Int64 value which is out the range of Byte IConvertible To Byte 3"); try { long int64A = TestLibrary.Generator.GetInt64(-55); if (int64A <=128) { int64A = (int64A + 129) * (-1); } else { int64A = int64A * (-1); } IConvertible iConvert = (IConvertible)(int64A); sbyte sbyteA = iConvert.ToSByte(provider); retVal = false; TestLibrary.TestFramework.LogError("N003", "Int64 value out of the range of Byte but not throw exception"); } catch (OverflowException) { } catch (Exception e) { TestLibrary.TestFramework.LogError("N004", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #region HelpMethod private Int32 GetInt32(Int32 minValue, Int32 maxValue) { try { if (minValue == maxValue) { return minValue; } if (minValue < maxValue) { return minValue + TestLibrary.Generator.GetInt32(-55) % (maxValue - minValue); } } catch { throw; } return minValue; } #endregion }
using System; using System.Data; using System.Data.OleDb; using System.Collections; using System.Configuration; using PCSComUtils.DataAccess; using PCSComUtils.PCSExc; using PCSComUtils.Common; namespace PCSComProduct.Items.DS { public class ITM_RoutingStatusDS { public ITM_RoutingStatusDS() { } private const string THIS = "PCSComProduct.Items.DS.DS.ITM_RoutingStatusDS"; //************************************************************************** /// <Description> /// This method uses to add data to ITM_RoutingStatus /// </Description> /// <Inputs> /// ITM_RoutingStatusVO /// </Inputs> /// <Outputs> /// newly inserted primarkey value /// </Outputs> /// <Returns> /// void /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Wednesday, January 19, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void Add(object pobjObjectVO) { const string METHOD_NAME = THIS + ".Add()"; OleDbConnection oconPCS =null; OleDbCommand ocmdPCS =null; try { ITM_RoutingStatusVO objObject = (ITM_RoutingStatusVO) pobjObjectVO; string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand("", oconPCS); strSql= "INSERT INTO ITM_RoutingStatus(" + ITM_RoutingStatusTable.CODE_FLD + "," + ITM_RoutingStatusTable.DESCRIPTION_FLD + ")" + "VALUES(?,?)"; ocmdPCS.Parameters.Add(new OleDbParameter(ITM_RoutingStatusTable.CODE_FLD, OleDbType.VarWChar)); ocmdPCS.Parameters[ITM_RoutingStatusTable.CODE_FLD].Value = objObject.Code; ocmdPCS.Parameters.Add(new OleDbParameter(ITM_RoutingStatusTable.DESCRIPTION_FLD, OleDbType.VarWChar)); ocmdPCS.Parameters[ITM_RoutingStatusTable.DESCRIPTION_FLD].Value = objObject.Description; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to delete data from ITM_RoutingStatus /// </Description> /// <Inputs> /// ID /// </Inputs> /// <Outputs> /// void /// </Outputs> /// <Returns> /// /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// 09-Dec-2004 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void Delete(int pintID) { const string METHOD_NAME = THIS + ".Delete()"; string strSql = String.Empty; strSql= "DELETE " + ITM_RoutingStatusTable.TABLE_NAME + " WHERE " + "RoutingStatusID" + "=" + pintID.ToString(); OleDbConnection oconPCS=null; OleDbCommand ocmdPCS =null; try { Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); ocmdPCS = null; } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to get data from ITM_RoutingStatus /// </Description> /// <Inputs> /// ID /// </Inputs> /// <Outputs> /// ITM_RoutingStatusVO /// </Outputs> /// <Returns> /// ITM_RoutingStatusVO /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Wednesday, January 19, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public object GetObjectVO(int pintID) { const string METHOD_NAME = THIS + ".GetObjectVO()"; DataSet dstPCS = new DataSet(); OleDbDataReader odrPCS = null; OleDbConnection oconPCS = null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD + "," + ITM_RoutingStatusTable.CODE_FLD + "," + ITM_RoutingStatusTable.DESCRIPTION_FLD + " FROM " + ITM_RoutingStatusTable.TABLE_NAME +" WHERE " + ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD + "=" + pintID; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); odrPCS = ocmdPCS.ExecuteReader(); ITM_RoutingStatusVO objObject = new ITM_RoutingStatusVO(); while (odrPCS.Read()) { objObject.RoutingStatusID = int.Parse(odrPCS[ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD].ToString().Trim()); objObject.Code = odrPCS[ITM_RoutingStatusTable.CODE_FLD].ToString().Trim(); objObject.Description = odrPCS[ITM_RoutingStatusTable.DESCRIPTION_FLD].ToString().Trim(); } return objObject; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to update data to ITM_RoutingStatus /// </Description> /// <Inputs> /// ITM_RoutingStatusVO /// </Inputs> /// <Outputs> /// /// </Outputs> /// <Returns> /// /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// 09-Dec-2004 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void Update(object pobjObjecVO) { const string METHOD_NAME = THIS + ".Update()"; ITM_RoutingStatusVO objObject = (ITM_RoutingStatusVO) pobjObjecVO; //prepare value for parameters OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); strSql= "UPDATE ITM_RoutingStatus SET " + ITM_RoutingStatusTable.CODE_FLD + "= ?" + "," + ITM_RoutingStatusTable.DESCRIPTION_FLD + "= ?" +" WHERE " + ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD + "= ?"; ocmdPCS.Parameters.Add(new OleDbParameter(ITM_RoutingStatusTable.CODE_FLD, OleDbType.VarWChar)); ocmdPCS.Parameters[ITM_RoutingStatusTable.CODE_FLD].Value = objObject.Code; ocmdPCS.Parameters.Add(new OleDbParameter(ITM_RoutingStatusTable.DESCRIPTION_FLD, OleDbType.VarWChar)); ocmdPCS.Parameters[ITM_RoutingStatusTable.DESCRIPTION_FLD].Value = objObject.Description; ocmdPCS.Parameters.Add(new OleDbParameter(ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD, OleDbType.Integer)); ocmdPCS.Parameters[ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD].Value = objObject.RoutingStatusID; ocmdPCS.CommandText = strSql; ocmdPCS.Connection.Open(); ocmdPCS.ExecuteNonQuery(); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to get all data from ITM_RoutingStatus /// </Description> /// <Inputs> /// /// </Inputs> /// <Outputs> /// DataSet /// </Outputs> /// <Returns> /// DataSet /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Wednesday, January 19, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public DataSet List() { const string METHOD_NAME = THIS + ".List()"; DataSet dstPCS = new DataSet(); OleDbConnection oconPCS =null; OleDbCommand ocmdPCS = null; try { string strSql = String.Empty; strSql= "SELECT " + ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD + "," + ITM_RoutingStatusTable.CODE_FLD + "," + ITM_RoutingStatusTable.DESCRIPTION_FLD + " FROM " + ITM_RoutingStatusTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); ocmdPCS = new OleDbCommand(strSql, oconPCS); ocmdPCS.Connection.Open(); OleDbDataAdapter odadPCS = new OleDbDataAdapter(ocmdPCS); odadPCS.Fill(dstPCS,ITM_RoutingStatusTable.TABLE_NAME); return dstPCS; } catch(OleDbException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } //************************************************************************** /// <Description> /// This method uses to update a DataSet /// </Description> /// <Inputs> /// DataSet /// </Inputs> /// <Outputs> /// /// </Outputs> /// <Returns> /// /// </Returns> /// <Authors> /// HungLa /// </Authors> /// <History> /// Wednesday, January 19, 2005 /// </History> /// <Notes> /// </Notes> //************************************************************************** public void UpdateDataSet(DataSet pData) { const string METHOD_NAME = THIS + ".UpdateDataSet()"; string strSql; OleDbConnection oconPCS =null; OleDbCommandBuilder odcbPCS ; OleDbDataAdapter odadPCS = new OleDbDataAdapter(); try { strSql= "SELECT " + ITM_RoutingStatusTable.ROUTINGSTATUSID_FLD + "," + ITM_RoutingStatusTable.CODE_FLD + "," + ITM_RoutingStatusTable.DESCRIPTION_FLD + " FROM " + ITM_RoutingStatusTable.TABLE_NAME; Utils utils = new Utils(); oconPCS = new OleDbConnection(Utils.Instance.OleDbConnectionString); odadPCS.SelectCommand = new OleDbCommand(strSql, oconPCS); odcbPCS = new OleDbCommandBuilder(odadPCS); pData.EnforceConstraints = false; odadPCS.Update(pData,ITM_RoutingStatusTable.TABLE_NAME); } catch(OleDbException ex) { if (ex.Errors[1].NativeError == ErrorCode.SQLDUPLICATE_KEYCODE) { throw new PCSDBException(ErrorCode.DUPLICATE_KEY, METHOD_NAME, ex); } else if (ex.Errors[1].NativeError == ErrorCode.SQLCASCADE_PREVENT_KEYCODE) { throw new PCSDBException(ErrorCode.CASCADE_DELETE_PREVENT, METHOD_NAME, ex); } else { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } } catch(InvalidOperationException ex) { throw new PCSDBException(ErrorCode.ERROR_DB, METHOD_NAME,ex); } catch (Exception ex) { throw new PCSDBException(ErrorCode.OTHER_ERROR, METHOD_NAME, ex); } finally { if (oconPCS!=null) { if (oconPCS.State != ConnectionState.Closed) { oconPCS.Close(); } } } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.14.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.WebSites { using System; using System.Linq; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; using Models; /// <summary> /// GlobalDomainRegistrationOperations operations. /// </summary> internal partial class GlobalDomainRegistrationOperations : IServiceOperations<WebSiteManagementClient>, IGlobalDomainRegistrationOperations { /// <summary> /// Initializes a new instance of the GlobalDomainRegistrationOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal GlobalDomainRegistrationOperations(WebSiteManagementClient client) { if (client == null) { throw new ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the WebSiteManagementClient /// </summary> public WebSiteManagementClient Client { get; private set; } /// <summary> /// Lists all domains in a subscription /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse<DomainCollection>> GetAllDomainsWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetAllDomains", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/domains").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<DomainCollection>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<DomainCollection>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Generates a single sign on request for domain management portal /// </summary> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse<DomainControlCenterSsoRequest>> GetDomainControlCenterSsoRequestWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "GetDomainControlCenterSsoRequest", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/generateSsoRequest").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<DomainControlCenterSsoRequest>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<DomainControlCenterSsoRequest>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Validates domain registration information /// </summary> /// <param name='domainRegistrationInput'> /// Domain registration information /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse<object>> ValidateDomainPurchaseInformationWithHttpMessagesAsync(DomainRegistrationInput domainRegistrationInput, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (domainRegistrationInput == null) { throw new ValidationException(ValidationRules.CannotBeNull, "domainRegistrationInput"); } if (domainRegistrationInput != null) { domainRegistrationInput.Validate(); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("domainRegistrationInput", domainRegistrationInput); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ValidateDomainPurchaseInformation", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/validateDomainRegistrationInformation").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; _requestContent = SafeJsonConvert.SerializeObject(domainRegistrationInput, this.Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<object>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<object>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Checks if a domain is available for registration /// </summary> /// <param name='identifier'> /// Name of the domain /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse<DomainAvailablilityCheckResult>> CheckDomainAvailabilityWithHttpMessagesAsync(NameIdentifier identifier, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (identifier == null) { throw new ValidationException(ValidationRules.CannotBeNull, "identifier"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("identifier", identifier); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "CheckDomainAvailability", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/checkDomainAvailability").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; _requestContent = SafeJsonConvert.SerializeObject(identifier, this.Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<DomainAvailablilityCheckResult>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<DomainAvailablilityCheckResult>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Lists domain recommendations based on keywords /// </summary> /// <param name='parameters'> /// Domain recommendation search parameters /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public async Task<AzureOperationResponse<NameIdentifierCollection>> ListDomainRecommendationsWithHttpMessagesAsync(DomainRecommendationSearchParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (parameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "parameters"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (this.Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("parameters", parameters); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListDomainRecommendations", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/listDomainRecommendations").ToString(); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (this.Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(this.Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("POST"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; _requestContent = SafeJsonConvert.SerializeObject(parameters, this.Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<NameIdentifierCollection>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<NameIdentifierCollection>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using System; using System.Collections; using System.ComponentModel; using System.Data; using System.Drawing; using System.Web; using System.Web.SessionState; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.HtmlControls; using System.Data.SqlClient; namespace WebApplication2 { /// <summary> /// Summary description for frmResourcesInfo. /// </summary> public partial class frmOwnResourcesAll: System.Web.UI.Page { /*SqlConnection epsDbConn=new SqlConnection("Server=cp2693-a\\eps1;database=eps1;"+ "uid=tauheed;pwd=tauheed;");*/ private static string strURL = System.Configuration.ConfigurationSettings.AppSettings["local_url"]; private static string strDB = System.Configuration.ConfigurationSettings.AppSettings["local_db"]; public SqlConnection epsDbConn=new SqlConnection(strDB); private string S; protected void Page_Load(object sender, System.EventArgs e) { // Put user code to initialize the page here Load_Resources(); } #region Web Form Designer generated code override protected void OnInit(EventArgs e) { // // CODEGEN: This call is required by the ASP.NET Web Form Designer. // InitializeComponent(); base.OnInit(e); } /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { } #endregion private void Load_Resources() { if (!IsPostBack) { S="Yes"; lblOrg.Text=Session["OrgName"].ToString(); lblContent.Text="Type of Resource: " + Session["ResTypeName"].ToString(); loadData(); } } private void loadData() { SqlCommand cmd=new SqlCommand(); cmd.CommandType=CommandType.StoredProcedure; cmd.CommandText="eps_RetrieveResAll"; cmd.Connection=this.epsDbConn; cmd.Parameters.Add ("@DomainId",SqlDbType.Int); cmd.Parameters["@DomainId"].Value=Session["DomainId"].ToString(); cmd.Parameters.Add ("@LicenseId",SqlDbType.Int); cmd.Parameters["@LicenseId"].Value=Session["LicenseId"].ToString(); cmd.Parameters.Add ("@OrgIdP",SqlDbType.Int); cmd.Parameters["@OrgIdP"].Value=Session["OrgIdP"].ToString(); cmd.Parameters.Add ("@OrgId",SqlDbType.Int); cmd.Parameters["@OrgId"].Value=Session["OrgId"].ToString(); cmd.Parameters.Add ("@ResTypeId",SqlDbType.Int); cmd.Parameters["@ResTypeId"].Value=Session["ResTypeId"].ToString(); cmd.Parameters.Add ("@AllResTypesFlag",SqlDbType.NVarChar); cmd.Parameters["@AllResTypesFlag"].Value=S; DataSet ds=new DataSet(); SqlDataAdapter da=new SqlDataAdapter(cmd); da.Fill(ds,"ResourcesAll"); Session["ds"] = ds; if (ds.Tables["ResourcesAll"].Rows.Count ==0) { if (S=="Yes") { lblContent.Text = "There are no resources of type '" + Session["ResTypeName"].ToString() + "' identified for the above organization. " + " You may extend your search to available resources" + " of all types by clicking on the button titled" + " 'Display All Available Resources'. Or you may " + " initiate a procurement request by clicking on the button" + " titled 'Enter Procurement Request'"; } else { lblContent.Text = "There are no resources of type" + " available to this organization. Would you like to " + " initiate a procurement request now?"; } btnOK.Text="Exit"; btnCancel.Visible=false; btnResAll.Visible=true; DataGrid1.DataSource=ds; DataGrid1.DataBind(); refreshGrid(); } else { DataGrid1.DataSource=ds; DataGrid1.DataBind(); refreshGrid(); } } protected void btnOK_Click(object sender, System.EventArgs e) { updateGrid(); Exit(); } private void updateGrid() { foreach (DataGridItem i in DataGrid1.Items) { CheckBox cb = (CheckBox)(i.Cells[5].FindControl("cbxSel")); if (cb.Checked) { SqlCommand cmd=new SqlCommand(); cmd.CommandType=CommandType.StoredProcedure; cmd.CommandText="eps_UpdateTaskResource"; cmd.Connection=this.epsDbConn; cmd.Parameters.Add("@TaskResId", SqlDbType.Int); cmd.Parameters ["@TaskResId"].Value=Session["TaskResId"].ToString(); cmd.Parameters.Add("@Id", SqlDbType.Int); cmd.Parameters ["@Id"].Value=i.Cells[0].Text; cmd.Parameters.Add("@Type", SqlDbType.NVarChar); cmd.Parameters ["@Type"].Value=i.Cells[4].Text; cmd.Connection.Open(); cmd.ExecuteNonQuery(); cmd.Connection.Close(); } } } private void refreshGrid() { /*foreach (DataGridItem i in DataGrid1.Items) { CheckBox cb = (CheckBox)(i.Cells[8].FindControl("cbxSel")); SqlCommand cmd=new SqlCommand(); cmd.Connection=this.epsDbConn; cmd.CommandType=CommandType.Text; cmd.CommandText="Select Id from ResourceOrg" + " Where ResourceId=" + i.Cells[0].Text + " and OrgId=" + Session["OrgId"]; cmd.Connection.Open(); if (cmd.ExecuteScalar() != null) cb.Checked = true; cmd.Connection.Close(); }*/ } private void Exit() { Response.Redirect (strURL + Session["CResAll"].ToString() + ".aspx?"); } protected void btnCancel_Click(object sender, System.EventArgs e) { Exit(); } private void btnCancel1_Click(object sender, System.EventArgs e) { Exit(); } protected void btnResAll_Click(object sender, System.EventArgs e) { S="No"; lblContent.Text="Type of Resource: All Available Types"; btnOK.Visible=true; btnCancel.Visible=true; loadData(); } protected void btnProcurementReq_Click(object sender, System.EventArgs e) { Session["CallerUpdProcure"]="frmTaskResources"; Session["btnAction"]="Add"; Session["Id"]="0"; Response.Redirect (strURL + "frmUpdProcurement.aspx?"); } } }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ namespace System.ServiceModel.Channels { using System; using System.ComponentModel; using System.Net.Security; using System.Runtime.CompilerServices; using System.ServiceModel.Administration; using System.ServiceModel.Description; using System.ServiceModel.Security; [TypeForwardedFrom("System.WorkflowServices, Version=3.5.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35")] public class ContextBindingElement : BindingElement, IPolicyExportExtension, IContextSessionProvider, IWmiInstanceProvider, IContextBindingElement { internal const ContextExchangeMechanism DefaultContextExchangeMechanism = ContextExchangeMechanism.ContextSoapHeader; internal const bool DefaultContextManagementEnabled = true; internal const ProtectionLevel DefaultProtectionLevel = ProtectionLevel.Sign; ContextExchangeMechanism contextExchangeMechanism; ICorrelationDataSource instanceCorrelationData; bool contextManagementEnabled; ProtectionLevel protectionLevel; public ContextBindingElement() : this(DefaultProtectionLevel, DefaultContextExchangeMechanism, null, DefaultContextManagementEnabled) { // empty } public ContextBindingElement(ProtectionLevel protectionLevel) : this(protectionLevel, DefaultContextExchangeMechanism, null, DefaultContextManagementEnabled) { // empty } public ContextBindingElement(ProtectionLevel protectionLevel, ContextExchangeMechanism contextExchangeMechanism) : this(protectionLevel, contextExchangeMechanism, null, DefaultContextManagementEnabled) { // empty } public ContextBindingElement(ProtectionLevel protectionLevel, ContextExchangeMechanism contextExchangeMechanism, Uri clientCallbackAddress) : this(protectionLevel, contextExchangeMechanism, clientCallbackAddress, DefaultContextManagementEnabled) { // empty } public ContextBindingElement(ProtectionLevel protectionLevel, ContextExchangeMechanism contextExchangeMechanism, Uri clientCallbackAddress, bool contextManagementEnabled) { this.ProtectionLevel = protectionLevel; this.ContextExchangeMechanism = contextExchangeMechanism; this.ClientCallbackAddress = clientCallbackAddress; this.ContextManagementEnabled = contextManagementEnabled; } ContextBindingElement(ContextBindingElement other) : base(other) { this.ProtectionLevel = other.ProtectionLevel; this.ContextExchangeMechanism = other.ContextExchangeMechanism; this.ClientCallbackAddress = other.ClientCallbackAddress; this.ContextManagementEnabled = other.ContextManagementEnabled; } [DefaultValue(null)] public Uri ClientCallbackAddress { get; set; } [DefaultValue(DefaultContextExchangeMechanism)] public ContextExchangeMechanism ContextExchangeMechanism { get { return this.contextExchangeMechanism; } set { if (!ContextExchangeMechanismHelper.IsDefined(value)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("value")); } this.contextExchangeMechanism = value; } } [DefaultValue(DefaultContextManagementEnabled)] public bool ContextManagementEnabled { get { return this.contextManagementEnabled; } set { this.contextManagementEnabled = value; } } [DefaultValue(DefaultProtectionLevel)] public ProtectionLevel ProtectionLevel { get { return this.protectionLevel; } set { if (!ProtectionLevelHelper.IsDefined(value)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("value")); } this.protectionLevel = value; } } public override IChannelFactory<TChannel> BuildChannelFactory<TChannel>(BindingContext context) { if (context == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context"); } if (!this.CanBuildChannelFactory<TChannel>(context)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( new InvalidOperationException(SR.GetString(SR.ContextBindingElementCannotProvideChannelFactory, typeof(TChannel).ToString()))); } this.EnsureContextExchangeMechanismCompatibleWithScheme(context); this.EnsureContextExchangeMechanismCompatibleWithTransportCookieSetting(context); return new ContextChannelFactory<TChannel>(context, this.ContextExchangeMechanism, this.ClientCallbackAddress, this.ContextManagementEnabled); } public override IChannelListener<TChannel> BuildChannelListener<TChannel>(BindingContext context) { if (context == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context"); } if (!this.CanBuildChannelListener<TChannel>(context)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( new InvalidOperationException(SR.GetString(SR.ContextBindingElementCannotProvideChannelListener, typeof(TChannel).ToString()))); } this.EnsureContextExchangeMechanismCompatibleWithScheme(context); return new ContextChannelListener<TChannel>(context, this.ContextExchangeMechanism); } public override bool CanBuildChannelFactory<TChannel>(BindingContext context) { if (context == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context"); } return (typeof(TChannel) == typeof(IOutputChannel) || typeof(TChannel) == typeof(IOutputSessionChannel) || typeof(TChannel) == typeof(IRequestChannel) || typeof(TChannel) == typeof(IRequestSessionChannel) || (typeof(TChannel) == typeof(IDuplexSessionChannel) && this.ContextExchangeMechanism != ContextExchangeMechanism.HttpCookie)) && context.CanBuildInnerChannelFactory<TChannel>(); } public override bool CanBuildChannelListener<TChannel>(BindingContext context) { if (context == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context"); } return ((typeof(TChannel) == typeof(IInputChannel) || typeof(TChannel) == typeof(IInputSessionChannel) || typeof(TChannel) == typeof(IReplyChannel) || typeof(TChannel) == typeof(IReplySessionChannel) || (typeof(TChannel) == typeof(IDuplexSessionChannel) && this.ContextExchangeMechanism != ContextExchangeMechanism.HttpCookie)) && context.CanBuildInnerChannelListener<TChannel>()); } public override BindingElement Clone() { return new ContextBindingElement(this); } public virtual void ExportPolicy(MetadataExporter exporter, PolicyConversionContext context) { if (context == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context"); } ContextBindingElementPolicy.ExportRequireContextAssertion(this, context.GetBindingAssertions()); } public override T GetProperty<T>(BindingContext context) { if (context == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("context"); } if (typeof(T) == typeof(ChannelProtectionRequirements) && this.ProtectionLevel != ProtectionLevel.None) { ChannelProtectionRequirements innerRequirements = context.GetInnerProperty<ChannelProtectionRequirements>(); if (innerRequirements == null) { return (T)(object)ContextMessageHeader.GetChannelProtectionRequirements(this.ProtectionLevel); } else { ChannelProtectionRequirements requirements = new ChannelProtectionRequirements(innerRequirements); requirements.Add(ContextMessageHeader.GetChannelProtectionRequirements(this.ProtectionLevel)); return (T)(object)requirements; } } else if (typeof(T) == typeof(IContextSessionProvider)) { return (T)(object)this; } else if (typeof(T) == typeof(IContextBindingElement)) { return (T)(object)this; } else if (typeof(T) == typeof(ICorrelationDataSource)) { ICorrelationDataSource correlationData = instanceCorrelationData; if (correlationData == null) { ICorrelationDataSource innerCorrelationData = context.GetInnerProperty<ICorrelationDataSource>(); correlationData = CorrelationDataSourceHelper.Combine(innerCorrelationData, ContextExchangeCorrelationDataDescription.DataSource); instanceCorrelationData = correlationData; } return (T)(object)correlationData; } return context.GetInnerProperty<T>(); } internal override bool IsMatch(BindingElement b) { if (b == null) { return false; } ContextBindingElement other = b as ContextBindingElement; if (other == null) { return false; } if (this.ClientCallbackAddress != other.ClientCallbackAddress) { return false; } if (this.ContextExchangeMechanism != other.ContextExchangeMechanism) { return false; } if (this.ContextManagementEnabled != other.ContextManagementEnabled) { return false; } if (this.ProtectionLevel != other.protectionLevel) { return false; } return true; } void IWmiInstanceProvider.FillInstance(IWmiInstance wmiInstance) { wmiInstance.SetProperty("ProtectionLevel", this.protectionLevel.ToString()); wmiInstance.SetProperty("ContextExchangeMechanism", this.contextExchangeMechanism.ToString()); wmiInstance.SetProperty("ContextManagementEnabled", this.contextManagementEnabled); } string IWmiInstanceProvider.GetInstanceType() { return "ContextBindingElement"; } internal static void ValidateContextBindingElementOnAllEndpointsWithSessionfulContract(ServiceDescription description, IServiceBehavior callingBehavior) { if (description == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("description"); } if (callingBehavior == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("callingBehavior"); } BindingParameterCollection bpc = new BindingParameterCollection(); foreach (ServiceEndpoint endpoint in description.Endpoints) { if (endpoint.Binding != null && endpoint.Contract != null && !endpoint.InternalIsSystemEndpoint(description) && endpoint.Contract.SessionMode != SessionMode.NotAllowed) { if (endpoint.Binding.GetProperty<IContextBindingElement>(bpc) == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( new InvalidOperationException(SR.GetString( SR.BehaviorRequiresContextProtocolSupportInBinding, callingBehavior.GetType().Name, endpoint.Name, endpoint.ListenUri.ToString()))); } } } } void EnsureContextExchangeMechanismCompatibleWithScheme(BindingContext context) { if (context.Binding != null && this.contextExchangeMechanism == ContextExchangeMechanism.HttpCookie && !"http".Equals(context.Binding.Scheme, StringComparison.OrdinalIgnoreCase) && !"https".Equals(context.Binding.Scheme, StringComparison.OrdinalIgnoreCase)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( new InvalidOperationException( SR.GetString( SR.HttpCookieContextExchangeMechanismNotCompatibleWithTransportType, context.Binding.Scheme, context.Binding.Namespace, context.Binding.Name))); } } void EnsureContextExchangeMechanismCompatibleWithTransportCookieSetting(BindingContext context) { if (context.Binding != null && this.contextExchangeMechanism == ContextExchangeMechanism.HttpCookie) { foreach (BindingElement bindingElement in context.Binding.Elements) { HttpTransportBindingElement http = bindingElement as HttpTransportBindingElement; if (http != null && http.AllowCookies) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError( new InvalidOperationException( SR.GetString( SR.HttpCookieContextExchangeMechanismNotCompatibleWithTransportCookieSetting, context.Binding.Namespace, context.Binding.Name))); } } } } class ContextExchangeCorrelationDataDescription : CorrelationDataDescription { static CorrelationDataSourceHelper cachedCorrelationDataSource; ContextExchangeCorrelationDataDescription() { } public static ICorrelationDataSource DataSource { get { if (cachedCorrelationDataSource == null) { cachedCorrelationDataSource = new CorrelationDataSourceHelper( new CorrelationDataDescription[] { new ContextExchangeCorrelationDataDescription() }); } return cachedCorrelationDataSource; } } public override bool IsOptional { get { return true; } } public override bool IsDefault { get { return true; } } public override bool KnownBeforeSend { get { return true; } } public override string Name { get { return ContextExchangeCorrelationHelper.CorrelationName; } } public override bool ReceiveValue { get { return true; } } public override bool SendValue { get { return true; } } } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using CsvHelper; using QuantConnect.Data; using QuantConnect.Data.Market; using QuantConnect.Logging; using QuantConnect.Util; using RestSharp; using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using QuantConnect.Securities; namespace QuantConnect.ToolBox.AlphaVantageDownloader { /// <summary> /// Alpha Vantage data downloader /// </summary> public class AlphaVantageDataDownloader : IDataDownloader, IDisposable { private readonly MarketHoursDatabase _marketHoursDatabase; private readonly IRestClient _avClient; private readonly RateGate _rateGate; private bool _disposed; /// <summary> /// Construct AlphaVantageDataDownloader with default RestClient /// </summary> /// <param name="apiKey">API key</param> public AlphaVantageDataDownloader(string apiKey) : this(new RestClient(), apiKey) { } /// <summary> /// Dependency injection constructor /// </summary> /// <param name="restClient">The <see cref="RestClient"/> to use</param> /// <param name="apiKey">API key</param> public AlphaVantageDataDownloader(IRestClient restClient, string apiKey) { _avClient = restClient; _marketHoursDatabase = MarketHoursDatabase.FromDataFolder(); _avClient.BaseUrl = new Uri("https://www.alphavantage.co/"); _avClient.Authenticator = new AlphaVantageAuthenticator(apiKey); _rateGate = new RateGate(5, TimeSpan.FromMinutes(1)); // Free API is limited to 5 requests/minute } /// <summary> /// Get historical data enumerable for a single symbol, type and resolution given this start and end time (in UTC). /// </summary> /// <param name="dataDownloaderGetParameters">model class for passing in parameters for historical data</param> /// <returns>Enumerable of base data for this symbol</returns> public IEnumerable<BaseData> Get(DataDownloaderGetParameters dataDownloaderGetParameters) { var symbol = dataDownloaderGetParameters.Symbol; var resolution = dataDownloaderGetParameters.Resolution; var startUtc = dataDownloaderGetParameters.StartUtc; var endUtc = dataDownloaderGetParameters.EndUtc; var tickType = dataDownloaderGetParameters.TickType; if (tickType != TickType.Trade) { return Enumerable.Empty<BaseData>(); } var request = new RestRequest("query", DataFormat.Json); request.AddParameter("symbol", symbol.Value); request.AddParameter("datatype", "csv"); IEnumerable<TimeSeries> data = null; switch (resolution) { case Resolution.Minute: case Resolution.Hour: data = GetIntradayData(request, startUtc, endUtc, resolution); break; case Resolution.Daily: data = GetDailyData(request, startUtc, endUtc, symbol); break; default: throw new ArgumentOutOfRangeException(nameof(resolution), $"{resolution} resolution not supported by API."); } var period = resolution.ToTimeSpan(); return data.Select(d => new TradeBar(d.Time, symbol, d.Open, d.High, d.Low, d.Close, d.Volume, period)); } /// <summary> /// Get data from daily API /// </summary> /// <param name="request">Base request</param> /// <param name="startUtc">Start time</param> /// <param name="endUtc">End time</param> /// <param name="symbol">Symbol to download</param> /// <returns></returns> private IEnumerable<TimeSeries> GetDailyData(RestRequest request, DateTime startUtc, DateTime endUtc, Symbol symbol) { request.AddParameter("function", "TIME_SERIES_DAILY"); // The default output only includes 100 trading days of data. If we want need more, specify full output if (GetBusinessDays(startUtc, endUtc, symbol) > 100) { request.AddParameter("outputsize", "full"); } return GetTimeSeries(request); } /// <summary> /// Get data from intraday API /// </summary> /// <param name="request">Base request</param> /// <param name="startUtc">Start time</param> /// <param name="endUtc">End time</param> /// <param name="resolution">Data resolution to request</param> /// <returns></returns> private IEnumerable<TimeSeries> GetIntradayData(RestRequest request, DateTime startUtc, DateTime endUtc, Resolution resolution) { request.AddParameter("function", "TIME_SERIES_INTRADAY_EXTENDED"); request.AddParameter("adjusted", "false"); switch (resolution) { case Resolution.Minute: request.AddParameter("interval", "1min"); break; case Resolution.Hour: request.AddParameter("interval", "60min"); break; default: throw new ArgumentOutOfRangeException($"{resolution} resolution not supported by intraday API."); } var slices = GetSlices(startUtc, endUtc); foreach (var slice in slices) { request.AddOrUpdateParameter("slice", slice); var data = GetTimeSeries(request); foreach (var record in data) { yield return record; } } } /// <summary> /// Execute request and parse response. /// </summary> /// <param name="request">The request</param> /// <returns><see cref="TimeSeries"/> data</returns> private IEnumerable<TimeSeries> GetTimeSeries(RestRequest request) { if (_rateGate.IsRateLimited) { Log.Trace("Requests are limited to 5 per minute. Reduce the time between start and end times or simply wait, and this process will continue automatically."); } _rateGate.WaitToProceed(); //var url = _avClient.BuildUri(request); Log.Trace("Downloading /{0}?{1}", request.Resource, string.Join("&", request.Parameters)); var response = _avClient.Get(request); if (response.ContentType != "application/x-download") { throw new FormatException($"Unexpected content received from API.\n{response.Content}"); } using (var reader = new StringReader(response.Content)) { using (var csv = new CsvReader(reader, CultureInfo.InvariantCulture)) { return csv.GetRecords<TimeSeries>() .OrderBy(t => t.Time) .ToList(); // Execute query before readers are disposed. } } } /// <summary> /// Get slice names for date range. /// See https://www.alphavantage.co/documentation/#intraday-extended /// </summary> /// <param name="startUtc">Start date</param> /// <param name="endUtc">End date</param> /// <returns>Slice names</returns> private static IEnumerable<string> GetSlices(DateTime startUtc, DateTime endUtc) { if ((DateTime.UtcNow - startUtc).TotalDays > 365 * 2) { throw new ArgumentOutOfRangeException(nameof(startUtc), "Intraday data is only available for the last 2 years."); } var timeSpan = endUtc - startUtc; var months = (int)Math.Floor(timeSpan.TotalDays / 30); for (var i = months; i >= 0; i--) { var year = i / 12 + 1; var month = i % 12 + 1; yield return $"year{year}month{month}"; } } /// <summary> /// From https://stackoverflow.com/questions/1617049/calculate-the-number-of-business-days-between-two-dates /// </summary> private int GetBusinessDays(DateTime start, DateTime end, Symbol symbol) { var exchangeHours = _marketHoursDatabase.GetExchangeHours(symbol.ID.Market, symbol, symbol.SecurityType); var current = start.Date; var days = 0; while (current < end) { if (exchangeHours.IsDateOpen(current)) { days++; } current = current.AddDays(1); } return days; } protected virtual void Dispose(bool disposing) { if (!_disposed) { if (disposing) { // dispose managed state (managed objects) _rateGate.Dispose(); } // free unmanaged resources (unmanaged objects) and override finalizer _disposed = true; } } public void Dispose() { // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method Dispose(disposing: true); GC.SuppressFinalize(this); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.IO.PortsTests; using System.Threading; using Legacy.Support; using Xunit; namespace System.IO.Ports.Tests { public class SerialStream_ReadTimeout_Property : PortsTest { // The default number of bytes to write with when testing timeout with Read(byte[], int, int) private const int DEFAULT_READ_BYTE_ARRAY_SIZE = 8; // The amount of time to wait when expecting an long timeout private const int DEFAULT_WAIT_LONG_TIMEOUT = 250; // The maximum acceptable time allowed when a read method should timeout immediately when it is called for the first time private const int MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT = 1000; // The maximum acceptable percentage difference allowed when a read method is called for the first time private const double MAX_ACCEPTABLE_WARMUP_PERCENTAGE_DIFFERENCE = .5; // The maximum acceptable percentage difference allowed private const double MAX_ACCEPTABLE_PERCENTAGE_DIFFERENCE = .15; private const int SUCCESSIVE_READTIMEOUT_SOMEDATA = 950; private const int NUM_TRYS = 5; private delegate void ReadMethodDelegate(Stream stream); #region Test Cases [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_DefaultValue() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { com.Open(); Stream stream = com.BaseStream; Debug.WriteLine("Verifying the default value of ReadTimeout"); Assert.Equal(-1, stream.ReadTimeout); } } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_AfterClose() { Debug.WriteLine("Verifying setting ReadTimeout after the SerialPort was closed"); VerifyException(2048, null, typeof(ObjectDisposedException)); } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_Int32MinValue() { Debug.WriteLine("Verifying Int32.MinValue ReadTimeout"); VerifyException(int.MinValue, typeof(ArgumentOutOfRangeException)); } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_NEG2() { Debug.WriteLine("Verifying -2 ReadTimeout"); VerifyException(-2, typeof(ArgumentOutOfRangeException)); } [ConditionalFact(nameof(HasLoopbackOrNullModem))] public void ReadTimeout_Default_Read_byte_int_int() { Debug.WriteLine("Verifying default ReadTimeout with Read(byte[] buffer, int offset, int count)"); VerifyDefaultTimeout(Read_byte_int_int); } [ConditionalFact(nameof(HasLoopbackOrNullModem))] public void ReadTimeout_Default_ReadByte() { Debug.WriteLine("Verifying default ReadTimeout with ReadByte()"); VerifyDefaultTimeout(ReadByte); } [ConditionalFact(nameof(HasLoopbackOrNullModem))] public void ReadTimeout_Infinite_Read_byte_int_int() { Debug.WriteLine("Verifying infinite ReadTimeout with Read(byte[] buffer, int offset, int count)"); VerifyLongTimeout(Read_byte_int_int, -1); } [ConditionalFact(nameof(HasLoopbackOrNullModem))] public void ReadTimeout_Infinite_ReadByte() { Debug.WriteLine("Verifying infinite ReadTimeout with ReadByte()"); VerifyLongTimeout(ReadByte, -1); } [ConditionalFact(nameof(HasLoopbackOrNullModem))] public void ReadTimeout_Int32MaxValue_Read_byte_int_int() { Debug.WriteLine("Verifying Int32.MaxValue ReadTimeout with Read(byte[] buffer, int offset, int count)"); VerifyLongTimeout(Read_byte_int_int, int.MaxValue - 1); } [ConditionalFact(nameof(HasLoopbackOrNullModem))] public void ReadTimeout_Int32MaxValue_ReadByte() { Debug.WriteLine("Verifying Int32.MaxValue ReadTimeout with ReadByte()"); VerifyLongTimeout(ReadByte, int.MaxValue - 1); } [OuterLoop("Slow Test")] [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_750_Read_byte_int_int() { Debug.WriteLine("Verifying 750 ReadTimeout with Read(byte[] buffer, int offset, int count)"); VerifyTimeout(Read_byte_int_int, 750); } [OuterLoop("Slow Test")] [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_750_ReadByte() { Debug.WriteLine("Verifying 750 ReadTimeout with ReadByte()"); VerifyTimeout(ReadByte, 750); } [OuterLoop("Slow test")] [ConditionalFact(nameof(HasOneSerialPort))] public void SuccessiveReadTimeoutNoData_Read_byte_int_int() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { com.Open(); Stream stream = com.BaseStream; stream.ReadTimeout = 850; Debug.WriteLine( "Verifying ReadTimeout={0} with successive call to Read(byte[], int, int) and no data", stream.ReadTimeout); try { stream.Read(new byte[DEFAULT_READ_BYTE_ARRAY_SIZE], 0, DEFAULT_READ_BYTE_ARRAY_SIZE); Assert.True(false, "Err_1707ahbap!!!: Read did not throw TimeouException when it timed out"); } catch (TimeoutException) { } VerifyTimeout(Read_byte_int_int, stream); } } [ConditionalFact(nameof(HasNullModem))] public void SuccessiveReadTimeoutSomeData_Read_byte_int_int() { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { var t = new Thread(WriteToCom1); com1.Open(); Stream stream = com1.BaseStream; stream.ReadTimeout = SUCCESSIVE_READTIMEOUT_SOMEDATA; Debug.WriteLine( "Verifying ReadTimeout={0} with successive call to Read(byte[], int, int) and some data being received in the first call", stream.ReadTimeout); // Call WriteToCom1 asynchronously this will write to com1 some time before the following call // to a read method times out t.Start(); try { stream.Read(new byte[DEFAULT_READ_BYTE_ARRAY_SIZE], 0, DEFAULT_READ_BYTE_ARRAY_SIZE); } catch (TimeoutException) { } // Wait for the thread to finish while (t.IsAlive) Thread.Sleep(50); // Make sure there is no bytes in the buffer so the next call to read will timeout com1.DiscardInBuffer(); VerifyTimeout(Read_byte_int_int, stream); } } [OuterLoop("Slow Test")] [ConditionalFact(nameof(HasOneSerialPort))] public void SuccessiveReadTimeoutNoData_ReadByte() { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { com.Open(); Stream stream = com.BaseStream; stream.ReadTimeout = 850; Debug.WriteLine("Verifying ReadTimeout={0} with successive call to ReadByte() and no data", stream.ReadTimeout); Assert.Throws<TimeoutException>(() => stream.ReadByte()); VerifyTimeout(ReadByte, stream); } } [ConditionalFact(nameof(HasNullModem))] public void SuccessiveReadTimeoutSomeData_ReadByte() { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { var t = new Thread(WriteToCom1); com1.Open(); Stream stream = com1.BaseStream; stream.ReadTimeout = SUCCESSIVE_READTIMEOUT_SOMEDATA; Debug.WriteLine( "Verifying ReadTimeout={0} with successive call to ReadByte() and some data being received in the first call", stream.ReadTimeout); // Call WriteToCom1 asynchronously this will write to com1 some time before the following call // to a read method times out t.Start(); try { stream.ReadByte(); } catch (TimeoutException) { } // Wait for the thread to finish while (t.IsAlive) Thread.Sleep(50); // Make sure there is no bytes in the buffer so the next call to read will timeout com1.DiscardInBuffer(); VerifyTimeout(ReadByte, stream); } } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_0_Read_byte_int_int() { Debug.WriteLine("Verifying 0 ReadTimeout with Read(byte[] buffer, int offset, int count)"); Verify0Timeout(Read_byte_int_int); } [ConditionalFact(nameof(HasOneSerialPort))] public void ReadTimeout_0_ReadByte() { Debug.WriteLine("Verifying 0 ReadTimeout with ReadByte()"); Verify0Timeout(ReadByte); } [ConditionalFact(nameof(HasNullModem))] public void ReadTimeout_0_1ByteAvailable_Read_byte_int_int() { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { var rcvBytes = new byte[128]; int bytesRead; Debug.WriteLine( "Verifying 0 ReadTimeout with Read(byte[] buffer, int offset, int count) and one byte available"); com1.Open(); com2.Open(); Stream stream = com1.BaseStream; stream.ReadTimeout = 0; com2.Write(new byte[] { 50 }, 0, 1); TCSupport.WaitForReadBufferToLoad(com1, 1); Assert.True(1 == (bytesRead = com1.Read(rcvBytes, 0, rcvBytes.Length)), string.Format("Err_31597ahpba, Expected to Read to return 1 actual={0}", bytesRead)); Assert.True(50 == rcvBytes[0], string.Format("Err_778946ahba, Expected to read 50 actual={0}", rcvBytes[0])); } } [ConditionalFact(nameof(HasNullModem))] public void ReadTimeout_0_1ByteAvailable_ReadByte() { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { int byteRead; Debug.WriteLine("Verifying 0 ReadTimeout with ReadByte() and one byte available"); com1.Open(); com2.Open(); Stream stream = com1.BaseStream; stream.ReadTimeout = 0; com2.Write(new byte[] { 50 }, 0, 1); TCSupport.WaitForReadBufferToLoad(com1, 1); Assert.True(50 == (byteRead = com1.ReadByte()), string.Format("Err_05949aypa, Expected to Read to return 50 actual={0}", byteRead)); } } private void WriteToCom1() { using (var com2 = new SerialPort(TCSupport.LocalMachineSerialInfo.SecondAvailablePortName)) { var xmitBuffer = new byte[1]; int sleepPeriod = SUCCESSIVE_READTIMEOUT_SOMEDATA / 2; // Sleep some random period with of a maximum duration of half the largest possible timeout value for a read method on COM1 Thread.Sleep(sleepPeriod); com2.Open(); com2.Write(xmitBuffer, 0, xmitBuffer.Length); } } #endregion #region Verification for Test Cases private void VerifyDefaultTimeout(ReadMethodDelegate readMethod) { using (var com1 = TCSupport.InitFirstSerialPort()) using (var com2 = TCSupport.InitSecondSerialPort(com1)) { com1.Open(); if (!com2.IsOpen) com2.Open(); com1.BaseStream.WriteTimeout = 1; Assert.Equal(-1, com1.BaseStream.ReadTimeout); VerifyLongTimeout(readMethod, com1, com2); } } private void VerifyLongTimeout(ReadMethodDelegate readMethod, int readTimeout) { using (var com1 = TCSupport.InitFirstSerialPort()) using (var com2 = TCSupport.InitSecondSerialPort(com1)) { com1.Open(); if (!com2.IsOpen) com2.Open(); com1.BaseStream.WriteTimeout = 1; com1.BaseStream.ReadTimeout = 1; com1.BaseStream.ReadTimeout = readTimeout; Assert.True(readTimeout == com1.BaseStream.ReadTimeout, string.Format("Err_7071ahpsb!!! Expected ReadTimeout to be {0} actaul {1}", readTimeout, com1.BaseStream.ReadTimeout)); VerifyLongTimeout(readMethod, com1, com2); } } private void VerifyLongTimeout(ReadMethodDelegate readMethod, SerialPort com1, SerialPort com2) { var readThread = new ReadDelegateThread(com1.BaseStream, readMethod); var t = new Thread(readThread.CallRead); t.Start(); Thread.Sleep(DEFAULT_WAIT_LONG_TIMEOUT); Assert.True(t.IsAlive, string.Format("Err_17071ahpa!!! {0} terminated with a long timeout of {1}ms", readMethod.Method.Name, com1.BaseStream.ReadTimeout)); com2.Write(new byte[8], 0, 8); while (t.IsAlive) Thread.Sleep(10); } private void VerifyTimeout(ReadMethodDelegate readMethod, int readTimeout) { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { com1.Open(); com1.BaseStream.WriteTimeout = 1; com1.BaseStream.ReadTimeout = 1; com1.BaseStream.ReadTimeout = readTimeout; Assert.True(readTimeout == com1.BaseStream.ReadTimeout, string.Format("Err_236897ahpbm!!! Expected ReadTimeout to be {0} actaul {1}", readTimeout, com1.BaseStream.ReadTimeout)); VerifyTimeout(readMethod, com1.BaseStream); } } private void VerifyTimeout(ReadMethodDelegate readMethod, Stream stream) { var timer = new Stopwatch(); int expectedTime = stream.ReadTimeout; int actualTime; double percentageDifference; // Warmup the read method. When called for the first time the read method seems to take much longer then subsequent calls timer.Start(); try { readMethod(stream); } catch (TimeoutException) { } timer.Stop(); actualTime = (int)timer.ElapsedMilliseconds; percentageDifference = Math.Abs((expectedTime - actualTime) / (double)expectedTime); // Verify that the percentage difference between the expected and actual timeout is less then maxPercentageDifference Assert.True(percentageDifference <= MAX_ACCEPTABLE_WARMUP_PERCENTAGE_DIFFERENCE, string.Format("Err_88558amuph!!!: The read method timedout in {0} expected {1} percentage difference: {2} when called for the first time", actualTime, expectedTime, percentageDifference)); actualTime = 0; timer.Reset(); // Perform the actual test verifying that the read method times out in approximately ReadTime milliseconds Thread.CurrentThread.Priority = ThreadPriority.Highest; for (var i = 0; i < NUM_TRYS; i++) { timer.Start(); try { readMethod(stream); } catch (TimeoutException) { } timer.Stop(); actualTime += (int)timer.ElapsedMilliseconds; timer.Reset(); } Thread.CurrentThread.Priority = ThreadPriority.Normal; actualTime /= NUM_TRYS; percentageDifference = Math.Abs((expectedTime - actualTime) / (double)expectedTime); // Verify that the percentage difference between the expected and actual timeout is less then maxPercentageDifference Assert.True(percentageDifference <= MAX_ACCEPTABLE_PERCENTAGE_DIFFERENCE, string.Format("Err_56485ahpbz!!!: The read method timedout in {0} expected {1} percentage difference: {2}", actualTime, expectedTime, percentageDifference)); } private void Verify0Timeout(ReadMethodDelegate readMethod) { using (var com1 = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { com1.Open(); com1.BaseStream.WriteTimeout = 1; com1.BaseStream.ReadTimeout = 1; com1.BaseStream.ReadTimeout = 0; Assert.True(0 == com1.BaseStream.ReadTimeout, string.Format("Err_72072ahps!!! Expected ReadTimeout to be {0} actaul {1}", 0, com1.BaseStream.ReadTimeout)); Verify0Timeout(readMethod, com1.BaseStream); } } private void Verify0Timeout(ReadMethodDelegate readMethod, Stream stream) { var timer = new Stopwatch(); int actualTime; // Warmup the read method. When called for the first time the read method seems to take much longer then subsequent calls timer.Start(); try { readMethod(stream); } catch (TimeoutException) { } timer.Stop(); actualTime = (int)timer.ElapsedMilliseconds; // Verify that the time the method took to timeout is less then the maximum acceptable time Assert.True(actualTime <= MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT, string.Format("Err_277a0ahpsb!!!: With a timeout of 0 the read method timedout in {0} expected something less then {1} when called for the first time", actualTime, MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT)); actualTime = 0; timer.Reset(); // Perform the actual test verifying that the read method times out in approximately ReadTime milliseconds Thread.CurrentThread.Priority = ThreadPriority.Highest; for (var i = 0; i < NUM_TRYS; i++) { timer.Start(); try { readMethod(stream); } catch (TimeoutException) { } timer.Stop(); actualTime += (int)timer.ElapsedMilliseconds; timer.Reset(); } Thread.CurrentThread.Priority = ThreadPriority.Normal; actualTime /= NUM_TRYS; // Verify that the time the method took to timeout is less then the maximum acceptable time Assert.True(actualTime <= MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT, string.Format("Err_112389ahbp!!!: With a timeout of 0 the read method timedout in {0} expected something less then {1}", actualTime, MAX_ACCEPTABLE_WARMUP_ZERO_TIMEOUT)); } private void VerifyException(int readTimeout, Type expectedException) { VerifyException(readTimeout, expectedException, expectedException); } private void VerifyException(int readTimeout, Type expectedExceptionAfterOpen, Type expectedExceptionAfterClose) { using (SerialPort com = new SerialPort(TCSupport.LocalMachineSerialInfo.FirstAvailablePortName)) { com.Open(); Stream stream = com.BaseStream; VerifyException(stream, readTimeout, expectedExceptionAfterOpen); com.Close(); VerifyException(stream, readTimeout, expectedExceptionAfterClose); } } private void VerifyException(Stream stream, int readTimeout, Type expectedException) { int origReadTimeout = stream.ReadTimeout; if (expectedException != null) { Assert.Throws(expectedException, () => stream.ReadTimeout = readTimeout); Assert.Equal(origReadTimeout, stream.ReadTimeout); } else { stream.ReadTimeout = readTimeout; Assert.Equal(readTimeout, stream.ReadTimeout); } } private void Read_byte_int_int(Stream stream) { stream.Read(new byte[DEFAULT_READ_BYTE_ARRAY_SIZE], 0, DEFAULT_READ_BYTE_ARRAY_SIZE); } private void ReadByte(Stream stream) { stream.ReadByte(); } private class ReadDelegateThread { public ReadDelegateThread(Stream stream, ReadMethodDelegate readMethod) { _stream = stream; _readMethod = readMethod; } public void CallRead() { _readMethod(_stream); } private readonly ReadMethodDelegate _readMethod; private readonly Stream _stream; } #endregion } }
using J2N.Runtime.CompilerServices; using YAF.Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Reflection; using System.Runtime.CompilerServices; using JCG = J2N.Collections.Generic; namespace YAF.Lucene.Net.Util { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Estimates the size (memory representation) of .NET objects. /// <para/> /// @lucene.internal /// </summary> /// <seealso cref="SizeOf(object)"/> /// <seealso cref="ShallowSizeOf(object)"/> /// <seealso cref="ShallowSizeOfInstance(Type)"/> public sealed class RamUsageEstimator { ///// <summary> ///// JVM info string for debugging and reports. </summary> //public static readonly string JVM_INFO_STRING; // LUCENENET specific - this is not being used /// <summary> /// One kilobyte bytes. </summary> public const long ONE_KB = 1024; /// <summary> /// One megabyte bytes. </summary> public const long ONE_MB = ONE_KB * ONE_KB; /// <summary> /// One gigabyte bytes. </summary> public const long ONE_GB = ONE_KB * ONE_MB; /// <summary> /// No instantiation. </summary> private RamUsageEstimator() { } public const int NUM_BYTES_BOOLEAN = 1; public const int NUM_BYTES_BYTE = 1; public const int NUM_BYTES_CHAR = 2; /// <summary> /// NOTE: This was NUM_BYTES_SHORT in Lucene /// </summary> public const int NUM_BYTES_INT16 = 2; /// <summary> /// NOTE: This was NUM_BYTES_INT in Lucene /// </summary> public const int NUM_BYTES_INT32 = 4; /// <summary> /// NOTE: This was NUM_BYTES_FLOAT in Lucene /// </summary> public const int NUM_BYTES_SINGLE = 4; /// <summary> /// NOTE: This was NUM_BYTES_LONG in Lucene /// </summary> public const int NUM_BYTES_INT64 = 8; public const int NUM_BYTES_DOUBLE = 8; /// <summary> /// Number of bytes this .NET runtime uses to represent an object reference. /// </summary> public static readonly int NUM_BYTES_OBJECT_REF = Constants.RUNTIME_IS_64BIT ? 8 : 4; // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) /// <summary> /// Number of bytes to represent an object header (no fields, no alignments). /// </summary> public static readonly int NUM_BYTES_OBJECT_HEADER = Constants.RUNTIME_IS_64BIT ? (8 + 8) : 8; // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) /// <summary> /// Number of bytes to represent an array header (no content, but with alignments). /// </summary> public static readonly int NUM_BYTES_ARRAY_HEADER = Constants.RUNTIME_IS_64BIT ? (8 + 2 * 8) : 12; // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) /// <summary> /// A constant specifying the object alignment boundary inside the .NET runtime. Objects will /// always take a full multiple of this constant, possibly wasting some space. /// </summary> public static readonly int NUM_BYTES_OBJECT_ALIGNMENT = Constants.RUNTIME_IS_64BIT ? 8 : 4; // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) /// <summary> /// Sizes of primitive classes. /// </summary> private static readonly IDictionary<Type, int> primitiveSizes = new JCG.Dictionary<Type, int>(IdentityEqualityComparer<Type>.Default) // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { [typeof(bool)] = NUM_BYTES_BOOLEAN, [typeof(sbyte)] = NUM_BYTES_BYTE, [typeof(byte)] = NUM_BYTES_BYTE, [typeof(char)] = NUM_BYTES_CHAR, [typeof(short)] = NUM_BYTES_INT16, [typeof(ushort)] = NUM_BYTES_INT16, [typeof(int)] = NUM_BYTES_INT32, [typeof(uint)] = NUM_BYTES_INT32, [typeof(float)] = NUM_BYTES_SINGLE, [typeof(double)] = NUM_BYTES_DOUBLE, [typeof(long)] = NUM_BYTES_INT64, [typeof(ulong)] = NUM_BYTES_INT64 }; // LUCENENET specific: Moved all estimates to static initializers to avoid using a static constructor //static RamUsageEstimator() //{ // Initialize empirically measured defaults. We'll modify them to the current // JVM settings later on if possible. // int referenceSize = Constants.RUNTIME_IS_64BIT ? 8 : 4; // int objectHeader = Constants.RUNTIME_IS_64BIT ? 16 : 8; // The following is objectHeader + NUM_BYTES_INT32, but aligned(object alignment) // so on 64 bit JVMs it'll be align(16 + 4, @8) = 24. // int arrayHeader = Constants.RUNTIME_IS_64BIT ? 24 : 12; // int objectAlignment = Constants.RUNTIME_IS_64BIT ? 8 : 4; // Type unsafeClass = null; // object tempTheUnsafe = null; // try // { // unsafeClass = Type.GetType("sun.misc.Unsafe"); // FieldInfo unsafeField = unsafeClass.getDeclaredField("theUnsafe"); // unsafeField.Accessible = true; // tempTheUnsafe = unsafeField.get(null); // } // catch (Exception e) // { // // Ignore. // } // TheUnsafe = tempTheUnsafe; // // get object reference size by getting scale factor of Object[] arrays: // try // { // Method arrayIndexScaleM = unsafeClass.GetMethod("arrayIndexScale", typeof(Type)); // referenceSize = (int)((Number)arrayIndexScaleM.invoke(TheUnsafe, typeof(object[]))); // } // catch (Exception e) // { // // ignore. // } // // "best guess" based on reference size. We will attempt to modify // // these to exact values if there is supported infrastructure. // objectHeader = Constants.RUNTIME_IS_64BIT ? (8 + referenceSize) : 8; // arrayHeader = Constants.RUNTIME_IS_64BIT ? (8 + 2 * referenceSize) : 12; // // get the object header size: // // - first try out if the field offsets are not scaled (see warning in Unsafe docs) // // - get the object header size by getting the field offset of the first field of a dummy object // // If the scaling is byte-wise and unsafe is available, enable dynamic size measurement for // // estimateRamUsage(). // Method tempObjectFieldOffsetMethod = null; // try // { // Method objectFieldOffsetM = unsafeClass.GetMethod("objectFieldOffset", typeof(FieldInfo)); // FieldInfo dummy1Field = typeof(DummyTwoLongObject).getDeclaredField("dummy1"); // int ofs1 = (int)((Number)objectFieldOffsetM.invoke(TheUnsafe, dummy1Field)); // FieldInfo dummy2Field = typeof(DummyTwoLongObject).getDeclaredField("dummy2"); // int ofs2 = (int)((Number)objectFieldOffsetM.invoke(TheUnsafe, dummy2Field)); // if (Math.Abs(ofs2 - ofs1) == NUM_BYTES_LONG) // { // FieldInfo baseField = typeof(DummyOneFieldObject).getDeclaredField("base"); // objectHeader = (int)((Number)objectFieldOffsetM.invoke(TheUnsafe, baseField)); // tempObjectFieldOffsetMethod = objectFieldOffsetM; // } // } // catch (Exception e) // { // // Ignore. // } // ObjectFieldOffsetMethod = tempObjectFieldOffsetMethod; // // Get the array header size by retrieving the array base offset // // (offset of the first element of an array). // try // { // Method arrayBaseOffsetM = unsafeClass.GetMethod("arrayBaseOffset", typeof(Type)); // // we calculate that only for byte[] arrays, it's actually the same for all types: // arrayHeader = (int)((Number)arrayBaseOffsetM.invoke(TheUnsafe, typeof(sbyte[]))); // } // catch (Exception e) // { // // Ignore. // } // NUM_BYTES_OBJECT_REF = referenceSize; // NUM_BYTES_OBJECT_HEADER = objectHeader; // NUM_BYTES_ARRAY_HEADER = arrayHeader; // // Try to get the object alignment (the default seems to be 8 on Hotspot, // // regardless of the architecture). // int objectAlignment = 8; // try // { // Type beanClazz = Type.GetType("com.sun.management.HotSpotDiagnosticMXBean").asSubclass(typeof(PlatformManagedObject)); // object hotSpotBean = ManagementFactory.getPlatformMXBean(beanClazz); // if (hotSpotBean != null) // { // Method getVMOptionMethod = beanClazz.GetMethod("getVMOption", typeof(string)); // object vmOption = getVMOptionMethod.invoke(hotSpotBean, "ObjectAlignmentInBytes"); // objectAlignment = Convert.ToInt32(vmOption.GetType().GetMethod("getValue").invoke(vmOption).ToString(), CultureInfo.InvariantCulture); // } // } // catch (Exception e) // { // // Ignore. // } // NUM_BYTES_OBJECT_ALIGNMENT = objectAlignment; // // LUCENENET specific -this is not being used // JVM_INFO_STRING = "[JVM: " + Constants.JVM_NAME + ", " + Constants.JVM_VERSION + ", " + Constants.JVM_VENDOR + ", " + Constants.JAVA_VENDOR + ", " + Constants.JAVA_VERSION + "]"; //} ///// <summary> ///// A handle to <code>sun.misc.Unsafe</code>. ///// </summary> //private static readonly object TheUnsafe; ///// <summary> ///// A handle to <code>sun.misc.Unsafe#fieldOffset(Field)</code>. ///// </summary> //private static readonly Method ObjectFieldOffsetMethod; /// <summary> /// Cached information about a given class. /// </summary> private sealed class ClassCache { public long AlignedShallowInstanceSize { get; private set; } [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public FieldInfo[] ReferenceFields { get; private set; } public ClassCache(long alignedShallowInstanceSize, FieldInfo[] referenceFields) { this.AlignedShallowInstanceSize = alignedShallowInstanceSize; this.ReferenceFields = referenceFields; } } //// Object with just one field to determine the object header size by getting the offset of the dummy field: //private sealed class DummyOneFieldObject //{ // public sbyte @base; //} //// Another test object for checking, if the difference in offsets of dummy1 and dummy2 is 8 bytes. //// Only then we can be sure that those are real, unscaled offsets: //private sealed class DummyTwoLongObject //{ // public long Dummy1, Dummy2; //} /// <summary> /// Aligns an object size to be the next multiple of <see cref="NUM_BYTES_OBJECT_ALIGNMENT"/>. /// </summary> public static long AlignObjectSize(long size) { size += (long)NUM_BYTES_OBJECT_ALIGNMENT - 1L; return size - (size % NUM_BYTES_OBJECT_ALIGNMENT); } /// <summary> /// Returns the size in bytes of the <see cref="T:byte[]"/> object. </summary> // LUCENENET specific overload for CLS compliance public static long SizeOf(byte[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:sbyte[]"/> object. </summary> [CLSCompliant(false)] public static long SizeOf(sbyte[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:bool[]"/> object. </summary> public static long SizeOf(bool[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:char[]"/> object. </summary> public static long SizeOf(char[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_CHAR * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:short[]"/> object. </summary> public static long SizeOf(short[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT16 * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:int[]"/> object. </summary> public static long SizeOf(int[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT32 * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:float[]"/> object. </summary> public static long SizeOf(float[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_SINGLE * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:long[]"/> object. </summary> public static long SizeOf(long[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT64 * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:double[]"/> object. </summary> public static long SizeOf(double[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_DOUBLE * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:ulong[]"/> object. </summary> [CLSCompliant(false)] public static long SizeOf(ulong[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT64 * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:uint[]"/> object. </summary> [CLSCompliant(false)] public static long SizeOf(uint[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT32 * arr.Length); } /// <summary> /// Returns the size in bytes of the <see cref="T:ushort[]"/> object. </summary> [CLSCompliant(false)] public static long SizeOf(ushort[] arr) { return AlignObjectSize((long)NUM_BYTES_ARRAY_HEADER + (long)NUM_BYTES_INT16 * arr.Length); } /// <summary> /// Estimates the RAM usage by the given object. It will /// walk the object tree and sum up all referenced objects. /// /// <para><b>Resource Usage:</b> this method internally uses a set of /// every object seen during traversals so it does allocate memory /// (it isn't side-effect free). After the method exits, this memory /// should be GCed.</para> /// </summary> public static long SizeOf(object obj) { return MeasureObjectSize(obj); } /// <summary> /// Estimates a "shallow" memory usage of the given object. For arrays, this will be the /// memory taken by array storage (no subreferences will be followed). For objects, this /// will be the memory taken by the fields. /// <para/> /// .NET object alignments are also applied. /// </summary> public static long ShallowSizeOf(object obj) { if (obj == null) { return 0; } Type clz = obj.GetType(); if (clz.IsArray) { return ShallowSizeOfArray((Array)obj); } else { return ShallowSizeOfInstance(clz); } } /// <summary> /// Returns the shallow instance size in bytes an instance of the given class would occupy. /// This works with all conventional classes and primitive types, but not with arrays /// (the size then depends on the number of elements and varies from object to object). /// </summary> /// <seealso cref="ShallowSizeOf(object)"/> /// <exception cref="ArgumentException"> if <paramref name="clazz"/> is an array class. </exception> public static long ShallowSizeOfInstance(Type clazz) { if (clazz.IsArray) { throw new System.ArgumentException("this method does not work with array classes."); } if (clazz.GetTypeInfo().IsPrimitive) { return primitiveSizes[clazz]; } long size = NUM_BYTES_OBJECT_HEADER; // Walk type hierarchy for (; clazz != null; clazz = clazz.GetTypeInfo().BaseType) { FieldInfo[] fields = clazz.GetFields( BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Static); foreach (FieldInfo f in fields) { if (!f.IsStatic) { size = AdjustForField(size, f); } } } return AlignObjectSize(size); } /// <summary> /// Return shallow size of any <paramref name="array"/>. /// </summary> private static long ShallowSizeOfArray(Array array) { long size = NUM_BYTES_ARRAY_HEADER; int len = array.Length; if (len > 0) { Type arrayElementClazz = array.GetType().GetElementType(); if (arrayElementClazz.GetTypeInfo().IsPrimitive) { size += (long)len * primitiveSizes[arrayElementClazz]; } else { size += (long)NUM_BYTES_OBJECT_REF * len; } } return AlignObjectSize(size); } /* * Non-recursive version of object descend. this consumes more memory than recursive in-depth * traversal but prevents stack overflows on long chains of objects * or complex graphs (a max. recursion depth on my machine was ~5000 objects linked in a chain * so not too much). */ private static long MeasureObjectSize(object root) { // Objects seen so far. IdentityHashSet<object> seen = new IdentityHashSet<object>(); // Class cache with reference Field and precalculated shallow size. IDictionary<Type, ClassCache> classCache = new JCG.Dictionary<Type, ClassCache>(IdentityEqualityComparer<Type>.Default); // Stack of objects pending traversal. Recursion caused stack overflows. Stack<object> stack = new Stack<object>(); stack.Push(root); long totalSize = 0; while (stack.Count > 0) { object ob = stack.Pop(); if (ob == null || seen.Contains(ob)) { continue; } seen.Add(ob); Type obClazz = ob.GetType(); if (obClazz.Equals(typeof(string))) { // LUCENENET specific - we can get a closer estimate of a string // by using simple math. Reference: http://stackoverflow.com/a/8171099. // This fixes the TestSanity test. totalSize += (2 * (((string)ob).Length + 1)); } if (obClazz.IsArray) { /* * Consider an array, possibly of primitive types. Push any of its references to * the processing stack and accumulate this array's shallow size. */ long size = NUM_BYTES_ARRAY_HEADER; Array array = (Array)ob; int len = array.Length; if (len > 0) { Type componentClazz = obClazz.GetElementType(); if (componentClazz.GetTypeInfo().IsPrimitive) { size += (long)len * primitiveSizes[componentClazz]; } else { size += (long)NUM_BYTES_OBJECT_REF * len; // Push refs for traversal later. for (int i = len; --i >= 0; ) { object o = array.GetValue(i); if (o != null && !seen.Contains(o)) { stack.Push(o); } } } } totalSize += AlignObjectSize(size); } else { /* * Consider an object. Push any references it has to the processing stack * and accumulate this object's shallow size. */ try { if (!classCache.TryGetValue(obClazz, out ClassCache cachedInfo) || cachedInfo == null) { classCache[obClazz] = cachedInfo = CreateCacheEntry(obClazz); } foreach (FieldInfo f in cachedInfo.ReferenceFields) { // Fast path to eliminate redundancies. object o = f.GetValue(ob); if (o != null && !seen.Contains(o)) { stack.Push(o); } } totalSize += cachedInfo.AlignedShallowInstanceSize; } catch (Exception e) { // this should never happen as we enabled setAccessible(). throw new Exception("Reflective field access failed?", e); } } } // Help the GC (?). seen.Clear(); stack.Clear(); classCache.Clear(); return totalSize; } /// <summary> /// Create a cached information about shallow size and reference fields for /// a given class. /// </summary> private static ClassCache CreateCacheEntry(Type clazz) { ClassCache cachedInfo; long shallowInstanceSize = NUM_BYTES_OBJECT_HEADER; List<FieldInfo> referenceFields = new List<FieldInfo>(32); for (Type c = clazz; c != null; c = c.GetTypeInfo().BaseType) { FieldInfo[] fields = c.GetFields( BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Static); foreach (FieldInfo f in fields) { if (!f.IsStatic) { shallowInstanceSize = AdjustForField(shallowInstanceSize, f); if (!f.FieldType.GetTypeInfo().IsPrimitive) { referenceFields.Add(f); } } } } cachedInfo = new ClassCache(AlignObjectSize(shallowInstanceSize), referenceFields.ToArray()); return cachedInfo; } /// <summary> /// This method returns the maximum representation size of an object. <paramref name="sizeSoFar"/> /// is the object's size measured so far. <paramref name="f"/> is the field being probed. /// /// <para/>The returned offset will be the maximum of whatever was measured so far and /// <paramref name="f"/> field's offset and representation size (unaligned). /// </summary> private static long AdjustForField(long sizeSoFar, FieldInfo f) { Type type = f.FieldType; int fsize = 0; if (!typeof(IntPtr).Equals(type) && !typeof(UIntPtr).Equals(type)) fsize = type.GetTypeInfo().IsPrimitive ? primitiveSizes[type] : NUM_BYTES_OBJECT_REF; // LUCENENET NOTE: I dont think this will ever not be null //if (ObjectFieldOffsetMethod != null) //{ // try // { // long offsetPlusSize = (long)((Number) ObjectFieldOffsetMethod.invoke(TheUnsafe, f)) + fsize; // return Math.Max(sizeSoFar, offsetPlusSize); // } // catch (Exception ex) // { // throw new Exception("Access problem with sun.misc.Unsafe", ex); // } //} //else //{ // // TODO: No alignments based on field type/ subclass fields alignments? // return sizeSoFar + fsize; //} return sizeSoFar + fsize; } /// <summary> /// Returns <c>size</c> in human-readable units (GB, MB, KB or bytes). /// </summary> public static string HumanReadableUnits(long bytes) { return HumanReadableUnits(bytes, new NumberFormatInfo() { NumberDecimalDigits = 1 }); } /// <summary> /// Returns <c>size</c> in human-readable units (GB, MB, KB or bytes). /// </summary> public static string HumanReadableUnits(long bytes, IFormatProvider df) { if (bytes / ONE_GB > 0) { return Convert.ToString(((float)bytes / ONE_GB), df) + " GB"; } else if (bytes / ONE_MB > 0) { return Convert.ToString(((float)bytes / ONE_MB), df) + " MB"; } else if (bytes / ONE_KB > 0) { return Convert.ToString(((float)bytes / ONE_KB), df) + " KB"; } else { return Convert.ToString(bytes) + " bytes"; } } /// <summary> /// Return a human-readable size of a given object. </summary> /// <seealso cref="SizeOf(object)"/> /// <seealso cref="HumanReadableUnits(long)"/> public static string HumanSizeOf(object @object) { return HumanReadableUnits(SizeOf(@object)); } /// <summary> /// Return a human-readable size of a given object. </summary> /// <seealso cref="SizeOf(object)"/> /// <seealso cref="HumanReadableUnits(long)"/> public static string HumanSizeOf(object @object, IFormatProvider df) { return HumanReadableUnits(SizeOf(@object), df); } /// <summary> /// An identity hash set implemented using open addressing. No null keys are allowed. /// <para/> /// TODO: If this is useful outside this class, make it public - needs some work /// </summary> internal sealed class IdentityHashSet<KType> : IEnumerable<KType> { /// <summary> /// Default load factor. /// </summary> public const float DEFAULT_LOAD_FACTOR = 0.75f; /// <summary> /// Minimum capacity for the set. /// </summary> public const int MIN_CAPACITY = 4; /// <summary> /// All of set entries. Always of power of two length. /// </summary> [WritableArray] [SuppressMessage("Microsoft.Performance", "CA1819", Justification = "Lucene's design requires some writable array properties")] public object[] Keys { get { return keys; } set { keys = value; } } private object[] keys; /// <summary> /// Cached number of assigned slots. /// </summary> public int Assigned { get; set; } /// <summary> /// The load factor for this set (fraction of allocated or deleted slots before /// the buffers must be rehashed or reallocated). /// </summary> public float LoadFactor { get; private set; } /// <summary> /// Cached capacity threshold at which we must resize the buffers. /// </summary> private int resizeThreshold; /// <summary> /// Creates a hash set with the default capacity of 16, /// load factor of <see cref="DEFAULT_LOAD_FACTOR"/>. /// </summary> public IdentityHashSet() : this(16, DEFAULT_LOAD_FACTOR) { } /// <summary> /// Creates a hash set with the given capacity, load factor of /// <see cref="DEFAULT_LOAD_FACTOR"/>. /// </summary> public IdentityHashSet(int initialCapacity) : this(initialCapacity, DEFAULT_LOAD_FACTOR) { } /// <summary> /// Creates a hash set with the given capacity and load factor. /// </summary> public IdentityHashSet(int initialCapacity, float loadFactor) { initialCapacity = Math.Max(MIN_CAPACITY, initialCapacity); Debug.Assert(initialCapacity > 0, "Initial capacity must be between (0, " + int.MaxValue + "]."); Debug.Assert(loadFactor > 0 && loadFactor < 1, "Load factor must be between (0, 1)."); this.LoadFactor = loadFactor; AllocateBuffers(RoundCapacity(initialCapacity)); } /// <summary> /// Adds a reference to the set. Null keys are not allowed. /// </summary> public bool Add(KType e) { Debug.Assert(e != null, "Null keys not allowed."); if (Assigned >= resizeThreshold) { ExpandAndRehash(); } int mask = keys.Length - 1; int slot = Rehash(e) & mask; object existing; while ((existing = keys[slot]) != null) { if (object.ReferenceEquals(e, existing)) { return false; // already found. } slot = (slot + 1) & mask; } Assigned++; keys[slot] = e; return true; } /// <summary> /// Checks if the set contains a given ref. /// </summary> public bool Contains(KType e) { int mask = keys.Length - 1; int slot = Rehash(e) & mask; object existing; while ((existing = keys[slot]) != null) { if (object.ReferenceEquals(e, existing)) { return true; } slot = (slot + 1) & mask; } return false; } /// <summary> /// Rehash via MurmurHash. /// /// <para/>The implementation is based on the /// finalization step from Austin Appleby's /// <c>MurmurHash3</c>. /// /// See <a target="_blank" href="http://sites.google.com/site/murmurhash/">http://sites.google.com/site/murmurhash/</a>. /// </summary> private static int Rehash(object o) { int k = RuntimeHelpers.GetHashCode(o); k ^= (int)((uint)k >> 16); k *= unchecked((int)0x85ebca6b); k ^= (int)((uint)k >> 13); k *= unchecked((int)0xc2b2ae35); k ^= (int)((uint)k >> 16); return k; } /// <summary> /// Expand the internal storage buffers (capacity) or rehash current keys and /// values if there are a lot of deleted slots. /// </summary> private void ExpandAndRehash() { object[] oldKeys = this.keys; Debug.Assert(Assigned >= resizeThreshold); AllocateBuffers(NextCapacity(keys.Length)); /* * Rehash all assigned slots from the old hash table. */ int mask = keys.Length - 1; for (int i = 0; i < oldKeys.Length; i++) { object key = oldKeys[i]; if (key != null) { int slot = Rehash(key) & mask; while (keys[slot] != null) { slot = (slot + 1) & mask; } keys[slot] = key; } } Array.Clear(oldKeys, 0, oldKeys.Length); } /// <summary> /// Allocate internal buffers for a given <paramref name="capacity"/>. /// </summary> /// <param name="capacity"> /// New capacity (must be a power of two). </param> private void AllocateBuffers(int capacity) { this.keys = new object[capacity]; this.resizeThreshold = (int)(capacity * DEFAULT_LOAD_FACTOR); } /// <summary> /// Return the next possible capacity, counting from the current buffers' size. /// </summary> private int NextCapacity(int current) // LUCENENET NOTE: made private, since protected is not valid in a sealed class { Debug.Assert(current > 0 && ((current & (current - 1)) == 0), "Capacity must be a power of two."); Debug.Assert((current << 1) > 0, "Maximum capacity exceeded (" + ((int)((uint)0x80000000 >> 1)) + ")."); if (current < MIN_CAPACITY / 2) { current = MIN_CAPACITY / 2; } return current << 1; } /// <summary> /// Round the capacity to the next allowed value. /// </summary> private int RoundCapacity(int requestedCapacity) // LUCENENET NOTE: made private, since protected is not valid in a sealed class { // Maximum positive integer that is a power of two. if (requestedCapacity > ((int)((uint)0x80000000 >> 1))) { return ((int)((uint)0x80000000 >> 1)); } int capacity = MIN_CAPACITY; while (capacity < requestedCapacity) { capacity <<= 1; } return capacity; } public void Clear() { Assigned = 0; Array.Clear(keys, 0, keys.Length); } public int Count // LUCENENET NOTE: This was size() in Lucene. { get { return Assigned; } } //public bool IsEmpty // LUCENENET NOTE: in .NET we can just use !Any() on IEnumerable<T> //{ // get // { // return Count == 0; // } //} public IEnumerator<KType> GetEnumerator() { return new IteratorAnonymousInnerClassHelper(this); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return GetEnumerator(); } private class IteratorAnonymousInnerClassHelper : IEnumerator<KType> { private readonly IdentityHashSet<KType> outerInstance; public IteratorAnonymousInnerClassHelper(IdentityHashSet<KType> outerInstance) { this.outerInstance = outerInstance; pos = -1; nextElement = FetchNext(); } internal int pos; internal object nextElement; internal KType current; public bool MoveNext() { object r = nextElement; if (nextElement == null) { return false; } nextElement = FetchNext(); current = (KType)r; return true; } public KType Current { get { return current; } } object System.Collections.IEnumerator.Current { get { return Current; } } private object FetchNext() { pos++; while (pos < outerInstance.keys.Length && outerInstance.keys[pos] == null) { pos++; } return (pos >= outerInstance.keys.Length ? null : outerInstance.keys[pos]); } public void Reset() { throw new NotSupportedException(); } public void Dispose() { } } } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using System.IO; using System; using System.Text; public class MainTab : VHMain { #region Constants const string TipBoxStartKey = "vhToolkitShowTipsAtStart"; /// <summary> /// private stuff /// </summary> enum AcquireSpeechState { Disabled, Off, On, InUse, NUM_STATES } enum PerceptionApplicationState { Disabled, TrackHead, TrackGaze } enum GameMode { FreeLook, Character, } #endregion #region Variables public VHMsgManager vhmsg; public FreeMouseLook m_camera; public SmartbodyManager m_SBM; public SpeechBox m_SpeechBox; public SBCharacterController m_CharacterController; public bool m_displayVhmsgLog = false; public Texture[] m_MicrophoneImages = new Texture[(int)AcquireSpeechState.NUM_STATES]; public Texture2D m_whiteTexture; public Cutscene m_IntroCutscene; Color m_currentColor; public GameObject m_LoadingScreenWhiteBg; public float m_DelayTimeAfterCutsceneFinishes = 2; public UnitySmartbodyCharacter[] m_Characters; // cameras public GameObject normalCamera; public MaterialCustomizer[] m_MaterialCustomizers; private bool m_forceGazeOnSetCamera = false; GameObject[] allCameras; Vector3 m_StartingCameraPosition; Quaternion m_StartingCameraRotation; AcquireSpeechState m_AcquireSpeechState = AcquireSpeechState.Disabled; PerceptionApplicationState m_PerceptionApplicationState = PerceptionApplicationState.Disabled; GameMode m_GameMode = GameMode.FreeLook; string m_SeqFile = ""; string m_PyFile = "init-unity"; //string m_locoCharacterName = "brad"; // for acquire speech int m_BradTalkId = 128; // toggles bool m_bLocomotionEnabled = true; bool m_showCustomizerGUI = false; bool m_bFinishedPreviousUtterance = true; bool m_bStartInAcquireSpeechMode = true; bool m_bIntroSequencePlaying = false; private bool m_showController = false; private float m_timeSlider = 1.0f; bool m_walkToMode = false; Vector3 m_walkToPoint; bool m_disableGUI = false; Texture2D m_CachedBgTexture; Rect m_MicImagePos; private string [] testUtteranceButtonText = { "1", "2", "Tts", "Tts2", "V2a", "V2b" }; private string[] GameModeNames; private int testUtteranceSelected = 0; private string [] testUtteranceCharacter = { "Brad", "Rachel", "Brad", "Rachel", "*", "*" }; private string [] testUtteranceName = { "brad_byte", "rachel_usc", "speech_womanTTS", "speech_womanTTS", "z_viseme_test2", "z_viseme_test3" }; private string [] testUtteranceText = { "", "", "If the system cannot find my regular voice, it defaults back to the Windows standard voice. Depending on your version of Windows that can be a womans voice. Dont I sound delightful?", "If the system cannot find my regular voice, it defaults back to the Windows standard voice. Depending on your version of Windows that can be a womans voice. Dont I sound delightful?", "", "" }; // the TTS text private string [] testTtsVoices = { "Festival_voice_cmu_us_jmk_arctic_clunits", "Festival_voice_cmu_us_clb_arctic_clunits", "Festival_voice_rab_diphone", "Festival_voice_kal_diphone", "Festival_voice_ked_diphone", "Microsoft|Anna", "star", "katherine" }; private string[] perceptionButtonText = { @"PerceptionApp OFF", @"Track Head", @"Track Gaze" }; private string[] sceneNames = { "Campus", "House", "LineUp", "Customizer", "CampusEmpty", "OculusRiftTest", "CampusTacQ" }; private string[] characterNames; private int testTtsSelected = 0; private int m_SelectedCharacter; private int perceptionSelected = 0; Vector3 m_chrBradStartPos; Quaternion m_chrBradStartRot; Vector3 m_chrRachelStartPos; Quaternion m_chrRachelStartRot; int m_gazingMode = 1; // 0 - off, 1 - gaze camera, 2 - gaze mouse cursor bool m_idleMode = true; List<GameObject> m_BrownHeads = new List<GameObject>(); #endregion #region Properties bool IsSpeechTextBoxInFocus { get { return SpeechBox.SpeechTextFieldName == GUI.GetNameOfFocusedControl(); } } bool InAcquireSpeechMode { get { return m_AcquireSpeechState == AcquireSpeechState.On || m_AcquireSpeechState == AcquireSpeechState.InUse; } } UnitySmartbodyCharacter SelectedCharacter { get { return m_Characters[m_SelectedCharacter]; } } SBCharacterController SelectedCharacterController { get { return SelectedCharacter.GetComponent<SBCharacterController>(); } } #endregion public void TossNPCDomain() { if (VHUtils.SceneManagerActiveSceneName() == "House") { string tossmessage = ""; tossmessage = string.Format("vrSpeech start user0001 user"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpeech finished-speaking user0001"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpeech interp user0001 1 1.0 normal INTEROCITOR SEATEC ASTRONOMY TRANSMOGRIFY EXPELIARMUS"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpeech asr-complete user0001"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpoke"); vhmsg.SendVHMsg(tossmessage); } else { string tossmessage = ""; tossmessage = string.Format("vrSpeech start user0001 user"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpeech finished-speaking user0001"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpeech interp user0001 1 1.0 normal THEREMIN NOSFERATU THERMOCOUPLE PATRONUS"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpeech asr-complete user0001"); vhmsg.SendVHMsg(tossmessage); tossmessage = string.Format("vrSpoke"); vhmsg.SendVHMsg(tossmessage); } } public override void Start() { Application.targetFrameRate = 60; base.Start(); m_userDialogText = ""; m_subtitleText = ""; DisplaySubtitles = true; DisplayUserDialog = true; //m_currentColor = new Color(); //m_currentColor = GameObject.Find("Background").renderer.material.color; if (m_IntroCutscene != null) { m_IntroCutscene.AddOnFinishedCutsceneCallback(IntroCutsceneFinished); m_IntroCutscene.AddOnEventFiredCallback(IntroEventFired); } m_StartingCameraPosition = m_camera.transform.position; m_StartingCameraRotation = m_camera.transform.rotation; ProcessCommandLineAndConfigSettings(); GameModeNames = Enum.GetNames(typeof(GameMode)); if (VHUtils.SceneManagerActiveSceneName() == "Campus") { if (m_Characters.Length > 0) { characterNames = new string[m_Characters.Length]; for (int i = 0; i < m_Characters.Length; i++) { if (m_Characters[i].GetComponent<SBCharacterController>() != null) { m_Characters[i].GetComponent<SBCharacterController>().enabled = false; } characterNames[i] = m_Characters[i].name; } } SelectCharacter(0); } m_SBM = SmartbodyManager.Get(); { Debug.Log("Using Smartbody dll"); m_SBM.AddCustomCharCreateCB(new SmartbodyManager.OnCustomCharacterCallback(OnCharacterCreate)); m_SBM.AddCustomCharDeleteCB(new SmartbodyManager.OnCustomCharacterCallback(OnCharacterDelete)); } m_Console.AddCommandCallback("set_loco_char_name", new DebugConsole.ConsoleCallback(HandleConsoleMessage)); m_Console.AddCommandCallback("play_intro", new DebugConsole.ConsoleCallback(HandleConsoleMessage)); m_Console.AddCommandCallback("set_tips", new DebugConsole.ConsoleCallback(HandleConsoleMessage)); m_Console.AddCommandCallback("show_tips", new DebugConsole.ConsoleCallback(HandleConsoleMessage)); m_MicImagePos = new Rect(0.92f, 0.85f, 0.06f, 0.06f); SubscribeVHMsg(); StartCoroutine(ShowIntro(0)); { var brad = GameObject.Find("Brad"); if (brad) { m_chrBradStartPos = brad.transform.position; m_chrBradStartRot = brad.transform.rotation; } var rachel = GameObject.Find("Rachel"); if (rachel) { m_chrRachelStartPos = rachel.transform.position; m_chrRachelStartRot = rachel.transform.rotation; } } #if UNITY_IPHONE || UNITY_ANDROID if (!m_Console.DrawConsole) m_Console.ToggleConsole(); m_showController = true; #endif if (VHUtils.SceneManagerActiveSceneName() == "Customizer") { //m_disableGUI = true; var speechBox = GameObject.Find("SpeechBox"); if (speechBox) speechBox.GetComponent<SpeechBox>().Show = false; m_AcquireSpeechState = AcquireSpeechState.Disabled; PlayIdleFidgets(false); m_currentColor = Color.white; //GameObject.Find("screen").renderer.material = null; List<string> names = new List<string>(); cameraChoices = GameObject.FindObjectsOfType(typeof(Camera)) as Camera[]; Array.Sort(cameraChoices, (a, b) => a.gameObject.name.CompareTo(b.gameObject.name) ); foreach (Camera camera in cameraChoices) { if (!(camera.gameObject.name == "Camera01_mediumCt")) camera.GetComponent<SmartbodyPawn>().AddToSmartbody(); camera.gameObject.SetActive(false); names.Add(camera.gameObject.name); if (camera.gameObject.name == "Camera01_mediumCt") { m_cameraSelectCurrent = names.Count - 1; cameraSelGridInt = names.Count - 1; } } cameraChoices[m_cameraSelectCurrent].gameObject.SetActive(true); cameraChoicesStrings = names.ToArray(); cameraSelectionHeight = cameraChoicesStrings.Length * 29; } else if (VHUtils.SceneManagerActiveSceneName() == "Campus") { UnSelectCharacters(); } //Find all the brown heads for hooking up the skin shader string[] brownPrefabNames = { "ChrBrownRocPrefab" }; for (int i = 0; i < brownPrefabNames.Length; i++) { GameObject brownPrefab = GameObject.Find(brownPrefabNames[i]); if (brownPrefab == null) { Debug.LogError(brownPrefabNames[i] + " doesn't exist in the scene, so his head couldn't be found"); } else { UnitySmartbodyCharacter sbcomponent = brownPrefab.GetComponent<UnitySmartbodyCharacter>(); if (sbcomponent) { sbcomponent.SetChannelCallback(ChrBrownRocChannelCallback); } GameObject brownHead = VHUtils.FindChild(brownPrefab, "ChrBrownRoc/CharacterRoot/Mesh/SkinnedMesh/MshRef/Head"); if (brownHead == null) { Debug.LogError("Couldn't find " + brownPrefabNames[i] + "'s head"); } else { m_BrownHeads.Add(brownHead); } } } #if false // Transparent window example [DllImport("user32.dll")] static extern int GetForegroundWindow(); [DllImport("user32.dll", EntryPoint="MoveWindow")] static extern int MoveWindow (int hwnd, int x, int y,int nWidth,int nHeight,int bRepaint ); [DllImport("user32.dll", EntryPoint="SetWindowLongA")] static extern int SetWindowLong (int hwnd, int nIndex,int dwNewLong); [DllImport("user32.dll")] static extern bool ShowWindowAsync(int hWnd, int nCmdShow); [DllImport("user32.dll", EntryPoint="SetLayeredWindowAttributes")] static extern int SetLayeredWindowAttributes (int hwnd, int crKey,byte bAlpha, int dwFlags ); int handle = GetForegroundWindow(); SetWindowLong(handle, -20, 524288); // GWL_EXSTYLE=-20 , WS_EX_LAYERED=524288=&h80000 SetLayeredWindowAttributes(handle, 0, 128, 2); // handle,color key = 0 >> black, % of transparency, LWA_ALPHA=1 #endif } void IntroCutsceneFinished(Cutscene cutscene) { StartCoroutine(IntroCutsceneFinishedCoroutine(cutscene, m_DelayTimeAfterCutsceneFinishes)); } void IntroEventFired(Cutscene cutscene, CutsceneEvent ce) { if (ce.FunctionName == "Express") { m_subtitleText = ""; } } IEnumerator IntroCutsceneFinishedCoroutine(Cutscene cutscene, float delay) { yield return new WaitForSeconds(delay); CleanupIntroSequence(); } void SubscribeVHMsg() { VHMsgBase vhmsg = VHMsgBase.Get(); vhmsg.SubscribeMessage("vrAllCall"); vhmsg.SubscribeMessage("vrKillComponent"); vhmsg.SubscribeMessage("vrExpress"); vhmsg.SubscribeMessage("vrSpoke"); vhmsg.SubscribeMessage("CommAPI"); vhmsg.SubscribeMessage("acquireSpeech"); vhmsg.SubscribeMessage("PlaySound"); vhmsg.SubscribeMessage("StopSound"); vhmsg.SubscribeMessage("renderer"); vhmsg.SubscribeMessage("render_text_overlay"); vhmsg.SubscribeMessage("vht_get_characters"); vhmsg.SubscribeMessage("renderer_record"); vhmsg.SubscribeMessage("renderer_gui"); vhmsg.SubscribeMessage("sbm"); vhmsg.AddMessageEventHandler(new VHMsgBase.MessageEventHandler(VHMsg_MessageEvent)); vhmsg.SendVHMsg("vrComponent renderer"); if (m_AcquireSpeechState != AcquireSpeechState.Disabled) { vhmsg.SendVHMsg("acquireSpeech start"); } { if (!string.IsNullOrEmpty(m_SeqFile)) vhmsg.SendVHMsg("sbm seq " + m_SeqFile); if (!string.IsNullOrEmpty(m_PyFile)) vhmsg.SendVHMsg("sbm pythonscript " + m_PyFile); } } IEnumerator ShowIntro(float delay) { yield return new WaitForSeconds(delay); bool showIntro = !Application.isEditor && VHUtils.HasCommandLineArgument("intro"); if (showIntro && VHUtils.SceneManagerActiveSceneName() == "Campus") { m_AcquireSpeechState = AcquireSpeechState.Disabled; vhmsg.SendVHMsg("nvbg_set_option Brad saliency_idle_gaze false"); vhmsg.SendVHMsg("nvbg_set_option Rachel saliency_idle_gaze false"); m_IntroCutscene.Play(); m_bIntroSequencePlaying = true; StartCoroutine(WaitForCutsceneEnd(m_IntroCutscene.Length)); } else { if (VHUtils.SceneManagerActiveSceneName() == "Customizer") { m_AcquireSpeechState = AcquireSpeechState.Disabled; PlayIdleFidgets(false); } else { m_AcquireSpeechState = AcquireSpeechState.Off; for (int i = 0; i < m_SBM.GetSBMCharacterNames().Length; ++i) { PlayIdleFidgets(true); } } } TossNPCDomain(); yield break; } private void UpdatePerceptionAppState() { perceptionSelected++; perceptionSelected %= perceptionButtonText.Length; if (m_PerceptionApplicationState == PerceptionApplicationState.Disabled) { //vhmsg.SendVHMsg("vrPerceptionApplication", "TOGGLE"); vhmsg.SendVHMsg("vrPerceptionApplication", "trackHead"); m_PerceptionApplicationState = PerceptionApplicationState.TrackHead; } else if (m_PerceptionApplicationState == PerceptionApplicationState.TrackHead) { vhmsg.SendVHMsg("vrPerceptionApplication", "trackGaze"); //vhmsg.SendVHMsg("vrPerceptionApplication", "trackHead"); m_PerceptionApplicationState = PerceptionApplicationState.TrackGaze; } else if (m_PerceptionApplicationState == PerceptionApplicationState.TrackGaze) { vhmsg.SendVHMsg("vrPerceptionApplication", "TOGGLE"); m_PerceptionApplicationState = PerceptionApplicationState.Disabled; } } private void ToggleGazeMode() { m_gazingMode++; m_gazingMode = m_gazingMode % 3; // skipping mousepawn gaze for tab demo if (m_gazingMode == 0) { m_SBM.PythonCommand(string.Format(@"scene.command('char {0} gazefade out 1')", "*")); } if (m_gazingMode == 1) { { m_SBM.SBGaze("*", "Camera"); //m_SBM.SBGaze("*", "Camera", 400, 400, SmartbodyManager.GazeJointRange.EYES_CHEST); } } if (m_gazingMode == 2) { m_SBM.SBGaze("*", "MousePawn"); } } IEnumerator WaitForCutsceneEnd(float cutsceneLength) { yield return new WaitForSeconds(cutsceneLength); } public void PlayIdleFidgets(bool _value) { for (int i = 0; i < m_SBM.GetSBMCharacterNames().Length; ++i) { vhmsg.SendVHMsg("nvbg_set_option " + (m_SBM.GetSBMCharacterNames())[i] + " saliency_idle_gaze " + Convert.ToString(_value)); } if (VHUtils.SceneManagerActiveSceneName() == "Customizer") { vhmsg.SendVHMsg("nvbg_set_option Brad saliency_idle_gaze false"); vhmsg.SendVHMsg("nvbg_set_option Rachel saliency_idle_gaze false"); } } public void Update() { if (m_SBM) { m_SBM.m_camPos = m_camera.transform.position; m_SBM.m_camRot = m_camera.transform.rotation; m_SBM.m_camFovY = m_camera.GetComponent<Camera>().fieldOfView; m_SBM.m_camAspect = m_camera.GetComponent<Camera>().aspect; m_SBM.m_camZNear = m_camera.GetComponent<Camera>().nearClipPlane; m_SBM.m_camZFar = m_camera.GetComponent<Camera>().farClipPlane; } if (Input.GetKeyDown(KeyCode.Escape)) { Application.Quit(); } m_camera.enabled = !m_Console.DrawConsole; if (!m_Console.DrawConsole && !IsSpeechTextBoxInFocus) // they aren't typing in a box { if (Input.GetKeyDown(KeyCode.Slash)) { if (m_IntroCutscene) { PlayIdleFidgets(false); m_IntroCutscene.Play(); } StartCoroutine(WaitForCutsceneEnd(m_IntroCutscene.Length)); } // kill intro if (Input.GetKeyDown(KeyCode.Alpha1) && m_bIntroSequencePlaying) { StopAllCoroutines(); CleanupIntroSequence(); } //Go forward and backward through 'slides' if (Input.GetKeyDown(KeyCode.RightArrow) ||Input.GetKeyDown(KeyCode.Alpha3)) { m_currentSlide = (m_currentSlide + 1) % m_slides.Length; SlidesScreen.GetComponent<Renderer>().material.mainTexture = m_slides[m_currentSlide]; } if (Input.GetKeyDown(KeyCode.LeftArrow) || Input.GetKeyDown(KeyCode.Alpha2)) { m_currentSlide = m_currentSlide == 0 ? m_slides.Length - 1 : m_currentSlide - 1; SlidesScreen.GetComponent<Renderer>().material.mainTexture = m_slides[m_currentSlide]; } //Trigger TAB initial presentation cutscene if (Input.GetKeyDown(KeyCode.Alpha4)) { Cutscene tabIntroCutscene = GameObject.Find("tab01_ArnoIntro").GetComponent<Cutscene>(); Debug.Log("tab01_ArnoIntro"); tabIntroCutscene.Play(); } //Trigger Brad's theater cutscene if (Input.GetKeyDown(KeyCode.Alpha5)) { Cutscene tabBradTheaterCutscene = GameObject.Find("tab02_BradTheater").GetComponent<Cutscene>(); Debug.Log("tab02_BradTheater"); tabBradTheaterCutscene.Play(); } //Trigger camera move to front of theater if (Input.GetKeyDown(KeyCode.Alpha6)) { Cutscene tabTheaterTransitionCutscene = GameObject.Find("tab02b_TransitionToFront").GetComponent<Cutscene>(); Debug.Log("tab02b_TransitionToFront"); tabTheaterTransitionCutscene.Play(); } //Trigger the campus tour cutscene if (Input.GetKeyDown(KeyCode.Alpha7)) { Cutscene tabCampusTourCutscene = GameObject.Find("tabMasterTour").GetComponent<Cutscene>(); Debug.Log("tabMasterTour"); tabCampusTourCutscene.Play(); } //Trigger Rachel's interaction if (Input.GetKeyDown(KeyCode.Alpha8)) { Cutscene tabRachelCutscene = GameObject.Find("tab04_RachelInterview").GetComponent<Cutscene>(); Debug.Log("tab04_RachelInterview"); tabRachelCutscene.Play(); } if (Input.GetKeyDown(KeyCode.C)) { m_showController = !m_showController; } if (Input.GetKeyDown(KeyCode.K)) { m_showCustomizerGUI = !m_showCustomizerGUI; } if (Input.GetKeyDown(KeyCode.G)) { SmartbodyManager.Get().PythonCommand(string.Format(@"bml.execBML('{0}', '<blend name=""{1}"" y=""{2}""/>')", "Brad", "ChrMarineStep", -1)); } if (Input.GetKeyUp(KeyCode.G)) { SmartbodyManager.Get().PythonCommand(string.Format(@"bml.execBML('{0}', '<blend name=""{1}"" y=""{2}""/>')", "Brad", "ChrMarineStep", 0)); SmartbodyManager.Get().PythonCommand(string.Format(@"bml.execBML('{0}', '<blend name=""{1}"" y=""{2}""/>')", "Brad", "PseudoIdle", 0)); } // toggle subtitle text if (Input.GetKeyDown(KeyCode.I)) { DisplaySubtitles = !DisplaySubtitles; } // toggle mic input if (Input.GetKeyDown(KeyCode.M) && m_AcquireSpeechState != AcquireSpeechState.Disabled && m_AcquireSpeechState != AcquireSpeechState.InUse) { SetAcquireSpeechState(m_AcquireSpeechState == AcquireSpeechState.On ? AcquireSpeechState.Off : AcquireSpeechState.On); } // toggle user dialog text if (Input.GetKeyDown(KeyCode.O)) { DisplayUserDialog = !DisplayUserDialog; } // toggle entire GUI if (Input.GetKeyDown(KeyCode.P)) { m_disableGUI = !m_disableGUI; var speechBox = GameObject.Find("SpeechBox"); if (m_disableGUI) { if (speechBox) speechBox.GetComponent<SpeechBox>().Show = false; } else { if (speechBox) speechBox.GetComponent<SpeechBox>().Show = true; } } if (Input.GetKeyDown(KeyCode.U)) { ToggleGazeMode(); } if (Input.GetKeyDown(KeyCode.V)) { m_SBM.SBPlayAnim("Brad", "ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Rachel", "ChrRachel_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Alexis", "Alexis_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Carl", "carl_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Joan", "Joan_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Justin", "justin_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Mia", "Mia_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Monster", "monster_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Soldier", "soldier_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Swat", "soldier_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Vincent", "soldier_ChrBrad@Idle01_Contemplate01"); m_SBM.SBPlayAnim("Zombie", "zombie_hires_ChrBrad@Idle01_Contemplate01"); } // reset camera position if (Input.GetKeyDown(KeyCode.X)) { m_camera.transform.position = m_StartingCameraPosition; m_camera.transform.rotation = m_StartingCameraRotation; } if (Input.GetKeyDown(KeyCode.Y)) { UpdatePerceptionAppState(); } if (Input.GetKeyDown(KeyCode.B)) { vhmsg.SendVHMsg("vrKillComponent", "ssi_vhmsger"); vhmsg.SendVHMsg("vrKillComponent", "perception-test-application"); } if (Input.GetKeyDown(KeyCode.F1)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 2, CharacterDefines.FaceSide.both, 0.6f, 4); } if (Input.GetKeyDown(KeyCode.F2)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 45, CharacterDefines.FaceSide.both, 1, 2); } if (Input.GetKeyDown(KeyCode.F3)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 100, CharacterDefines.FaceSide.left, 1, 4); m_SBM.SBPlayFAC("ChrBrownRocPrefab", 100, CharacterDefines.FaceSide.right, 1, 4); } if (Input.GetKeyDown(KeyCode.F4)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 110, CharacterDefines.FaceSide.left, 1, 4); m_SBM.SBPlayFAC("ChrBrownRocPrefab", 110, CharacterDefines.FaceSide.right, 1, 4); } if (Input.GetKeyDown(KeyCode.F5)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 120, CharacterDefines.FaceSide.left, 1, 4); m_SBM.SBPlayFAC("ChrBrownRocPrefab", 120, CharacterDefines.FaceSide.right, 1, 4); } if (Input.GetKeyDown(KeyCode.F6)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 50, CharacterDefines.FaceSide.both, 0.6f, 4); } if (Input.GetKeyDown(KeyCode.F7)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 50, CharacterDefines.FaceSide.both, 0.6f, 4); m_SBM.SBPlayFAC("ChrBrownRocPrefab", 100, CharacterDefines.FaceSide.left, 1, 4); } if (Input.GetKeyDown(KeyCode.F8)) { m_SBM.SBPlayFAC("ChrBrownRocPrefab", 50, CharacterDefines.FaceSide.both, 0.6f, 4); m_SBM.SBPlayFAC("ChrBrownRocPrefab", 100, CharacterDefines.FaceSide.right, 1, 4); } // walk to mouse position if (m_walkToMode && Input.GetMouseButtonDown(0)) { Ray ray = m_camera.GetComponent<Camera>().ScreenPointToRay(Input.mousePosition); RaycastHit hit; if (Physics.Raycast(ray, out hit)) { Debug.Log("Walk to: " + -hit.point.x + " " + hit.point.z); //SmartbodyManager.Get().SBWalkTo("*", string.Format("{0} {1}", -hit.point.x, hit.point.z), false); //SmartbodyManager.Get().SBWalkTo("Brad", string.Format("{0} {1}", -hit.point.x - 1, hit.point.z), false); //SmartbodyManager.Get().SBWalkTo("Rachel", string.Format("{0} {1}", -hit.point.x + 1, hit.point.z), false); //SmartbodyManager.Get().SBWalkTo("Rachel", string.Format("{0} {1}", -hit.point.x + 1, hit.point.z), false); //SmartbodyManager.Get().SBWalkTo("ChrBackovicPrefab", string.Format("{0} {1}", -hit.point.x - 1, hit.point.z), false); //SmartbodyManager.Get().SBWalkTo("ChrBrownRocPrefab", string.Format("{0} {1}", -hit.point.x + 1, hit.point.z), false); //SmartbodyManager.Get().SBWalkTo("ChrBackovicPrefab", string.Format("{0} {1}", -hit.point.x - 1, hit.point.z - 1), true); //SmartbodyManager.Get().SBWalkTo("ChrCrowleyPrefab", string.Format("{0} {1}", -hit.point.x - 1, hit.point.z + 1), true); //SmartbodyManager.Get().SBWalkTo("ChrJohnsonPrefab", string.Format("{0} {1}", -hit.point.x + 1, hit.point.z - 1), true); //SmartbodyManager.Get().SBWalkTo("ChrMcHughPrefab", string.Format("{0} {1}", -hit.point.x + 1, hit.point.z + 1), true); SmartbodyManager.Get().SBWalkTo("Brad", string.Format("{0} {1}", -hit.point.x - 2, hit.point.z), false); SmartbodyManager.Get().SBWalkTo("Rachel", string.Format("{0} {1}", -hit.point.x + 2, hit.point.z), false); SmartbodyManager.Get().SBWalkTo("ChrCrowleyPrefab", string.Format("{0} {1}", -hit.point.x, hit.point.z - 2), false); SmartbodyManager.Get().SBWalkTo("Ellie", string.Format("{0} {1}", -hit.point.x, hit.point.z + 2), false); SmartbodyManager.Get().SBWalkTo("ChrBrownRocPrefab", string.Format("{0} {1}", -hit.point.x, hit.point.z), false); m_walkToPoint = hit.point; } } if (m_gazingMode == 2) // gaze mouse cursor { Ray ray = m_camera.GetComponent<Camera>().ScreenPointToRay(Input.mousePosition); RaycastHit hit; if (Physics.Raycast(ray, out hit)) { GameObject mousePawn = GameObject.Find("MousePawn"); mousePawn.transform.position = hit.point; } else { GameObject mousePawn = GameObject.Find("MousePawn"); mousePawn.transform.position = m_camera.GetComponent<Camera>().transform.position; } } if (m_CharacterController) m_CharacterController.enabled = m_bLocomotionEnabled; } if (m_AcquireSpeechState == AcquireSpeechState.On || m_AcquireSpeechState == AcquireSpeechState.InUse) { if (Input.GetMouseButtonDown(0)) // 0 == mouse left click { vhmsg.SendVHMsg("acquireSpeech startUtterance mic"); m_AcquireSpeechState = AcquireSpeechState.InUse; } else if (Input.GetMouseButtonUp(0)) // 0 == mouse left click { vhmsg.SendVHMsg("acquireSpeech stopUtterance mic"); m_AcquireSpeechState = AcquireSpeechState.On; } } // lock the screen cursor if they are looking around or using their mic FreeMouseLook mouseLook = Camera.main ? Camera.main.GetComponent<FreeMouseLook>() : null; bool cameraRotationOn = mouseLook ? mouseLook.CameraRotationOn : false; Cursor.lockState = (InAcquireSpeechMode || cameraRotationOn) ? CursorLockMode.Locked : CursorLockMode.None; if (VHUtils.SceneManagerActiveSceneName() == "Customizer") { m_currentColor = GameObject.Find("ColorPicker").GetComponent<ColorPicker>().setColor; //Debug.LogError(m_currentColor.r + " " + m_currentColor.g + " " + m_currentColor.b + " " + m_currentColor.a); GameObject.Find("screen").GetComponent<Renderer>().material.color = m_currentColor; } UpdateBrownFace(); } // background select Vector2 m_scrollPosition; int selGridInt = 0; int backgroundSelectionHeight = 0; string [] backgroundFiles = new string [] {}; // character select Vector2 m_characterScrollPosition; static string [] characterChoices = new string [] { "Brad", "Harmony", "JustinIct", "Pedro", "Rachel", "Rio", "Utah" }; int characterSelectionHeight = characterChoices.Length * 29; int characterSelGridInt = 0; string m_characterSelectCurrent = "Brad"; // camera select Vector2 m_cameraScrollPosition; Camera [] cameraChoices; string [] cameraChoicesStrings; int cameraSelectionHeight; int cameraSelGridInt = 0; int m_cameraSelectCurrent = 0; public GameObject SlidesScreen; public Texture2D [] m_slides; int m_currentSlide = 0; void SwapTexture(MonoBehaviour behaviour, WWW www) { behaviour.StartCoroutine(SwapTexture(www)); } IEnumerator SwapTexture(WWW www) { yield return www; if (m_CachedBgTexture != null) { Destroy(m_CachedBgTexture); m_CachedBgTexture = null; } m_CachedBgTexture = new Texture2D(4, 4); www.LoadImageIntoTexture(m_CachedBgTexture); GameObject.Find("screen").GetComponent<Renderer>().material.mainTexture = m_CachedBgTexture; } public override void OnGUI() { if (m_disableGUI) return; base.OnGUI(); if (m_AcquireSpeechState != AcquireSpeechState.Disabled) { if (m_MicrophoneImages[(int)m_AcquireSpeechState]) VHIMGUI.DrawTexture(m_MicImagePos, m_MicrophoneImages[(int)m_AcquireSpeechState]); } if (VHUtils.SceneManagerActiveSceneName() == "Customizer") { if (m_showCustomizerGUI) { GUILayout.BeginArea(new Rect(10, 10, 200, 600)); GUILayout.BeginVertical(); // background select m_scrollPosition = GUILayout.BeginScrollView(m_scrollPosition, false, true, GUILayout.Height(100), GUILayout.MaxHeight(Math.Max(100, backgroundSelectionHeight))); selGridInt = GUILayout.SelectionGrid(selGridInt, backgroundFiles, 1); GUILayout.EndScrollView(); GUILayout.BeginHorizontal(); if (GUILayout.Button("Set")) { string path = "Backgrounds/" + backgroundFiles[selGridInt]; VHFile.LoadStreamingAssetsAsync(path); WWW www = VHFile.LoadStreamingAssetsAsync(path); SwapTexture(this, www); } if (GUILayout.Button("Refresh")) { string path = "Backgrounds"; List<string> files = new List<string>(); files.AddRange(VHFile.GetStreamingAssetsFiles(path, ".png")); files.AddRange(VHFile.GetStreamingAssetsFiles(path, ".jpg")); files.AddRange(VHFile.GetStreamingAssetsFiles(path, ".bmp")); for (int i = 0; i < files.Count; i++) files[i] = Path.GetFileName(files[i]); backgroundFiles = files.ToArray(); backgroundSelectionHeight = backgroundFiles.Length * 29; selGridInt = 0; } GUILayout.EndHorizontal(); if (GUILayout.Button("ColorPicker")) { GameObject.Find("ColorPicker").GetComponent<ColorPicker>().showPicker = true; } GUILayout.Space(50); // character select m_characterScrollPosition = GUILayout.BeginScrollView(m_characterScrollPosition, false, true, GUILayout.Height(100), GUILayout.MaxHeight(Math.Max(100, characterSelectionHeight))); characterSelGridInt = GUILayout.SelectionGrid(characterSelGridInt, characterChoices, 1); GUILayout.EndScrollView(); if (GUILayout.Button("Set Character")) { VHMsgManager.Get().SendVHMsg(string.Format("renderer destroy {0}", m_characterSelectCurrent)); VHMsgManager.Get().SendVHMsg(string.Format("renderer create {0} {1}", characterChoices[characterSelGridInt], characterChoices[characterSelGridInt])); m_characterSelectCurrent = characterChoices[characterSelGridInt]; } GUILayout.Space(50); // camera select m_cameraScrollPosition = GUILayout.BeginScrollView(m_cameraScrollPosition, false, true, GUILayout.Height(100), GUILayout.MaxHeight(Math.Max(100, cameraSelectionHeight))); cameraSelGridInt = GUILayout.SelectionGrid(cameraSelGridInt, cameraChoicesStrings, 1); GUILayout.EndScrollView(); if (GUILayout.Button("Set Camera")) { cameraChoices[m_cameraSelectCurrent].gameObject.SetActive(false); m_cameraSelectCurrent = cameraSelGridInt; cameraChoices[m_cameraSelectCurrent].gameObject.SetActive(true); SmartbodyManager.Get().SBGaze("*", cameraChoicesStrings[m_cameraSelectCurrent]); } GUILayout.EndVertical(); GUILayout.EndArea(); } } if (m_showController) { float buttonX = 0; float buttonY = 0; #if UNITY_IPHONE || UNITY_ANDROID float buttonH = 70; #else float buttonH = 20; #endif float buttonW = 140; GUILayout.BeginArea(new Rect(buttonX, buttonY, buttonW, Screen.height)); GUILayout.BeginVertical(); for (int i = 0; i < sceneNames.Length; i++) { if (VHUtils.SceneManagerActiveSceneName() == sceneNames[i]) { continue; } if (GUILayout.Button("Load " + sceneNames[i])) { VHUtils.SceneManagerLoadScene(sceneNames[i]); } } GUILayout.BeginHorizontal(); if (GUILayout.Button(testUtteranceButtonText[testUtteranceSelected], GUILayout.Height(buttonH))) { testUtteranceSelected++; testUtteranceSelected = testUtteranceSelected % testUtteranceButtonText.Length; } if (GUILayout.Button("Test Utt", GUILayout.Height(buttonH))) { m_SBM.SBPlayAudio(testUtteranceCharacter[testUtteranceSelected], testUtteranceName[testUtteranceSelected], testUtteranceText[testUtteranceSelected]); MobilePlayAudio(testUtteranceName[testUtteranceSelected]); } GUILayout.EndHorizontal(); if (GUILayout.Button(testTtsVoices[testTtsSelected], GUILayout.Height(buttonH))) { testTtsSelected++; testTtsSelected = testTtsSelected % testTtsVoices.Length; string message = string.Format("sbm set character {0} voicebackup remote {1}", "Brad", testTtsVoices[testTtsSelected]); vhmsg.SendVHMsg(message); } m_walkToMode = GUILayout.Toggle(m_walkToMode, "WalkToMode"); m_SBM.m_displayLogMessages = GUILayout.Toggle(m_SBM.m_displayLogMessages, "SBMLog"); m_displayVhmsgLog = GUILayout.Toggle(m_displayVhmsgLog, "VHMsgLog"); m_timeSlider = GUILayout.HorizontalSlider(m_timeSlider, 0.01f, 3); GUILayout.Label(string.Format("Time: {0}", m_timeSlider)); #if !UNITY_WEBPLAYER if (GUILayout.Button("Launch Pocketsphinx")) { System.Diagnostics.ProcessStartInfo startInfo = new System.Diagnostics.ProcessStartInfo(); startInfo.FileName = Application.dataPath + "/../" + "../../bin/launch-scripts/run-toolkit-asr-server-TABGFY13.bat"; startInfo.Arguments = "Pocketphinx"; startInfo.WorkingDirectory = Application.dataPath + "/../" + "../../bin/launch-scripts"; System.Diagnostics.Process.Start(startInfo); } if (GUILayout.Button("Launch Acquirespeech")) { System.Diagnostics.ProcessStartInfo startInfo = new System.Diagnostics.ProcessStartInfo(); startInfo.FileName = Application.dataPath + "/../" + "../../bin/launch-scripts/run-toolkit-acquirespeech.bat"; startInfo.Arguments = "PocketSphinx"; startInfo.WorkingDirectory = Application.dataPath + "/../" + "../../bin/launch-scripts"; System.Diagnostics.Process.Start(startInfo); } if (GUILayout.Button("Launch NPCEditor")) { System.Diagnostics.ProcessStartInfo startInfo = new System.Diagnostics.ProcessStartInfo(); startInfo.FileName = Application.dataPath + "/../" + "../../bin/launch-scripts/run-toolkit-npceditor-vhbuilder.bat"; startInfo.Arguments = "../../data/classifier/racheltab.plist"; startInfo.WorkingDirectory = Application.dataPath + "/../" + "../../bin/launch-scripts"; System.Diagnostics.Process.Start(startInfo); } if (GUILayout.Button("Launch NVBG")) { System.Diagnostics.ProcessStartInfo startInfo = new System.Diagnostics.ProcessStartInfo(); startInfo.FileName = Application.dataPath + "/../" + "../../bin/launch-scripts/run-toolkit-NVBG-C#-all.bat"; startInfo.WorkingDirectory = Application.dataPath + "/../" + "../../bin/launch-scripts"; System.Diagnostics.Process.Start(startInfo); } #endif if (GUILayout.Button(perceptionButtonText[perceptionSelected])) { UpdatePerceptionAppState(); } if (GUILayout.Button("Stop Walking")) { string message = string.Format(@"bml.execBML('{0}', '<locomotion enable=""{1}"" />')", "*", "false"); SmartbodyManager.Get().PythonCommand(message); } string gazeMode = ""; if (m_gazingMode == 0) gazeMode = "(Off)"; else if (m_gazingMode == 1) gazeMode = "(Camera)"; else if (m_gazingMode == 2) gazeMode = "(Mouse)"; if (GUILayout.Button(string.Format("Toggle Gaze {0}", gazeMode))) { ToggleGazeMode(); } if (GUILayout.Button(string.Format("Turn Idles {0}", m_idleMode ? "Off" : "On"))) { m_idleMode = !m_idleMode; string onoff = m_idleMode ? "true" : "false"; VHMsgManager.Get().SendVHMsg(string.Format(@"nvbg_set_option {0} saliency_glance {1}", "Brad", onoff)); VHMsgManager.Get().SendVHMsg(string.Format(@"nvbg_set_option {0} saliency_glance {1}", "Rachel", onoff)); VHMsgManager.Get().SendVHMsg(string.Format(@"nvbg_set_option {0} saliency_idle_gaze {1}", "Brad", onoff)); VHMsgManager.Get().SendVHMsg(string.Format(@"nvbg_set_option {0} saliency_idle_gaze {1}", "Rachel", onoff)); } if (GUILayout.Button("Reset", GUILayout.Height(buttonH))) { m_SBM.SBTransform("Brad", m_chrBradStartPos, m_chrBradStartRot); m_SBM.SBTransform("Rachel", m_chrRachelStartPos, m_chrRachelStartRot); } if (VHUtils.SceneManagerActiveSceneName() == "Campus") { GUILayout.Label("Game Mode"); GameMode prevMode = m_GameMode; m_GameMode = (GameMode)GUILayout.Toolbar((int)m_GameMode, GameModeNames); if (m_GameMode != prevMode) { SwitchGameMode(m_GameMode, prevMode); } if (m_GameMode == GameMode.Character) { int prevChar = m_SelectedCharacter; m_SelectedCharacter = GUILayout.Toolbar(m_SelectedCharacter, characterNames); if (prevChar != m_SelectedCharacter) { UnSelectCharacter(prevChar); SelectCharacter(m_SelectedCharacter); } } } GUILayout.EndVertical(); GUILayout.EndArea(); Time.timeScale = m_timeSlider; } if (m_walkToMode) { Vector3 screenPoint = m_camera.gameObject.GetComponent<Camera>().WorldToScreenPoint(m_walkToPoint); GUI.color = new Color(1, 0, 0, 1); float boxH = 10; float boxW = 10; Rect r = new Rect(screenPoint.x - (boxW / 2), (m_camera.gameObject.GetComponent<Camera>().pixelHeight - screenPoint.y) - (boxH / 2), boxW, boxH); GUI.DrawTexture(r, m_whiteTexture); GUI.color = Color.white; } } void SwitchGameMode(GameMode newMode, GameMode oldMode) { m_GameMode = newMode; switch (oldMode) { case GameMode.FreeLook: m_camera.gameObject.SetActive(false); break; case GameMode.Character: UnSelectCharacters(); break; } switch (newMode) { case GameMode.FreeLook: m_camera.gameObject.SetActive(true); break; case GameMode.Character: SelectCharacter(m_SelectedCharacter); break; } } //void DisableCharacters() //{ // UnSelectCharacters(); // for (int i = 0; i < m_Characters.Length; i++) // { // m_Characters[i].GetComponentInChildren<SBCharacterController>().enabled = false; // } //} void UnSelectCharacters() { for (int i = 0; i < m_Characters.Length; i++) { //m_Characters[i].GetComponentInChildren<SBCharacterController>().enabled = false; VHUtils.FindChild(m_Characters[i].gameObject, "Camera").GetComponent<Camera>().enabled = false; //m_Characters[i].GetComponent<Camera>().enabled = false; } } void UnSelectCharacter(int selection) { m_Characters[selection].GetComponentInChildren<SBCharacterController>().enabled = false; VHUtils.FindChild(m_Characters[selection].gameObject, "Camera").GetComponent<Camera>().enabled = false; } void SelectCharacter(int selection) { UnSelectCharacters(); m_SelectedCharacter = selection; SelectedCharacterController.enabled = true; VHUtils.FindChild(SelectedCharacter.gameObject, "Camera").GetComponent<Camera>().enabled = true; //SelectedCharacter.GetComponentInChildren<Camera>().enabled = true; } public override void OnApplicationQuit() { base.OnApplicationQuit(); } void VHMsg_MessageEvent(object sender, VHMsgBase.Message message) { if (m_displayVhmsgLog) { Debug.Log("VHMsg recvd: " + message.s); } string [] splitargs = message.s.Split( " ".ToCharArray() ); if (splitargs.Length > 0) { if (splitargs[0] == "vrAllCall") { vhmsg.SendVHMsg("vrComponent renderer"); } else if (splitargs[0] == "vrKillComponent") { if (splitargs.Length > 1) { if (splitargs[1] == "renderer" || splitargs[1] == "all") { if (Application.isEditor) { #if UNITY_EDITOR UnityEditor.EditorApplication.ExecuteMenuItem( "Edit/Play" ); #endif } else { Application.Quit(); } } } } else if (splitargs[0] == "PlaySound") { string path = splitargs[1].Trim('"'); // PlaySound has double quotes around the sound file. remove them before continuing. path = Path.GetFullPath(path); path = path.Replace("\\", "/"); path = "file://" + path; WWW www = new WWW(path); VHUtils.PlayWWWSound(this, www, m_SBM.GetCharacterVoice(splitargs[2]), false); } else if (splitargs[0] == "StopSound") { //NOTE // currently stopping all characters on stopsound // needs to be changed to only affect the character in question string[] charNames = m_SBM.GetSBMCharacterNames(); for (int i = 0; i < charNames.Length; ++i) { m_SBM.GetCharacterVoice(charNames[i]).Stop(); } } else if (splitargs[0] == "vrExpress") { m_bFinishedPreviousUtterance = false; } else if (splitargs[0] == "vrSpoke") { m_bFinishedPreviousUtterance = true; HandlevrSpokeMessage(); } else if (splitargs[0] == "CommAPI") { // CommAPI setcameraposition <x> <y> <z> // CommAPI setcamerarotation <x> <y> <z> if (splitargs.Length >= 1) { if (splitargs[1] == "setcameraposition") { if (splitargs.Length >= 5) { Vector3 position = VHMath.ConvertStringsToVector(splitargs[2], splitargs[3], splitargs[4]); m_camera.transform.position = position; } } else if (splitargs[1] == "setcamerarotation") { if (splitargs.Length >= 5) { // x,y,z = Orientation in degrees. (default coord system would match x,y,z to r,h,p Vector3 rotation = VHMath.ConvertStringsToVector(splitargs[2], splitargs[3], splitargs[4]); m_camera.transform.localRotation = Quaternion.Euler(rotation); } } } } else if (splitargs[0] == "renderer") { if (splitargs.Length >= 1) { // "renderer log testing testing" // "renderer console show_tips 1" string function = splitargs[1].ToLower(); string[] rendererSplitArgs = new string[splitargs.Length - 2]; Array.Copy(splitargs, 2, rendererSplitArgs, 0, splitargs.Length - 2); gameObject.SendMessage(function, rendererSplitArgs); } } else if (splitargs[0] == "sbm") { ////HACK HACK HACK HACK TO BE REMOVED ////HACK HACK HACK HACK TO BE REMOVED ////HACK HACK HACK HACK TO BE REMOVED if (splitargs.Length > 1) { if (splitargs[1].Equals("vrSpoke")) { if (VHUtils.SceneManagerActiveSceneName() == "Campus") { if (!m_IntroCutscene.HasStartedPlaying) { m_subtitleText = ""; m_userDialogText = ""; } } else { m_subtitleText = ""; m_userDialogText = ""; } } } } else if (splitargs[0].Equals("renderer_record")) { } else if (splitargs[0].Equals("render_text_overlay")) { if (splitargs.Length >= 1) { if (splitargs[1].Equals("disable")) { DisplaySubtitles = false; DisplayUserDialog = false; } if (splitargs[1].Equals("enable")) { m_subtitleText = ""; m_userDialogText = ""; DisplaySubtitles = true; DisplayUserDialog = true; } } } else if (splitargs[0].Equals("renderer_gui")) { if (splitargs.Length >= 1) { var speechBox = GameObject.Find("SpeechBox"); if (splitargs[1].Equals("True")) { m_AcquireSpeechState = AcquireSpeechState.Off; if (speechBox) speechBox.GetComponent<SpeechBox>().Show = true; } else { if (speechBox) speechBox.GetComponent<SpeechBox>().Show = false; m_AcquireSpeechState = AcquireSpeechState.Disabled; } } } else if (splitargs[0].Equals("vht_get_characters")) { string[] retval = m_SBM.GetSBMCharacterNames(); string charNames = ""; for (int i = 0; i < retval.Length; ++i) { charNames += retval[i] + " "; } vhmsg.SendVHMsg("VHBuilder character_names " + charNames); } } } void HandlevrSpokeMessage() { m_userDialogText = ""; if (!m_bIntroSequencePlaying) { m_subtitleText = ""; } } void OnCharacterCreate(UnitySmartbodyCharacter character) { Debug.Log(string.Format("Character '{0}' created", character.SBMCharacterName)); } void OnCharacterDelete(UnitySmartbodyCharacter character) { Debug.Log(string.Format("Character '{0}' deleted", character.SBMCharacterName)); } void ProcessCommandLineAndConfigSettings() { m_SeqFile = m_ConfigFile.GetSetting("general", "DefaultSeqFile"); if (!string.IsNullOrEmpty(m_SeqFile)) Debug.Log("m_SeqFile: " + m_SeqFile); m_PyFile = m_ConfigFile.GetSetting("general", "DefaultPyFile"); if (!string.IsNullOrEmpty(m_PyFile)) Debug.Log("m_PyFile: " + m_PyFile); if (m_ConfigFile.SettingExists("general", "CameraMoveSpeed")) { m_camera.movementSpeed = float.Parse(m_ConfigFile.GetSetting("general", "CameraMoveSpeed")); } if (m_ConfigFile.SettingExists("general", "CameraRotateSpeed")) { m_camera.sensitivityX = m_camera.sensitivityY = float.Parse(m_ConfigFile.GetSetting("general", "CameraRotateSpeed")); } if (m_ConfigFile.SettingExists("general", "CameraSecondaryMoveSpeed")) { m_camera.secondaryMovementSpeed = float.Parse(m_ConfigFile.GetSetting("general", "CameraSecondaryMoveSpeed")); } if (m_ConfigFile.SettingExists("general", "CameraFrustumNear")) { m_camera.GetComponent<Camera>().nearClipPlane = float.Parse(m_ConfigFile.GetSetting("general", "CameraFrustumNear")); } if (m_ConfigFile.SettingExists("general", "CameraFrustumFar")) { m_camera.GetComponent<Camera>().farClipPlane = float.Parse(m_ConfigFile.GetSetting("general", "CameraFrustumFar")); } m_bStartInAcquireSpeechMode = bool.Parse(m_ConfigFile.GetSetting("general", "StartInAcquireSpeechMode")); m_AcquireSpeechState = m_bStartInAcquireSpeechMode ? AcquireSpeechState.On : AcquireSpeechState.Off; // setup resolution // resolution 640 x 480 string resolution = VHUtils.GetCommandLineArgumentValue("resolution"); string fullscreen = VHUtils.GetCommandLineArgumentValue("fullscreen"); bool full = false; bool.TryParse(fullscreen, out full); Screen.fullScreen = full; if (!string.IsNullOrEmpty(resolution)) { int screenWidth = Screen.width; int screenHeight = Screen.height; string[] widthHeightStrings = resolution.Split('x'); if (widthHeightStrings.Length == 2 && int.TryParse(widthHeightStrings[0], out screenWidth) && int.TryParse(widthHeightStrings[1], out screenHeight)) { SetResolution(screenWidth, screenHeight, Screen.fullScreen); } } } protected void log( string [] args ) { if (args.Length > 0) { string argsString = String.Join(" ", args); Debug.Log(argsString); } } protected void console( string [] args ) { if (args.Length > 0) { string argsString = String.Join(" ", args); HandleConsoleMessage(argsString, m_Console); } } protected void color(string[] args) { if (args.Length > 2) { //Debug.LogError(args[0] + args[1] + args[2]); int r = Convert.ToInt32(args[0]); int g = Convert.ToInt32(args[1]); int b = Convert.ToInt32(args[2]); m_currentColor = new Color(((float)r/255), ((float)g/255), ((float)b/255)); if (VHUtils.SceneManagerActiveSceneName() == "Customizer") { //m_currentColor = GameObject.Find("ColorPicker").GetComponent<ColorPicker>().setColor; //GameObject.Find("Background").renderer.material.color = m_currentColor; GameObject.Find("ColorPicker").GetComponent<ColorPicker>().setColor = m_currentColor; } //GameObject.Find("Background").renderer.material.color = m_currentColor; } } protected void customizer(string[] args) { if (args.Length > 4) { string characterName = args[0]; string displayName = args[1]; int r = Convert.ToInt32(args[2]); int g = Convert.ToInt32(args[3]); int b = Convert.ToInt32(args[4]); UnitySmartbodyCharacter sbChar = m_SBM.GetCharacterByName(characterName); if (sbChar != null) { MaterialCustomizer matCustomizer = sbChar.GetComponent<MaterialCustomizer>(); if (matCustomizer != null) { matCustomizer.SetColor(displayName, new Color(((float)r / 255), ((float)g / 255), ((float)b / 255))); } } m_currentColor = new Color(((float)r / 255), ((float)g / 255), ((float)b / 255)); } else if (args.Length > 1) { string characterName = args[0]; float value = float.Parse(args[1]); UnitySmartbodyCharacter sbChar = m_SBM.GetCharacterByName(characterName); if (sbChar != null) { MaterialCustomizer matCustomizer = sbChar.GetComponent<MaterialCustomizer>(); if (matCustomizer != null) { matCustomizer.SetFloat("Skin", 1 - value); } } } } protected void background( string [] args ) { if (args.Length > 0) { if (args[0] == "file") { // renderer background file background.png if (args.Length > 1) { string background = ""; int i = 0; for (i = 1; i < args.Length -1; ++i) { background += args[i] + " "; } background += args[i]; string path = "Backgrounds/" + background; VHFile.LoadStreamingAssetsAsync(path); WWW www = VHFile.LoadStreamingAssetsAsync(path); SwapTexture(this, www); } } } } protected void codec(string[] args) { if (args.Length > 0) { string codec = ""; int i = 0; for (i = 0; i < args.Length -1; ++i) { codec += args[i] + " "; } codec += args[i]; Debug.Log(codec); SetVideoCodec(codec); } } public void SetVideoCodec(string _codecName) { } IEnumerator GazeAtCamera() { yield return new WaitForSeconds(0.3f); //SmartbodyManager.Get().SBGaze("*", cameraChoicesStrings[m_cameraSelectCurrent], 500); string message = string.Format(@"sbm bml char * <gaze target=""{0}"" sbm:joint-range=""HEAD EYES NECK"" sbm:joint-speed=""{1}""/>", cameraChoicesStrings[m_cameraSelectCurrent], 500); vhmsg.SendVHMsg(message); } protected void setcamera( string [] args ) { if (args.Length > 0) { if (args[0] == "set") { // renderer setcamera set Camera2 if (args.Length > 1) { string camera = args[1]; cameraChoices[m_cameraSelectCurrent].gameObject.SetActive(false); for (int i = 0; i < cameraChoices.Length; i++) { if (cameraChoices[i].name == camera) { m_cameraSelectCurrent = i; break; } } cameraChoices[m_cameraSelectCurrent].gameObject.SetActive(true); if (m_forceGazeOnSetCamera) StartCoroutine(GazeAtCamera()); } } else if (args[0] == "force_gaze") { if (args.Length > 1) { if (Convert.ToBoolean(args[1])) { m_forceGazeOnSetCamera = true; } else { m_forceGazeOnSetCamera = false; } } } } } protected override void HandleConsoleMessage(string commandEntered, DebugConsole console) { base.HandleConsoleMessage(commandEntered, console); Vector2 vec2Data = Vector2.zero; if (commandEntered.IndexOf("vhmsg") != -1) { string opCode = string.Empty; string args = string.Empty; if (console.ParseVHMSG(commandEntered, ref opCode, ref args)) { vhmsg.SendVHMsg(opCode, args); } else { console.AddText(commandEntered + " requires an opcode string and can have an optional argument string"); } } else if (commandEntered.IndexOf("set_loco_char_name") != -1) { //m_locoCharacterName = commandEntered.Replace("set_loco_char_name", ""); } else if (commandEntered.IndexOf("set_resolution") != -1) { if (console.ParseVector2(commandEntered, ref vec2Data)) { SetResolution((int)vec2Data.x, (int)vec2Data.y, Screen.fullScreen); } } else if (commandEntered.IndexOf("play_intro") != -1) { StopAllCoroutines(); m_IntroCutscene.Play(); m_bIntroSequencePlaying = true; } } void SetResolution(int width, int height, bool fullScreen) { Screen.SetResolution(width, height, fullScreen); } string ParseSpeechText(string text) { int endOfSpeechIndex = text.IndexOf("</speech>"); if (endOfSpeechIndex == -1) { // there is no speech text return null; } int startOfSpeechIndex = text.LastIndexOf('>', endOfSpeechIndex); if (startOfSpeechIndex == -1) { // broken xml tags return null; } return text.Substring(startOfSpeechIndex + 1, endOfSpeechIndex - startOfSpeechIndex - 1); } void MoveCharacter(string character, string direction, float fSpeed, float fLrps, float fFadeOutTime) { string command = string.Format("sbm test loco char {0} {1} spd {2} rps {3} time {4}", character, direction, fSpeed, fLrps, fFadeOutTime); vhmsg.SendVHMsg(command); } void IntroSequenceSetup() { m_bIntroSequencePlaying = true; m_SpeechBox.enabled = false; m_bLocomotionEnabled = false; m_bFinishedPreviousUtterance = true; if (InAcquireSpeechMode) { vhmsg.SendVHMsg("acquireSpeech stopSession"); } else { // get acquire speech to the recorder tab with start, but then disable it because we don't want to interrupt the intro vhmsg.SendVHMsg("acquireSpeech startSession"); vhmsg.SendVHMsg("acquireSpeech stopSession"); } m_AcquireSpeechState = AcquireSpeechState.Disabled; } void CleanupIntroSequence() { m_subtitleText = ""; m_bIntroSequencePlaying = false; m_SpeechBox.enabled = true; m_bLocomotionEnabled = true; SetAcquireSpeechState(m_bStartInAcquireSpeechMode ? AcquireSpeechState.On : AcquireSpeechState.Off); } void SetAcquireSpeechState(AcquireSpeechState state) { m_AcquireSpeechState = state; vhmsg.SendVHMsg("acquireSpeech " + (m_AcquireSpeechState == AcquireSpeechState.On ? "startSession" : "stopSession")); //sbm.DisplayUserDialog = m_AcquireSpeechState == AcquireSpeechState.On; DisplayUserDialog = m_AcquireSpeechState == AcquireSpeechState.On; } public void ToggleAxisLines() { GameObject axisLines = GameObject.Find("AxisLines"); if (axisLines) { if (axisLines.transform.childCount > 0) { Transform[] allChildren = axisLines.GetComponentsInChildren<Transform>(true); if (axisLines.transform.GetChild(0).gameObject.activeSelf) { foreach (Transform t in allChildren) { if (t == axisLines.transform) continue; t.gameObject.SetActive(false); } } else { foreach (Transform t in allChildren) { if (t == axisLines.transform) continue; t.gameObject.SetActive(true); } } } } } IEnumerator WaitForPreviousUtteranceToFinish() { while (!m_bFinishedPreviousUtterance) { yield return new WaitForEndOfFrame(); } // reset the variable and add a bit of a delay so that he doesn't keep talking without pausing m_bFinishedPreviousUtterance = false; yield return new WaitForSeconds(1.0f); } void MakeBradTalk(string charName, string externalSoundId, string text) { vhmsg.SendVHMsg(String.Format("vrExpress {1} user 1303332588320-{0}-1 <?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\" ?>" + "<act><participant id=\"{1}\" role=\"actor\" /><fml><turn start=\"take\" end=\"give\" /><affect type=\"neutral\" " + "target=\"addressee\"></affect><culture type=\"neutral\"></culture><personality type=\"neutral\"></personality></fml>" + "<bml><speech id=\"sp1\" ref=\"{2}\" type=\"application/ssml+xml\">{3}</speech></bml></act>", m_BradTalkId, charName, externalSoundId, text)); m_BradTalkId += 3; } void MobilePlayAudio(string audioFile) { // Play the audio directly because VHMsg isn't enabled on mobile. So, we can't receive the PlaySound message if (Application.platform == RuntimePlatform.Android || Application.platform == RuntimePlatform.IPhonePlayer) { string s = "Sounds/" + audioFile + ".wav"; var www = VHFile.LoadStreamingAssetsAsync(s); VHUtils.PlayWWWSound(this, www, m_SBM.GetCharacterVoice("Brad"), false); } } void UpdateBrownFace() { for (int i = 0; i < m_BrownHeads.Count; i++) { //Debug.Log("UpdateBrownFace() - " + SmartbodyManager.Get().SBGetAuValue("ChrBrownRocPrefab", "au_45_left")); /* SetGLShaderParam(m_BrownHeads[i], 1, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_45_left")); SetGLShaderParam(m_BrownHeads[i], 2, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_45_right")); SetGLShaderParam(m_BrownHeads[i], 3, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_2_left")); SetGLShaderParam(m_BrownHeads[i], 4, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_2_right")); SetGLShaderParam(m_BrownHeads[i], 5, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_100_left")); SetGLShaderParam(m_BrownHeads[i], 6, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_100_right")); SetGLShaderParam(m_BrownHeads[i], 7, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_110_left")); SetGLShaderParam(m_BrownHeads[i], 8, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_110_right")); SetGLShaderParam(m_BrownHeads[i], 9, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_120_left")); SetGLShaderParam(m_BrownHeads[i], 10, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_120_right")); SetGLShaderParam(m_BrownHeads[i], 11, SBHelpers.SBGetAuValue("ChrBrownRocPrefab", "au_50")); */ } } private void SetGLShaderParam(GameObject obj, int shaderNum, float weight) { //Debug.Log("weight: " + weight); //GameObject roc = m_sbm.GetCharacterBySBMName("ChrBrownRoc").gameObject; //GameObject head = Utils.FindChild(roc, "CharacterRoot/Mesh/SkinnedMesh/MshRef/Head"); // set all to 0 /*head.renderer.material.SetFloat("_Weight1", 0); head.renderer.material.SetFloat("_Weight2", 0); head.renderer.material.SetFloat("_Weight3", 0); head.renderer.material.SetFloat("_Weight4", 0); head.renderer.material.SetFloat("_Weight5", 0); head.renderer.material.SetFloat("_Weight6", 0); head.renderer.material.SetFloat("_Weight7", 0); head.renderer.material.SetFloat("_Weight8", 0); head.renderer.material.SetFloat("_Weight9", 0); head.renderer.material.SetFloat("_Weight10", 0); head.renderer.material.SetFloat("_Weight11", 0); head.renderer.material.SetFloat("_Weight12", 0);*/ // set selected to value //if (shaderNum != 0) { string shaderName = string.Format("_Weight{0}", shaderNum); obj.GetComponent<Renderer>().material.SetFloat(shaderName, weight); } } private void ChrBrownRocChannelCallback(UnitySmartbodyCharacter character, string channelName, float value) { int shaderNum = -1; if (channelName == "au_45_left") shaderNum = 1; else if (channelName == "au_45_right") shaderNum = 2; else if (channelName == "au_2_left") shaderNum = 3; else if (channelName == "au_2_right") shaderNum = 4; else if (channelName == "au_100_left") shaderNum = 5; else if (channelName == "au_100_right") shaderNum = 6; else if (channelName == "au_110_left") shaderNum = 7; else if (channelName == "au_110_right") shaderNum = 8; else if (channelName == "au_120_left") shaderNum = 9; else if (channelName == "au_120_right") shaderNum = 10; else if (channelName == "au_50") shaderNum = 11; if (shaderNum != -1) { for (int i = 0; i < m_BrownHeads.Count; i++) { //Debug.Log("ChrBrownRocChannelCallback() - " + character.SBMCharacterName + " " + channelName + " " + value); SetGLShaderParam(m_BrownHeads[i], shaderNum, value); } } } public void ChangeSlide(string slideName) { Texture2D foundSlide = null; foreach (Texture2D slide in m_slides) { if (slide.name == slideName) foundSlide = slide; } if (foundSlide) SlidesScreen.GetComponent<Renderer>().material.mainTexture = foundSlide; } }
// **************************************************************** // Copyright 2007, Charlie Poole // This is free software licensed under the NUnit license. You may // obtain a copy of the license at http://nunit.org // **************************************************************** // TODO: Figure out how to make test work in SILVERLIGHT, since they support SetUpFixture #if !SILVERLIGHT && !PORTABLE using System.Collections.Generic; using NUnit.Framework.Api; using NUnit.Framework.Interfaces; namespace NUnit.Framework.Internal { [TestFixture] public class SetUpFixtureTests { private static readonly string testAssembly = AssemblyHelper.GetAssemblyPath(typeof(NUnit.TestData.SetupFixture.Namespace1.SomeFixture).Assembly); ITestAssemblyBuilder builder; ITestAssemblyRunner runner; #region SetUp [SetUp] public void SetUp() { TestUtilities.SimpleEventRecorder.Clear(); builder = new DefaultTestAssemblyBuilder(); runner = new NUnitTestAssemblyRunner(builder); } #endregion SetUp private ITestResult runTests(string nameSpace) { return runTests(nameSpace, TestFilter.Empty); } private ITestResult runTests(string nameSpace, TestFilter filter) { IDictionary<string, object> options = new Dictionary<string, object>(); if (nameSpace != null) options["LOAD"] = new string[] { nameSpace }; // No need for the overhead of parallel execution here options["NumberOfTestWorkers"] = 0; if (runner.Load(testAssembly, options) != null) return runner.Run(TestListener.NULL, filter); return null; } #region Builder Tests /// <summary> /// Tests that the TestSuiteBuilder correctly interperets a SetupFixture class as a 'virtual namespace' into which /// all it's sibling classes are inserted. /// </summary> [NUnit.Framework.Test] public void NamespaceSetUpFixtureReplacesNamespaceNodeInTree() { string nameSpace = "NUnit.TestData.SetupFixture.Namespace1"; IDictionary<string, object> options = new Dictionary<string, object>(); options["LOAD"] = new string[] { nameSpace }; ITest suite = builder.Build(testAssembly, options); Assert.IsNotNull(suite); Assert.AreEqual(testAssembly, suite.FullName); Assert.AreEqual(1, suite.Tests.Count, "Error in top level test count"); string[] nameSpaceBits = nameSpace.Split('.'); for (int i = 0; i < nameSpaceBits.Length; i++) { suite = suite.Tests[0] as TestSuite; Assert.AreEqual(nameSpaceBits[i], suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(RunState.Runnable)); } Assert.That(suite, Is.InstanceOf<SetUpFixture>()); suite = suite.Tests[0] as TestSuite; Assert.AreEqual("SomeFixture", suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(RunState.Runnable)); Assert.That(suite.Tests[0].RunState, Is.EqualTo(RunState.Runnable)); } /// <summary> /// Tests that the TestSuiteBuilder correctly interperets a SetupFixture class with no parent namespace /// as a 'virtual assembly' into which all it's sibling fixtures are inserted. /// </summary> [NUnit.Framework.Test] public void AssemblySetUpFixtureReplacesAssemblyNodeInTree() { IDictionary<string, object> options = new Dictionary<string, object>(); ITest suite = builder.Build(testAssembly, options); Assert.IsNotNull(suite); Assert.That(suite, Is.InstanceOf<SetUpFixture>()); suite = suite.Tests[1] as TestSuite; Assert.AreEqual("SomeFixture", suite.Name); Assert.AreEqual(1, suite.Tests.Count); } [Test] public void InvalidAssemblySetUpFixtureIsLoadedCorrectly() { string nameSpace = "NUnit.TestData.SetupFixture.Namespace6"; IDictionary<string, object> options = new Dictionary<string, object>(); options["LOAD"] = new string[] { nameSpace }; ITest suite = builder.Build(testAssembly, options); Assert.IsNotNull(suite); Assert.AreEqual(testAssembly, suite.FullName); Assert.AreEqual(1, suite.Tests.Count, "Error in top level test count"); Assert.AreEqual(RunState.Runnable, suite.RunState); string[] nameSpaceBits = nameSpace.Split('.'); for (int i = 0; i < nameSpaceBits.Length; i++) { suite = suite.Tests[0] as TestSuite; Assert.AreEqual(nameSpaceBits[i], suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(i < nameSpaceBits.Length - 1 ? RunState.Runnable : RunState.NotRunnable)); } suite = suite.Tests[0] as TestSuite; Assert.AreEqual("SomeFixture", suite.Name); Assert.AreEqual(1, suite.Tests.Count); Assert.That(suite.RunState, Is.EqualTo(RunState.Runnable)); Assert.That(suite.Tests[0].RunState, Is.EqualTo(RunState.Runnable)); } #endregion #region Simple [NUnit.Framework.Test] public void NamespaceSetUpFixtureWrapsExecutionOfSingleTest() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace1").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("NS1.OneTimeSetup", "NS1.Fixture.SetUp", "NS1.Test.SetUp", "NS1.Test", "NS1.Test.TearDown", "NS1.Fixture.TearDown", "NS1.OneTimeTearDown"); } #endregion Simple #region Static [Test] public void NamespaceSetUpMethodsMayBeStatic() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace5").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("NS5.OneTimeSetUp", "NS5.Fixture.SetUp", "NS5.Test.SetUp", "NS5.Test", "NS5.Test.TearDown", "NS5.Fixture.TearDown", "NS5.OneTimeTearDown"); } #endregion #region TwoTestFixtures [NUnit.Framework.Test] public void NamespaceSetUpFixtureWrapsExecutionOfTwoTests() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace2").ResultState.Status, Is.EqualTo(TestStatus.Passed)); // There are two fixtures but we can't be sure of the order of execution so they use the same events TestUtilities.SimpleEventRecorder.Verify("NS2.OneTimeSetUp", "NS2.Fixture.SetUp", "NS2.Test.SetUp", "NS2.Test", "NS2.Test.TearDown", "NS2.Fixture.TearDown", "NS2.Fixture.SetUp", "NS2.Test.SetUp", "NS2.Test", "NS2.Test.TearDown", "NS2.Fixture.TearDown", "NS2.OneTimeTearDown"); } #endregion TwoTestFixtures #region SubNamespace [NUnit.Framework.Test] public void NamespaceSetUpFixtureWrapsNestedNamespaceSetUpFixture() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace3").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("NS3.OneTimeSetUp", "NS3.Fixture.SetUp", "NS3.Test.SetUp", "NS3.Test", "NS3.Test.TearDown", "NS3.Fixture.TearDown", "NS3.SubNamespace.OneTimeSetUp", "NS3.SubNamespace.Fixture.SetUp", "NS3.SubNamespace.Test.SetUp", "NS3.SubNamespace.Test", "NS3.SubNamespace.Test.TearDown", "NS3.SubNamespace.Fixture.TearDown", "NS3.SubNamespace.OneTimeTearDown", "NS3.OneTimeTearDown"); } #endregion SubNamespace #region TwoSetUpFixtures [NUnit.Framework.Test] public void WithTwoSetUpFixturesBothAreUsed() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace4").ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.ExpectEvents("NS4.OneTimeSetUp1", "NS4.OneTimeSetUp2") .AndThen("NS4.Fixture.SetUp") .AndThen("NS4.Test.SetUp") .AndThen("NS4.Test") .AndThen("NS4.Test.TearDown") .AndThen("NS4.Fixture.TearDown") .AndThen("NS4.OneTimeTearDown1", "NS4.OneTimeTearDown2") .Verify(); } #endregion TwoSetUpFixtures #region InvalidSetUpFixture [Test] public void InvalidSetUpFixtureTest() { Assert.That(runTests("NUnit.TestData.SetupFixture.Namespace6").ResultState.Status, Is.EqualTo(TestStatus.Failed)); TestUtilities.SimpleEventRecorder.Verify(new string[0]); } #endregion #region NoNamespaceSetupFixture [NUnit.Framework.Test] public void AssemblySetupFixtureWrapsExecutionOfTest() { ITestResult result = runTests(null, new Filters.FullNameFilter("SomeFixture")); Assert.AreEqual(1, result.PassCount); Assert.That(result.ResultState.Status, Is.EqualTo(TestStatus.Passed)); TestUtilities.SimpleEventRecorder.Verify("Assembly.OneTimeSetUp", "NoNamespaceTest", "Assembly.OneTimeTearDown"); } #endregion NoNamespaceSetupFixture } } #endif
using System; using System.Collections.Generic; using System.Drawing; using System.Linq; using BASeCamp.BASeBlock.Blocks; using BASeCamp.BASeBlock.Events; using BASeCamp.BASeBlock.GameStates; namespace BASeCamp.BASeBlock.PaddleBehaviours { /// <summary> /// the "BuilderShot" is a projectile that is fired and spawns a block of a specific type and size at the destination location. /// /// </summary> public class BuilderShotBehaviour : BasePaddleBehaviour { //the buildershot itself. private BuilderShot CurrentShot = null; private Type[] _BlockBuildTypes = new Type[] {typeof (StrongBlock), typeof (InvincibleBlock)}; private int _PowerLevel = 0; private TimeSpan cooldown = new TimeSpan(0, 0, 0, 1); private BCBlockGameState gstate = null; private DateTime lastshot = DateTime.Now; private Paddle ownerpaddle = null; public BuilderShotBehaviour(BCBlockGameState stateobject) { gstate = stateobject; //stateobject.ClientObject.ButtonDown += new Func<ButtonConstants, bool>(ClientObject_ButtonDown); //stateobject.ClientObject.ButtonUp += new Func<ButtonConstants, bool>(ClientObject_ButtonUp); gstate.ClientObject.ButtonDown += ClientObject_ButtonDown; } public int PowerLevel { get { return _PowerLevel; } set { _PowerLevel = BCBlockGameState.ClampValue(value, 0, _BlockBuildTypes.Length - 1); RefreshAntecedent(); } } public Type[] BlockBuildTypes { get { return _BlockBuildTypes; } set { _BlockBuildTypes = value; RefreshAntecedent(); } } public Type BlockBuildType { get { return _BlockBuildTypes[_PowerLevel]; } set { _BlockBuildTypes[_PowerLevel] = value; } } public String GetDescription() { return BlockBuildType.Name; } private void RefreshAntecedent() { //change state of CurrentShot to reflect changes in the "Behaviour" object. (us) if (CurrentShot != null) { CurrentShot.BuildBlock = BlockBuildType; } } /// <summary> /// acquires the icon for this behaviour. /// </summary> /// <returns> /// an image to represent the behaviour. /// </returns> public override Image GetIcon() { return BCBlockGameState.Imageman.getLoadedImage("INVINCIBLE"); } public override string getName() { return "Builder"; } public override void Draw(Paddle onPaddle, Graphics g) { PointF Position = onPaddle.Position; SizeF PaddleSize = onPaddle.PaddleSize; RectangleF drawrect = new RectangleF(Position.X - PaddleSize.Width/2, Position.Y - PaddleSize.Height/2, PaddleSize.Width, PaddleSize.Height); //g.FillRectangle(new SolidBrush(Color.FromArgb(100, Color.Green)), drawrect); //draw the "sticky" overlay... g.DrawImage(BCBlockGameState.Imageman.getLoadedImage("BUILDPADDLE"), drawrect); } public override bool Impact(Paddle onPaddle, cBall withBall) { return false; } public override void BehaviourAdded(Paddle toPaddle, BCBlockGameState gamestate) { ownerpaddle = toPaddle; //remove any existing terminator behaviour. foreach (var loopbeh in toPaddle.Behaviours) { if (loopbeh is TerminatorBehaviour) gstate.GameObjects.AddLast(new BehaviourRemoverProxy(toPaddle, loopbeh)); } base.BehaviourAdded(toPaddle, gamestate); } /// <summary> /// determines if there is a shot being... er... shot. /// </summary> /// <returns></returns> private bool CheckCurrentShot() { if (CurrentShot == null) return false; if (CurrentShot.ShotState != BuilderShot.BuilderShotState.BSS_Projectile) CurrentShot = null; return CurrentShot != null; } private void ClientObject_ButtonDown(Object sender, ButtonEventArgs<bool> e) { if (gstate.PlayerPaddle != null && !gstate.PlayerPaddle.Behaviours.Contains(this)) { //unhook gstate.ClientObject.ButtonDown -= ClientObject_ButtonDown; return; } //don't shoot if we are not in the "run" state. if (gstate.ClientObject.ActiveState is StateRunning) { if (e.Button == ButtonConstants.Button_B) { ShootBuilder(); } } e.Result = true; } private void ShootBuilder() { if (CheckCurrentShot()) { //only allow for growification when it get's higher than 128. if (CurrentShot.Location.Y < gstate.GameArea.Height - 128) { CurrentShot.ExpandPhase(); lastshot = DateTime.Now; } } //shoot a BuilderShot. if (DateTime.Now - lastshot > cooldown) { if (!CheckCurrentShot()) { BuilderShot shootthis = new BuilderShot(ownerpaddle.Getrect().CenterPoint(), new PointF(0, -2), BlockBuildType); gstate.GameObjects.AddLast(shootthis); CurrentShot = shootthis; } } } public override void BehaviourRemoved(Paddle fromPaddle, BCBlockGameState gamestate) { if (gstate != null) { gstate.ClientObject.ButtonDown -= ClientObject_ButtonDown; base.BehaviourRemoved(fromPaddle, gamestate); } } public class BuilderShot : Projectile, iSizedProjectile { public enum BuilderShotState { BSS_Projectile = 0, BSS_Expand = 1 //expanding phase, before it turns into a block. } private const int increments = 32; private SizeF? Growincrement = null; private Type[] _BuildBlocks = new Type[] {typeof (StrongBlock), typeof (InvincibleBlock)}; private SizeF _BuildSize = new SizeF(32, 16); private int _PowerLevel = 0; private SizeF _ShotSize = new SizeF(8, 8); private BuilderShotState _ShotState = BuilderShotState.BSS_Projectile; private Image useDrawImage = null; public BuilderShot(PointF pLocation, PointF pVelocity, Type pBuildBlock) : base(pLocation, pVelocity) { BuildBlock = pBuildBlock; } public BuilderShotState ShotState { get { return _ShotState; } set { _ShotState = value; } } public SizeF ShotSize { get { return _ShotSize; } set { _ShotSize = value; BuildBlock = BuildBlock; //invoke routine... } } public SizeF BuildSize { get { return _BuildSize; } set { _BuildSize = value; } } public int PowerLevel { get { return _PowerLevel; } set { _PowerLevel = BCBlockGameState.ClampValue(value, 0, _BuildBlocks.Length); } } public Type BuildBlock { get { return _BuildBlocks[_PowerLevel]; } set { _BuildBlocks[_PowerLevel] = value; Bitmap buildbitmap = null; Graphics buildcanvas = null; //create and draw the block. Block.DrawBlock(_BuildBlocks[_PowerLevel], out buildbitmap, out buildcanvas, _ShotSize); useDrawImage = buildbitmap; } } public SizeF Size { get { return _ShotSize; } set { _ShotSize = value; } } public void ExpandPhase() { //enter expansion phase. _ShotState = BuilderShotState.BSS_Expand; _Velocity = new PointF(0, 0); //full stop, ensign. } private bool forcerefresher(ProxyObject po, BCBlockGameState gs) { gs.Forcerefresh = true; return true; //destroy. } public override bool PerformFrame(BCBlockGameState gamestate) { bool returnvalue = base.PerformFrame(gamestate); switch (_ShotState) { case BuilderShotState.BSS_Projectile: List<Block> resulthittest = BCBlockGameState.Block_HitTest(gamestate.Blocks, getfullsize(), false); returnvalue = !returnvalue || resulthittest.Any(); if (returnvalue) { _ShotState = BuilderShotState.BSS_Expand; Velocity = new PointF(0, 0.01f); gamestate.Forcerefresh = true; } return false; case BuilderShotState.BSS_Expand: //if our size is the desired size of the block, create that block and return true. //otherwise, change out size and location to emulate "growing". if (this.ShotSize.Width >= BuildSize.Width && ShotSize.Height >= BuildSize.Height) { //grow phase completed. //create the block in the desired location. RectangleF desiredlocation = new RectangleF(Location.X, Location.Y, BuildSize.Width, BuildSize.Height); Block builtblock = (Block) Activator.CreateInstance(this.BuildBlock, desiredlocation); //add it to the game... gamestate.Blocks.AddLast(builtblock); //make sure to force a refresh, too. gamestate.Forcerefresh = true; gamestate.Defer( () => gamestate.GameObjects.AddLast(new ProxyObject(forcerefresher, null))); //return true to destroy outselves. //todo: maybe add "effects" here, too? return true; } else { //otherwise, we are in the growing phase. //growincrement could be null, if so, initialize it... if (Growincrement == null) { //initialize it to the difference between the final size and the current shot size, divided //by increments. Growincrement = new SizeF((BuildSize.Width - ShotSize.Width)/increments, (BuildSize.Height - ShotSize.Height)/increments); } //change size by growincrement. Location = new PointF(Location.X - Growincrement.Value.Width, Location.Y - Growincrement.Value.Height); ShotSize = new SizeF(ShotSize.Width + Growincrement.Value.Width*2, ShotSize.Height + Growincrement.Value.Height*2); } return false; } return false; } public override void Draw(Graphics g) { g.DrawImage(useDrawImage, _Location.X, _Location.Y, _ShotSize.Width, _ShotSize.Height); } /// <summary> /// returns the rectangle that would hold the full sized block on this position. /// </summary> /// <returns></returns> private RectangleF getfullsize() { PointF gotcenter = new PointF(_Location.X + _ShotSize.Width/2, _Location.Y + _ShotSize.Height/2); return new RectangleF(gotcenter.X - BuildSize.Width/2, gotcenter.Y - BuildSize.Height/2, BuildSize.Width, BuildSize.Height); } } } }
using System; using System.Collections; using System.Collections.Generic; using Vevo.Domain.Discounts; using Vevo.Domain.Orders; using Vevo.Domain.Products; using Vevo.Shared.Utilities; using Vevo.WebUI; public partial class Components_CouponMessageDisplay : Vevo.WebUI.International.BaseLanguageUserControl { #region Private private void HideAllControls() { uxCouponAmountDiv.Visible = false; uxCouponExpireDateDiv.Visible = false; uxAvailableItemHeaderListDiv.Visible = false; uxAvailableItemListDiv.Visible = false; uxErrorMessageDiv.Visible = false; uxPromotionWarningDiv.Visible = false; } private void DisplayError( string errorMessage ) { uxErrorMessage.DisplayErrorNoNewLine( errorMessage ); uxErrorMessageDiv.Visible = true; } private string GetCustomerError() { return "[$ErrorCouponUserName]"; } private string GetProductError( Coupon coupon ) { string message = "<p>[$ErrorInvalidProduct]</p> <p>[$ProductListHeader]</p>"; message += GetApplicableProductListText( coupon, "all_product" ); return message; } private string GetOrderAmountError( Coupon coupon ) { string message = String.Format( "<p>[$ErrorCouponMinimumOrder] {0}.</p>", StoreContext.Currency.FormatPrice( coupon.MinimumSubtotal ) ); if (coupon.ProductCostType == Coupon.ProductCostTypeEnum.CouponEligibleProducts) message += "<p>[$ErrorCouponMinimumOrderEligible]</p>"; return message; } private string GetCategoryError( Coupon coupon ) { string message = "<p>[$ErrorInvalidCategory]</p> <p>[$CategoryListHeader]</p>"; message += GetApplicableCategoryListText( coupon ); return message; } private string GetExpiredError( Coupon coupon ) { switch (coupon.ExpirationStatus) { case Coupon.ExpirationStatusEnum.ExpiredByDate: return "[$InvalidExpired]"; case Coupon.ExpirationStatusEnum.ExpiredByQuantity: return "[$InvalidOverLimit]"; default: return "[$InvalidCoupon]"; } } private string GetInvalidCodeError() { return "[$InvalidCoupon]"; } private string GetBelowMinimumQuantityError( Coupon coupon ) { string message = "<p>" + GetDiscountTypeBuyXGetYText( coupon, true ) + "</p>"; message += "<p>[$ShoppingCartProductListHeader]</p>"; message += "<p>" + GetApplicableProductListText( coupon, "not_match_product" ) + "</p>"; return message; } private string GetFormattedError( Coupon coupon, CartItemGroup cartItemGroup ) { switch (coupon.Validate( cartItemGroup, StoreContext.Customer )) { case Coupon.ValidationStatus.InvalidCustomer: return GetCustomerError(); case Coupon.ValidationStatus.BelowMinimumOrderAmount: return GetOrderAmountError( coupon ); case Coupon.ValidationStatus.InvalidProduct: if (coupon.ProductFilter == Coupon.ProductFilterEnum.ByProductIDs) return GetProductError( coupon ); else return GetCategoryError( coupon ); case Coupon.ValidationStatus.Expired: return GetExpiredError( coupon ); case Coupon.ValidationStatus.InvalidCode: return GetInvalidCodeError(); case Coupon.ValidationStatus.BelowMinimumQuantity: return GetBelowMinimumQuantityError( coupon ); default: return String.Empty; } } private decimal GetCouponDiscount( Coupon coupon ) { IList<decimal> discountLines; return coupon.GetDiscount( StoreContext.ShoppingCart.GetAllCartItems(), StoreContext.Customer, out discountLines ); } private string GetApplicableProductListText( Coupon coupon, string productListType ) { string message = String.Empty; switch (productListType) { case "all_product": { message = "<ul>"; foreach (Product product in coupon.GetApplicableProducts( StoreContext.Culture )) { if (!String.IsNullOrEmpty( product.Name )) message += "<li>" + product.Name + "</li> "; } message += "</ul>"; } break; case "match_product": { if (GetCouponDiscount( coupon ) > 0) { ICartItem[] cart = StoreContext.ShoppingCart.GetCartItems(); int productDiscoutableItemCount = 0; if (coupon.ProductFilter != Coupon.ProductFilterEnum.All) { message = "<ul>"; for (int i = 0; i < cart.Length; i++) { if (coupon.IsProductDiscountable( cart[i].Product ) && cart[i].DiscountGroupID == "0" && cart[i].Quantity > coupon.MinimumQuantity) { message += "<li>" + cart[i].GetName( StoreContext.Culture, StoreContext.Currency ) + "</li>"; productDiscoutableItemCount++; } } message += "</ul>"; if (productDiscoutableItemCount == cart.Length) { message = string.Empty; } } } } break; case "not_match_product": { if (GetCouponDiscount( coupon ) == 0) { ICartItem[] cart = StoreContext.ShoppingCart.GetCartItems(); message = "<ul>"; for (int i = 0; i < cart.Length; i++) { if (coupon.IsProductDiscountable( cart[i].Product ) && cart[i].DiscountGroupID == "0") { message += "<li>" + cart[i].GetName( StoreContext.Culture, StoreContext.Currency ) + "</li>"; } } message += "</ul>"; } } break; } return message; } private string GetApplicableCategoryListText( Coupon coupon ) { string message = "<ul>"; foreach (Category category in coupon.GetApplicableCategories( StoreContext.Culture )) { if (!String.IsNullOrEmpty( category.Name )) message += "<li>" + category.Name + "</li> "; } message += "</ul>"; return message; } private string GetDiscountTypeBuyXGetYText( Coupon coupon, bool isErrorMessage ) { String message = ""; if (!isErrorMessage) { message = "Buy " + coupon.MinimumQuantity.ToString() + " item(s) full price and get "; if (coupon.DiscountType == Coupon.DiscountTypeEnum.BuyXDiscountYPrice) { message += StoreContext.Currency.FormatPrice( coupon.DiscountAmount ) + " discount "; message += "for " + coupon.PromotionQuantity.ToString() + " item(s). "; } else //Coupon.DiscountTypeEnum.BuyXDiscountYPercentage { if (ConvertUtilities.ToInt32( coupon.Percentage ) == 100) { message += coupon.PromotionQuantity + " item(s) free. "; } else // coupon.Percentage != 100 { message += coupon.Percentage.ToString( "0.00" ) + "% discount "; message += "for " + coupon.PromotionQuantity.ToString() + " item(s). "; } } } else { message += "To use this coupon, you need to buy at least "; message += ConvertUtilities.ToString( coupon.MinimumQuantity + coupon.PromotionQuantity ) + " items per product."; } return message; } private void DisplayCouponAmount( Coupon coupon ) { uxCouponAmountDiv.Visible = false; if (coupon.DiscountType == Coupon.DiscountTypeEnum.Price) { uxCouponAmountDiv.Visible = true; uxCouponAmountLabel.Text = StoreContext.Currency.FormatPrice( coupon.DiscountAmount ); } if (coupon.DiscountType == Coupon.DiscountTypeEnum.Percentage) { uxCouponAmountDiv.Visible = true; uxCouponAmountLabel.Text = coupon.Percentage.ToString() + "%"; } if (coupon.DiscountType == Coupon.DiscountTypeEnum.BuyXDiscountYPrice) { uxCouponAmountDiv.Visible = true; uxCouponAmountLabel.Text = GetDiscountTypeBuyXGetYText( coupon, false ); } if (coupon.DiscountType == Coupon.DiscountTypeEnum.BuyXDiscountYPercentage) { uxCouponAmountDiv.Visible = true; uxCouponAmountLabel.Text = GetDiscountTypeBuyXGetYText( coupon, false ); } if (coupon.DiscountType == Coupon.DiscountTypeEnum.FreeShipping) { uxCouponAmountDiv.Visible = true; uxCouponAmountLabel.Text = "Free Shipping Cost"; } } private void DisplayExpirationDetails( Coupon coupon ) { uxCouponExpireDateDiv.Visible = false; if (coupon.ExpirationType == Coupon.ExpirationTypeEnum.Date || coupon.ExpirationType == Coupon.ExpirationTypeEnum.Both) { uxCouponExpireDateDiv.Visible = true; uxCouponExpireDateLabel.Text = String.Format( "{0:dd} {0:MMM} {0:yyyy}", coupon.ExpirationDate ); } } private void DisplayCouponApplicableItems( Coupon coupon, string productListType ) { string message = String.Empty; if (coupon.ProductFilter == Coupon.ProductFilterEnum.ByProductIDs) { message = GetApplicableProductListText( coupon, productListType ); if (!String.IsNullOrEmpty( message )) { if (productListType == "all_product") { uxAvailableItemHeaderLabel.Text = "[$ProductListHeader]"; } else { uxAvailableItemHeaderLabel.Text = "[$ShoppingCartProductListHeader]"; } uxAvailableItemHeaderListDiv.Visible = true; uxAvailableItemLabel.Text = message; uxAvailableItemListDiv.Visible = true; } } else if (coupon.ProductFilter == Coupon.ProductFilterEnum.ByCategoryIDs) { uxAvailableItemHeaderLabel.Text = "[$CategoryListHeader]"; uxAvailableItemLabel.Text = GetApplicableCategoryListText( coupon ); uxAvailableItemListDiv.Visible = true; uxAvailableItemHeaderListDiv.Visible = true; } } private void DisplayPromotionWarning( Coupon coupon ) { IList<ICartItem> cartItems = StoreContext.ShoppingCart.GetCartItems(); if (Coupon.DiscountTypeEnum.FreeShipping != coupon.DiscountType) { foreach (ICartItem cartItem in cartItems) { if (cartItem.IsPromotion) { uxPromotionWarningDiv.Visible = true; break; } } } } #endregion #region Protected protected void Page_Load( object sender, EventArgs e ) { } #endregion #region Public Methods public void HideAll() { HideAllControls(); } public void DisplayCouponDetails( Coupon coupon, string productListType ) { HideAllControls(); DisplayCouponAmount( coupon ); DisplayExpirationDetails( coupon ); DisplayCouponApplicableItems( coupon, productListType ); DisplayPromotionWarning( coupon ); } public void DisplayCouponErrorMessage( Coupon coupon, CartItemGroup cartItemGroup ) { HideAllControls(); string errorMessage = GetFormattedError( coupon, cartItemGroup ); if (!String.IsNullOrEmpty( errorMessage )) DisplayError( errorMessage ); } public string GetCouponErrorMessage( Coupon coupon, CartItemGroup cartItemGroup ) { HideAllControls(); return GetFormattedError( coupon, cartItemGroup ); } #endregion }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using MongoDB.Bson.Serialization; using MongoDB.Driver; namespace NEStore.MongoDb { public class MongoDbBucket<T> : IBucket<T> { private volatile bool _indexesEnsured; private readonly MongoDbEventStore<T> _eventStore; public IMongoCollection<CommitData<T>> Collection { get; } public IMongoCollection<CommitInfo> InfoCollection { get; } public string BucketName { get; } public MongoDbBucket(MongoDbEventStore<T> eventStore, string bucketName) { _eventStore = eventStore; Collection = eventStore.CollectionFromBucket<CommitData<T>>(bucketName); InfoCollection = eventStore.CollectionFromBucket<CommitInfo>(bucketName); BucketName = bucketName; } /// <summary> /// Persist a commit to Mongo /// </summary> /// <param name="streamId">Unique stream identifier</param> /// <param name="expectedStreamRevision">Expected revision of the provided stream</param> /// <param name="events">List of events to commit</param> /// <returns>WriteResult object containing the commit persisted and the DispatchTask of the events</returns> public async Task<WriteResult<T>> WriteAsync(Guid streamId, int expectedStreamRevision, IEnumerable<T> events) { if (expectedStreamRevision < 0) throw new ArgumentOutOfRangeException(nameof(expectedStreamRevision)); await AutoEnsureIndexesAsync() .ConfigureAwait(false); var lastCommit = await GetLastCommitAsync() .ConfigureAwait(false); await CheckStreamConsistencyBeforeWriting(streamId, expectedStreamRevision, lastCommit) .ConfigureAwait(false); await _eventStore.UndispatchedStrategy .CheckUndispatchedAsync(this, streamId) .ConfigureAwait(false); var eventsArray = events.ToArray(); var commit = await CreateCommitAsync(streamId, expectedStreamRevision, eventsArray, lastCommit).ConfigureAwait(false); try { await Collection.InsertOneAsync(commit) .ConfigureAwait(false); } catch (MongoWriteException ex) when (ex.IsDuplicateKeyException()) { throw new ConcurrencyWriteException($"Someone else is working on the same bucket ({BucketName}) or stream ({commit.StreamId})", ex); } catch (MongoWriteException) { //TODO: do we need to rethrow the exception? } var dispatchTask = DispatchCommitAsync(commit); return new WriteResult<T>(commit, dispatchTask); } /// <summary> /// Dispatch all commits where dispatched attribute is set to false /// </summary> public async Task<CommitData<T>[]> DispatchUndispatchedAsync(Guid? streamId = null, long? toBucketRevision = null) { var filter = Builders<CommitData<T>>.Filter.Eq(p => p.Dispatched, false); if (streamId != null) filter = filter & Builders<CommitData<T>>.Filter.Eq(p => p.StreamId, streamId.Value); if (toBucketRevision != null) filter = filter & Builders<CommitData<T>>.Filter.Lte(p => p.BucketRevision, toBucketRevision.Value); var commits = await Collection .Find(filter) .Sort(Builders<CommitData<T>>.Sort.Ascending(p => p.BucketRevision)) .ToListAsync() .ConfigureAwait(false); foreach (var commit in commits) await DispatchCommitAsync(commit) .ConfigureAwait(false); return commits.ToArray(); } /// <summary> /// Set all undispatched events as dispatched, without dispatching them /// </summary> public async Task SetAllAsDispatched() { await Collection .UpdateManyAsync(Builders<CommitData<T>>.Filter.Eq(p => p.Dispatched, false), Builders<CommitData<T>>.Update.Set(p => p.Dispatched, true)) .ConfigureAwait(false); } /// <summary> /// Delete all commits succeeding the revision provided /// </summary> /// <param name="bucketRevision">Revision of last commit to keep</param> public async Task RollbackAsync(long bucketRevision) { await Collection .DeleteManyAsync(p => p.BucketRevision > bucketRevision).ConfigureAwait(false); await _eventStore.AutonIncrementStrategy.RollbackAsync(BucketName, bucketRevision).ConfigureAwait(false); } /// <summary> /// Retrieve all events from bucket filtered by params /// </summary> /// <param name="streamId">Unique stream identifier</param> /// <param name="fromBucketRevision">Start bucket revision</param> /// <param name="toBucketRevision">End bucket revision</param> /// <returns>Flattered list of events retrieved from commits</returns> public async Task<IEnumerable<T>> GetEventsAsync(Guid? streamId = null, long fromBucketRevision = 1, long? toBucketRevision = null) { var commits = await GetCommitsAsync(streamId, fromBucketRevision, toBucketRevision) .ConfigureAwait(false); return commits.SelectMany(c => c.Events); } /// <summary> /// Retrieve all events from bucket filtered by params /// </summary> /// <param name="streamId">Unique stream identifier</param> /// <param name="fromStreamRevision"> /// Start stream revision. This point is included in the performed search. /// </param> /// <param name="toStreamRevision"> /// End stream revision. This point is included in the performed search. /// </param> /// <returns> /// Flattered list of events retrieved from commits /// </returns> /// <remarks> /// This method is meant to return the event which transition the aggregate /// to revision fromStreamRevision, the event which transition the aggregate to /// revision fromStreamRevision + 1, the event which transition the aggregate to /// revision fromStreamRevision + 2, ..., the event which transition the aggregate to /// revision toStreamRevision. Both the ends (fromStreamRevision and toStreamRevision) are included. /// </remarks> public async Task<IEnumerable<T>> GetEventsForStreamAsync( Guid streamId, int fromStreamRevision = 1, int? toStreamRevision = null) { if (fromStreamRevision <= 0) { throw new ArgumentOutOfRangeException( nameof(fromStreamRevision), $"Parameter {nameof(fromStreamRevision)} must be greater than 0."); } if (toStreamRevision.HasValue && toStreamRevision.Value < fromStreamRevision) { throw new ArgumentOutOfRangeException( nameof(toStreamRevision), $"When parameter {nameof(toStreamRevision)} is not null, it must be greater than or equal to parameter ${nameof(fromStreamRevision)}."); } var filter = Builders<CommitData<T>> .Filter .Eq(p => p.StreamId, streamId); filter &= Builders<CommitData<T>> .Filter .Gte(c => c.StreamRevisionEnd, fromStreamRevision); var normalizedToStreamRevision = toStreamRevision ?? int.MaxValue; filter &= Builders<CommitData<T>> .Filter .Lt(p => p.StreamRevisionStart, normalizedToStreamRevision); var commits = await Collection .Find(filter) .Sort(Builders<CommitData<T>>.Sort.Ascending(p => p.BucketRevision)) .ToListAsync() .ConfigureAwait(false); return commits.SelectMany(commit => { var commitStartRevision = commit.StreamRevisionStart; return commit .Events .Select((@event, index) => { var eventStartRevision = commit.StreamRevisionStart + index; var eventEndRevision = eventStartRevision + 1; return (@event, eventEndRevision); }); }) .Where(tuple => { var (@event, endRevision) = tuple; return endRevision >= fromStreamRevision && endRevision <= normalizedToStreamRevision; }) .Select(tuple => tuple.@event) .ToList(); } /// <summary> /// Retrieve all commits from bucket filtered by params /// </summary> /// <param name="streamId">Unique stream identifier</param> /// <param name="fromBucketRevision">Start bucket revision</param> /// <param name="toBucketRevision">End bucket revision</param> /// <param name="dispatched">Include/exclude dispatched</param> /// <param name="limit">Limit</param> /// <returns>List of commits matching filters</returns> public async Task<IEnumerable<CommitData<T>>> GetCommitsAsync( Guid? streamId = null, long fromBucketRevision = 1, long? toBucketRevision = null, bool? dispatched = null, int? limit = null) { if (fromBucketRevision <= 0) throw new ArgumentOutOfRangeException(nameof(fromBucketRevision), "Parameter must be greater than 0."); if (toBucketRevision <= 0) throw new ArgumentOutOfRangeException(nameof(toBucketRevision), "Parameter must be greater than 0."); var filter = Builders<CommitData<T>>.Filter.Empty; if (streamId != null) filter = filter & Builders<CommitData<T>>.Filter.Eq(p => p.StreamId, streamId.Value); if (fromBucketRevision != 1) filter = filter & Builders<CommitData<T>>.Filter.Gte(p => p.BucketRevision, fromBucketRevision); if (toBucketRevision != null) filter = filter & Builders<CommitData<T>>.Filter.Lte(p => p.BucketRevision, toBucketRevision.Value); if (dispatched != null) filter = filter & Builders<CommitData<T>>.Filter.Eq(p => p.Dispatched, dispatched.Value); var commits = await Collection .Find(filter) .Sort(Builders<CommitData<T>>.Sort.Ascending(p => p.BucketRevision)) .Limit(limit) .ToListAsync() .ConfigureAwait(false); return commits; } /// <summary> /// Retrieve the latest commit matching the specified criteria /// </summary> /// <param name="streamId">Unique stream identifier</param> /// <param name="atBucketRevision">Get the last commit less or equal the specified bucket revision</param> /// <returns>Last commit info</returns> public async Task<CommitInfo> GetLastCommitAsync(Guid? streamId = null, long? atBucketRevision = null) { if (atBucketRevision <= 0) throw new ArgumentOutOfRangeException(nameof(atBucketRevision), "Parameter must be greater than 0."); var filter = Builders<CommitInfo>.Filter.Empty; if (streamId != null) filter = filter & Builders<CommitInfo>.Filter.Eq(p => p.StreamId, streamId.Value); if (atBucketRevision != null) filter = filter & Builders<CommitInfo>.Filter.Lte(p => p.BucketRevision, atBucketRevision.Value); var result = await InfoCollection .Find(filter) .Sort(Builders<CommitInfo>.Sort.Descending(p => p.BucketRevision)) .FirstOrDefaultAsync() .ConfigureAwait(false); return result; } /// <summary> /// Retrieve all streams inside the range provided /// </summary> /// <param name="fromBucketRevision">Min bucket revision</param> /// <param name="toBucketRevision">Max bucket revision</param> /// <returns>List of streams identifiers</returns> public async Task<IEnumerable<Guid>> GetStreamIdsAsync(long fromBucketRevision = 1, long? toBucketRevision = null) { if (fromBucketRevision <= 0) throw new ArgumentOutOfRangeException(nameof(fromBucketRevision), "Parameter must be greater than 0."); if (toBucketRevision <= 0) throw new ArgumentOutOfRangeException(nameof(toBucketRevision), "Parameter must be greater than 0."); var filter = Builders<CommitData<T>>.Filter.Empty; if (fromBucketRevision != 1) filter = filter & Builders<CommitData<T>>.Filter.Gte(p => p.BucketRevision, fromBucketRevision); if (toBucketRevision != null) filter = filter & Builders<CommitData<T>>.Filter.Lte(p => p.BucketRevision, toBucketRevision.Value); var cursor = await Collection .DistinctAsync(p => p.StreamId, filter) .ConfigureAwait(false); var result = await cursor.ToListAsync() .ConfigureAwait(false); return result; } /// <summary> /// Create commit object that will be persisted to Mongo /// </summary> /// <param name="streamId">Unique stream identifier</param> /// <param name="expectedStreamRevision">Expected revision of the provided stream</param> /// <param name="eventsArray">List of events to commit</param> /// <param name="lastCommit">Last commit written to current bucket</param> /// <returns>CommitData object</returns> private async Task<CommitData<T>> CreateCommitAsync(Guid streamId, int expectedStreamRevision, T[] eventsArray, CommitInfo lastCommit) { var bucketRevision =await _eventStore.AutonIncrementStrategy .IncrementAsync(BucketName, lastCommit) .ConfigureAwait(false); var commit = new CommitData<T> { BucketRevision = bucketRevision, Dispatched = false, Events = eventsArray, StreamId = streamId, StreamRevisionStart = expectedStreamRevision, StreamRevisionEnd = expectedStreamRevision + eventsArray.Length }; return commit; } /// <summary> /// Setup bucket creating Indexes /// </summary> private async Task AutoEnsureIndexesAsync() { if (_indexesEnsured || !_eventStore.AutoEnsureIndexes) return; await _eventStore.EnsureBucketAsync(BucketName) .ConfigureAwait(false); _indexesEnsured = true; } /// <summary> /// Dispatch events of commit /// </summary> /// <param name="commit">Commit to be dispatched</param> private async Task DispatchCommitAsync(CommitData<T> commit) { var dispatchers = _eventStore.GetDispatchers(); await Task.WhenAll(dispatchers.Select(x => x.DispatchAsync(BucketName, commit))) .ConfigureAwait(false); var commitBucketRevision = commit.BucketRevision; await Collection.UpdateOneAsync( p => p.BucketRevision == commitBucketRevision, Builders<CommitData<T>>.Update.Set(p => p.Dispatched, true)) .ConfigureAwait(false); } /// <summary> /// Checks if someone else is writing on the same bucket /// </summary> /// <param name="streamId">Unique stream identifier</param> /// <param name="expectedStreamRevision">Expected revision of the provided stream</param> /// <param name="lastCommit">Last commit of the bucket</param> private async Task CheckStreamConsistencyBeforeWriting(Guid streamId, int expectedStreamRevision, CommitInfo lastCommit) { if (!_eventStore.CheckStreamRevisionBeforeWriting) return; if (lastCommit == null) return; var lastStreamRevision = lastCommit.StreamId == streamId ? lastCommit.StreamRevisionEnd : await this.GetStreamRevisionAsync(streamId) .ConfigureAwait(false); // Note: this check doesn't ensure that in case of real concurrency no one can insert the same commit // the real check is done via a mongo index "StreamRevision". This check basically just ensure to do not write // revision with holes if (lastStreamRevision > expectedStreamRevision) throw new ConcurrencyWriteException("Someone else is working on the same bucket or stream"); if (lastStreamRevision < expectedStreamRevision) // Ensure to write commits sequentially throw new ArgumentOutOfRangeException(nameof(expectedStreamRevision)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Globalization; namespace System.Collections.Immutable { /// <content> /// Contains the inner <see cref="ImmutableDictionary{TKey, TValue}.HashBucket"/> struct. /// </content> public partial class ImmutableDictionary<TKey, TValue> { /// <summary> /// Contains all the key/values in the collection that hash to the same value. /// </summary> internal readonly struct HashBucket : IEnumerable<KeyValuePair<TKey, TValue>> { /// <summary> /// One of the values in this bucket. /// </summary> private readonly KeyValuePair<TKey, TValue> _firstValue; /// <summary> /// Any other elements that hash to the same value. /// </summary> /// <value> /// This is null if and only if the entire bucket is empty (including <see cref="_firstValue"/>). /// It's empty if <see cref="_firstValue"/> has an element but no additional elements. /// </value> private readonly ImmutableList<KeyValuePair<TKey, TValue>>.Node _additionalElements; /// <summary> /// Initializes a new instance of the <see cref="ImmutableDictionary{TKey, TValue}.HashBucket"/> struct. /// </summary> /// <param name="firstElement">The first element.</param> /// <param name="additionalElements">The additional elements.</param> private HashBucket(KeyValuePair<TKey, TValue> firstElement, ImmutableList<KeyValuePair<TKey, TValue>>.Node additionalElements = null) { _firstValue = firstElement; _additionalElements = additionalElements ?? ImmutableList<KeyValuePair<TKey, TValue>>.Node.EmptyNode; } /// <summary> /// Gets a value indicating whether this instance is empty. /// </summary> /// <value> /// <c>true</c> if this instance is empty; otherwise, <c>false</c>. /// </value> internal bool IsEmpty { get { return _additionalElements == null; } } /// <summary> /// Gets the first value in this bucket. /// </summary> internal KeyValuePair<TKey, TValue> FirstValue { get { if (this.IsEmpty) { throw new InvalidOperationException(); } return _firstValue; } } /// <summary> /// Gets the list of additional (hash collision) elements. /// </summary> internal ImmutableList<KeyValuePair<TKey, TValue>>.Node AdditionalElements { get { return _additionalElements; } } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> public Enumerator GetEnumerator() { return new Enumerator(this); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="IEnumerator{T}"/> that can be used to iterate through the collection. /// </returns> IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() { return this.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="IEnumerator"/> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return this.GetEnumerator(); } /// <summary> /// Throws an exception to catch any errors in comparing <see cref="HashBucket"/> instances. /// </summary> public override bool Equals(object obj) { // This should never be called, as hash buckets don't know how to equate themselves. throw new NotSupportedException(); } /// <summary> /// Throws an exception to catch any errors in comparing <see cref="HashBucket"/> instances. /// </summary> public override int GetHashCode() { // This should never be called, as hash buckets don't know how to hash themselves. throw new NotSupportedException(); } /// <summary> /// Adds the specified key. /// </summary> /// <param name="key">The key to add.</param> /// <param name="value">The value to add.</param> /// <param name="keyOnlyComparer">The key comparer.</param> /// <param name="valueComparer">The value comparer.</param> /// <param name="behavior">The intended behavior for certain cases that may come up during the operation.</param> /// <param name="result">A description of the effect was on adding an element to this <see cref="HashBucket"/>.</param> /// <returns>A new <see cref="HashBucket"/> that contains the added value and any values already held by this <see cref="HashBucket"/>.</returns> internal HashBucket Add(TKey key, TValue value, IEqualityComparer<KeyValuePair<TKey, TValue>> keyOnlyComparer, IEqualityComparer<TValue> valueComparer, KeyCollisionBehavior behavior, out OperationResult result) { var kv = new KeyValuePair<TKey, TValue>(key, value); if (this.IsEmpty) { result = OperationResult.SizeChanged; return new HashBucket(kv); } if (keyOnlyComparer.Equals(kv, _firstValue)) { switch (behavior) { case KeyCollisionBehavior.SetValue: result = OperationResult.AppliedWithoutSizeChange; return new HashBucket(kv, _additionalElements); case KeyCollisionBehavior.Skip: result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowIfValueDifferent: if (!valueComparer.Equals(_firstValue.Value, value)) { throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); } result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowAlways: throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); default: throw new InvalidOperationException(); // unreachable } } int keyCollisionIndex = _additionalElements.IndexOf(kv, keyOnlyComparer); if (keyCollisionIndex < 0) { result = OperationResult.SizeChanged; return new HashBucket(_firstValue, _additionalElements.Add(kv)); } else { switch (behavior) { case KeyCollisionBehavior.SetValue: result = OperationResult.AppliedWithoutSizeChange; return new HashBucket(_firstValue, _additionalElements.ReplaceAt(keyCollisionIndex, kv)); case KeyCollisionBehavior.Skip: result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowIfValueDifferent: #if !NETSTANDARD1_0 ref readonly var existingEntry = ref _additionalElements.ItemRef(keyCollisionIndex); #else var existingEntry = _additionalElements[keyCollisionIndex]; #endif if (!valueComparer.Equals(existingEntry.Value, value)) { throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); } result = OperationResult.NoChangeRequired; return this; case KeyCollisionBehavior.ThrowAlways: throw new ArgumentException(SR.Format(SR.DuplicateKey, key)); default: throw new InvalidOperationException(); // unreachable } } } /// <summary> /// Removes the specified value if it exists in the collection. /// </summary> /// <param name="key">The key to remove.</param> /// <param name="keyOnlyComparer">The equality comparer.</param> /// <param name="result">A description of the effect was on adding an element to this <see cref="HashBucket"/>.</param> /// <returns>A new <see cref="HashBucket"/> that does not contain the removed value and any values already held by this <see cref="HashBucket"/>.</returns> internal HashBucket Remove(TKey key, IEqualityComparer<KeyValuePair<TKey, TValue>> keyOnlyComparer, out OperationResult result) { if (this.IsEmpty) { result = OperationResult.NoChangeRequired; return this; } var kv = new KeyValuePair<TKey, TValue>(key, default(TValue)); if (keyOnlyComparer.Equals(_firstValue, kv)) { if (_additionalElements.IsEmpty) { result = OperationResult.SizeChanged; return new HashBucket(); } else { // We can promote any element from the list into the first position, but it's most efficient // to remove the root node in the binary tree that implements the list. int indexOfRootNode = _additionalElements.Left.Count; result = OperationResult.SizeChanged; return new HashBucket(_additionalElements.Key, _additionalElements.RemoveAt(indexOfRootNode)); } } int index = _additionalElements.IndexOf(kv, keyOnlyComparer); if (index < 0) { result = OperationResult.NoChangeRequired; return this; } else { result = OperationResult.SizeChanged; return new HashBucket(_firstValue, _additionalElements.RemoveAt(index)); } } /// <summary> /// Gets the value for the given key in the collection if one exists.. /// </summary> /// <param name="key">The key to search for.</param> /// <param name="comparers">The comparers.</param> /// <param name="value">The value for the given key.</param> /// <returns>A value indicating whether the key was found.</returns> internal bool TryGetValue(TKey key, Comparers comparers, out TValue value) { if (this.IsEmpty) { value = default(TValue); return false; } if (comparers.KeyComparer.Equals(_firstValue.Key, key)) { value = _firstValue.Value; return true; } var kv = new KeyValuePair<TKey, TValue>(key, default(TValue)); var index = _additionalElements.IndexOf(kv, comparers.KeyOnlyComparer); if (index < 0) { value = default(TValue); return false; } #if !NETSTANDARD1_0 value = _additionalElements.ItemRef(index).Value; #else value = _additionalElements[index].Value; #endif return true; } /// <summary> /// Searches the dictionary for a given key and returns the equal key it finds, if any. /// </summary> /// <param name="equalKey">The key to search for.</param> /// <param name="comparers">The comparers.</param> /// <param name="actualKey">The key from the dictionary that the search found, or <paramref name="equalKey"/> if the search yielded no match.</param> /// <returns>A value indicating whether the search was successful.</returns> /// <remarks> /// This can be useful when you want to reuse a previously stored reference instead of /// a newly constructed one (so that more sharing of references can occur) or to look up /// the canonical value, or a value that has more complete data than the value you currently have, /// although their comparer functions indicate they are equal. /// </remarks> internal bool TryGetKey(TKey equalKey, Comparers comparers, out TKey actualKey) { if (this.IsEmpty) { actualKey = equalKey; return false; } if (comparers.KeyComparer.Equals(_firstValue.Key, equalKey)) { actualKey = _firstValue.Key; return true; } var kv = new KeyValuePair<TKey, TValue>(equalKey, default(TValue)); var index = _additionalElements.IndexOf(kv, comparers.KeyOnlyComparer); if (index < 0) { actualKey = equalKey; return false; } #if !NETSTANDARD1_0 actualKey = _additionalElements.ItemRef(index).Key; #else actualKey = _additionalElements[index].Key; #endif return true; } /// <summary> /// Freezes this instance so that any further mutations require new memory allocations. /// </summary> internal void Freeze() { if (_additionalElements != null) { _additionalElements.Freeze(); } } /// <summary> /// Enumerates all the elements in this instance. /// </summary> internal struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>>, IDisposable { /// <summary> /// The bucket being enumerated. /// </summary> private readonly HashBucket _bucket; /// <summary> /// The current position of this enumerator. /// </summary> private Position _currentPosition; /// <summary> /// The enumerator that represents the current position over the <see cref="_additionalElements"/> of the <see cref="HashBucket"/>. /// </summary> private ImmutableList<KeyValuePair<TKey, TValue>>.Enumerator _additionalEnumerator; /// <summary> /// Initializes a new instance of the <see cref="ImmutableDictionary{TKey, TValue}.HashBucket.Enumerator"/> struct. /// </summary> /// <param name="bucket">The bucket.</param> internal Enumerator(HashBucket bucket) { _bucket = bucket; _currentPosition = Position.BeforeFirst; _additionalEnumerator = default(ImmutableList<KeyValuePair<TKey, TValue>>.Enumerator); } /// <summary> /// Describes the positions the enumerator state machine may be in. /// </summary> private enum Position { /// <summary> /// The first element has not yet been moved to. /// </summary> BeforeFirst, /// <summary> /// We're at the <see cref="_firstValue"/> of the containing bucket. /// </summary> First, /// <summary> /// We're enumerating the <see cref="_additionalElements"/> in the bucket. /// </summary> Additional, /// <summary> /// The end of enumeration has been reached. /// </summary> End, } /// <summary> /// Gets the current element. /// </summary> object IEnumerator.Current { get { return this.Current; } } /// <summary> /// Gets the current element. /// </summary> public KeyValuePair<TKey, TValue> Current { get { return _currentPosition switch { Position.First => _bucket._firstValue, Position.Additional => _additionalEnumerator.Current, _ => throw new InvalidOperationException(), }; } } /// <summary> /// Advances the enumerator to the next element of the collection. /// </summary> /// <returns> /// true if the enumerator was successfully advanced to the next element; false if the enumerator has passed the end of the collection. /// </returns> /// <exception cref="InvalidOperationException">The collection was modified after the enumerator was created. </exception> public bool MoveNext() { if (_bucket.IsEmpty) { _currentPosition = Position.End; return false; } switch (_currentPosition) { case Position.BeforeFirst: _currentPosition = Position.First; return true; case Position.First: if (_bucket._additionalElements.IsEmpty) { _currentPosition = Position.End; return false; } _currentPosition = Position.Additional; _additionalEnumerator = new ImmutableList<KeyValuePair<TKey, TValue>>.Enumerator(_bucket._additionalElements); return _additionalEnumerator.MoveNext(); case Position.Additional: return _additionalEnumerator.MoveNext(); case Position.End: return false; default: throw new InvalidOperationException(); } } /// <summary> /// Sets the enumerator to its initial position, which is before the first element in the collection. /// </summary> /// <exception cref="InvalidOperationException">The collection was modified after the enumerator was created. </exception> public void Reset() { // We can safely dispose of the additional enumerator because if the client reuses this enumerator // we'll acquire a new one anyway (and so for that matter we should be sure to dispose of this). _additionalEnumerator.Dispose(); _currentPosition = Position.BeforeFirst; } /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { _additionalEnumerator.Dispose(); } } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Fixtures.Azure.AcceptanceTestsLro { using System; using System.Linq; using System.Collections.Generic; using System.Diagnostics; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; using Models; /// <summary> /// Long-running Operation for AutoRest /// </summary> public partial class AutoRestLongRunningOperationTestService : ServiceClient<AutoRestLongRunningOperationTestService>, IAutoRestLongRunningOperationTestService, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Gets Azure subscription credentials. /// </summary> public ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the ILROsOperations. /// </summary> public virtual ILROsOperations LROs { get; private set; } /// <summary> /// Gets the ILRORetrysOperations. /// </summary> public virtual ILRORetrysOperations LRORetrys { get; private set; } /// <summary> /// Gets the ILROSADsOperations. /// </summary> public virtual ILROSADsOperations LROSADs { get; private set; } /// <summary> /// Gets the ILROsCustomHeaderOperations. /// </summary> public virtual ILROsCustomHeaderOperations LROsCustomHeader { get; private set; } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestLongRunningOperationTestService(params DelegatingHandler[] handlers) : base(handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected AutoRestLongRunningOperationTestService(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers) { this.Initialize(); } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestLongRunningOperationTestService(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected AutoRestLongRunningOperationTestService(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this.BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestLongRunningOperationTestService(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestLongRunningOperationTestService(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new ArgumentNullException("credentials"); } this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestLongRunningOperationTestService(Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the AutoRestLongRunningOperationTestService class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Gets Azure subscription credentials. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public AutoRestLongRunningOperationTestService(Uri baseUri, ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new ArgumentNullException("baseUri"); } if (credentials == null) { throw new ArgumentNullException("credentials"); } this.BaseUri = baseUri; this.Credentials = credentials; if (this.Credentials != null) { this.Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { this.LROs = new LROsOperations(this); this.LRORetrys = new LRORetrysOperations(this); this.LROSADs = new LROSADsOperations(this); this.LROsCustomHeader = new LROsCustomHeaderOperations(this); this.BaseUri = new Uri("http://localhost"); this.AcceptLanguage = "en-US"; this.LongRunningOperationRetryTimeout = 30; this.GenerateClientRequestId = true; SerializationSettings = new JsonSerializerSettings { Formatting = Formatting.Indented, DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; SerializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); } } }
/********************************************************************++ Copyright (c) Microsoft Corporation. All rights reserved. --********************************************************************/ #if !SILVERLIGHT // ComObject #if !CLR2 using System.Linq.Expressions; #else using Microsoft.Scripting.Ast; #endif using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Reflection; using System.Runtime.InteropServices; using ComTypes = System.Runtime.InteropServices.ComTypes; using System.Dynamic; namespace System.Management.Automation.ComInterop { /// <summary> /// An object that implements IDispatch /// /// This currently has the following issues: /// 1. If we prefer ComObjectWithTypeInfo over IDispatchComObject, then we will often not /// IDispatchComObject since implementations of IDispatch often rely on a registered type library. /// If we prefer IDispatchComObject over ComObjectWithTypeInfo, users get a non-ideal experience. /// 2. IDispatch cannot distinguish between properties and methods with 0 arguments (and non-0 /// default arguments?). So obj.foo() is ambiguous as it could mean invoking method foo, /// or it could mean invoking the function pointer returned by property foo. /// We are attempting to find whether we need to call a method or a property by examining /// the ITypeInfo associated with the IDispatch. ITypeInfo tell's use what parameters the method /// expects, is it a method or a property, what is the default property of the object, how to /// create an enumerator for collections etc. /// 3. IronPython processes the signature and converts ref arguments into return values. /// However, since the signature of a DispMethod is not available beforehand, this conversion /// is not possible. There could be other signature conversions that may be affected. How does /// VB6 deal with ref arguments and IDispatch? /// /// We also support events for IDispatch objects: /// Background: /// COM objects support events through a mechanism known as Connect Points. /// Connection Points are separate objects created off the actual COM /// object (this is to prevent circular references between event sink /// and event source). When clients want to sink events generated by /// COM object they would implement callback interfaces (aka source /// interfaces) and hand it over (advise) to the Connection Point. /// /// Implementation details: /// When IDispatchComObject.TryGetMember request is received we first check /// whether the requested member is a property or a method. If this check /// fails we will try to determine whether an event is requested. To do /// so we will do the following set of steps: /// 1. Verify the COM object implements IConnectionPointContainer /// 2. Attempt to find COM object's coclass's description /// a. Query the object for IProvideClassInfo interface. Go to 3, if found /// b. From object's IDispatch retrieve primary interface description /// c. Scan coclasses declared in object's type library. /// d. Find coclass implementing this particular primary interface /// 3. Scan coclass for all its source interfaces. /// 4. Check whether to any of the methods on the source interfaces matches /// the request name /// /// Once we determine that TryGetMember requests an event we will return /// an instance of BoundDispEvent class. This class has InPlaceAdd and /// InPlaceSubtract operators defined. Calling InPlaceAdd operator will: /// 1. An instance of ComEventSinksContainer class is created (unless /// RCW already had one). This instance is hanged off the RCW in attempt /// to bind the lifetime of event sinks to the lifetime of the RCW itself, /// meaning event sink will be collected once the RCW is collected (this /// is the same way event sinks lifetime is controlled by PIAs). /// Notice: ComEventSinksContainer contains a Finalizer which will go and /// unadvise all event sinks. /// Notice: ComEventSinksContainer is a list of ComEventSink objects. /// 2. Unless we have already created a ComEventSink for the required /// source interface, we will create and advise a new ComEventSink. Each /// ComEventSink implements a single source interface that COM object /// supports. /// 3. ComEventSink contains a map between method DISPIDs to the /// multicast delegate that will be invoked when the event is raised. /// 4. ComEventSink implements IReflect interface which is exposed as /// custom IDispatch to COM consumers. This allows us to intercept calls /// to IDispatch.Invoke and apply custom logic - in particular we will /// just find and invoke the multicast delegate corresponding to the invoked /// dispid. /// </summary> internal sealed class IDispatchComObject : ComObject, IDynamicMetaObjectProvider { private ComTypeDesc _comTypeDesc; private static readonly Dictionary<Guid, ComTypeDesc> s_cacheComTypeDesc = new Dictionary<Guid, ComTypeDesc>(); internal IDispatchComObject(IDispatch rcw) : base(rcw) { DispatchObject = rcw; } public override string ToString() { ComTypeDesc ctd = _comTypeDesc; string typeName = null; if (ctd != null) { typeName = ctd.TypeName; } if (String.IsNullOrEmpty(typeName)) { typeName = "IDispatch"; } return String.Format(CultureInfo.CurrentCulture, "{0} ({1})", RuntimeCallableWrapper.ToString(), typeName); } public ComTypeDesc ComTypeDesc { get { EnsureScanDefinedMethods(); return _comTypeDesc; } } public IDispatch DispatchObject { get; } private static int GetIDsOfNames(IDispatch dispatch, string name, out int dispId) { int[] dispIds = new int[1]; Guid emptyRiid = Guid.Empty; int hresult = dispatch.TryGetIDsOfNames( ref emptyRiid, new string[] { name }, 1, 0, dispIds); dispId = dispIds[0]; return hresult; } private static int Invoke(IDispatch dispatch, int memberDispId, out object result) { Guid emptyRiid = Guid.Empty; ComTypes.DISPPARAMS dispParams = new ComTypes.DISPPARAMS(); ComTypes.EXCEPINFO excepInfo = new ComTypes.EXCEPINFO(); uint argErr; int hresult = dispatch.TryInvoke( memberDispId, ref emptyRiid, 0, ComTypes.INVOKEKIND.INVOKE_PROPERTYGET, ref dispParams, out result, out excepInfo, out argErr); return hresult; } internal bool TryGetGetItem(out ComMethodDesc value) { ComMethodDesc methodDesc = _comTypeDesc.GetItem; if (methodDesc != null) { value = methodDesc; return true; } return SlowTryGetGetItem(out value); } private bool SlowTryGetGetItem(out ComMethodDesc value) { EnsureScanDefinedMethods(); ComMethodDesc methodDesc = _comTypeDesc.GetItem; // Without type information, we really don't know whether or not we have a property getter. if (methodDesc == null) { string name = "[PROPERTYGET, DISPID(0)]"; _comTypeDesc.EnsureGetItem(new ComMethodDesc(name, ComDispIds.DISPID_VALUE, ComTypes.INVOKEKIND.INVOKE_PROPERTYGET)); methodDesc = _comTypeDesc.GetItem; } value = methodDesc; return true; } internal bool TryGetSetItem(out ComMethodDesc value) { ComMethodDesc methodDesc = _comTypeDesc.SetItem; if (methodDesc != null) { value = methodDesc; return true; } return SlowTryGetSetItem(out value); } private bool SlowTryGetSetItem(out ComMethodDesc value) { EnsureScanDefinedMethods(); ComMethodDesc methodDesc = _comTypeDesc.SetItem; // Without type information, we really don't know whether or not we have a property setter. if (methodDesc == null) { string name = "[PROPERTYPUT, DISPID(0)]"; _comTypeDesc.EnsureSetItem(new ComMethodDesc(name, ComDispIds.DISPID_VALUE, ComTypes.INVOKEKIND.INVOKE_PROPERTYPUT)); methodDesc = _comTypeDesc.SetItem; } value = methodDesc; return true; } internal bool TryGetMemberMethod(string name, out ComMethodDesc method) { EnsureScanDefinedMethods(); return _comTypeDesc.TryGetFunc(name, out method); } internal bool TryGetMemberEvent(string name, out ComEventDesc @event) { EnsureScanDefinedEvents(); return _comTypeDesc.TryGetEvent(name, out @event); } internal bool TryGetMemberMethodExplicit(string name, out ComMethodDesc method) { EnsureScanDefinedMethods(); int dispId; int hresult = GetIDsOfNames(DispatchObject, name, out dispId); if (hresult == ComHresults.S_OK) { ComMethodDesc cmd = new ComMethodDesc(name, dispId, ComTypes.INVOKEKIND.INVOKE_FUNC); _comTypeDesc.AddFunc(name, cmd); method = cmd; return true; } else if (hresult == ComHresults.DISP_E_UNKNOWNNAME) { method = null; return false; } else { throw Error.CouldNotGetDispId(name, String.Format(CultureInfo.InvariantCulture, "0x{0:X})", hresult)); } } internal bool TryGetPropertySetterExplicit(string name, out ComMethodDesc method, Type limitType, bool holdsNull) { EnsureScanDefinedMethods(); int dispId; int hresult = GetIDsOfNames(DispatchObject, name, out dispId); if (hresult == ComHresults.S_OK) { // we do not know whether we have put or putref here // and we will not guess and pretend we found both. ComMethodDesc put = new ComMethodDesc(name, dispId, ComTypes.INVOKEKIND.INVOKE_PROPERTYPUT); _comTypeDesc.AddPut(name, put); ComMethodDesc putref = new ComMethodDesc(name, dispId, ComTypes.INVOKEKIND.INVOKE_PROPERTYPUTREF); _comTypeDesc.AddPutRef(name, putref); if (ComBinderHelpers.PreferPut(limitType, holdsNull)) { method = put; } else { method = putref; } return true; } else if (hresult == ComHresults.DISP_E_UNKNOWNNAME) { method = null; return false; } else { throw Error.CouldNotGetDispId(name, String.Format(CultureInfo.InvariantCulture, "0x{0:X})", hresult)); } } internal override IList<string> GetMemberNames(bool dataOnly) { EnsureScanDefinedMethods(); EnsureScanDefinedEvents(); return ComTypeDesc.GetMemberNames(dataOnly); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")] internal override IList<KeyValuePair<string, object>> GetMembers(IEnumerable<string> names) { if (names == null) { names = GetMemberNames(true); } Type comType = RuntimeCallableWrapper.GetType(); var members = new List<KeyValuePair<string, object>>(); foreach (string name in names) { if (name == null) { continue; } ComMethodDesc method; if (ComTypeDesc.TryGetFunc(name, out method) && method.IsDataMember) { try { object value = comType.InvokeMember( method.Name, BindingFlags.GetProperty, null, RuntimeCallableWrapper, Utils.EmptyArray<object>(), CultureInfo.InvariantCulture ); members.Add(new KeyValuePair<string, object>(method.Name, value)); //evaluation failed for some reason. pass exception out } catch (Exception ex) { members.Add(new KeyValuePair<string, object>(method.Name, ex)); } } } return members.ToArray(); } DynamicMetaObject IDynamicMetaObjectProvider.GetMetaObject(Expression parameter) { EnsureScanDefinedMethods(); return new IDispatchMetaObject(parameter, this); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2201:DoNotRaiseReservedExceptionTypes")] private static void GetFuncDescForDescIndex(ComTypes.ITypeInfo typeInfo, int funcIndex, out ComTypes.FUNCDESC funcDesc, out IntPtr funcDescHandle) { IntPtr pFuncDesc = IntPtr.Zero; typeInfo.GetFuncDesc(funcIndex, out pFuncDesc); // GetFuncDesc should never return null, this is just to be safe if (pFuncDesc == IntPtr.Zero) { throw Error.CannotRetrieveTypeInformation(); } funcDesc = (ComTypes.FUNCDESC)Marshal.PtrToStructure(pFuncDesc, typeof(ComTypes.FUNCDESC)); funcDescHandle = pFuncDesc; } private void EnsureScanDefinedEvents() { // _comTypeDesc.Events is null if we have not yet attempted // to scan the object for events. if (_comTypeDesc != null && _comTypeDesc.Events != null) { return; } // check type info in the type descriptions cache ComTypes.ITypeInfo typeInfo = ComRuntimeHelpers.GetITypeInfoFromIDispatch(DispatchObject, true); if (typeInfo == null) { _comTypeDesc = ComTypeDesc.CreateEmptyTypeDesc(); return; } ComTypes.TYPEATTR typeAttr = ComRuntimeHelpers.GetTypeAttrForTypeInfo(typeInfo); if (_comTypeDesc == null) { lock (s_cacheComTypeDesc) { if (s_cacheComTypeDesc.TryGetValue(typeAttr.guid, out _comTypeDesc) == true && _comTypeDesc.Events != null) { return; } } } ComTypeDesc typeDesc = ComTypeDesc.FromITypeInfo(typeInfo, typeAttr); ComTypes.ITypeInfo classTypeInfo = null; Dictionary<string, ComEventDesc> events = null; var cpc = RuntimeCallableWrapper as ComTypes.IConnectionPointContainer; if (cpc == null) { // No ICPC - this object does not support events events = ComTypeDesc.EmptyEvents; } else if ((classTypeInfo = GetCoClassTypeInfo(this.RuntimeCallableWrapper, typeInfo)) == null) { // no class info found - this object may support events // but we could not discover those events = ComTypeDesc.EmptyEvents; } else { events = new Dictionary<string, ComEventDesc>(); ComTypes.TYPEATTR classTypeAttr = ComRuntimeHelpers.GetTypeAttrForTypeInfo(classTypeInfo); for (int i = 0; i < classTypeAttr.cImplTypes; i++) { int hRefType; classTypeInfo.GetRefTypeOfImplType(i, out hRefType); ComTypes.ITypeInfo interfaceTypeInfo; classTypeInfo.GetRefTypeInfo(hRefType, out interfaceTypeInfo); ComTypes.IMPLTYPEFLAGS flags; classTypeInfo.GetImplTypeFlags(i, out flags); if ((flags & ComTypes.IMPLTYPEFLAGS.IMPLTYPEFLAG_FSOURCE) != 0) { ScanSourceInterface(interfaceTypeInfo, ref events); } } if (events.Count == 0) { events = ComTypeDesc.EmptyEvents; } } lock (s_cacheComTypeDesc) { ComTypeDesc cachedTypeDesc; if (s_cacheComTypeDesc.TryGetValue(typeAttr.guid, out cachedTypeDesc)) { _comTypeDesc = cachedTypeDesc; } else { _comTypeDesc = typeDesc; s_cacheComTypeDesc.Add(typeAttr.guid, _comTypeDesc); } _comTypeDesc.Events = events; } } private static void ScanSourceInterface(ComTypes.ITypeInfo sourceTypeInfo, ref Dictionary<string, ComEventDesc> events) { ComTypes.TYPEATTR sourceTypeAttribute = ComRuntimeHelpers.GetTypeAttrForTypeInfo(sourceTypeInfo); for (int index = 0; index < sourceTypeAttribute.cFuncs; index++) { IntPtr funcDescHandleToRelease = IntPtr.Zero; try { ComTypes.FUNCDESC funcDesc; GetFuncDescForDescIndex(sourceTypeInfo, index, out funcDesc, out funcDescHandleToRelease); // we are not interested in hidden or restricted functions for now. if ((funcDesc.wFuncFlags & (int)ComTypes.FUNCFLAGS.FUNCFLAG_FHIDDEN) != 0) { continue; } if ((funcDesc.wFuncFlags & (int)ComTypes.FUNCFLAGS.FUNCFLAG_FRESTRICTED) != 0) { continue; } string name = ComRuntimeHelpers.GetNameOfMethod(sourceTypeInfo, funcDesc.memid); name = name.ToUpper(System.Globalization.CultureInfo.InvariantCulture); // Sometimes coclass has multiple source interfaces. Usually this is caused by // adding new events and putting them on new interfaces while keeping the // old interfaces around. This may cause name collisions which we are // resolving by keeping only the first event with the same name. if (events.ContainsKey(name) == false) { ComEventDesc eventDesc = new ComEventDesc(); eventDesc.dispid = funcDesc.memid; eventDesc.sourceIID = sourceTypeAttribute.guid; events.Add(name, eventDesc); } } finally { if (funcDescHandleToRelease != IntPtr.Zero) { sourceTypeInfo.ReleaseFuncDesc(funcDescHandleToRelease); } } } } private static ComTypes.ITypeInfo GetCoClassTypeInfo(object rcw, ComTypes.ITypeInfo typeInfo) { Debug.Assert(typeInfo != null); IProvideClassInfo provideClassInfo = rcw as IProvideClassInfo; if (provideClassInfo != null) { IntPtr typeInfoPtr = IntPtr.Zero; try { provideClassInfo.GetClassInfo(out typeInfoPtr); if (typeInfoPtr != IntPtr.Zero) { return Marshal.GetObjectForIUnknown(typeInfoPtr) as ComTypes.ITypeInfo; } } finally { if (typeInfoPtr != IntPtr.Zero) { Marshal.Release(typeInfoPtr); } } } // retrieving class information through IPCI has failed - // we can try scanning the typelib to find the coclass ComTypes.ITypeLib typeLib; int typeInfoIndex; typeInfo.GetContainingTypeLib(out typeLib, out typeInfoIndex); string typeName = ComRuntimeHelpers.GetNameOfType(typeInfo); ComTypeLibDesc typeLibDesc = ComTypeLibDesc.GetFromTypeLib(typeLib); ComTypeClassDesc coclassDesc = typeLibDesc.GetCoClassForInterface(typeName); if (coclassDesc == null) { return null; } ComTypes.ITypeInfo typeInfoCoClass; Guid coclassGuid = coclassDesc.Guid; typeLib.GetTypeInfoOfGuid(ref coclassGuid, out typeInfoCoClass); return typeInfoCoClass; } private void EnsureScanDefinedMethods() { if (_comTypeDesc != null && _comTypeDesc.Funcs != null) { return; } ComTypes.ITypeInfo typeInfo = ComRuntimeHelpers.GetITypeInfoFromIDispatch(DispatchObject, true); if (typeInfo == null) { _comTypeDesc = ComTypeDesc.CreateEmptyTypeDesc(); return; } ComTypes.TYPEATTR typeAttr = ComRuntimeHelpers.GetTypeAttrForTypeInfo(typeInfo); if (_comTypeDesc == null) { lock (s_cacheComTypeDesc) { if (s_cacheComTypeDesc.TryGetValue(typeAttr.guid, out _comTypeDesc) == true && _comTypeDesc.Funcs != null) { return; } } } if (typeAttr.typekind == ComTypes.TYPEKIND.TKIND_INTERFACE) { //We have typeinfo for custom interface. Get typeinfo for Dispatch interface. typeInfo = ComTypeInfo.GetDispatchTypeInfoFromCustomInterfaceTypeInfo(typeInfo); typeAttr = ComRuntimeHelpers.GetTypeAttrForTypeInfo(typeInfo); } if (typeAttr.typekind == ComTypes.TYPEKIND.TKIND_COCLASS) { //We have typeinfo for the COClass. Find the default interface and get typeinfo for default interface. typeInfo = ComTypeInfo.GetDispatchTypeInfoFromCoClassTypeInfo(typeInfo); typeAttr = ComRuntimeHelpers.GetTypeAttrForTypeInfo(typeInfo); } ComTypeDesc typeDesc = ComTypeDesc.FromITypeInfo(typeInfo, typeAttr); ComMethodDesc getItem = null; ComMethodDesc setItem = null; Hashtable funcs = new Hashtable(typeAttr.cFuncs); Hashtable puts = new Hashtable(); Hashtable putrefs = new Hashtable(); for (int definedFuncIndex = 0; definedFuncIndex < typeAttr.cFuncs; definedFuncIndex++) { IntPtr funcDescHandleToRelease = IntPtr.Zero; try { ComTypes.FUNCDESC funcDesc; GetFuncDescForDescIndex(typeInfo, definedFuncIndex, out funcDesc, out funcDescHandleToRelease); if ((funcDesc.wFuncFlags & (int)ComTypes.FUNCFLAGS.FUNCFLAG_FRESTRICTED) != 0) { // This function is not meant for the script user to use. continue; } ComMethodDesc method = new ComMethodDesc(typeInfo, funcDesc); string name = method.Name.ToUpper(System.Globalization.CultureInfo.InvariantCulture); if ((funcDesc.invkind & ComTypes.INVOKEKIND.INVOKE_PROPERTYPUT) != 0) { // If there is a getter for this put, use that ReturnType as the // PropertyType. if (funcs.ContainsKey(name)) { method.InputType = ((ComMethodDesc)funcs[name]).ReturnType; } puts.Add(name, method); // for the special dispId == 0, we need to store // the method descriptor for the Do(SetItem) binder. if (method.DispId == ComDispIds.DISPID_VALUE && setItem == null) { setItem = method; } continue; } if ((funcDesc.invkind & ComTypes.INVOKEKIND.INVOKE_PROPERTYPUTREF) != 0) { // If there is a getter for this put, use that ReturnType as the // PropertyType. if (funcs.ContainsKey(name)) { method.InputType = ((ComMethodDesc)funcs[name]).ReturnType; } putrefs.Add(name, method); // for the special dispId == 0, we need to store // the method descriptor for the Do(SetItem) binder. if (method.DispId == ComDispIds.DISPID_VALUE && setItem == null) { setItem = method; } continue; } if (funcDesc.memid == ComDispIds.DISPID_NEWENUM) { funcs.Add("GETENUMERATOR", method); continue; } // If there is a setter for this put, update the InputType from our // ReturnType. if (puts.ContainsKey(name)) { ((ComMethodDesc)puts[name]).InputType = method.ReturnType; } if (putrefs.ContainsKey(name)) { ((ComMethodDesc)putrefs[name]).InputType = method.ReturnType; } funcs.Add(name, method); // for the special dispId == 0, we need to store the method descriptor // for the Do(GetItem) binder. if (funcDesc.memid == ComDispIds.DISPID_VALUE) { getItem = method; } } finally { if (funcDescHandleToRelease != IntPtr.Zero) { typeInfo.ReleaseFuncDesc(funcDescHandleToRelease); } } } lock (s_cacheComTypeDesc) { ComTypeDesc cachedTypeDesc; if (s_cacheComTypeDesc.TryGetValue(typeAttr.guid, out cachedTypeDesc)) { _comTypeDesc = cachedTypeDesc; } else { _comTypeDesc = typeDesc; s_cacheComTypeDesc.Add(typeAttr.guid, _comTypeDesc); } _comTypeDesc.Funcs = funcs; _comTypeDesc.Puts = puts; _comTypeDesc.PutRefs = putrefs; _comTypeDesc.EnsureGetItem(getItem); _comTypeDesc.EnsureSetItem(setItem); } } internal bool TryGetPropertySetter(string name, out ComMethodDesc method, Type limitType, bool holdsNull) { EnsureScanDefinedMethods(); if (ComBinderHelpers.PreferPut(limitType, holdsNull)) { return _comTypeDesc.TryGetPut(name, out method) || _comTypeDesc.TryGetPutRef(name, out method); } else { return _comTypeDesc.TryGetPutRef(name, out method) || _comTypeDesc.TryGetPut(name, out method); } } } } #endif
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using Hyak.Common; using Microsoft.Azure; using Microsoft.WindowsAzure.Management.Network.Models; namespace Microsoft.WindowsAzure.Management.Network.Models { /// <summary> /// The response structure for the Network Operations List operation. /// </summary> public partial class NetworkListResponse : AzureOperationResponse, IEnumerable<NetworkListResponse.VirtualNetworkSite> { private IList<NetworkListResponse.VirtualNetworkSite> _virtualNetworkSites; /// <summary> /// Optional. /// </summary> public IList<NetworkListResponse.VirtualNetworkSite> VirtualNetworkSites { get { return this._virtualNetworkSites; } set { this._virtualNetworkSites = value; } } /// <summary> /// Initializes a new instance of the NetworkListResponse class. /// </summary> public NetworkListResponse() { this.VirtualNetworkSites = new LazyList<NetworkListResponse.VirtualNetworkSite>(); } /// <summary> /// Gets the sequence of VirtualNetworkSites. /// </summary> public IEnumerator<NetworkListResponse.VirtualNetworkSite> GetEnumerator() { return this.VirtualNetworkSites.GetEnumerator(); } /// <summary> /// Gets the sequence of VirtualNetworkSites. /// </summary> System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return this.GetEnumerator(); } public partial class AddressSpace { private IList<string> _addressPrefixes; /// <summary> /// Optional. Address spaces, in CIDR format in the virtual network. /// </summary> public IList<string> AddressPrefixes { get { return this._addressPrefixes; } set { this._addressPrefixes = value; } } /// <summary> /// Initializes a new instance of the AddressSpace class. /// </summary> public AddressSpace() { this.AddressPrefixes = new LazyList<string>(); } } /// <summary> /// Specifies the type of connection of the local network site. The /// value of this element can be either IPsec or Dedicated. The /// default value is IPsec. /// </summary> public partial class Connection { private string _type; /// <summary> /// Optional. /// </summary> public string Type { get { return this._type; } set { this._type = value; } } /// <summary> /// Initializes a new instance of the Connection class. /// </summary> public Connection() { } } public partial class DnsServer { private string _address; /// <summary> /// Optional. The IPv4 address of the DNS server. /// </summary> public string Address { get { return this._address; } set { this._address = value; } } private string _name; /// <summary> /// Optional. The name of the DNS server. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } /// <summary> /// Initializes a new instance of the DnsServer class. /// </summary> public DnsServer() { } } /// <summary> /// Contains gateway references to the local network sites that the /// virtual network can connect to. /// </summary> public partial class Gateway { private string _profile; /// <summary> /// Optional. The gateway connection size. /// </summary> public string Profile { get { return this._profile; } set { this._profile = value; } } private IList<NetworkListResponse.LocalNetworkSite> _sites; /// <summary> /// Optional. The list of local network sites that the virtual /// network can connect to. /// </summary> public IList<NetworkListResponse.LocalNetworkSite> Sites { get { return this._sites; } set { this._sites = value; } } private NetworkListResponse.VPNClientAddressPool _vPNClientAddressPool; /// <summary> /// Optional. The VPN Client Address Pool reserves a pool of IP /// addresses for VPN clients. This object is used for /// point-to-site connectivity. /// </summary> public NetworkListResponse.VPNClientAddressPool VPNClientAddressPool { get { return this._vPNClientAddressPool; } set { this._vPNClientAddressPool = value; } } /// <summary> /// Initializes a new instance of the Gateway class. /// </summary> public Gateway() { this.Sites = new LazyList<NetworkListResponse.LocalNetworkSite>(); } } /// <summary> /// Contains the list of parameters defining the local network site. /// </summary> public partial class LocalNetworkSite { private NetworkListResponse.AddressSpace _addressSpace; /// <summary> /// Optional. The address space of the local network site. /// </summary> public NetworkListResponse.AddressSpace AddressSpace { get { return this._addressSpace; } set { this._addressSpace = value; } } private IList<NetworkListResponse.Connection> _connections; /// <summary> /// Optional. Specifies the types of connections to the local /// network site. /// </summary> public IList<NetworkListResponse.Connection> Connections { get { return this._connections; } set { this._connections = value; } } private string _name; /// <summary> /// Optional. The name of the local network site. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } private string _vpnGatewayAddress; /// <summary> /// Optional. The IPv4 address of the local network site. /// </summary> public string VpnGatewayAddress { get { return this._vpnGatewayAddress; } set { this._vpnGatewayAddress = value; } } /// <summary> /// Initializes a new instance of the LocalNetworkSite class. /// </summary> public LocalNetworkSite() { this.Connections = new LazyList<NetworkListResponse.Connection>(); } } public partial class Subnet { private string _addressPrefix; /// <summary> /// Optional. Represents an address space, in CIDR format that /// defines the subnet. /// </summary> public string AddressPrefix { get { return this._addressPrefix; } set { this._addressPrefix = value; } } private string _name; /// <summary> /// Optional. Name of the subnet. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } private string _networkSecurityGroup; /// <summary> /// Optional. Name of Network Security Group associated with this /// subnet. /// </summary> public string NetworkSecurityGroup { get { return this._networkSecurityGroup; } set { this._networkSecurityGroup = value; } } /// <summary> /// Initializes a new instance of the Subnet class. /// </summary> public Subnet() { } } /// <summary> /// Contains the collections of parameters used to configure a virtual /// network space that is dedicated to your subscription without /// overlapping with other networks /// </summary> public partial class VirtualNetworkSite { private NetworkListResponse.AddressSpace _addressSpace; /// <summary> /// Optional. The list of network address spaces for a virtual /// network site. This represents the overall network space /// contained within the virtual network site. /// </summary> public NetworkListResponse.AddressSpace AddressSpace { get { return this._addressSpace; } set { this._addressSpace = value; } } private string _affinityGroup; /// <summary> /// Optional. An affinity group, which indirectly refers to the /// location where the virtual network exists. /// </summary> public string AffinityGroup { get { return this._affinityGroup; } set { this._affinityGroup = value; } } private IList<NetworkListResponse.DnsServer> _dnsServers; /// <summary> /// Optional. The list of available DNS Servers associated with the /// virtual network site. /// </summary> public IList<NetworkListResponse.DnsServer> DnsServers { get { return this._dnsServers; } set { this._dnsServers = value; } } private NetworkListResponse.Gateway _gateway; /// <summary> /// Optional. The gateway that contains a list of Local Network /// Sites which enable the Virtual Network Site to communicate /// with a customer's on-premise networks. /// </summary> public NetworkListResponse.Gateway Gateway { get { return this._gateway; } set { this._gateway = value; } } private string _id; /// <summary> /// Optional. A unique string identifier that represents the /// virtual network site. /// </summary> public string Id { get { return this._id; } set { this._id = value; } } private bool _inUse; /// <summary> /// Optional. Shows if the virtual networks is in use. /// </summary> public bool InUse { get { return this._inUse; } set { this._inUse = value; } } private string _label; /// <summary> /// Optional. The friendly identifier for the site. /// </summary> public string Label { get { return this._label; } set { this._label = value; } } private string _location; /// <summary> /// Optional. Gets or sets the virtual network location. /// </summary> public string Location { get { return this._location; } set { this._location = value; } } private string _migrationState; /// <summary> /// Optional. Specifies the IaaS Classic to ARM migration state of /// the Virtual Network Site. Possible values are: None, /// Preparing, Prepared, PrepareFailed, Committing, Committed, /// CommitFailed, Aborting, AbortFailed. None is treated as null /// value and it is not be visible. /// </summary> public string MigrationState { get { return this._migrationState; } set { this._migrationState = value; } } private string _name; /// <summary> /// Optional. Name of the virtual network site. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } private string _state; /// <summary> /// Optional. Current status of the virtual network. (Created, /// Creating, Updating, Deleting, or Unavailable.) /// </summary> public string State { get { return this._state; } set { this._state = value; } } private IList<NetworkListResponse.Subnet> _subnets; /// <summary> /// Optional. The list of network subnets for a virtual network /// site. All network subnets must be contained within the overall /// virtual network address spaces. /// </summary> public IList<NetworkListResponse.Subnet> Subnets { get { return this._subnets; } set { this._subnets = value; } } /// <summary> /// Initializes a new instance of the VirtualNetworkSite class. /// </summary> public VirtualNetworkSite() { this.DnsServers = new LazyList<NetworkListResponse.DnsServer>(); this.Subnets = new LazyList<NetworkListResponse.Subnet>(); } } /// <summary> /// The VPN Client Address Pool reserves a pool of IP addresses for VPN /// clients. This object is used for point-to-site connectivity. /// </summary> public partial class VPNClientAddressPool { private IList<string> _addressPrefixes; /// <summary> /// Optional. The CIDR identifiers that identify addresses in the /// pool. /// </summary> public IList<string> AddressPrefixes { get { return this._addressPrefixes; } set { this._addressPrefixes = value; } } /// <summary> /// Initializes a new instance of the VPNClientAddressPool class. /// </summary> public VPNClientAddressPool() { this.AddressPrefixes = new LazyList<string>(); } } } }
/* The MIT License (MIT) * * Copyright (c) 2014 HendryLeo * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using Microsoft.Dexterity.Bridge; using Microsoft.Dexterity.Applications; using Microsoft.Dexterity.Shell; using Microsoft.Dexterity.Applications.DynamicsDictionary; namespace EditPOPUserDefinedAfterPost { public partial class EditPOPUserDefined : DexUIForm { private Int16 _caller; public Int16 Caller { get { return _caller; } set { _caller = value; } } public EditPOPUserDefined() { InitializeComponent(); TableError err; string[] UserDefLabels; string[] UserDefSetups; //byte err; //Get the UserDefined Label from POP_Setup err = DataAccessHelper.GetPOPSetup(out UserDefLabels); if (err == TableError.NoError) { lblUserDef1.Text = UserDefLabels[0]; lblUserDef2.Text = UserDefLabels[1]; lblUserDef3.Text = UserDefLabels[2]; lblUserDef4.Text = UserDefLabels[3]; lblUserDef5.Text = UserDefLabels[4]; lblUserDef6.Text = UserDefLabels[5]; lblUserDef7.Text = UserDefLabels[6]; lblUserDef8.Text = UserDefLabels[7]; lblUserDef9.Text = UserDefLabels[8]; lblUserDef10.Text = UserDefLabels[9]; lblUserDef11.Text = UserDefLabels[10]; lblUserDef12.Text = UserDefLabels[11]; lblUserDef13.Text = UserDefLabels[12]; lblUserDef14.Text = UserDefLabels[13]; lblUserDef15.Text = UserDefLabels[14]; lblUserDef16.Text = UserDefLabels[15]; lblUserDef17.Text = UserDefLabels[16]; lblUserDef18.Text = UserDefLabels[17]; lblUserDef19.Text = UserDefLabels[18]; lblUserDef20.Text = UserDefLabels[19]; lblUserDef21.Text = UserDefLabels[20]; lblUserDef22.Text = UserDefLabels[21]; lblUserDef23.Text = UserDefLabels[22]; lblUserDef24.Text = UserDefLabels[23]; lblUserDef25.Text = UserDefLabels[24]; lblUserDef26.Text = UserDefLabels[25]; lblUserDef27.Text = UserDefLabels[26]; lblUserDef28.Text = UserDefLabels[27]; lblUserDef29.Text = UserDefLabels[28]; lblUserDef30.Text = UserDefLabels[29]; lblUserDef31.Text = UserDefLabels[30]; lblUserDef32.Text = UserDefLabels[31]; lblUserDef33.Text = UserDefLabels[32]; lblUserDef34.Text = UserDefLabels[33]; lblUserDef35.Text = UserDefLabels[34]; } //Get the UserDefined Values from POP_UserDefined_Setup err = DataAccessHelper.GetPOPUserDefinedSetup(1, out UserDefSetups); cboUserDefList1.Items.Add(""); if (err == TableError.NoError) { foreach (string UserDefSetup in UserDefSetups) { cboUserDefList1.Items.Add(UserDefSetup); } } err = DataAccessHelper.GetPOPUserDefinedSetup(2, out UserDefSetups); cboUserDefList2.Items.Add(""); if (err == TableError.NoError) { foreach (string UserDefSetup in UserDefSetups) { cboUserDefList2.Items.Add(UserDefSetup); } } err = DataAccessHelper.GetPOPUserDefinedSetup(3, out UserDefSetups); cboUserDefList3.Items.Add(""); if (err == TableError.NoError) { foreach (string UserDefSetup in UserDefSetups) { cboUserDefList3.Items.Add(UserDefSetup); } } err = DataAccessHelper.GetPOPUserDefinedSetup(4, out UserDefSetups); cboUserDefList4.Items.Add(""); if (err == TableError.NoError) { foreach (string UserDefSetup in UserDefSetups) { cboUserDefList4.Items.Add(UserDefSetup); } } err = DataAccessHelper.GetPOPUserDefinedSetup(5, out UserDefSetups); cboUserDefList5.Items.Add(""); if (err == TableError.NoError) { foreach (string UserDefSetup in UserDefSetups) { cboUserDefList5.Items.Add(UserDefSetup); } } } private void EditPOPUserDefined_FormClosing(object sender, FormClosingEventArgs e) { // Is the form being closed, or just hidden? if (GPAddIn.CloseEditPOPUserDefinedForm == false) { // Do not allow the form to completely close and be removed from memory. // Just hide the form and cancel the close operation. this.Hide(); e.Cancel = true; } } void PopulateForm() { TableError err; string[] UserDefinedStrings; DateTime[] UserDefinedDates; switch (_caller) { case 1: { txtPOPNumber.Text = GPAddIn.POPInquiryReceivingsEntryWindow.PopReceiptNumber.Value; break; } case 2: { txtPOPNumber.Text = GPAddIn.POPReceivingEntryWindow.PopReceiptNumber.Value; break; } case 3: { txtPOPNumber.Text = GPAddIn.PORReturnsEntryWindow.PopReceiptNumber.Value; break; } case 4: { txtPOPNumber.Text = GPAddIn.PORInquiryReturnsEntryWindow.PopReceiptNumber.Value; break; } } err = DataAccessHelper.GetPOPUserDefinedValues(txtPOPNumber.Text, out UserDefinedStrings, out UserDefinedDates); if (err == TableError.NoError | err == TableError.NotFound) { cboUserDefList1.SelectedItem = UserDefinedStrings[0]; cboUserDefList2.SelectedItem = UserDefinedStrings[1]; cboUserDefList3.SelectedItem = UserDefinedStrings[2]; cboUserDefList4.SelectedItem = UserDefinedStrings[3]; cboUserDefList5.SelectedItem = UserDefinedStrings[4]; txtUserDefText1.Text = UserDefinedStrings[5]; txtUserDefText2.Text = UserDefinedStrings[6]; txtUserDefText3.Text = UserDefinedStrings[7]; txtUserDefText4.Text = UserDefinedStrings[8]; txtUserDefText5.Text = UserDefinedStrings[9]; txtUserDefText6.Text = UserDefinedStrings[10]; txtUserDefText7.Text = UserDefinedStrings[11]; txtUserDefText8.Text = UserDefinedStrings[12]; txtUserDefText9.Text = UserDefinedStrings[13]; txtUserDefText10.Text = UserDefinedStrings[14]; dtUserDefDate1.Value = UserDefinedDates[0]; dtUserDefDate2.Value = UserDefinedDates[1]; dtUserDefDate3.Value = UserDefinedDates[2]; dtUserDefDate4.Value = UserDefinedDates[3]; dtUserDefDate5.Value = UserDefinedDates[4]; dtUserDefDate6.Value = UserDefinedDates[5]; dtUserDefDate7.Value = UserDefinedDates[6]; dtUserDefDate8.Value = UserDefinedDates[7]; dtUserDefDate9.Value = UserDefinedDates[8]; dtUserDefDate10.Value = UserDefinedDates[9]; dtUserDefDate11.Value = UserDefinedDates[10]; dtUserDefDate12.Value = UserDefinedDates[11]; dtUserDefDate13.Value = UserDefinedDates[12]; dtUserDefDate14.Value = UserDefinedDates[13]; dtUserDefDate15.Value = UserDefinedDates[14]; dtUserDefDate16.Value = UserDefinedDates[15]; dtUserDefDate17.Value = UserDefinedDates[16]; dtUserDefDate18.Value = UserDefinedDates[17]; dtUserDefDate19.Value = UserDefinedDates[18]; dtUserDefDate20.Value = UserDefinedDates[19]; } } private void EditPOPUserDefined_Load(object sender, EventArgs e) { PopulateForm(); } private void EditPOPUserDefined_Activated(object sender, EventArgs e) { PopulateForm(); } private void cmdSave_Click(object sender, EventArgs e) { TableError err; string POPReceipt; string[] UserDefinedStrings = new string[15] { "", "", "", "", "", "", "", "", "", "", "", "", "", "", "" }; DateTime[] UserDefinedDates = new DateTime[20] {new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1), new DateTime(1900,1,1)}; POPReceipt = txtPOPNumber.Text; UserDefinedStrings[0] = cboUserDefList1.SelectedItem.ToString(); UserDefinedStrings[1] = cboUserDefList2.SelectedItem.ToString(); UserDefinedStrings[2] = cboUserDefList3.SelectedItem.ToString(); UserDefinedStrings[3] = cboUserDefList4.SelectedItem.ToString(); UserDefinedStrings[4] = cboUserDefList5.SelectedItem.ToString(); UserDefinedStrings[5] = txtUserDefText1.Text; UserDefinedStrings[6] = txtUserDefText2.Text; UserDefinedStrings[7] = txtUserDefText3.Text; UserDefinedStrings[8] = txtUserDefText4.Text; UserDefinedStrings[9] = txtUserDefText5.Text; UserDefinedStrings[10] = txtUserDefText6.Text; UserDefinedStrings[11] = txtUserDefText7.Text; UserDefinedStrings[12] = txtUserDefText8.Text; UserDefinedStrings[13] = txtUserDefText9.Text; UserDefinedStrings[14] = txtUserDefText10.Text; UserDefinedDates[0] = dtUserDefDate1.Value; UserDefinedDates[1] = dtUserDefDate2.Value; UserDefinedDates[2] = dtUserDefDate3.Value; UserDefinedDates[3] = dtUserDefDate4.Value; UserDefinedDates[4] = dtUserDefDate5.Value; UserDefinedDates[5] = dtUserDefDate6.Value; UserDefinedDates[6] = dtUserDefDate7.Value; UserDefinedDates[7] = dtUserDefDate8.Value; UserDefinedDates[8] = dtUserDefDate9.Value; UserDefinedDates[9] = dtUserDefDate10.Value; UserDefinedDates[10] = dtUserDefDate11.Value; UserDefinedDates[11] = dtUserDefDate12.Value; UserDefinedDates[12] = dtUserDefDate13.Value; UserDefinedDates[13] = dtUserDefDate14.Value; UserDefinedDates[14] = dtUserDefDate15.Value; UserDefinedDates[15] = dtUserDefDate16.Value; UserDefinedDates[16] = dtUserDefDate17.Value; UserDefinedDates[17] = dtUserDefDate18.Value; UserDefinedDates[18] = dtUserDefDate19.Value; UserDefinedDates[19] = dtUserDefDate20.Value; err = DataAccessHelper.SetPOPUserDefinedValues(POPReceipt, UserDefinedStrings, UserDefinedDates); if (err != TableError.NoError) { //notsaved Microsoft.Dexterity.Applications.Dynamics.Forms.SyVisualStudioHelper.Functions.DexError.Invoke(err.ToString()); } else { //saved this.Close(); } } private void cmdDelete_Click(object sender, EventArgs e) { TableError err; err = DataAccessHelper.DeletePOPUsrDefinedValues(txtPOPNumber.Text); if(err!=TableError.NoError) { //some error, not deleted Microsoft.Dexterity.Applications.Dynamics.Forms.SyVisualStudioHelper.Functions.DexError.Invoke(err.ToString()); } else { //deleted this.Close(); } } private void cmdCancel_Click(object sender, EventArgs e) { // Close the form this.Close(); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Globalization; using System.IdentityModel.Claims; using System.IdentityModel.Policy; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.Net; using System.Net.Security; using System.Runtime; using System.Security.Authentication; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.ServiceModel.Channels; using System.ServiceModel.Diagnostics; using System.ServiceModel.Security.Tokens; using System.Text; using System.Threading; namespace System.ServiceModel.Security { public static class ProtectionLevelHelper { public static bool IsDefined(ProtectionLevel value) { return (value == ProtectionLevel.None || value == ProtectionLevel.Sign || value == ProtectionLevel.EncryptAndSign); } public static void Validate(ProtectionLevel value) { if (!IsDefined(value)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("value", (int)value, typeof(ProtectionLevel))); } } public static bool IsStronger(ProtectionLevel v1, ProtectionLevel v2) { return ((v1 == ProtectionLevel.EncryptAndSign && v2 != ProtectionLevel.EncryptAndSign) || (v1 == ProtectionLevel.Sign && v2 == ProtectionLevel.None)); } public static bool IsStrongerOrEqual(ProtectionLevel v1, ProtectionLevel v2) { return (v1 == ProtectionLevel.EncryptAndSign || (v1 == ProtectionLevel.Sign && v2 != ProtectionLevel.EncryptAndSign)); } public static ProtectionLevel Max(ProtectionLevel v1, ProtectionLevel v2) { return IsStronger(v1, v2) ? v1 : v2; } public static int GetOrdinal(Nullable<ProtectionLevel> p) { if (p.HasValue) { switch ((ProtectionLevel)p) { default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("p", (int)p, typeof(ProtectionLevel))); case ProtectionLevel.None: return 2; case ProtectionLevel.Sign: return 3; case ProtectionLevel.EncryptAndSign: return 4; } } return 1; } } internal static class SslProtocolsHelper { internal static bool IsDefined(SslProtocols value) { SslProtocols allValues = SslProtocols.None; foreach (var protocol in Enum.GetValues(typeof(SslProtocols))) { allValues |= (SslProtocols)protocol; } return (value & allValues) == value; } internal static void Validate(SslProtocols value) { if (!IsDefined(value)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("value", (int)value, typeof(SslProtocols))); } } } internal static class TokenImpersonationLevelHelper { internal static bool IsDefined(TokenImpersonationLevel value) { return (value == TokenImpersonationLevel.None || value == TokenImpersonationLevel.Anonymous || value == TokenImpersonationLevel.Identification || value == TokenImpersonationLevel.Impersonation || value == TokenImpersonationLevel.Delegation); } internal static void Validate(TokenImpersonationLevel value) { if (!IsDefined(value)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("value", (int)value, typeof(TokenImpersonationLevel))); } } private static TokenImpersonationLevel[] s_TokenImpersonationLevelOrder = new TokenImpersonationLevel[] { TokenImpersonationLevel.None, TokenImpersonationLevel.Anonymous, TokenImpersonationLevel.Identification, TokenImpersonationLevel.Impersonation, TokenImpersonationLevel.Delegation }; internal static string ToString(TokenImpersonationLevel impersonationLevel) { if (impersonationLevel == TokenImpersonationLevel.Identification) { return "identification"; } if (impersonationLevel == TokenImpersonationLevel.None) { return "none"; } if (impersonationLevel == TokenImpersonationLevel.Anonymous) { return "anonymous"; } if (impersonationLevel == TokenImpersonationLevel.Impersonation) { return "impersonation"; } if (impersonationLevel == TokenImpersonationLevel.Delegation) { return "delegation"; } Fx.Assert("unknown token impersonation level"); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("impersonationLevel", (int)impersonationLevel, typeof(TokenImpersonationLevel))); } internal static bool IsGreaterOrEqual(TokenImpersonationLevel x, TokenImpersonationLevel y) { Validate(x); Validate(y); if (x == y) return true; int px = 0; int py = 0; for (int i = 0; i < s_TokenImpersonationLevelOrder.Length; i++) { if (x == s_TokenImpersonationLevelOrder[i]) px = i; if (y == s_TokenImpersonationLevelOrder[i]) py = i; } return (px > py); } internal static int Compare(TokenImpersonationLevel x, TokenImpersonationLevel y) { int result = 0; if (x != y) { switch (x) { case TokenImpersonationLevel.Identification: result = -1; break; case TokenImpersonationLevel.Impersonation: switch (y) { case TokenImpersonationLevel.Identification: result = 1; break; case TokenImpersonationLevel.Delegation: result = -1; break; default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("y", (int)y, typeof(TokenImpersonationLevel))); } break; case TokenImpersonationLevel.Delegation: result = 1; break; default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidEnumArgumentException("x", (int)x, typeof(TokenImpersonationLevel))); } } return result; } } internal static class SecurityUtils { public const string Principal = "Principal"; public const string Identities = "Identities"; private static IIdentity s_anonymousIdentity; private static X509SecurityTokenAuthenticator s_nonValidatingX509Authenticator; internal static X509SecurityTokenAuthenticator NonValidatingX509Authenticator { get { if (s_nonValidatingX509Authenticator == null) { s_nonValidatingX509Authenticator = new X509SecurityTokenAuthenticator(X509CertificateValidator.None); } return s_nonValidatingX509Authenticator; } } internal static IIdentity AnonymousIdentity { get { if (s_anonymousIdentity == null) { s_anonymousIdentity = CreateIdentity(string.Empty); } return s_anonymousIdentity; } } public static DateTime MaxUtcDateTime { get { // + and - TimeSpan.TicksPerDay is to compensate the DateTime.ParseExact (to localtime) overflow. return new DateTime(DateTime.MaxValue.Ticks - TimeSpan.TicksPerDay, DateTimeKind.Utc); } } public static DateTime MinUtcDateTime { get { // + and - TimeSpan.TicksPerDay is to compensate the DateTime.ParseExact (to localtime) overflow. return new DateTime(DateTime.MinValue.Ticks + TimeSpan.TicksPerDay, DateTimeKind.Utc); } } internal static IIdentity CreateIdentity(string name) { return new GenericIdentity(name); } internal static EndpointIdentity CreateWindowsIdentity() { return CreateWindowsIdentity(false); } internal static EndpointIdentity CreateWindowsIdentity(NetworkCredential serverCredential) { if (serverCredential != null && !NetworkCredentialHelper.IsDefault(serverCredential)) { string upn; if (serverCredential.Domain != null && serverCredential.Domain.Length > 0) { upn = serverCredential.UserName + "@" + serverCredential.Domain; } else { upn = serverCredential.UserName; } return EndpointIdentity.CreateUpnIdentity(upn); } return CreateWindowsIdentity(); } #if FEATURE_NETNATIVE internal static EndpointIdentity CreateWindowsIdentity(bool spnOnly) { EndpointIdentity identity = null; if (spnOnly) { identity = EndpointIdentity.CreateSpnIdentity(String.Format(CultureInfo.InvariantCulture, "host/{0}", DnsCache.MachineName)); } else { throw ExceptionHelper.PlatformNotSupported(); } return identity; } #else private static bool IsSystemAccount(WindowsIdentity self) { SecurityIdentifier sid = self.User; if (sid == null) { return false; } // S-1-5-82 is the prefix for the sid that represents the identity that IIS 7.5 Apppool thread runs under. return (sid.IsWellKnown(WellKnownSidType.LocalSystemSid) || sid.IsWellKnown(WellKnownSidType.NetworkServiceSid) || sid.IsWellKnown(WellKnownSidType.LocalServiceSid) || self.User.Value.StartsWith("S-1-5-82", StringComparison.OrdinalIgnoreCase)); } internal static EndpointIdentity CreateWindowsIdentity(bool spnOnly) { EndpointIdentity identity = null; using (WindowsIdentity self = WindowsIdentity.GetCurrent()) { bool isSystemAccount = IsSystemAccount(self); if (spnOnly || isSystemAccount) { identity = EndpointIdentity.CreateSpnIdentity(String.Format(CultureInfo.InvariantCulture, "host/{0}", DnsCache.MachineName)); } else { // Save windowsIdentity for delay lookup identity = new UpnEndpointIdentity(CloneWindowsIdentityIfNecessary(self)); } } return identity; } internal static WindowsIdentity CloneWindowsIdentityIfNecessary(WindowsIdentity wid) { return CloneWindowsIdentityIfNecessary(wid, null); } internal static WindowsIdentity CloneWindowsIdentityIfNecessary(WindowsIdentity wid, string authType) { if (wid != null) { IntPtr token = UnsafeGetWindowsIdentityToken(wid); if (token != IntPtr.Zero) { return UnsafeCreateWindowsIdentityFromToken(token, authType); } } return wid; } private static IntPtr UnsafeGetWindowsIdentityToken(WindowsIdentity wid) { throw ExceptionHelper.PlatformNotSupported("UnsafeGetWindowsIdentityToken is not supported"); } private static WindowsIdentity UnsafeCreateWindowsIdentityFromToken(IntPtr token, string authType) { if (authType != null) return new WindowsIdentity(token, authType); return new WindowsIdentity(token); } #endif // FEATURE_NETNATIVE internal static string GetSpnFromIdentity(EndpointIdentity identity, EndpointAddress target) { bool foundSpn = false; string spn = null; if (identity != null) { if (ClaimTypes.Spn.Equals(identity.IdentityClaim.ClaimType)) { spn = (string)identity.IdentityClaim.Resource; foundSpn = true; } else if (ClaimTypes.Upn.Equals(identity.IdentityClaim.ClaimType)) { spn = (string)identity.IdentityClaim.Resource; foundSpn = true; } else if (ClaimTypes.Dns.Equals(identity.IdentityClaim.ClaimType)) { spn = string.Format(CultureInfo.InvariantCulture, "host/{0}", (string)identity.IdentityClaim.Resource); foundSpn = true; } } if (!foundSpn) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MessageSecurityException(SR.Format(SR.CannotDetermineSPNBasedOnAddress, target))); } return spn; } internal static string GetSpnFromTarget(EndpointAddress target) { if (target == null) { throw Fx.AssertAndThrow("target should not be null - expecting an EndpointAddress"); } return string.Format(CultureInfo.InvariantCulture, "host/{0}", target.Uri.DnsSafeHost); } internal static bool IsSupportedAlgorithm(string algorithm, SecurityToken token) { if (token.SecurityKeys == null) { return false; } for (int i = 0; i < token.SecurityKeys.Count; ++i) { if (token.SecurityKeys[i].IsSupportedAlgorithm(algorithm)) { return true; } } return false; } internal static Claim GetPrimaryIdentityClaim(ReadOnlyCollection<IAuthorizationPolicy> authorizationPolicies) { return GetPrimaryIdentityClaim(AuthorizationContext.CreateDefaultAuthorizationContext(authorizationPolicies)); } internal static Claim GetPrimaryIdentityClaim(AuthorizationContext authContext) { if (authContext != null) { for (int i = 0; i < authContext.ClaimSets.Count; ++i) { ClaimSet claimSet = authContext.ClaimSets[i]; foreach (Claim claim in claimSet.FindClaims(null, Rights.Identity)) { return claim; } } } return null; } internal static string GenerateId() { return SecurityUniqueId.Create().Value; } internal static ReadOnlyCollection<IAuthorizationPolicy> CreatePrincipalNameAuthorizationPolicies(string principalName) { if (principalName == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("principalName"); Claim identityClaim; Claim primaryPrincipal; if (principalName.Contains("@") || principalName.Contains(@"\")) { identityClaim = new Claim(ClaimTypes.Upn, principalName, Rights.Identity); #if FEATURE_CORECLR primaryPrincipal = Claim.CreateUpnClaim(principalName); #else throw ExceptionHelper.PlatformNotSupported("UPN claim not supported on UWP"); #endif // FEATURE_CORECLR } else { identityClaim = new Claim(ClaimTypes.Spn, principalName, Rights.Identity); primaryPrincipal = Claim.CreateSpnClaim(principalName); } List<Claim> claims = new List<Claim>(2); claims.Add(identityClaim); claims.Add(primaryPrincipal); List<IAuthorizationPolicy> policies = new List<IAuthorizationPolicy>(1); policies.Add(new UnconditionalPolicy(SecurityUtils.CreateIdentity(principalName), new DefaultClaimSet(ClaimSet.Anonymous, claims))); return policies.AsReadOnly(); } internal static string GetIdentityNamesFromContext(AuthorizationContext authContext) { if (authContext == null) return String.Empty; StringBuilder str = new StringBuilder(256); for (int i = 0; i < authContext.ClaimSets.Count; ++i) { ClaimSet claimSet = authContext.ClaimSets[i]; // Windows WindowsClaimSet windows = claimSet as WindowsClaimSet; if (windows != null) { #if FEATURE_NETNATIVE throw ExceptionHelper.PlatformNotSupported("Windows Stream Security not yet supported on UWP"); #else if (str.Length > 0) str.Append(", "); AppendIdentityName(str, windows.WindowsIdentity); #endif // FEATURE_NETNATIVE } else { // X509 X509CertificateClaimSet x509 = claimSet as X509CertificateClaimSet; if (x509 != null) { if (str.Length > 0) str.Append(", "); AppendCertificateIdentityName(str, x509.X509Certificate); } } } if (str.Length <= 0) { List<IIdentity> identities = null; object obj; if (authContext.Properties.TryGetValue(SecurityUtils.Identities, out obj)) { identities = obj as List<IIdentity>; } if (identities != null) { for (int i = 0; i < identities.Count; ++i) { IIdentity identity = identities[i]; if (identity != null) { if (str.Length > 0) str.Append(", "); AppendIdentityName(str, identity); } } } } return str.Length <= 0 ? String.Empty : str.ToString(); } internal static void AppendCertificateIdentityName(StringBuilder str, X509Certificate2 certificate) { string value = certificate.SubjectName.Name; if (String.IsNullOrEmpty(value)) { value = certificate.GetNameInfo(X509NameType.DnsName, false); if (String.IsNullOrEmpty(value)) { value = certificate.GetNameInfo(X509NameType.SimpleName, false); if (String.IsNullOrEmpty(value)) { value = certificate.GetNameInfo(X509NameType.EmailName, false); if (String.IsNullOrEmpty(value)) { value = certificate.GetNameInfo(X509NameType.UpnName, false); } } } } // Same format as X509Identity str.Append(String.IsNullOrEmpty(value) ? "<x509>" : value); str.Append("; "); str.Append(certificate.Thumbprint); } internal static void AppendIdentityName(StringBuilder str, IIdentity identity) { string name = null; try { name = identity.Name; } #pragma warning suppress 56500 catch (Exception e) { if (Fx.IsFatal(e)) { throw; } // suppress exception, this is just info. } str.Append(String.IsNullOrEmpty(name) ? "<null>" : name); #if FEATURE_NETNATIVE // NegotiateStream throw ExceptionHelper.PlatformNotSupported("Windows Stream Security not yet supported on UWP"); #else WindowsIdentity windows = identity as WindowsIdentity; if (windows != null) { if (windows.User != null) { str.Append("; "); str.Append(windows.User.ToString()); } } else { WindowsSidIdentity sid = identity as WindowsSidIdentity; if (sid != null) { str.Append("; "); str.Append(sid.SecurityIdentifier.ToString()); } } #endif } internal static void OpenTokenProviderIfRequired(SecurityTokenProvider tokenProvider, TimeSpan timeout) { OpenCommunicationObject(tokenProvider as ICommunicationObject, timeout); } internal static void CloseTokenProviderIfRequired(SecurityTokenProvider tokenProvider, TimeSpan timeout) { CloseCommunicationObject(tokenProvider, false, timeout); } internal static void AbortTokenProviderIfRequired(SecurityTokenProvider tokenProvider) { CloseCommunicationObject(tokenProvider, true, TimeSpan.Zero); } internal static void OpenTokenAuthenticatorIfRequired(SecurityTokenAuthenticator tokenAuthenticator, TimeSpan timeout) { OpenCommunicationObject(tokenAuthenticator as ICommunicationObject, timeout); } internal static void CloseTokenAuthenticatorIfRequired(SecurityTokenAuthenticator tokenAuthenticator, TimeSpan timeout) { CloseTokenAuthenticatorIfRequired(tokenAuthenticator, false, timeout); } internal static void CloseTokenAuthenticatorIfRequired(SecurityTokenAuthenticator tokenAuthenticator, bool aborted, TimeSpan timeout) { CloseCommunicationObject(tokenAuthenticator, aborted, timeout); } internal static void AbortTokenAuthenticatorIfRequired(SecurityTokenAuthenticator tokenAuthenticator) { CloseCommunicationObject(tokenAuthenticator, true, TimeSpan.Zero); } private static void OpenCommunicationObject(ICommunicationObject obj, TimeSpan timeout) { if (obj != null) obj.Open(timeout); } private static void CloseCommunicationObject(Object obj, bool aborted, TimeSpan timeout) { if (obj != null) { ICommunicationObject co = obj as ICommunicationObject; if (co != null) { if (aborted) { try { co.Abort(); } catch (CommunicationException) { } } else { co.Close(timeout); } } else if (obj is IDisposable) { ((IDisposable)obj).Dispose(); } } } internal static SecurityStandardsManager CreateSecurityStandardsManager(MessageSecurityVersion securityVersion, SecurityTokenManager tokenManager) { SecurityTokenSerializer tokenSerializer = tokenManager.CreateSecurityTokenSerializer(securityVersion.SecurityTokenVersion); return new SecurityStandardsManager(securityVersion, tokenSerializer); } internal static SecurityStandardsManager CreateSecurityStandardsManager(SecurityTokenRequirement requirement, SecurityTokenManager tokenManager) { MessageSecurityTokenVersion securityVersion = (MessageSecurityTokenVersion)requirement.GetProperty<MessageSecurityTokenVersion>(ServiceModelSecurityTokenRequirement.MessageSecurityVersionProperty); if (securityVersion == MessageSecurityTokenVersion.WSSecurity10WSTrustFebruary2005WSSecureConversationFebruary2005BasicSecurityProfile10) return CreateSecurityStandardsManager(MessageSecurityVersion.WSSecurity10WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11BasicSecurityProfile10, tokenManager); if (securityVersion == MessageSecurityTokenVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005) return CreateSecurityStandardsManager(MessageSecurityVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11, tokenManager); if (securityVersion == MessageSecurityTokenVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005BasicSecurityProfile10) return CreateSecurityStandardsManager(MessageSecurityVersion.WSSecurity11WSTrustFebruary2005WSSecureConversationFebruary2005WSSecurityPolicy11BasicSecurityProfile10, tokenManager); if (securityVersion == MessageSecurityTokenVersion.WSSecurity10WSTrust13WSSecureConversation13BasicSecurityProfile10) return CreateSecurityStandardsManager(MessageSecurityVersion.WSSecurity10WSTrust13WSSecureConversation13WSSecurityPolicy12BasicSecurityProfile10, tokenManager); if (securityVersion == MessageSecurityTokenVersion.WSSecurity11WSTrust13WSSecureConversation13) return CreateSecurityStandardsManager(MessageSecurityVersion.WSSecurity11WSTrust13WSSecureConversation13WSSecurityPolicy12, tokenManager); if (securityVersion == MessageSecurityTokenVersion.WSSecurity11WSTrust13WSSecureConversation13BasicSecurityProfile10) return CreateSecurityStandardsManager(MessageSecurityVersion.WSSecurity11WSTrust13WSSecureConversation13WSSecurityPolicy12BasicSecurityProfile10, tokenManager); throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException()); } internal static SecurityStandardsManager CreateSecurityStandardsManager(MessageSecurityVersion securityVersion, SecurityTokenSerializer securityTokenSerializer) { if (securityVersion == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("securityVersion")); } if (securityTokenSerializer == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("securityTokenSerializer"); } return new SecurityStandardsManager(securityVersion, securityTokenSerializer); } internal static NetworkCredential GetNetworkCredentialsCopy(NetworkCredential networkCredential) { NetworkCredential result; if (networkCredential != null && !NetworkCredentialHelper.IsDefault(networkCredential)) { result = new NetworkCredential(networkCredential.UserName, networkCredential.Password, networkCredential.Domain); } else { result = networkCredential; } return result; } internal static NetworkCredential GetNetworkCredentialOrDefault(NetworkCredential credential) { // Because CredentialCache.DefaultNetworkCredentials is not immutable, we dont use it in our OM. Instead we // use an empty NetworkCredential to denote the default credentials. if (NetworkCredentialHelper.IsNullOrEmpty(credential)) { return CredentialCache.DefaultNetworkCredentials; } return credential; } internal static string AppendWindowsAuthenticationInfo(string inputString, NetworkCredential credential, AuthenticationLevel authenticationLevel, TokenImpersonationLevel impersonationLevel) { const string delimiter = "\0"; // nonprintable characters are invalid for SSPI Domain/UserName/Password if (NetworkCredentialHelper.IsDefault(credential)) { string sid = NetworkCredentialHelper.GetCurrentUserIdAsString(credential); return string.Concat(inputString, delimiter, sid, delimiter, AuthenticationLevelHelper.ToString(authenticationLevel), delimiter, TokenImpersonationLevelHelper.ToString(impersonationLevel)); } return string.Concat(inputString, delimiter, credential.Domain, delimiter, credential.UserName, delimiter, credential.Password, delimiter, AuthenticationLevelHelper.ToString(authenticationLevel), delimiter, TokenImpersonationLevelHelper.ToString(impersonationLevel)); } internal static class NetworkCredentialHelper { static internal bool IsNullOrEmpty(NetworkCredential credential) { return credential == null || ( string.IsNullOrEmpty(credential.UserName) && string.IsNullOrEmpty(credential.Domain) && string.IsNullOrEmpty(credential.Password) ); } static internal bool IsDefault(NetworkCredential credential) { return CredentialCache.DefaultNetworkCredentials.Equals(credential); } internal static string GetCurrentUserIdAsString(NetworkCredential credential) { #if FEATURE_NETNATIVE // There's no way to retrieve the current logged in user Id in UWP apps // so returning a username which is very unlikely to be a real username; return "_______****currentUser****_______"; #else using (WindowsIdentity self = WindowsIdentity.GetCurrent()) { return self.User.Value; } #endif } } internal static byte[] CloneBuffer(byte[] buffer) { byte[] copy = Fx.AllocateByteArray(buffer.Length); Buffer.BlockCopy(buffer, 0, copy, 0, buffer.Length); return copy; } internal static string GetKeyDerivationAlgorithm(SecureConversationVersion version) { string derivationAlgorithm = null; if (version == SecureConversationVersion.WSSecureConversationFeb2005) { derivationAlgorithm = SecurityAlgorithms.Psha1KeyDerivation; } else if (version == SecureConversationVersion.WSSecureConversation13) { derivationAlgorithm = SecurityAlgorithms.Psha1KeyDerivationDec2005; } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException()); } return derivationAlgorithm; } internal static X509Certificate2 GetCertificateFromStore(StoreName storeName, StoreLocation storeLocation, X509FindType findType, object findValue, EndpointAddress target) { X509Certificate2 certificate = GetCertificateFromStoreCore(storeName, storeLocation, findType, findValue, target, true); if (certificate == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidOperationException(SR.Format(SR.CannotFindCert, storeName, storeLocation, findType, findValue))); return certificate; } internal static bool TryGetCertificateFromStore(StoreName storeName, StoreLocation storeLocation, X509FindType findType, object findValue, EndpointAddress target, out X509Certificate2 certificate) { certificate = GetCertificateFromStoreCore(storeName, storeLocation, findType, findValue, target, false); return (certificate != null); } private static X509Certificate2 GetCertificateFromStoreCore(StoreName storeName, StoreLocation storeLocation, X509FindType findType, object findValue, EndpointAddress target, bool throwIfMultipleOrNoMatch) { if (findValue == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("findValue"); } X509Store store = new X509Store(storeName, storeLocation); X509Certificate2Collection certs = null; try { store.Open(OpenFlags.ReadOnly); certs = store.Certificates.Find(findType, findValue, false); if (certs.Count == 1) { return new X509Certificate2(certs[0].Handle); } if (throwIfMultipleOrNoMatch) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(CreateCertificateLoadException( storeName, storeLocation, findType, findValue, target, certs.Count)); } else { return null; } } finally { ResetAllCertificates(certs); store.Dispose(); } } private static Exception CreateCertificateLoadException(StoreName storeName, StoreLocation storeLocation, X509FindType findType, object findValue, EndpointAddress target, int certCount) { if (certCount == 0) { if (target == null) { return new InvalidOperationException(SR.Format(SR.CannotFindCert, storeName, storeLocation, findType, findValue)); } return new InvalidOperationException(SR.Format(SR.CannotFindCertForTarget, storeName, storeLocation, findType, findValue, target)); } if (target == null) { return new InvalidOperationException(SR.Format(SR.FoundMultipleCerts, storeName, storeLocation, findType, findValue)); } return new InvalidOperationException(SR.Format(SR.FoundMultipleCertsForTarget, storeName, storeLocation, findType, findValue, target)); } internal static void FixNetworkCredential(ref NetworkCredential credential) { if (credential == null) { return; } string username = credential.UserName; string domain = credential.Domain; if (!string.IsNullOrEmpty(username) && string.IsNullOrEmpty(domain)) { // do the splitting only if there is exactly 1 \ or exactly 1 @ string[] partsWithSlashDelimiter = username.Split('\\'); string[] partsWithAtDelimiter = username.Split('@'); if (partsWithSlashDelimiter.Length == 2 && partsWithAtDelimiter.Length == 1) { if (!string.IsNullOrEmpty(partsWithSlashDelimiter[0]) && !string.IsNullOrEmpty(partsWithSlashDelimiter[1])) { credential = new NetworkCredential(partsWithSlashDelimiter[1], credential.Password, partsWithSlashDelimiter[0]); } } else if (partsWithSlashDelimiter.Length == 1 && partsWithAtDelimiter.Length == 2) { if (!string.IsNullOrEmpty(partsWithAtDelimiter[0]) && !string.IsNullOrEmpty(partsWithAtDelimiter[1])) { credential = new NetworkCredential(partsWithAtDelimiter[0], credential.Password, partsWithAtDelimiter[1]); } } } } #if !FEATURE_NETNATIVE // NegotiateStream public static void ValidateAnonymityConstraint(WindowsIdentity identity, bool allowUnauthenticatedCallers) { if (!allowUnauthenticatedCallers && identity.User.IsWellKnown(WellKnownSidType.AnonymousSid)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperWarning( new SecurityTokenValidationException(SR.Format(SR.AnonymousLogonsAreNotAllowed))); } } #endif // !FEATURE_NETNATIVE // This is the workaround, Since store.Certificates returns a full collection // of certs in store. These are holding native resources. internal static void ResetAllCertificates(X509Certificate2Collection certificates) { if (certificates != null) { for (int i = 0; i < certificates.Count; ++i) { ResetCertificate(certificates[i]); } } } internal static void ResetCertificate(X509Certificate2 certificate) { // Check that Dispose() and Reset() do the same thing certificate.Dispose(); } } internal struct SecurityUniqueId { private static long s_nextId = 0; private static string s_commonPrefix = "uuid-" + Guid.NewGuid().ToString() + "-"; private long _id; private string _prefix; private string _val; private SecurityUniqueId(string prefix, long id) { _id = id; _prefix = prefix; _val = null; } public static SecurityUniqueId Create() { return Create(s_commonPrefix); } public static SecurityUniqueId Create(string prefix) { return new SecurityUniqueId(prefix, Interlocked.Increment(ref s_nextId)); } public string Value { get { if (_val == null) _val = _prefix + _id.ToString(CultureInfo.InvariantCulture); return _val; } } } internal static class EmptyReadOnlyCollection<T> { public static ReadOnlyCollection<T> Instance = new ReadOnlyCollection<T>(new List<T>()); } internal class OperationWithTimeoutAsyncResult : TraceAsyncResult { private static readonly Action<object> s_scheduledCallback = new Action<object>(OnScheduled); private TimeoutHelper _timeoutHelper; private Action<TimeSpan> _operationWithTimeout; public OperationWithTimeoutAsyncResult(Action<TimeSpan> operationWithTimeout, TimeSpan timeout, AsyncCallback callback, object state) : base(callback, state) { _operationWithTimeout = operationWithTimeout; _timeoutHelper = new TimeoutHelper(timeout); ActionItem.Schedule(s_scheduledCallback, this); } private static void OnScheduled(object state) { OperationWithTimeoutAsyncResult thisResult = (OperationWithTimeoutAsyncResult)state; Exception completionException = null; try { using (thisResult.CallbackActivity == null ? null : ServiceModelActivity.BoundOperation(thisResult.CallbackActivity)) { thisResult._operationWithTimeout(thisResult._timeoutHelper.RemainingTime()); } } #pragma warning suppress 56500 // covered by FxCOP catch (Exception e) { if (Fx.IsFatal(e)) { throw; } completionException = e; } thisResult.Complete(false, completionException); } public static void End(IAsyncResult result) { End<OperationWithTimeoutAsyncResult>(result); } } }
// // SCSharp.UI.PlayCustomScreen // // Authors: // Chris Toshok (toshok@gmail.com) // // Copyright 2006-2010 Chris Toshok // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.IO; using System.Threading; using System.Collections.Generic; using SdlDotNet.Core; using SdlDotNet.Graphics; using SdlDotNet.Input; using System.Drawing; namespace SCSharp.UI { public class PlayCustomScreen : UIScreen { public PlayCustomScreen (Mpq mpq) : base (mpq, "glue\\PalNl", Builtins.rez_GluCustmBin) { } const int MAPSIZE_FORMAT_INDEX = 32; const int MAPDIM_FORMAT_INDEX = 31; // XXX we don't use this one yet.. const int TILESET_FORMAT_INDEX = 33; const int NUMPLAYERS_FORMAT_INDEX = 30; const int HUMANSLOT_FORMAT_INDEX = 37; const int COMPUTERSLOT_FORMAT_INDEX = 35; const int FILELISTBOX_ELEMENT_INDEX = 7; const int CURRENTDIR_ELEMENT_INDEX = 8; const int MAPTITLE_ELEMENT_INDEX = 9; const int MAPDESCRIPTION_ELEMENT_INDEX = 10; const int MAPSIZE_ELEMENT_INDEX = 11; const int MAPTILESET_ELEMENT_INDEX = 12; const int MAPPLAYERS1_ELEMENT_INDEX = 14; const int MAPPLAYERS2_ELEMENT_INDEX = 15; const int OK_ELEMENT_INDEX = 16; const int CANCEL_ELEMENT_INDEX = 17; const int GAMETYPECOMBO_ELEMENT_INDEX = 20; const int GAMESUBTYPE_LABEL_ELEMENT_INDEX = 19; const int GAMESUBTYPE_COMBO_ELEMENT_INDEX = 21; const int PLAYER1_COMBOBOX_PLAYER = 22; const int PLAYER1_COMBOBOX_RACE = 30; const int max_players = 8; string mapdir; string curdir; Mpq selectedScenario; Chk selectedChk; Got selectedGot; ListBoxElement file_listbox; ComboBoxElement gametype_combo; void InitializeRaceCombo (ComboBoxElement combo) { combo.AddItem ("Zerg"); /* XXX these should all come from some string constant table someplace */ combo.AddItem ("Terran"); combo.AddItem ("Protoss"); combo.AddItem ("Random", true); } void InitializePlayerCombo (ComboBoxElement combo) { combo.AddItem (GlobalResources.Instance.GluAllTbl.Strings[130]); /* Closed */ combo.AddItem (GlobalResources.Instance.GluAllTbl.Strings[128], true); /* Computer */ } string[] files; string[] directories; Got[] templates; void PopulateFileList () { file_listbox.Clear (); string[] dir = Directory.GetDirectories (curdir); List<string> dirs = new List<string>(); if (curdir != mapdir) { dirs.Add ("Up One Level"); } foreach (string d in dir) { string dl = Path.GetFileName (d).ToLower (); if (curdir == mapdir) { if (!Game.Instance.IsBroodWar && dl == "broodwar") continue; if (dl == "replays") continue; } dirs.Add (d); } directories = dirs.ToArray(); files = Directory.GetFiles (curdir, "*.sc*"); Elements[CURRENTDIR_ELEMENT_INDEX].Text = Path.GetFileName (curdir); for (int i = 0; i < directories.Length; i ++) { file_listbox.AddItem (String.Format ("[{0}]", Path.GetFileName (directories[i]))); } for (int i = 0; i < files.Length; i ++) { string lower = files[i].ToLower(); if (lower.EndsWith (".scm") || lower.EndsWith (".scx")) file_listbox.AddItem (Path.GetFileName (files[i])); } } void PopulateGameTypes () { /* load the templates we're interested in displaying */ StreamReader sr = new StreamReader ((Stream)mpq.GetResource ("templates\\templates.lst")); List<Got> templateList = new List<Got>(); string l; while ((l = sr.ReadLine ()) != null) { string t = l.Replace ("\"", ""); Got got = (Got)mpq.GetResource ("templates\\" + t); if (got == null) continue; if (got.ComputerPlayersAllowed && got.NumberOfTeams == 0) { Console.WriteLine ("adding template {0}:{1}", got.UIGameTypeName, got.UISubtypeLabel); templateList.Add (got); } } templates = new Got[templateList.Count]; templateList.CopyTo (templates, 0); Array.Sort (templates, delegate (Got g1, Got g2) { return g1.ListPosition - g2.ListPosition; }); /* fill in the game type menu. we only show the templates that allow computer players, have 0 teams */ foreach (Got got in templates) { gametype_combo.AddItem (got.UIGameTypeName); } gametype_combo.SelectedIndex = 0; GameTypeSelectionChanged (gametype_combo.SelectedIndex); } protected override void ResourceLoader () { base.ResourceLoader (); for (int i = 0; i < Elements.Count; i ++) Console.WriteLine ("{0}: {1} '{2}'", i, Elements[i].Type, Elements[i].Text); /* these don't ever show up in the UI, that i know of... */ Elements[GAMESUBTYPE_LABEL_ELEMENT_INDEX].Visible = false; Elements[GAMESUBTYPE_COMBO_ELEMENT_INDEX].Visible = false; /* initialize all the race combo boxes */ for (int i = 0; i < max_players; i ++) { InitializePlayerCombo ((ComboBoxElement)Elements[PLAYER1_COMBOBOX_PLAYER + i]); InitializeRaceCombo ((ComboBoxElement)Elements[PLAYER1_COMBOBOX_RACE + i]); } file_listbox = (ListBoxElement)Elements[FILELISTBOX_ELEMENT_INDEX]; gametype_combo = (ComboBoxElement)Elements[GAMETYPECOMBO_ELEMENT_INDEX]; /* initially populate the map list by scanning the maps/ directory in the starcraftdir */ mapdir = Path.Combine (Game.Instance.RootDirectory, "maps"); curdir = mapdir; PopulateGameTypes (); PopulateFileList (); file_listbox.SelectionChanged += FileListSelectionChanged; gametype_combo.SelectionChanged += GameTypeSelectionChanged; Elements[OK_ELEMENT_INDEX].Activate += delegate () { if (selectedScenario == null) { // the selected entry is a directory, switch to it if (curdir != mapdir && file_listbox.SelectedIndex == 0) curdir = Directory.GetParent (curdir).FullName; else curdir = directories[file_listbox.SelectedIndex]; PopulateFileList (); } else { Game.Instance.SwitchToScreen (new GameScreen (mpq, selectedScenario, selectedChk, selectedGot)); } }; Elements[CANCEL_ELEMENT_INDEX].Activate += delegate () { Game.Instance.SwitchToScreen (new RaceSelectionScreen (mpq)); }; /* make sure the PLAYER1 player combo reads * the player's name and is desensitized */ ((ComboBoxElement)Elements[PLAYER1_COMBOBOX_PLAYER]).AddItem (/*XXX player name*/"toshok"); Elements[PLAYER1_COMBOBOX_PLAYER].Sensitive = false; } void UpdatePlayersDisplay () { if (selectedGot.UseMapSettings) { string slotString; slotString = GlobalResources.Instance.GluAllTbl.Strings[HUMANSLOT_FORMAT_INDEX]; slotString = slotString.Replace ("%c", " "); /* should probably be a tab.. */ slotString = slotString.Replace ("%s", (selectedChk == null ? "" : String.Format ("{0}", selectedChk.NumHumanSlots))); Elements[MAPPLAYERS1_ELEMENT_INDEX].Text = slotString; Elements[MAPPLAYERS1_ELEMENT_INDEX].Visible = true; slotString = GlobalResources.Instance.GluAllTbl.Strings[COMPUTERSLOT_FORMAT_INDEX]; slotString = slotString.Replace ("%c", " "); /* should probably be a tab.. */ slotString = slotString.Replace ("%s", (selectedChk == null ? "" : String.Format ("{0}", selectedChk.NumComputerSlots))); Elements[MAPPLAYERS2_ELEMENT_INDEX].Text = slotString; Elements[MAPPLAYERS2_ELEMENT_INDEX].Visible = true; } else { string numPlayersString = GlobalResources.Instance.GluAllTbl.Strings[NUMPLAYERS_FORMAT_INDEX]; numPlayersString = numPlayersString.Replace ("%c", " "); /* should probably be a tab.. */ numPlayersString = numPlayersString.Replace ("%s", (selectedChk == null ? "" : String.Format ("{0}", selectedChk.NumPlayers))); Elements[MAPPLAYERS1_ELEMENT_INDEX].Text = numPlayersString; Elements[MAPPLAYERS1_ELEMENT_INDEX].Visible = true; Elements[MAPPLAYERS2_ELEMENT_INDEX].Visible = false; } int i = 0; if (selectedChk != null) { for (i = 0; i < max_players; i ++) { if (selectedGot.UseMapSettings) { if (i >= selectedChk.NumComputerSlots + 1) break; } else { if (i >= selectedChk.NumPlayers) break; } if (i > 0) ((ComboBoxElement)Elements[PLAYER1_COMBOBOX_PLAYER + i]).SelectedIndex = 1; ((ComboBoxElement)Elements[PLAYER1_COMBOBOX_RACE + i]).SelectedIndex = 3; Elements[PLAYER1_COMBOBOX_PLAYER + i].Visible = true; Elements[PLAYER1_COMBOBOX_RACE + i].Visible = true; } } for (int j = i; j < max_players; j ++) { Elements[PLAYER1_COMBOBOX_PLAYER + j].Visible = false; Elements[PLAYER1_COMBOBOX_RACE + j].Visible = false; } } void GameTypeSelectionChanged (int selectedIndex) { /* the display of the number of players * changes depending upon the template */ selectedGot = templates[selectedIndex]; UpdatePlayersDisplay (); } void FileListSelectionChanged (int selectedIndex) { string map_path = Path.Combine (curdir, file_listbox.SelectedItem); if (selectedScenario !=null) selectedScenario.Dispose (); if (selectedIndex < directories.Length) { selectedScenario = null; selectedChk = null; } else { selectedScenario = new MpqArchive (map_path); selectedChk = (Chk)selectedScenario.GetResource ("staredit\\scenario.chk"); } Elements[MAPTITLE_ELEMENT_INDEX].Text = selectedChk == null ? "" : selectedChk.Name; Elements[MAPDESCRIPTION_ELEMENT_INDEX].Text = selectedChk == null ? "" : selectedChk.Description; string mapSizeString = GlobalResources.Instance.GluAllTbl.Strings[MAPSIZE_FORMAT_INDEX]; // string mapDimString = GlobalResources.Instance.GluAllTbl.Strings[MAPDIM_FORMAT_INDEX]; string tileSetString = GlobalResources.Instance.GluAllTbl.Strings[TILESET_FORMAT_INDEX]; mapSizeString = mapSizeString.Replace ("%c", " "); /* should probably be a tab.. */ mapSizeString = mapSizeString.Replace ("%s", (selectedChk == null ? "" : String.Format ("{0}x{1}", selectedChk.Width, selectedChk.Height))); tileSetString = tileSetString.Replace ("%c", " "); /* should probably be a tab.. */ tileSetString = tileSetString.Replace ("%s", (selectedChk == null ? "" : String.Format ("{0}", selectedChk.Tileset))); Elements[MAPSIZE_ELEMENT_INDEX].Text = mapSizeString; Elements[MAPTILESET_ELEMENT_INDEX].Text = tileSetString; UpdatePlayersDisplay (); } public override void KeyboardDown (KeyboardEventArgs args) { if (args.Key == Key.DownArrow || args.Key == Key.UpArrow) { file_listbox.KeyboardDown (args); } else base.KeyboardDown (args); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Data.Common; using System.Data.ProviderBase; using System.Diagnostics; using System.Threading; namespace System.Data.OleDb { using SysTx = Transactions; public sealed partial class OleDbConnection : DbConnection { private static readonly DbConnectionFactory _connectionFactory = OleDbConnectionFactory.SingletonInstance; private DbConnectionOptions _userConnectionOptions; private DbConnectionPoolGroup _poolGroup; private DbConnectionInternal _innerConnection; private int _closeCount; // used to distinguish between different uses of this object, so we don't have to maintain a list of it's children public OleDbConnection() : base() { GC.SuppressFinalize(this); _innerConnection = DbConnectionClosedNeverOpened.SingletonInstance; } // Copy Constructor private void CopyFrom(OleDbConnection connection) { // V1.2.3300 ADP.CheckArgumentNull(connection, "connection"); _userConnectionOptions = connection.UserConnectionOptions; _poolGroup = connection.PoolGroup; // Match the original connection's behavior for whether the connection was never opened, // but ensure Clone is in the closed state. if (DbConnectionClosedNeverOpened.SingletonInstance == connection._innerConnection) { _innerConnection = DbConnectionClosedNeverOpened.SingletonInstance; } else { _innerConnection = DbConnectionClosedPreviouslyOpened.SingletonInstance; } } internal DbConnectionFactory ConnectionFactory { get { return _connectionFactory; } } internal DbConnectionOptions ConnectionOptions { get { System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = PoolGroup; return ((null != poolGroup) ? poolGroup.ConnectionOptions : null); } } private string ConnectionString_Get() { bool hidePassword = InnerConnection.ShouldHidePassword; DbConnectionOptions connectionOptions = UserConnectionOptions; return ((null != connectionOptions) ? connectionOptions.UsersConnectionString(hidePassword) : ""); } private void ConnectionString_Set(string value) { DbConnectionPoolKey key = new DbConnectionPoolKey(value); ConnectionString_Set(key); } private void ConnectionString_Set(DbConnectionPoolKey key) { DbConnectionOptions connectionOptions = null; System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = ConnectionFactory.GetConnectionPoolGroup(key, null, ref connectionOptions); DbConnectionInternal connectionInternal = InnerConnection; bool flag = connectionInternal.AllowSetConnectionString; if (flag) { //try { // NOTE: There's a race condition with multiple threads changing // ConnectionString and any thread throws an exception // Closed->Busy: prevent Open during set_ConnectionString flag = SetInnerConnectionFrom(DbConnectionClosedBusy.SingletonInstance, connectionInternal); if (flag) { _userConnectionOptions = connectionOptions; _poolGroup = poolGroup; _innerConnection = DbConnectionClosedNeverOpened.SingletonInstance; } //} //catch { // // recover from exceptions to avoid sticking in busy state // SetInnerConnectionFrom(connectionInternal, DbConnectionClosedBusy.SingletonInstance); // throw; //} } if (!flag) { throw ADP.OpenConnectionPropertySet(ADP.ConnectionString, connectionInternal.State); } } internal DbConnectionInternal InnerConnection { get { return _innerConnection; } } internal System.Data.ProviderBase.DbConnectionPoolGroup PoolGroup { get { return _poolGroup; } set { // when a poolgroup expires and the connection eventually activates, the pool entry will be replaced Debug.Assert(null != value, "null poolGroup"); _poolGroup = value; } } internal DbConnectionOptions UserConnectionOptions { get { return _userConnectionOptions; } } internal void AddWeakReference(object value, int tag) { InnerConnection.AddWeakReference(value, tag); } protected override DbCommand CreateDbCommand() { DbCommand command = null; DbProviderFactory providerFactory = ConnectionFactory.ProviderFactory; command = providerFactory.CreateCommand(); command.Connection = this; return command; } protected override void Dispose(bool disposing) { if (disposing) { _userConnectionOptions = null; _poolGroup = null; Close(); } DisposeMe(disposing); base.Dispose(disposing); // notify base classes } partial void RepairInnerConnection(); //// NOTE: This is just a private helper because OracleClient V1.1 shipped //// with a different argument name and it's a breaking change to not use //// the same argument names in V2.0 (VB Named Parameter Binding--Ick) //private void EnlistDistributedTransactionHelper(System.EnterpriseServices.ITransaction transaction) { // SysTx.Transaction indigoTransaction = null; // if (null != transaction) { // indigoTransaction = SysTx.TransactionInterop.GetTransactionFromDtcTransaction((SysTx.IDtcTransaction)transaction); // } // RepairInnerConnection(); // // NOTE: since transaction enlistment involves round trips to the // // server, we don't want to lock here, we'll handle the race conditions // // elsewhere. // InnerConnection.EnlistTransaction(indigoTransaction); // // NOTE: If this outer connection were to be GC'd while we're // // enlisting, the pooler would attempt to reclaim the inner connection // // while we're attempting to enlist; not sure how likely that is but // // we should consider a GC.KeepAlive(this) here. // GC.KeepAlive(this); //} public override void EnlistTransaction(SysTx.Transaction transaction) { // If we're currently enlisted in a transaction and we were called // on the EnlistTransaction method (Whidbey) we're not allowed to // enlist in a different transaction. DbConnectionInternal innerConnection = InnerConnection; // NOTE: since transaction enlistment involves round trips to the // server, we don't want to lock here, we'll handle the race conditions // elsewhere. SysTx.Transaction enlistedTransaction = innerConnection.EnlistedTransaction; if (enlistedTransaction != null) { // Allow calling enlist if already enlisted (no-op) if (enlistedTransaction.Equals(transaction)) { return; } // Allow enlisting in a different transaction if the enlisted transaction has completed. if (enlistedTransaction.TransactionInformation.Status == SysTx.TransactionStatus.Active) { throw ADP.TransactionPresent(); } } RepairInnerConnection(); InnerConnection.EnlistTransaction(transaction); // NOTE: If this outer connection were to be GC'd while we're // enlisting, the pooler would attempt to reclaim the inner connection // while we're attempting to enlist; not sure how likely that is but // we should consider a GC.KeepAlive(this) here. GC.KeepAlive(this); } public override DataTable GetSchema() { return this.GetSchema(DbMetaDataCollectionNames.MetaDataCollections, null); } public override DataTable GetSchema(string collectionName) { return this.GetSchema(collectionName, null); } public override DataTable GetSchema(string collectionName, string[] restrictionValues) { // NOTE: This is virtual because not all providers may choose to support // returning schema data return InnerConnection.GetSchema(ConnectionFactory, PoolGroup, this, collectionName, restrictionValues); } internal void NotifyWeakReference(int message) { InnerConnection.NotifyWeakReference(message); } internal void PermissionDemand() { Debug.Assert(DbConnectionClosedConnecting.SingletonInstance == _innerConnection, "not connecting"); System.Data.ProviderBase.DbConnectionPoolGroup poolGroup = PoolGroup; DbConnectionOptions connectionOptions = ((null != poolGroup) ? poolGroup.ConnectionOptions : null); if ((null == connectionOptions) || connectionOptions.IsEmpty) { throw ADP.NoConnectionString(); } DbConnectionOptions userConnectionOptions = UserConnectionOptions; Debug.Assert(null != userConnectionOptions, "null UserConnectionOptions"); } internal void RemoveWeakReference(object value) { InnerConnection.RemoveWeakReference(value); } // OpenBusy->Closed (previously opened) // Connecting->Open internal void SetInnerConnectionEvent(DbConnectionInternal to) { // Set's the internal connection without verifying that it's a specific value Debug.Assert(null != _innerConnection, "null InnerConnection"); Debug.Assert(null != to, "to null InnerConnection"); ConnectionState originalState = _innerConnection.State & ConnectionState.Open; ConnectionState currentState = to.State & ConnectionState.Open; if ((originalState != currentState) && (ConnectionState.Closed == currentState)) { // Increment the close count whenever we switch to Closed unchecked { _closeCount++; } } _innerConnection = to; if (ConnectionState.Closed == originalState && ConnectionState.Open == currentState) { OnStateChange(DbConnectionInternal.StateChangeOpen); } else if (ConnectionState.Open == originalState && ConnectionState.Closed == currentState) { OnStateChange(DbConnectionInternal.StateChangeClosed); } else { Debug.Assert(false, "unexpected state switch"); if (originalState != currentState) { OnStateChange(new StateChangeEventArgs(originalState, currentState)); } } } // Closed->Connecting: prevent set_ConnectionString during Open // Open->OpenBusy: guarantee internal connection is returned to correct pool // Closed->ClosedBusy: prevent Open during set_ConnectionString internal bool SetInnerConnectionFrom(DbConnectionInternal to, DbConnectionInternal from) { // Set's the internal connection, verifying that it's a specific value before doing so. Debug.Assert(null != _innerConnection, "null InnerConnection"); Debug.Assert(null != from, "from null InnerConnection"); Debug.Assert(null != to, "to null InnerConnection"); bool result = (from == Interlocked.CompareExchange<DbConnectionInternal>(ref _innerConnection, to, from)); return result; } // ClosedBusy->Closed (never opened) // Connecting->Closed (exception during open, return to previous closed state) internal void SetInnerConnectionTo(DbConnectionInternal to) { // Set's the internal connection without verifying that it's a specific value Debug.Assert(null != _innerConnection, "null InnerConnection"); Debug.Assert(null != to, "to null InnerConnection"); _innerConnection = to; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics; using System.Threading; using System.Threading.Tasks; namespace System.Net.WebSockets { public sealed partial class ClientWebSocket : WebSocket { private enum InternalState { Created = 0, Connecting = 1, Connected = 2, Disposed = 3 } private readonly ClientWebSocketOptions _options; private WebSocketHandle _innerWebSocket; private readonly CancellationTokenSource _cts; // NOTE: this is really an InternalState value, but Interlocked doesn't support // operations on values of enum types. private int _state; public ClientWebSocket() { if (NetEventSource.Log.IsEnabled()) { NetEventSource.Enter(NetEventSource.ComponentType.WebSocket, this, ".ctor", null); } WebSocketHandle.CheckPlatformSupport(); _state = (int)InternalState.Created; _options = new ClientWebSocketOptions(); _cts = new CancellationTokenSource(); if (NetEventSource.Log.IsEnabled()) { NetEventSource.Exit(NetEventSource.ComponentType.WebSocket, this, ".ctor", null); } } #region Properties public ClientWebSocketOptions Options { get { return _options; } } public override WebSocketCloseStatus? CloseStatus { get { if (_innerWebSocket.IsValid) { return _innerWebSocket.CloseStatus; } return null; } } public override string CloseStatusDescription { get { if (_innerWebSocket.IsValid) { return _innerWebSocket.CloseStatusDescription; } return null; } } public override string SubProtocol { get { if (_innerWebSocket.IsValid) { return _innerWebSocket.SubProtocol; } return null; } } public override WebSocketState State { get { // state == Connected or Disposed if (_innerWebSocket.IsValid) { return _innerWebSocket.State; } switch ((InternalState)_state) { case InternalState.Created: return WebSocketState.None; case InternalState.Connecting: return WebSocketState.Connecting; default: // We only get here if disposed before connecting Debug.Assert((InternalState)_state == InternalState.Disposed); return WebSocketState.Closed; } } } #endregion Properties public Task ConnectAsync(Uri uri, CancellationToken cancellationToken) { if (uri == null) { throw new ArgumentNullException("uri"); } if (!uri.IsAbsoluteUri) { throw new ArgumentException(SR.net_uri_NotAbsolute, "uri"); } if (uri.Scheme != UriScheme.Ws && uri.Scheme != UriScheme.Wss) { throw new ArgumentException(SR.net_WebSockets_Scheme, "uri"); } // Check that we have not started already var priorState = (InternalState)Interlocked.CompareExchange(ref _state, (int)InternalState.Connecting, (int)InternalState.Created); if (priorState == InternalState.Disposed) { throw new ObjectDisposedException(GetType().FullName); } else if (priorState != InternalState.Created) { throw new InvalidOperationException(SR.net_WebSockets_AlreadyStarted); } _options.SetToReadOnly(); return ConnectAsyncCore(uri, cancellationToken); } private async Task ConnectAsyncCore(Uri uri, CancellationToken cancellationToken) { _innerWebSocket = WebSocketHandle.Create(); try { // Change internal state to 'connected' to enable the other methods if ((InternalState)Interlocked.CompareExchange(ref _state, (int)InternalState.Connected, (int)InternalState.Connecting) != InternalState.Connecting) { // Aborted/Disposed during connect. throw new ObjectDisposedException(GetType().FullName); } await _innerWebSocket.ConnectAsyncCore(uri, cancellationToken, _options).ConfigureAwait(false); } catch (Exception ex) { if (NetEventSource.Log.IsEnabled()) { NetEventSource.Exception(NetEventSource.ComponentType.WebSocket, this, "ConnectAsync", ex); } throw; } } public override Task SendAsync(ArraySegment<byte> buffer, WebSocketMessageType messageType, bool endOfMessage, CancellationToken cancellationToken) { ThrowIfNotConnected(); if (!((messageType == WebSocketMessageType.Text) || (messageType == WebSocketMessageType.Binary))) { string errorMessage = SR.Format( SR.net_WebSockets_Argument_InvalidMessageType, "Close", "SendAsync", "Binary", "Text", "CloseOutputAsync"); throw new ArgumentException(errorMessage, "messageType"); } WebSocketValidate.ValidateArraySegment<byte>(buffer, "buffer"); return _innerWebSocket.SendAsync(buffer, messageType, endOfMessage, cancellationToken); } public override Task<WebSocketReceiveResult> ReceiveAsync(ArraySegment<byte> buffer, CancellationToken cancellationToken) { ThrowIfNotConnected(); WebSocketValidate.ValidateArraySegment<byte>(buffer, "buffer"); return _innerWebSocket.ReceiveAsync(buffer, cancellationToken); } public override Task CloseAsync(WebSocketCloseStatus closeStatus, string statusDescription, CancellationToken cancellationToken) { ThrowIfNotConnected(); WebSocketValidate.ValidateCloseStatus(closeStatus, statusDescription); return _innerWebSocket.CloseAsync(closeStatus, statusDescription, cancellationToken); } public override Task CloseOutputAsync(WebSocketCloseStatus closeStatus, string statusDescription, CancellationToken cancellationToken) { ThrowIfNotConnected(); WebSocketValidate.ValidateCloseStatus(closeStatus, statusDescription); return _innerWebSocket.CloseOutputAsync(closeStatus, statusDescription, cancellationToken); } public override void Abort() { if ((InternalState)_state == InternalState.Disposed) { return; } if (_innerWebSocket.IsValid) { _innerWebSocket.Abort(); } Dispose(); } public override void Dispose() { var priorState = (InternalState)Interlocked.Exchange(ref _state, (int)InternalState.Disposed); if (priorState == InternalState.Disposed) { // No cleanup required. return; } _cts.Cancel(false); _cts.Dispose(); if (_innerWebSocket.IsValid) { _innerWebSocket.Dispose(); } } private void ThrowIfNotConnected() { if ((InternalState)_state == InternalState.Disposed) { throw new ObjectDisposedException(GetType().FullName); } else if ((InternalState)_state != InternalState.Connected) { throw new InvalidOperationException(SR.net_WebSockets_NotConnected); } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection; using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Build.Construction; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.Host.Mef; using Microsoft.CodeAnalysis.LanguageServices; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.MSBuild { /// <summary> /// An API for loading msbuild project files. /// </summary> public class MSBuildProjectLoader { // the workspace that the projects and solutions are intended to be loaded into. private readonly Workspace _workspace; // used to protect access to the following mutable state private readonly NonReentrantLock _dataGuard = new NonReentrantLock(); private ImmutableDictionary<string, string> _properties; private readonly Dictionary<string, string> _extensionToLanguageMap = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); /// <summary> /// Create a new instance of an <see cref="MSBuildProjectLoader"/>. /// </summary> public MSBuildProjectLoader(Workspace workspace, ImmutableDictionary<string, string> properties = null) { _workspace = workspace; _properties = properties ?? ImmutableDictionary<string, string>.Empty; } /// <summary> /// The MSBuild properties used when interpreting project files. /// These are the same properties that are passed to msbuild via the /property:&lt;n&gt;=&lt;v&gt; command line argument. /// </summary> public ImmutableDictionary<string, string> Properties { get { return _properties; } } /// <summary> /// Determines if metadata from existing output assemblies is loaded instead of opening referenced projects. /// If the referenced project is already opened, the metadata will not be loaded. /// If the metadata assembly cannot be found the referenced project will be opened instead. /// </summary> public bool LoadMetadataForReferencedProjects { get; set; } = false; /// <summary> /// Determines if unrecognized projects are skipped when solutions or projects are opened. /// /// A project is unrecognized if it either has /// a) an invalid file path, /// b) a non-existent project file, /// c) has an unrecognized file extension or /// d) a file extension associated with an unsupported language. /// /// If unrecognized projects cannot be skipped a corresponding exception is thrown. /// </summary> public bool SkipUnrecognizedProjects { get; set; } = true; /// <summary> /// Associates a project file extension with a language name. /// </summary> public void AssociateFileExtensionWithLanguage(string projectFileExtension, string language) { if (language == null) { throw new ArgumentNullException(nameof(language)); } if (projectFileExtension == null) { throw new ArgumentNullException(nameof(projectFileExtension)); } using (_dataGuard.DisposableWait()) { _extensionToLanguageMap[projectFileExtension] = language; } } private const string SolutionDirProperty = "SolutionDir"; private void SetSolutionProperties(string solutionFilePath) { // When MSBuild is building an individual project, it doesn't define $(SolutionDir). // However when building an .sln file, or when working inside Visual Studio, // $(SolutionDir) is defined to be the directory where the .sln file is located. // Some projects out there rely on $(SolutionDir) being set (although the best practice is to // use MSBuildProjectDirectory which is always defined). if (!string.IsNullOrEmpty(solutionFilePath)) { string solutionDirectory = Path.GetDirectoryName(solutionFilePath); if (!solutionDirectory.EndsWith(@"\", StringComparison.Ordinal)) { solutionDirectory += @"\"; } if (Directory.Exists(solutionDirectory)) { _properties = _properties.SetItem(SolutionDirProperty, solutionDirectory); } } } /// <summary> /// Loads the <see cref="SolutionInfo"/> for the specified solution file, including all projects referenced by the solution file and /// all the projects referenced by the project files. /// </summary> public async Task<SolutionInfo> LoadSolutionInfoAsync( string solutionFilePath, CancellationToken cancellationToken = default(CancellationToken)) { if (solutionFilePath == null) { throw new ArgumentNullException(nameof(solutionFilePath)); } var absoluteSolutionPath = this.GetAbsoluteSolutionPath(solutionFilePath, Directory.GetCurrentDirectory()); using (_dataGuard.DisposableWait(cancellationToken)) { this.SetSolutionProperties(absoluteSolutionPath); } VersionStamp version = default(VersionStamp); Microsoft.Build.Construction.SolutionFile solutionFile = Microsoft.Build.Construction.SolutionFile.Parse(absoluteSolutionPath); var reportMode = this.SkipUnrecognizedProjects ? ReportMode.Log : ReportMode.Throw; // a list to accumulate all the loaded projects var loadedProjects = new LoadState(null); // load all the projects foreach (var project in solutionFile.ProjectsInOrder) { cancellationToken.ThrowIfCancellationRequested(); if (project.ProjectType != SolutionProjectType.SolutionFolder) { var projectAbsolutePath = TryGetAbsolutePath(project.AbsolutePath, reportMode); if (projectAbsolutePath != null) { if (TryGetLoaderFromProjectPath(projectAbsolutePath, reportMode, out var loader)) { // projects get added to 'loadedProjects' as side-effect // never prefer metadata when loading solution, all projects get loaded if they can. var tmp = await GetOrLoadProjectAsync(projectAbsolutePath, loader, preferMetadata: false, loadedProjects: loadedProjects, cancellationToken: cancellationToken).ConfigureAwait(false); } } } } // construct workspace from loaded project infos return SolutionInfo.Create(SolutionId.CreateNewId(debugName: absoluteSolutionPath), version, absoluteSolutionPath, loadedProjects.Projects); } internal string GetAbsoluteSolutionPath(string path, string baseDirectory) { string absolutePath; try { absolutePath = GetAbsolutePath(path, baseDirectory); } catch (Exception) { throw new InvalidOperationException(string.Format(WorkspacesResources.Invalid_solution_file_path_colon_0, path)); } if (!File.Exists(absolutePath)) { throw new FileNotFoundException(string.Format(WorkspacesResources.Solution_file_not_found_colon_0, absolutePath)); } return absolutePath; } /// <summary> /// Loads the <see cref="ProjectInfo"/> from the specified project file and all referenced projects. /// The first <see cref="ProjectInfo"/> in the result corresponds to the specified project file. /// </summary> public async Task<ImmutableArray<ProjectInfo>> LoadProjectInfoAsync( string projectFilePath, ImmutableDictionary<string, ProjectId> projectPathToProjectIdMap = null, CancellationToken cancellationToken = default(CancellationToken)) { if (projectFilePath == null) { throw new ArgumentNullException(nameof(projectFilePath)); } this.TryGetAbsoluteProjectPath(projectFilePath, Directory.GetCurrentDirectory(), ReportMode.Throw, out var fullPath); this.TryGetLoaderFromProjectPath(projectFilePath, ReportMode.Throw, out var loader); var loadedProjects = new LoadState(projectPathToProjectIdMap); var id = await this.LoadProjectAsync(fullPath, loader, this.LoadMetadataForReferencedProjects, loadedProjects, cancellationToken).ConfigureAwait(false); var result = loadedProjects.Projects.Reverse().ToImmutableArray(); Debug.Assert(result[0].Id == id); return result; } private class LoadState { private Dictionary<ProjectId, ProjectInfo> _projectIdToProjectInfoMap = new Dictionary<ProjectId, ProjectInfo>(); /// <summary> /// Used to memoize results of <see cref="ProjectAlreadyReferencesProject"/> calls. /// Reset any time internal state is changed. /// </summary> private Dictionary<ProjectId, Dictionary<ProjectId, bool>> _projectAlreadyReferencesProjectResultCache = new Dictionary<ProjectId, Dictionary<ProjectId, bool>>(); private readonly Dictionary<string, ProjectId> _projectPathToProjectIdMap = new Dictionary<string, ProjectId>(PathUtilities.Comparer); public LoadState(IReadOnlyDictionary<string, ProjectId> projectPathToProjectIdMap) { if (projectPathToProjectIdMap != null) { _projectPathToProjectIdMap.AddRange(projectPathToProjectIdMap); } } public void Add(ProjectInfo info) { _projectIdToProjectInfoMap.Add(info.Id, info); //Memoized results of ProjectAlreadyReferencesProject may no longer be correct; //reset the cache. _projectAlreadyReferencesProjectResultCache.Clear(); } /// <summary> /// Returns true if the project identified by <paramref name="fromProject"/> has a reference (even indirectly) /// on the project identified by <paramref name="targetProject"/>. /// </summary> public bool ProjectAlreadyReferencesProject(ProjectId fromProject, ProjectId targetProject) { if ( !_projectAlreadyReferencesProjectResultCache.TryGetValue(fromProject, out var fromProjectMemo)) { fromProjectMemo = new Dictionary<ProjectId, bool>(); _projectAlreadyReferencesProjectResultCache.Add(fromProject, fromProjectMemo); } if ( !fromProjectMemo.TryGetValue(targetProject, out var answer)) { answer = _projectIdToProjectInfoMap.TryGetValue(fromProject, out var info) && info.ProjectReferences.Any(pr => pr.ProjectId == targetProject || ProjectAlreadyReferencesProject(pr.ProjectId, targetProject) ); fromProjectMemo.Add(targetProject, answer); } return answer; } public IEnumerable<ProjectInfo> Projects { get { return _projectIdToProjectInfoMap.Values; } } public ProjectId GetProjectId(string fullProjectPath) { _projectPathToProjectIdMap.TryGetValue(fullProjectPath, out var id); return id; } public ProjectId GetOrCreateProjectId(string fullProjectPath) { if (!_projectPathToProjectIdMap.TryGetValue(fullProjectPath, out var id)) { id = ProjectId.CreateNewId(debugName: fullProjectPath); _projectPathToProjectIdMap.Add(fullProjectPath, id); } return id; } } private async Task<ProjectId> GetOrLoadProjectAsync(string projectFilePath, IProjectFileLoader loader, bool preferMetadata, LoadState loadedProjects, CancellationToken cancellationToken) { var projectId = loadedProjects.GetProjectId(projectFilePath); if (projectId == null) { projectId = await this.LoadProjectAsync(projectFilePath, loader, preferMetadata, loadedProjects, cancellationToken).ConfigureAwait(false); } return projectId; } private async Task<ProjectId> LoadProjectAsync(string projectFilePath, IProjectFileLoader loader, bool preferMetadata, LoadState loadedProjects, CancellationToken cancellationToken) { Debug.Assert(projectFilePath != null); Debug.Assert(loader != null); var projectId = loadedProjects.GetOrCreateProjectId(projectFilePath); var projectName = Path.GetFileNameWithoutExtension(projectFilePath); var projectFile = await loader.LoadProjectFileAsync(projectFilePath, _properties, cancellationToken).ConfigureAwait(false); if (projectFile.ErrorMessage != null) { ReportFailure(ReportMode.Log, GetMsbuildFailedMessage(projectFilePath, projectFile.ErrorMessage)); // if we failed during load there won't be any project file info, so bail early with empty project. loadedProjects.Add(CreateEmptyProjectInfo(projectId, projectFilePath, loader.Language)); return projectId; } var projectFileInfo1 = await projectFile.GetProjectFileInfoAsync(cancellationToken).ConfigureAwait(false); var projectFileInfo = projectFileInfo1 as ProjectFileInfo; if (projectFileInfo.ErrorMessage != null) { ReportFailure(ReportMode.Log, GetMsbuildFailedMessage(projectFilePath, projectFileInfo.ErrorMessage)); } var projectDirectory = Path.GetDirectoryName(projectFilePath); var outputFilePath = projectFileInfo.OutputFilePath; var outputDirectory = Path.GetDirectoryName(outputFilePath); var version = GetProjectVersion(projectFilePath); // translate information from command line args var commandLineParser = _workspace.Services.GetLanguageServices(loader.Language).GetService<ICommandLineParserService>(); var metadataService = _workspace.Services.GetService<IMetadataService>(); var analyzerService = _workspace.Services.GetService<IAnalyzerService>(); var commandLineArgs = commandLineParser.Parse( arguments: projectFileInfo.CommandLineArgs, baseDirectory: projectDirectory, isInteractive: false, sdkDirectory: RuntimeEnvironment.GetRuntimeDirectory()); // we only support file paths in /r command line arguments var resolver = new WorkspaceMetadataFileReferenceResolver(metadataService, new RelativePathResolver(commandLineArgs.ReferencePaths, commandLineArgs.BaseDirectory)); IEnumerable<MetadataReference> metadataReferences = null; IEnumerable<AnalyzerReference> analyzerReferences = null; try { metadataReferences = commandLineArgs.ResolveMetadataReferences(resolver); var analyzerLoader = analyzerService.GetLoader(); foreach (var path in commandLineArgs.AnalyzerReferences.Select(r => r.FilePath)) { analyzerLoader.AddDependencyLocation(path); } analyzerReferences = commandLineArgs.ResolveAnalyzerReferences(analyzerLoader); } catch (Exception ex) { Console.WriteLine($"non fatal ResolveMetadataReferences fail {ex.Message} \n {ex.StackTrace}"); } var defaultEncoding = commandLineArgs.Encoding; // docs & additional docs var docFileInfos = projectFileInfo.Documents.ToImmutableArrayOrEmpty(); var additionalDocFileInfos = projectFileInfo.AdditionalDocuments.ToImmutableArrayOrEmpty(); var docs = new List<DocumentInfo>(); try { // check for duplicate documents var allDocFileInfos = docFileInfos.AddRange(additionalDocFileInfos); CheckDocuments(allDocFileInfos, projectFilePath, projectId); foreach (var docFileInfo in docFileInfos) { GetDocumentNameAndFolders(docFileInfo.LogicalPath, out var name, out var folders); docs.Add(DocumentInfo.Create( DocumentId.CreateNewId(projectId, debugName: docFileInfo.FilePath), name, folders, projectFile.GetSourceCodeKind(docFileInfo.FilePath), new FileTextLoader(docFileInfo.FilePath, defaultEncoding), docFileInfo.FilePath, docFileInfo.IsGenerated)); } } catch (Exception ex) { Console.WriteLine($"non fatal projectFileInfo.Documents fail {ex.Message}"); } var additionalDocs = new List<DocumentInfo>(); foreach (var docFileInfo in additionalDocFileInfos) { GetDocumentNameAndFolders(docFileInfo.LogicalPath, out var name, out var folders); additionalDocs.Add(DocumentInfo.Create( DocumentId.CreateNewId(projectId, debugName: docFileInfo.FilePath), name, folders, SourceCodeKind.Regular, new FileTextLoader(docFileInfo.FilePath, defaultEncoding), docFileInfo.FilePath, docFileInfo.IsGenerated)); } // project references var resolvedReferences = await ResolveProjectReferencesAsync( projectId, projectFilePath, projectFileInfo.ProjectReferences, preferMetadata, loadedProjects, cancellationToken).ConfigureAwait(false); // add metadata references for project refs converted to metadata refs if (metadataReferences != null) metadataReferences = metadataReferences.Concat(resolvedReferences.MetadataReferences); // if the project file loader couldn't figure out an assembly name, make one using the project's file path. var assemblyName = commandLineArgs.CompilationName; if (string.IsNullOrWhiteSpace(assemblyName)) { assemblyName = GetAssemblyNameFromProjectPath(projectFilePath); } // make sure that doc-comments at least get parsed. var parseOptions = commandLineArgs.ParseOptions; if (parseOptions.DocumentationMode == DocumentationMode.None) { parseOptions = parseOptions.WithDocumentationMode(DocumentationMode.Parse); } // add all the extra options that are really behavior overrides var compOptions = commandLineArgs.CompilationOptions .WithXmlReferenceResolver(new XmlFileResolver(projectDirectory)) .WithSourceReferenceResolver(new SourceFileResolver(ImmutableArray<string>.Empty, projectDirectory)) // TODO: https://github.com/dotnet/roslyn/issues/4967 .WithMetadataReferenceResolver(new WorkspaceMetadataFileReferenceResolver(metadataService, new RelativePathResolver(ImmutableArray<string>.Empty, projectDirectory))) .WithStrongNameProvider(new DesktopStrongNameProvider(ImmutableArray.Create(projectDirectory, outputFilePath))) .WithAssemblyIdentityComparer(DesktopAssemblyIdentityComparer.Default); loadedProjects.Add( ProjectInfo.Create( projectId, version, projectName, assemblyName, loader.Language, projectFilePath, outputFilePath, compilationOptions: compOptions, parseOptions: parseOptions, documents: docs, projectReferences: resolvedReferences.ProjectReferences, metadataReferences: metadataReferences, analyzerReferences: analyzerReferences, additionalDocuments: additionalDocs, isSubmission: false, hostObjectType: null)); return projectId; } private static string GetMsbuildFailedMessage(string projectFilePath, string message) { if (string.IsNullOrWhiteSpace(message)) { return string.Format("Msbuild_failed_when_processing_the_file_0 {0}", projectFilePath); } else { return string.Format( "Msbuild_failed_when_processing_the_file_{0}_with_message_{1}", projectFilePath, message); } } private static VersionStamp GetProjectVersion(string projectFilePath) { if (!string.IsNullOrEmpty(projectFilePath) && File.Exists(projectFilePath)) { return VersionStamp.Create(File.GetLastWriteTimeUtc(projectFilePath)); } else { return VersionStamp.Create(); } } private ProjectInfo CreateEmptyProjectInfo(ProjectId projectId, string projectFilePath, string language) { var languageService = _workspace.Services.GetLanguageServices(language); var parseOptions = languageService.GetService<ISyntaxTreeFactoryService>().GetDefaultParseOptions(); var compilationOptions = languageService.GetService<ICompilationFactoryService>().GetDefaultCompilationOptions(); var projectName = Path.GetFileNameWithoutExtension(projectFilePath); var version = GetProjectVersion(projectFilePath); return ProjectInfo.Create( projectId, version, projectName, assemblyName: GetAssemblyNameFromProjectPath(projectFilePath), language: language, filePath: projectFilePath, outputFilePath: string.Empty, compilationOptions: compilationOptions, parseOptions: parseOptions, documents: SpecializedCollections.EmptyEnumerable<DocumentInfo>(), projectReferences: SpecializedCollections.EmptyEnumerable<ProjectReference>(), metadataReferences: SpecializedCollections.EmptyEnumerable<MetadataReference>(), analyzerReferences: SpecializedCollections.EmptyEnumerable<AnalyzerReference>(), additionalDocuments: SpecializedCollections.EmptyEnumerable<DocumentInfo>(), isSubmission: false, hostObjectType: null); } private static string GetAssemblyNameFromProjectPath(string projectFilePath) { var assemblyName = Path.GetFileNameWithoutExtension(projectFilePath); // if this is still unreasonable, use a fixed name. if (string.IsNullOrWhiteSpace(assemblyName)) { assemblyName = "assembly"; } return assemblyName; } private static readonly char[] s_directorySplitChars = new char[] { Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar }; private static void GetDocumentNameAndFolders(string logicalPath, out string name, out ImmutableArray<string> folders) { var pathNames = logicalPath.Split(s_directorySplitChars, StringSplitOptions.RemoveEmptyEntries); if (pathNames.Length > 0) { if (pathNames.Length > 1) { folders = pathNames.Take(pathNames.Length - 1).ToImmutableArray(); } else { folders = ImmutableArray.Create<string>(); } name = pathNames[pathNames.Length - 1]; } else { name = logicalPath; folders = ImmutableArray.Create<string>(); } } private void CheckDocuments(IEnumerable<DocumentFileInfo> docs, string projectFilePath, ProjectId projectId) { var paths = new HashSet<string>(); foreach (var doc in docs) { if (paths.Contains(doc.FilePath)) { _workspace.OnWorkspaceFailed(new ProjectDiagnostic(WorkspaceDiagnosticKind.Warning, string.Format(WorkspacesResources.Duplicate_source_file_0_in_project_1, doc.FilePath, projectFilePath), projectId)); } paths.Add(doc.FilePath); } } private class ResolvedReferences { public readonly List<ProjectReference> ProjectReferences = new List<ProjectReference>(); public readonly List<MetadataReference> MetadataReferences = new List<MetadataReference>(); } private async Task<ResolvedReferences> ResolveProjectReferencesAsync( ProjectId thisProjectId, string thisProjectPath, IReadOnlyList<ProjectFileReference> projectFileReferences, bool preferMetadata, LoadState loadedProjects, CancellationToken cancellationToken) { var resolvedReferences = new ResolvedReferences(); var reportMode = this.SkipUnrecognizedProjects ? ReportMode.Log : ReportMode.Throw; foreach (var projectFileReference in projectFileReferences) { if (TryGetAbsoluteProjectPath(projectFileReference.Path, Path.GetDirectoryName(thisProjectPath), reportMode, out var fullPath)) { // if the project is already loaded, then just reference the one we have var existingProjectId = loadedProjects.GetProjectId(fullPath); if (existingProjectId != null) { resolvedReferences.ProjectReferences.Add(new ProjectReference(existingProjectId, projectFileReference.Aliases)); continue; } TryGetLoaderFromProjectPath(fullPath, ReportMode.Ignore, out var loader); // get metadata if preferred or if loader is unknown if (preferMetadata || loader == null) { var projectMetadata = await this.GetProjectMetadata(fullPath, projectFileReference.Aliases, _properties, cancellationToken).ConfigureAwait(false); if (projectMetadata != null) { resolvedReferences.MetadataReferences.Add(projectMetadata); continue; } } // must load, so we really need loader if (TryGetLoaderFromProjectPath(fullPath, reportMode, out loader)) { // load the project var projectId = await this.GetOrLoadProjectAsync(fullPath, loader, preferMetadata, loadedProjects, cancellationToken).ConfigureAwait(false); // If that other project already has a reference on us, this will cause a circularity. // This check doesn't need to be in the "already loaded" path above, since in any circularity this path // must be taken at least once. if (loadedProjects.ProjectAlreadyReferencesProject(projectId, targetProject: thisProjectId)) { // We'll try to make this metadata if we can var projectMetadata = await this.GetProjectMetadata(fullPath, projectFileReference.Aliases, _properties, cancellationToken).ConfigureAwait(false); if (projectMetadata != null) { resolvedReferences.MetadataReferences.Add(projectMetadata); } continue; } else { resolvedReferences.ProjectReferences.Add(new ProjectReference(projectId, projectFileReference.Aliases)); continue; } } } else { fullPath = projectFileReference.Path; } // cannot find metadata and project cannot be loaded, so leave a project reference to a non-existent project. var id = loadedProjects.GetOrCreateProjectId(fullPath); resolvedReferences.ProjectReferences.Add(new ProjectReference(id, projectFileReference.Aliases)); } return resolvedReferences; } /// <summary> /// Gets a MetadataReference to a project's output assembly. /// </summary> private async Task<MetadataReference> GetProjectMetadata(string projectFilePath, ImmutableArray<string> aliases, IDictionary<string, string> globalProperties, CancellationToken cancellationToken) { // use loader service to determine output file for project if possible string outputFilePath = null; try { outputFilePath = await ProjectFileLoader.GetOutputFilePathAsync(projectFilePath, globalProperties, cancellationToken).ConfigureAwait(false); } catch (Exception e) { _workspace.OnWorkspaceFailed( new WorkspaceDiagnostic(WorkspaceDiagnosticKind.Failure, e.Message)); } if (outputFilePath != null && File.Exists(outputFilePath)) { if (Workspace.TestHookStandaloneProjectsDoNotHoldReferences) { var documentationService = _workspace.Services.GetService<IDocumentationProviderService>(); var docProvider = documentationService.GetDocumentationProvider(outputFilePath); var metadata = AssemblyMetadata.CreateFromImage(File.ReadAllBytes(outputFilePath)); return metadata.GetReference( documentation: docProvider, aliases: aliases, display: outputFilePath); } else { var metadataService = _workspace.Services.GetService<IMetadataService>(); return metadataService.GetReference(outputFilePath, new MetadataReferenceProperties(MetadataImageKind.Assembly, aliases)); } } return null; } private string TryGetAbsolutePath(string path, ReportMode mode) { try { path = Path.GetFullPath(path); } catch (Exception) { ReportFailure(mode, string.Format(WorkspacesResources.Invalid_project_file_path_colon_0, path)); return null; } if (!File.Exists(path)) { ReportFailure( mode, string.Format(WorkspacesResources.Project_file_not_found_colon_0, path), msg => new FileNotFoundException(msg)); return null; } return path; } internal bool TryGetLoaderFromProjectPath(string projectFilePath, out IProjectFileLoader loader) { return TryGetLoaderFromProjectPath(projectFilePath, ReportMode.Ignore, out loader); } private bool TryGetLoaderFromProjectPath(string projectFilePath, ReportMode mode, out IProjectFileLoader loader) { using (_dataGuard.DisposableWait()) { // otherwise try to figure it out from extension var extension = Path.GetExtension(projectFilePath); if (extension.Length > 0 && extension[0] == '.') { extension = extension.Substring(1); } if (_extensionToLanguageMap.TryGetValue(extension, out var language)) { if (_workspace.Services.SupportedLanguages.Contains(language)) { loader = _workspace.Services.GetLanguageServices(language).GetService<IProjectFileLoader>(); } else { loader = null; this.ReportFailure(mode, string.Format(WorkspacesResources.Cannot_open_project_0_because_the_language_1_is_not_supported, projectFilePath, language)); return false; } } else { loader = ProjectFileLoader.GetLoaderForProjectFileExtension(_workspace, extension); if (loader == null && !extension.Equals("csproj")) { // this.ReportFailure(mode, string.Format(WorkspacesResources.Cannot_open_project_0_because_the_file_extension_1_is_not_associated_with_a_language, projectFilePath, Path.GetExtension(projectFilePath))); return false; } } // since we have both C# and VB loaders in this same library, it no longer indicates whether we have full language support available. if (loader != null) { language = loader.Language; // check for command line parser existing... if not then error. var commandLineParser = _workspace.Services.GetLanguageServices(language).GetService<ICommandLineParserService>(); if (commandLineParser == null) { loader = null; //this.ReportFailure(mode, // string.Format(WorkspacesResources.Cannot_open_project_0_because_the_language_1_is_not_supported, projectFilePath, language)); return false; } } if (loader != null) return true; } if (loader == null) { var factory = new Microsoft.CodeAnalysis.CSharp.CSharpProjectFileLoaderFactory(); HostLanguageServices languageServices = CommandLineHack.LanguageServices ?? default(HostLanguageServices); ILanguageService service = factory.CreateLanguageService(languageServices); // [ExportLanguageServiceFactory(typeof(IProjectFileLoader), LanguageNames.CSharp)] // language = factory.Language; loader = factory as IProjectFileLoader; } return (loader != null); } private bool TryGetAbsoluteProjectPath(string path, string baseDirectory, ReportMode mode, out string absolutePath) { try { absolutePath = GetAbsolutePath(path, baseDirectory); } catch (Exception) { ReportFailure(mode, string.Format(WorkspacesResources.Invalid_project_file_path_colon_0, path)); absolutePath = null; return false; } if (!File.Exists(absolutePath)) { ReportFailure( mode, string.Format(WorkspacesResources.Project_file_not_found_colon_0, absolutePath), msg => new FileNotFoundException(msg)); return false; } return true; } private static string GetAbsolutePath(string path, string baseDirectoryPath) { return Path.GetFullPath(FileUtilities.ResolveRelativePath(path, baseDirectoryPath) ?? path); } private enum ReportMode { Throw, Log, Ignore } private void ReportFailure(ReportMode mode, string message, Func<string, Exception> createException = null) { switch (mode) { case ReportMode.Throw: if (createException != null) { throw createException(message); } else { throw new InvalidOperationException(message); } case ReportMode.Log: _workspace.OnWorkspaceFailed(new WorkspaceDiagnostic(WorkspaceDiagnosticKind.Failure, message)); break; case ReportMode.Ignore: default: break; } } } }
//------------------------------------------------------------------------------ // <copyright file="XmlElement.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // <owner current="true" primary="true">[....]</owner> //------------------------------------------------------------------------------ using System; using System.Xml.Schema; using System.Xml.XPath; using System.Collections; using System.Diagnostics; using System.Globalization; namespace System.Xml { // Represents an element. public class XmlElement : XmlLinkedNode { XmlName name; XmlAttributeCollection attributes; XmlLinkedNode lastChild; // == this for empty elements otherwise it is the last child internal XmlElement( XmlName name, bool empty, XmlDocument doc ): base( doc ) { Debug.Assert(name!=null); this.parentNode = null; if ( !doc.IsLoading ) { XmlDocument.CheckName( name.Prefix ); XmlDocument.CheckName( name.LocalName ); } if (name.LocalName.Length == 0) throw new ArgumentException(Res.GetString(Res.Xdom_Empty_LocalName)); this.name = name; if (empty) { this.lastChild = this; } } protected internal XmlElement( string prefix, string localName, string namespaceURI, XmlDocument doc ) : this( doc.AddXmlName( prefix, localName, namespaceURI, null ), true, doc ) { } internal XmlName XmlName { get { return name; } set { name = value; } } // Creates a duplicate of this node. public override XmlNode CloneNode(bool deep) { Debug.Assert( OwnerDocument != null ); XmlDocument doc = OwnerDocument; bool OrigLoadingStatus = doc.IsLoading; doc.IsLoading = true; XmlElement element = doc.CreateElement( Prefix, LocalName, NamespaceURI ); doc.IsLoading = OrigLoadingStatus; if ( element.IsEmpty != this.IsEmpty ) element.IsEmpty = this.IsEmpty; if (HasAttributes) { foreach( XmlAttribute attr in Attributes ) { XmlAttribute newAttr = (XmlAttribute)(attr.CloneNode(true)); if (attr is XmlUnspecifiedAttribute && attr.Specified == false) ( ( XmlUnspecifiedAttribute )newAttr).SetSpecified(false); element.Attributes.InternalAppendAttribute( newAttr ); } } if (deep) element.CopyChildren( doc, this, deep ); return element; } // Gets the name of the node. public override string Name { get { return name.Name;} } // Gets the name of the current node without the namespace prefix. public override string LocalName { get { return name.LocalName;} } // Gets the namespace URI of this node. public override string NamespaceURI { get { return name.NamespaceURI;} } // Gets or sets the namespace prefix of this node. public override string Prefix { get { return name.Prefix;} set { name = name.OwnerDocument.AddXmlName( value, LocalName, NamespaceURI, SchemaInfo ); } } // Gets the type of the current node. public override XmlNodeType NodeType { get { return XmlNodeType.Element;} } public override XmlNode ParentNode { get { return this.parentNode; } } // Gets the XmlDocument that contains this node. public override XmlDocument OwnerDocument { get { return name.OwnerDocument; } } internal override bool IsContainer { get { return true;} } //the function is provided only at Load time to speed up Load process internal override XmlNode AppendChildForLoad(XmlNode newChild, XmlDocument doc) { XmlNodeChangedEventArgs args = doc.GetInsertEventArgsForLoad( newChild, this ); if (args != null) doc.BeforeEvent( args ); XmlLinkedNode newNode = (XmlLinkedNode)newChild; if (lastChild == null || lastChild == this) { // if LastNode == null newNode.next = newNode; lastChild = newNode; // LastNode = newNode; newNode.SetParentForLoad(this); } else { XmlLinkedNode refNode = lastChild; // refNode = LastNode; newNode.next = refNode.next; refNode.next = newNode; lastChild = newNode; // LastNode = newNode; if (refNode.IsText && newNode.IsText) { NestTextNodes(refNode, newNode); } else { newNode.SetParentForLoad(this); } } if (args != null) doc.AfterEvent( args ); return newNode; } // Gets or sets whether the element does not have any children. public bool IsEmpty { get { return lastChild == this; } set { if (value) { if (lastChild != this) { RemoveAllChildren(); lastChild = this; } } else { if (lastChild == this) { lastChild = null; } } } } internal override XmlLinkedNode LastNode { get { return lastChild == this ? null : lastChild; } set { lastChild = value; } } internal override bool IsValidChildType( XmlNodeType type ) { switch (type) { case XmlNodeType.Element: case XmlNodeType.Text: case XmlNodeType.EntityReference: case XmlNodeType.Comment: case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: case XmlNodeType.ProcessingInstruction: case XmlNodeType.CDATA: return true; default: return false; } } // Gets a XmlAttributeCollection containing the list of attributes for this node. public override XmlAttributeCollection Attributes { get { if (attributes == null) { lock ( OwnerDocument.objLock ) { if ( attributes == null ) { attributes = new XmlAttributeCollection(this); } } } return attributes; } } // Gets a value indicating whether the current node // has any attributes. public virtual bool HasAttributes { get { if ( this.attributes == null ) return false; else return this.attributes.Count > 0; } } // Returns the value for the attribute with the specified name. public virtual string GetAttribute(string name) { XmlAttribute attr = GetAttributeNode(name); if (attr != null) return attr.Value; return String.Empty; } // Sets the value of the attribute // with the specified name. public virtual void SetAttribute(string name, string value) { XmlAttribute attr = GetAttributeNode(name); if (attr == null) { attr = OwnerDocument.CreateAttribute(name); attr.Value = value; Attributes.InternalAppendAttribute( attr ); } else { attr.Value = value; } } // Removes an attribute by name. public virtual void RemoveAttribute(string name) { if (HasAttributes) Attributes.RemoveNamedItem(name); } // Returns the XmlAttribute with the specified name. public virtual XmlAttribute GetAttributeNode(string name) { if (HasAttributes) return Attributes[name]; return null; } // Adds the specified XmlAttribute. public virtual XmlAttribute SetAttributeNode(XmlAttribute newAttr) { if ( newAttr.OwnerElement != null ) throw new InvalidOperationException( Res.GetString(Res.Xdom_Attr_InUse) ); return(XmlAttribute) Attributes.SetNamedItem(newAttr); } // Removes the specified XmlAttribute. public virtual XmlAttribute RemoveAttributeNode(XmlAttribute oldAttr) { if (HasAttributes) return(XmlAttribute) Attributes.Remove(oldAttr); return null; } // Returns a XmlNodeList containing // a list of all descendant elements that match the specified name. public virtual XmlNodeList GetElementsByTagName(string name) { return new XmlElementList( this, name ); } // // DOM Level 2 // // Returns the value for the attribute with the specified LocalName and NamespaceURI. public virtual string GetAttribute(string localName, string namespaceURI) { XmlAttribute attr = GetAttributeNode( localName, namespaceURI ); if (attr != null) return attr.Value; return String.Empty; } // Sets the value of the attribute with the specified name // and namespace. public virtual string SetAttribute(string localName, string namespaceURI, string value) { XmlAttribute attr = GetAttributeNode( localName, namespaceURI ); if (attr == null) { attr = OwnerDocument.CreateAttribute( string.Empty, localName, namespaceURI ); attr.Value = value; Attributes.InternalAppendAttribute( attr ); } else { attr.Value = value; } return value; } // Removes an attribute specified by LocalName and NamespaceURI. public virtual void RemoveAttribute(string localName, string namespaceURI) { //Debug.Assert(namespaceURI != null); RemoveAttributeNode( localName, namespaceURI ); } // Returns the XmlAttribute with the specified LocalName and NamespaceURI. public virtual XmlAttribute GetAttributeNode(string localName, string namespaceURI) { //Debug.Assert(namespaceURI != null); if (HasAttributes) return Attributes[ localName, namespaceURI ]; return null; } // Adds the specified XmlAttribute. public virtual XmlAttribute SetAttributeNode(string localName, string namespaceURI) { XmlAttribute attr = GetAttributeNode( localName, namespaceURI ); if (attr == null) { attr = OwnerDocument.CreateAttribute( string.Empty, localName, namespaceURI ); Attributes.InternalAppendAttribute( attr ); } return attr; } // Removes the XmlAttribute specified by LocalName and NamespaceURI. public virtual XmlAttribute RemoveAttributeNode(string localName, string namespaceURI) { //Debug.Assert(namespaceURI != null); if (HasAttributes) { XmlAttribute attr = GetAttributeNode( localName, namespaceURI ); Attributes.Remove( attr ); return attr; } return null; } // Returns a XmlNodeList containing // a list of all descendant elements that match the specified name. public virtual XmlNodeList GetElementsByTagName(string localName, string namespaceURI) { //Debug.Assert(namespaceURI != null); return new XmlElementList( this, localName, namespaceURI ); } // Determines whether the current node has the specified attribute. public virtual bool HasAttribute(string name) { return GetAttributeNode(name) != null; } // Determines whether the current node has the specified // attribute from the specified namespace. public virtual bool HasAttribute(string localName, string namespaceURI) { return GetAttributeNode(localName, namespaceURI) != null; } // Saves the current node to the specified XmlWriter. public override void WriteTo(XmlWriter w) { if (GetType() == typeof(XmlElement)) { // Use the non-recursive version (for XmlElement only) WriteElementTo(w, this); } else { // Use the (potentially) recursive version WriteStartElement(w); if (IsEmpty) { w.WriteEndElement(); } else { WriteContentTo(w); w.WriteFullEndElement(); } } } // This method is copied from System.Xml.Linq.ElementWriter.WriteElement but adapted to DOM private static void WriteElementTo(XmlWriter writer, XmlElement e) { XmlNode root = e; XmlNode n = e; while (true) { e = n as XmlElement; // Only use the inlined write logic for XmlElement, not for derived classes if (e != null && e.GetType() == typeof(XmlElement)) { // Write the element e.WriteStartElement(writer); // Write the element's content if (e.IsEmpty) { // No content; use a short end element <a /> writer.WriteEndElement(); } else if (e.lastChild == null) { // No actual content; use a full end element <a></a> writer.WriteFullEndElement(); } else { // There are child node(s); move to first child n = e.FirstChild; Debug.Assert(n != null); continue; } } else { // Use virtual dispatch (might recurse) n.WriteTo(writer); } // Go back to the parent after writing the last child while (n != root && n == n.ParentNode.LastChild) { n = n.ParentNode; Debug.Assert(n != null); writer.WriteFullEndElement(); } if (n == root) break; n = n.NextSibling; Debug.Assert(n != null); } } // Writes the start of the element (and its attributes) to the specified writer private void WriteStartElement(XmlWriter w) { w.WriteStartElement(Prefix, LocalName, NamespaceURI); if (HasAttributes) { XmlAttributeCollection attrs = Attributes; for (int i = 0; i < attrs.Count; i += 1) { XmlAttribute attr = attrs[i]; attr.WriteTo(w); } } } // Saves all the children of the node to the specified XmlWriter. public override void WriteContentTo(XmlWriter w) { for (XmlNode node = FirstChild; node != null; node = node.NextSibling) { node.WriteTo(w); } } // Removes the attribute node with the specified index from the attribute collection. public virtual XmlNode RemoveAttributeAt(int i) { if (HasAttributes) return attributes.RemoveAt( i ); return null; } // Removes all attributes from the element. public virtual void RemoveAllAttributes() { if (HasAttributes) { attributes.RemoveAll(); } } // Removes all the children and/or attributes // of the current node. public override void RemoveAll() { //remove all the children base.RemoveAll(); //remove all the attributes RemoveAllAttributes(); } internal void RemoveAllChildren() { base.RemoveAll(); } public override IXmlSchemaInfo SchemaInfo { get { return name; } } // Gets or sets the markup representing just // the children of this node. public override string InnerXml { get { return base.InnerXml; } set { RemoveAllChildren(); XmlLoader loader = new XmlLoader(); loader.LoadInnerXmlElement( this, value ); } } // Gets or sets the concatenated values of the // node and all its children. public override string InnerText { get { return base.InnerText; } set { XmlLinkedNode linkedNode = LastNode; if (linkedNode != null && //there is one child linkedNode.NodeType == XmlNodeType.Text && //which is text node linkedNode.next == linkedNode ) // and it is the only child { //this branch is for perf reason, event fired when TextNode.Value is changed. linkedNode.Value = value; } else { RemoveAllChildren(); AppendChild( OwnerDocument.CreateTextNode( value ) ); } } } public override XmlNode NextSibling { get { if (this.parentNode != null && this.parentNode.LastNode != this) return next; return null; } } internal override void SetParent(XmlNode node) { this.parentNode = node; } internal override XPathNodeType XPNodeType { get { return XPathNodeType.Element; } } internal override string XPLocalName { get { return LocalName; } } internal override string GetXPAttribute( string localName, string ns ) { if ( ns == OwnerDocument.strReservedXmlns ) return null; XmlAttribute attr = GetAttributeNode( localName, ns ); if ( attr != null ) return attr.Value; return string.Empty; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Reflection; using System.Runtime.CompilerServices; using System.Text; using FileHelpers.Options; namespace FileHelpers { /// <summary> /// Base class for all Field Types. /// Implements all the basic functionality of a field in a typed file. /// </summary> public abstract class FieldBase : ICloneable { #region " Private & Internal Fields " // -------------------------------------------------------------- // WARNING !!! // Remember to add each of these fields to the clone method !! // -------------------------------------------------------------- /// <summary> /// type of object to be created, eg DateTime /// </summary> public Type FieldType { get; private set; } /// <summary> /// Provider to convert to and from text /// </summary> public ConverterBase Converter { get; private set; } /// <summary> /// Number of extra characters used, delimiters and quote characters /// </summary> internal virtual int CharsToDiscard { get { return 0; } } /// <summary> /// Field type of an array or it is just fieldType. /// What actual object will be created /// </summary> internal Type FieldTypeInternal { get; set; } /// <summary> /// Is this field an array? /// </summary> public bool IsArray { get; private set; } /// <summary> /// Array must have this many entries /// </summary> public int ArrayMinLength { get; set; } /// <summary> /// Array may have this many entries, if equal to ArrayMinLength then /// it is a fixed length array /// </summary> public int ArrayMaxLength { get; set; } /// <summary> /// Seems to be duplicate of FieldTypeInternal except it is ONLY set /// for an array /// </summary> internal Type ArrayType { get; set; } /// <summary> /// Do we process this field but not store the value /// </summary> public bool Discarded { get; set; } /// <summary> /// Unused! /// </summary> internal bool TrailingArray { get; set; } /// <summary> /// Value to use if input is null or empty /// </summary> internal object NullValue { get; set; } /// <summary> /// Are we a simple string field we can just assign to /// </summary> internal bool IsStringField { get; set; } /// <summary> /// Details about the extraction criteria /// </summary> internal FieldInfo FieldInfo { get; set; } /// <summary> /// indicates whether we trim leading and/or trailing whitespace /// </summary> public TrimMode TrimMode { get; set; } /// <summary> /// Character to chop off front and / rear of the string /// </summary> internal char[] TrimChars { get; set; } /// <summary> /// The field may not be present on the input data (line not long enough) /// </summary> public bool IsOptional { get; set; } /// <summary> /// The next field along is optional, optimise processing next records /// </summary> internal bool NextIsOptional { get { if (Parent.FieldCount > ParentIndex + 1) return Parent.Fields[ParentIndex + 1].IsOptional; return false; } } /// <summary> /// Am I the first field in an array list /// </summary> internal bool IsFirst { get { return ParentIndex == 0; } } /// <summary> /// Am I the last field in the array list /// </summary> internal bool IsLast { get { return ParentIndex == Parent.FieldCount - 1; } } /// <summary> /// Set from the FieldInNewLIneAtribute. This field begins on a new /// line of the file /// </summary> internal bool InNewLine { get; set; } /// <summary> /// Order of the field in the file layout /// </summary> internal int? FieldOrder { get; set; } /// <summary> /// Can null be assigned to this value type, for example not int or /// DateTime /// </summary> internal bool IsNullableType { get; private set; } /// <summary> /// Name of the field without extra characters (eg property) /// </summary> internal string FieldFriendlyName { get; set; } /// <summary> /// The field must be not be empty /// </summary> public bool IsNotEmpty { get; set; } /// <summary> /// Caption of the field displayed in header row (see EngineBase.GetFileHeader) /// </summary> internal string FieldCaption { get; set; } // -------------------------------------------------------------- // WARNING !!! // Remember to add each of these fields to the clone method !! // -------------------------------------------------------------- /// <summary> /// Fieldname of the field we are storing /// </summary> internal string FieldName { get { return FieldInfo.Name; } } /* private static readonly char[] mWhitespaceChars = new[] { '\t', '\n', '\v', '\f', '\r', ' ', '\x00a0', '\u2000', '\u2001', '\u2002', '\u2003', '\u2004', '\u2005', '\u2006', '\u2007', '\u2008', '\u2009', '\u200a', '\u200b', '\u3000', '\ufeff' */ #endregion #region " CreateField " /// <summary> /// Check the Attributes on the field and return a structure containing /// the settings for this file. /// </summary> /// <param name="fi">Information about this field</param> /// <param name="recordAttribute">Type of record we are reading</param> /// <returns>Null if not used</returns> public static FieldBase CreateField(FieldInfo fi, TypedRecordAttribute recordAttribute) { // If ignored, return null #pragma warning disable 612,618 // disable obsole warning if (fi.IsDefined(typeof (FieldNotInFileAttribute), true) || fi.IsDefined(typeof (FieldIgnoredAttribute), true) || fi.IsDefined(typeof (FieldHiddenAttribute), true)) #pragma warning restore 612,618 return null; FieldBase res = null; var attributes = (FieldAttribute[]) fi.GetCustomAttributes(typeof (FieldAttribute), true); // CHECK USAGE ERRORS !!! // Fixed length record and no attributes at all if (recordAttribute is FixedLengthRecordAttribute && attributes.Length == 0) { throw new BadUsageException("The field: '" + fi.Name + "' must be marked the FieldFixedLength attribute because the record class is marked with FixedLengthRecord."); } if (attributes.Length > 1) { throw new BadUsageException("The field: '" + fi.Name + "' has a FieldFixedLength and a FieldDelimiter attribute."); } if (recordAttribute is DelimitedRecordAttribute && fi.IsDefined(typeof (FieldAlignAttribute), false)) { throw new BadUsageException("The field: '" + fi.Name + "' can't be marked with FieldAlign attribute, it is only valid for fixed length records and are used only for write purpose."); } if (fi.FieldType.IsArray == false && fi.IsDefined(typeof (FieldArrayLengthAttribute), false)) { throw new BadUsageException("The field: '" + fi.Name + "' can't be marked with FieldArrayLength attribute is only valid for array fields."); } // PROCESS IN NORMAL CONDITIONS if (attributes.Length > 0) { FieldAttribute fieldAttb = attributes[0]; if (fieldAttb is FieldFixedLengthAttribute) { // Fixed Field if (recordAttribute is DelimitedRecordAttribute) { throw new BadUsageException("The field: '" + fi.Name + "' can't be marked with FieldFixedLength attribute, it is only for the FixedLengthRecords not for delimited ones."); } var attbFixedLength = (FieldFixedLengthAttribute) fieldAttb; var attbAlign = Attributes.GetFirst<FieldAlignAttribute>(fi); res = new FixedLengthField(fi, attbFixedLength.Length, attbAlign); ((FixedLengthField) res).FixedMode = ((FixedLengthRecordAttribute) recordAttribute).FixedMode; } else if (fieldAttb is FieldDelimiterAttribute) { // Delimited Field if (recordAttribute is FixedLengthRecordAttribute) { throw new BadUsageException("The field: '" + fi.Name + "' can't be marked with FieldDelimiter attribute, it is only for DelimitedRecords not for fixed ones."); } res = new DelimitedField(fi, ((FieldDelimiterAttribute) fieldAttb).Delimiter); } else { throw new BadUsageException( "Custom field attributes are not currently supported. Unknown attribute: " + fieldAttb.GetType().Name + " on field: " + fi.Name); } } else // attributes.Length == 0 { var delimitedRecordAttribute = recordAttribute as DelimitedRecordAttribute; if (delimitedRecordAttribute != null) res = new DelimitedField(fi, delimitedRecordAttribute.Separator); } if (res != null) { // FieldDiscarded res.Discarded = fi.IsDefined(typeof (FieldValueDiscardedAttribute), false); // FieldTrim Attributes.WorkWithFirst<FieldTrimAttribute>(fi, (x) => { res.TrimMode = x.TrimMode; res.TrimChars = x.TrimChars; }); // FieldQuoted Attributes.WorkWithFirst<FieldQuotedAttribute>(fi, (x) => { if (res is FixedLengthField) { throw new BadUsageException( "The field: '" + fi.Name + "' can't be marked with FieldQuoted attribute, it is only for the delimited records."); } ((DelimitedField) res).QuoteChar = x.QuoteChar; ((DelimitedField) res).QuoteMode = x.QuoteMode; ((DelimitedField) res).QuoteMultiline = x.QuoteMultiline; }); // FieldOrder Attributes.WorkWithFirst<FieldOrderAttribute>(fi, x => res.FieldOrder = x.Order); // FieldCaption Attributes.WorkWithFirst<FieldCaptionAttribute>(fi, x => res.FieldCaption = x.Caption); // FieldOptional res.IsOptional = fi.IsDefined(typeof(FieldOptionalAttribute), false); // FieldInNewLine res.InNewLine = fi.IsDefined(typeof(FieldInNewLineAttribute), false); // FieldNotEmpty res.IsNotEmpty = fi.IsDefined(typeof(FieldNotEmptyAttribute), false); // FieldArrayLength if (fi.FieldType.IsArray) { res.IsArray = true; res.ArrayType = fi.FieldType.GetElementType(); // MinValue indicates that there is no FieldArrayLength in the array res.ArrayMinLength = int.MinValue; res.ArrayMaxLength = int.MaxValue; Attributes.WorkWithFirst<FieldArrayLengthAttribute>(fi, (x) => { res.ArrayMinLength = x.MinLength; res.ArrayMaxLength = x.MaxLength; if (res.ArrayMaxLength < res.ArrayMinLength || res.ArrayMinLength < 0 || res.ArrayMaxLength <= 0) { throw new BadUsageException("The field: " + fi.Name + " has invalid length values in the [FieldArrayLength] attribute."); } }); } } if (fi.IsDefined(typeof (CompilerGeneratedAttribute), false)) { if (fi.Name.EndsWith("__BackingField") && fi.Name.StartsWith("<") && fi.Name.Contains(">")) res.FieldFriendlyName = fi.Name.Substring(1, fi.Name.IndexOf(">") - 1); res.IsAutoProperty = true; var prop = fi.DeclaringType.GetProperty(res.FieldFriendlyName); if (prop != null) { Attributes.WorkWithFirst<FieldOrderAttribute>(prop, x => res.FieldOrder = x.Order); } } if (string.IsNullOrEmpty(res.FieldFriendlyName)) res.FieldFriendlyName = res.FieldName; return res; } internal RecordOptions Parent { get; set; } internal int ParentIndex { get; set; } internal static string AutoPropertyName(FieldInfo fi) { if (fi.IsDefined(typeof(CompilerGeneratedAttribute), false)) { if (fi.Name.EndsWith("__BackingField") && fi.Name.StartsWith("<") && fi.Name.Contains(">")) return fi.Name.Substring(1, fi.Name.IndexOf(">") - 1); } return ""; } internal bool IsAutoProperty { get; set; } #endregion #region " Constructor " /// <summary> /// Create a field base without any configuration /// </summary> internal FieldBase() { IsNullableType = false; TrimMode = TrimMode.None; FieldOrder = null; InNewLine = false; //NextIsOptional = false; IsOptional = false; TrimChars = null; NullValue = null; TrailingArray = false; IsArray = false; IsNotEmpty = false; } /// <summary> /// Create a field base from a fieldinfo object /// Verify the settings against the actual field to ensure it will work. /// </summary> /// <param name="fi">Field Info Object</param> internal FieldBase(FieldInfo fi) : this() { FieldInfo = fi; FieldType = FieldInfo.FieldType; if (FieldType.IsArray) FieldTypeInternal = FieldType.GetElementType(); else FieldTypeInternal = FieldType; IsStringField = FieldTypeInternal == typeof (string); object[] attribs = fi.GetCustomAttributes(typeof (FieldConverterAttribute), true); if (attribs.Length > 0) { var conv = (FieldConverterAttribute) attribs[0]; this.Converter = conv.Converter; conv.ValidateTypes(FieldInfo); } else this.Converter = ConvertHelpers.GetDefaultConverter(fi.Name, FieldType); if (this.Converter != null) this.Converter.mDestinationType = FieldTypeInternal; attribs = fi.GetCustomAttributes(typeof (FieldNullValueAttribute), true); if (attribs.Length > 0) { NullValue = ((FieldNullValueAttribute) attribs[0]).NullValue; // mNullValueOnWrite = ((FieldNullValueAttribute) attribs[0]).NullValueOnWrite; if (NullValue != null) { if (!FieldTypeInternal.IsAssignableFrom(NullValue.GetType())) { throw new BadUsageException("The NullValue is of type: " + NullValue.GetType().Name + " that is not asignable to the field " + FieldInfo.Name + " of type: " + FieldTypeInternal.Name); } } } IsNullableType = FieldTypeInternal.IsValueType && FieldTypeInternal.IsGenericType && FieldTypeInternal.GetGenericTypeDefinition() == typeof (Nullable<>); } #endregion #region " MustOverride (String Handling) " /// <summary> /// Extract the string from the underlying data, removes quotes /// characters for example /// </summary> /// <param name="line">Line to parse data from</param> /// <returns>Slightly processed string from the data</returns> internal abstract ExtractedInfo ExtractFieldString(LineInfo line); /// <summary> /// Create a text block containing the field from definition /// </summary> /// <param name="sb">Append string to output</param> /// <param name="fieldValue">Field we are adding</param> /// <param name="isLast">Indicates if we are processing last field</param> internal abstract void CreateFieldString(StringBuilder sb, object fieldValue, bool isLast); /// <summary> /// Convert a field value to a string representation /// </summary> /// <param name="fieldValue">Object containing data</param> /// <returns>String representation of field</returns> internal string CreateFieldString(object fieldValue) { if (this.Converter == null) { if (fieldValue == null) return string.Empty; else return fieldValue.ToString(); } else return this.Converter.FieldToString(fieldValue); } #endregion #region " ExtractValue " /// <summary> /// Get the data out of the records /// </summary> /// <param name="line">Line handler containing text</param> /// <returns></returns> internal object ExtractFieldValue(LineInfo line) { //-> extract only what I need if (InNewLine) { // Any trailing characters, terminate if (line.EmptyFromPos() == false) { throw new BadUsageException(line, "Text '" + line.CurrentString + "' found before the new line of the field: " + FieldInfo.Name + " (this is not allowed when you use [FieldInNewLine])"); } line.ReLoad(line.mReader.ReadNextLine()); if (line.mLineStr == null) { throw new BadUsageException(line, "End of stream found parsing the field " + FieldInfo.Name + ". Please check the class record."); } } if (IsArray == false) { ExtractedInfo info = ExtractFieldString(line); if (info.mCustomExtractedString == null) line.mCurrentPos = info.ExtractedTo + 1; line.mCurrentPos += CharsToDiscard; //total; if (Discarded) return GetDiscardedNullValue(); else return AssignFromString(info, line).Value; } else { if (ArrayMinLength <= 0) ArrayMinLength = 0; int i = 0; var res = new ArrayList(Math.Max(ArrayMinLength, 10)); while (line.mCurrentPos - CharsToDiscard < line.mLineStr.Length && i < ArrayMaxLength) { ExtractedInfo info = ExtractFieldString(line); if (info.mCustomExtractedString == null) line.mCurrentPos = info.ExtractedTo + 1; line.mCurrentPos += CharsToDiscard; try { var value = AssignFromString(info, line); if (value.NullValueUsed && i == 0 && line.IsEOL()) break; res.Add(value.Value); } catch (NullValueNotFoundException) { if (i == 0) break; else throw; } i++; } if (res.Count < ArrayMinLength) { throw new InvalidOperationException( string.Format( "Line: {0} Column: {1} Field: {2}. The array has only {3} values, less than the minimum length of {4}", line.mReader.LineNumber.ToString(), line.mCurrentPos.ToString(), FieldInfo.Name, res.Count, ArrayMinLength)); } else if (IsLast && line.IsEOL() == false) { throw new InvalidOperationException( string.Format( "Line: {0} Column: {1} Field: {2}. The array has more values than the maximum length of {3}", line.mReader.LineNumber, line.mCurrentPos, FieldInfo.Name, ArrayMaxLength)); } // TODO: is there a reason we go through all the array processing then discard it if (Discarded) return null; else return res.ToArray(ArrayType); } } #region " AssignFromString " private struct AssignResult { public object Value; public bool NullValueUsed; } /// <summary> /// Create field object after extracting the string from the underlying /// input data /// </summary> /// <param name="fieldString">Information extracted?</param> /// <param name="line">Underlying input data</param> /// <returns>Object to assign to field</returns> private AssignResult AssignFromString(ExtractedInfo fieldString, LineInfo line) { object val; var extractedString = fieldString.ExtractedString(); try { if (IsNotEmpty && String.IsNullOrEmpty(extractedString)) { throw new InvalidOperationException("The value is empty and must be populated."); } else if (this.Converter == null) { if (IsStringField) val = TrimString(extractedString); else { extractedString = extractedString.Trim(); if (extractedString.Length == 0) { return new AssignResult { Value = GetNullValue(line), NullValueUsed = true }; } else val = Convert.ChangeType(extractedString, FieldTypeInternal, null); } } else { var trimmedString = extractedString.Trim(); if (this.Converter.CustomNullHandling == false && trimmedString.Length == 0) { return new AssignResult { Value = GetNullValue(line), NullValueUsed = true }; } else { if (TrimMode == TrimMode.Both) val = this.Converter.StringToField(trimmedString); else val = this.Converter.StringToField(TrimString(extractedString)); if (val == null) { return new AssignResult { Value = GetNullValue(line), NullValueUsed = true }; } } } return new AssignResult { Value = val }; } catch (ConvertException ex) { ex.FieldName = FieldInfo.Name; ex.LineNumber = line.mReader.LineNumber; ex.ColumnNumber = fieldString.ExtractedFrom + 1; throw; } catch (BadUsageException) { throw; } catch (Exception ex) { if (this.Converter == null || this.Converter.GetType().Assembly == typeof (FieldBase).Assembly) { throw new ConvertException(extractedString, FieldTypeInternal, FieldInfo.Name, line.mReader.LineNumber, fieldString.ExtractedFrom + 1, ex.Message, ex); } else { throw new ConvertException(extractedString, FieldTypeInternal, FieldInfo.Name, line.mReader.LineNumber, fieldString.ExtractedFrom + 1, "Your custom converter: " + this.Converter.GetType().Name + " throws an " + ex.GetType().Name + " with the message: " + ex.Message, ex); } } } private String TrimString(string extractedString) { switch (TrimMode) { case TrimMode.None: return extractedString; case TrimMode.Both: return extractedString.Trim(); case TrimMode.Left: return extractedString.TrimStart(); case TrimMode.Right: return extractedString.TrimEnd(); default: throw new Exception("Trim mode invalid in FieldBase.TrimString -> " + TrimMode.ToString()); } } /// <summary> /// Convert a null value into a representation, /// allows for a null value override /// </summary> /// <param name="line">input line to read, used for error messages</param> /// <returns>Null value for object</returns> private object GetNullValue(LineInfo line) { if (NullValue == null) { if (FieldTypeInternal.IsValueType) { if (IsNullableType) return null; string msg = "Not value found for the value type field: '" + FieldInfo.Name + "' Class: '" + FieldInfo.DeclaringType.Name + "'. " + Environment.NewLine + "You must use the [FieldNullValue] attribute because this is a value type and can't be null or use a Nullable Type instead of the current type."; throw new NullValueNotFoundException(line, msg); } else return null; } else return NullValue; } /// <summary> /// Get the null value that represent a discarded value /// </summary> /// <returns>null value of discard?</returns> private object GetDiscardedNullValue() { if (NullValue == null) { if (FieldTypeInternal.IsValueType) { if (IsNullableType) return null; string msg = "The field: '" + FieldInfo.Name + "' Class: '" + FieldInfo.DeclaringType.Name + "' is from a value type: " + FieldInfo.FieldType.Name + " and is discarded (null) you must provide a [FieldNullValue] attribute."; throw new BadUsageException(msg); } else return null; } else return NullValue; } #endregion #region " CreateValueForField " /// <summary> /// Convert a field value into a write able value /// </summary> /// <param name="fieldValue">object value to convert</param> /// <returns>converted value</returns> public object CreateValueForField(object fieldValue) { object val = null; if (fieldValue == null) { if (NullValue == null) { if (FieldTypeInternal.IsValueType && Nullable.GetUnderlyingType(FieldTypeInternal) == null) { throw new BadUsageException( "Null Value found. You must specify a FieldNullValueAttribute in the " + FieldInfo.Name + " field of type " + FieldTypeInternal.Name + ", because this is a ValueType."); } else val = null; } else val = NullValue; } else if (FieldTypeInternal == fieldValue.GetType()) val = fieldValue; else { if (this.Converter == null) val = Convert.ChangeType(fieldValue, FieldTypeInternal, null); else { try { if (Nullable.GetUnderlyingType(FieldTypeInternal) != null && Nullable.GetUnderlyingType(FieldTypeInternal) == fieldValue.GetType()) val = fieldValue; else val = Convert.ChangeType(fieldValue, FieldTypeInternal, null); } catch { val = Converter.StringToField(fieldValue.ToString()); } } } return val; } #endregion #endregion #region " AssignToString " /// <summary> /// convert field to string value and assign to a string builder /// buffer for output /// </summary> /// <param name="sb">buffer to collect record</param> /// <param name="fieldValue">value to convert</param> internal void AssignToString(StringBuilder sb, object fieldValue) { if (this.InNewLine == true) sb.Append(StringHelper.NewLine); if (IsArray) { if (fieldValue == null) { if (0 < this.ArrayMinLength) { throw new InvalidOperationException( string.Format("Field: {0}. The array is null, but the minimum length is {1}", FieldInfo.Name, ArrayMinLength)); } return; } var array = (IList) fieldValue; if (array.Count < this.ArrayMinLength) { throw new InvalidOperationException( string.Format("Field: {0}. The array has {1} values, but the minimum length is {2}", FieldInfo.Name, array.Count, ArrayMinLength)); } if (array.Count > this.ArrayMaxLength) { throw new InvalidOperationException( string.Format("Field: {0}. The array has {1} values, but the maximum length is {2}", FieldInfo.Name, array.Count, ArrayMaxLength)); } for (int i = 0; i < array.Count; i++) { object val = array[i]; CreateFieldString(sb, val, IsLast && i == array.Count - 1); } } else CreateFieldString(sb, fieldValue, IsLast); } #endregion /// <summary> /// Copy the field object /// </summary> /// <returns>a complete copy of the Field object</returns> object ICloneable.Clone() { var res = CreateClone(); res.FieldType = FieldType; res.Converter = this.Converter; res.FieldTypeInternal = FieldTypeInternal; res.IsArray = IsArray; res.ArrayType = ArrayType; res.ArrayMinLength = ArrayMinLength; res.ArrayMaxLength = ArrayMaxLength; res.TrailingArray = TrailingArray; res.NullValue = NullValue; res.IsStringField = IsStringField; res.FieldInfo = FieldInfo; res.TrimMode = TrimMode; res.TrimChars = TrimChars; res.IsOptional = IsOptional; //res.NextIsOptional = NextIsOptional; res.InNewLine = InNewLine; res.FieldOrder = FieldOrder; res.IsNullableType = IsNullableType; res.Discarded = Discarded; res.FieldFriendlyName = FieldFriendlyName; res.IsNotEmpty = IsNotEmpty; res.FieldCaption = FieldCaption; res.Parent = Parent; res.ParentIndex = ParentIndex; return res; } /// <summary> /// Add the extra details that derived classes create /// </summary> /// <returns>field clone of right type</returns> protected abstract FieldBase CreateClone(); } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using log4net; using Mono.Addins; using Nini.Config; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Connectors.Hypergrid; using OpenSim.Services.Interfaces; using System; using System.Collections.Generic; using System.Reflection; using GridRegion = OpenSim.Services.Interfaces.GridRegion; namespace OpenSim.Region.CoreModules.Framework.EntityTransfer { [Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "HGEntityTransferModule")] public class HGEntityTransferModule : EntityTransferModule, INonSharedRegionModule, IEntityTransferModule, IUserAgentVerificationModule { protected string m_AccountName; protected List<AvatarAttachment> m_Attachs; protected List<AvatarAppearance> m_ExportedAppearances; protected bool m_RestrictAppearanceAbroad; private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private GatekeeperServiceConnector m_GatekeeperConnector; private int m_levelHGTeleport = 0; private IUserAgentService m_UAS; protected List<AvatarAppearance> ExportedAppearance { get { if (m_ExportedAppearances != null) return m_ExportedAppearances; m_ExportedAppearances = new List<AvatarAppearance>(); m_Attachs = new List<AvatarAttachment>(); string[] names = m_AccountName.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); foreach (string name in names) { string[] parts = name.Trim().Split(); if (parts.Length != 2) { m_log.WarnFormat("[HG ENTITY TRANSFER MODULE]: Wrong user account name format {0}. Specify 'First Last'", name); return null; } UserAccount account = Scene.UserAccountService.GetUserAccount(UUID.Zero, parts[0], parts[1]); if (account == null) { m_log.WarnFormat("[HG ENTITY TRANSFER MODULE]: Unknown account {0}", m_AccountName); return null; } AvatarAppearance a = Scene.AvatarService.GetAppearance(account.PrincipalID); if (a != null) m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Successfully retrieved appearance for {0}", name); foreach (AvatarAttachment att in a.GetAttachments()) { InventoryItemBase item = new InventoryItemBase(att.ItemID, account.PrincipalID); item = Scene.InventoryService.GetItem(item); if (item != null) a.SetAttachment(att.AttachPoint, att.ItemID, item.AssetID); else m_log.WarnFormat("[HG ENTITY TRANSFER MODULE]: Unable to retrieve item {0} from inventory {1}", att.ItemID, name); } m_ExportedAppearances.Add(a); m_Attachs.AddRange(a.GetAttachments()); } return m_ExportedAppearances; } } #region ISharedRegionModule public override string Name { get { return "HGEntityTransferModule"; } } public override void AddRegion(Scene scene) { base.AddRegion(scene); if (m_Enabled) { scene.RegisterModuleInterface<IUserAgentVerificationModule>(this); scene.EventManager.OnIncomingSceneObject += OnIncomingSceneObject; } } public override void Initialise(IConfigSource source) { IConfig moduleConfig = source.Configs["Modules"]; if (moduleConfig != null) { string name = moduleConfig.GetString("EntityTransferModule", ""); if (name == Name) { IConfig transferConfig = source.Configs["EntityTransfer"]; if (transferConfig != null) { m_levelHGTeleport = transferConfig.GetInt("LevelHGTeleport", 0); m_RestrictAppearanceAbroad = transferConfig.GetBoolean("RestrictAppearanceAbroad", false); if (m_RestrictAppearanceAbroad) { m_AccountName = transferConfig.GetString("AccountForAppearance", string.Empty); if (m_AccountName == string.Empty) m_log.WarnFormat("[HG ENTITY TRANSFER MODULE]: RestrictAppearanceAbroad is on, but no account has been given for avatar appearance!"); } } InitialiseCommon(source); m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: {0} enabled.", Name); } } } public override void RegionLoaded(Scene scene) { base.RegionLoaded(scene); if (m_Enabled) { m_GatekeeperConnector = new GatekeeperServiceConnector(scene.AssetService); m_UAS = scene.RequestModuleInterface<IUserAgentService>(); if (m_UAS == null) m_UAS = new UserAgentServiceConnector(m_ThisHomeURI); } } public override void RemoveRegion(Scene scene) { base.RemoveRegion(scene); if (m_Enabled) scene.UnregisterModuleInterface<IUserAgentVerificationModule>(this); } protected override void OnNewClient(IClientAPI client) { client.OnTeleportHomeRequest += TriggerTeleportHome; client.OnTeleportLandmarkRequest += RequestTeleportLandmark; client.OnConnectionClosed += new Action<IClientAPI>(OnConnectionClosed); } private void OnIncomingSceneObject(SceneObjectGroup so) { if (!so.IsAttachment) return; if (so.AttachedAvatar == UUID.Zero || Scene.UserManagementModule.IsLocalGridUser(so.AttachedAvatar)) return; // foreign user AgentCircuitData aCircuit = Scene.AuthenticateHandler.GetAgentCircuitData(so.AttachedAvatar); if (aCircuit != null && (aCircuit.teleportFlags & (uint)Constants.TeleportFlags.ViaHGLogin) != 0) { if (aCircuit.ServiceURLs != null && aCircuit.ServiceURLs.ContainsKey("AssetServerURI")) { string url = aCircuit.ServiceURLs["AssetServerURI"].ToString(); m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Incoming attachment {0} for HG user {1} with asset server {2}", so.Name, so.AttachedAvatar, url); Dictionary<UUID, sbyte> ids = new Dictionary<UUID, sbyte>(); HGUuidGatherer uuidGatherer = new HGUuidGatherer(Scene.AssetService, url); uuidGatherer.GatherAssetUuids(so, ids); foreach (KeyValuePair<UUID, sbyte> kvp in ids) uuidGatherer.FetchAsset(kvp.Key); } } } #endregion ISharedRegionModule #region HG overrides of IEntiryTransferModule /// <summary> /// Tries to teleport agent to landmark. /// </summary> /// <param name="remoteClient"></param> /// <param name="regionHandle"></param> /// <param name="position"></param> public override void RequestTeleportLandmark(IClientAPI remoteClient, AssetLandmark lm) { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Teleporting agent via landmark to {0} region {1} position {2}", (lm.Gatekeeper == string.Empty) ? "local" : lm.Gatekeeper, lm.RegionID, lm.Position); if (lm.Gatekeeper == string.Empty) { base.RequestTeleportLandmark(remoteClient, lm); return; } GridRegion info = Scene.GridService.GetRegionByUUID(UUID.Zero, lm.RegionID); // Local region? if (info != null) { ((Scene)(remoteClient.Scene)).RequestTeleportLocation(remoteClient, info.RegionHandle, lm.Position, Vector3.Zero, (uint)(Constants.TeleportFlags.SetLastToTarget | Constants.TeleportFlags.ViaLandmark)); } else { // Foreign region Scene scene = (Scene)(remoteClient.Scene); GatekeeperServiceConnector gConn = new GatekeeperServiceConnector(); GridRegion gatekeeper = new GridRegion(); gatekeeper.ServerURI = lm.Gatekeeper; string homeURI = Scene.GetAgentHomeURI(remoteClient.AgentId); string message; GridRegion finalDestination = gConn.GetHyperlinkRegion(gatekeeper, new UUID(lm.RegionID), remoteClient.AgentId, homeURI, out message); if (finalDestination != null) { ScenePresence sp = scene.GetScenePresence(remoteClient.AgentId); IEntityTransferModule transferMod = scene.RequestModuleInterface<IEntityTransferModule>(); if (transferMod != null && sp != null) { if (message != null) sp.ControllingClient.SendAgentAlertMessage(message, true); transferMod.DoTeleport( sp, gatekeeper, finalDestination, lm.Position, Vector3.UnitX, (uint)(Constants.TeleportFlags.SetLastToTarget | Constants.TeleportFlags.ViaLandmark)); } } else { remoteClient.SendTeleportFailed(message); } } } public override bool TeleportHome(UUID id, IClientAPI client) { m_log.DebugFormat( "[ENTITY TRANSFER MODULE]: Request to teleport {0} {1} home", client.Name, client.AgentId); // Let's find out if this is a foreign user or a local user IUserManagement uMan = Scene.RequestModuleInterface<IUserManagement>(); if (uMan != null && uMan.IsLocalGridUser(id)) { // local grid user m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: User is local"); return base.TeleportHome(id, client); } // Foreign user wants to go home // AgentCircuitData aCircuit = ((Scene)(client.Scene)).AuthenticateHandler.GetAgentCircuitData(client.CircuitCode); if (aCircuit == null || (aCircuit != null && !aCircuit.ServiceURLs.ContainsKey("HomeURI"))) { client.SendTeleportFailed("Your information has been lost"); m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Unable to locate agent's gateway information"); return false; } IUserAgentService userAgentService = new UserAgentServiceConnector(aCircuit.ServiceURLs["HomeURI"].ToString()); Vector3 position = Vector3.UnitY, lookAt = Vector3.UnitY; GridRegion finalDestination = null; try { finalDestination = userAgentService.GetHomeRegion(aCircuit.AgentID, out position, out lookAt); } catch (Exception e) { m_log.Debug("[HG ENTITY TRANSFER MODULE]: GetHomeRegion call failed ", e); } if (finalDestination == null) { client.SendTeleportFailed("Your home region could not be found"); m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Agent's home region not found"); return false; } ScenePresence sp = ((Scene)(client.Scene)).GetScenePresence(client.AgentId); if (sp == null) { client.SendTeleportFailed("Internal error"); m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Agent not found in the scene where it is supposed to be"); return false; } GridRegion homeGatekeeper = MakeRegion(aCircuit); m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: teleporting user {0} {1} home to {2} via {3}:{4}", aCircuit.firstname, aCircuit.lastname, finalDestination.RegionName, homeGatekeeper.ServerURI, homeGatekeeper.RegionName); DoTeleport( sp, homeGatekeeper, finalDestination, position, lookAt, (uint)(Constants.TeleportFlags.SetLastToTarget | Constants.TeleportFlags.ViaHome)); return true; } public override void TriggerTeleportHome(UUID id, IClientAPI client) { TeleportHome(id, client); } protected override void AgentHasMovedAway(ScenePresence sp, bool logout) { base.AgentHasMovedAway(sp, logout); if (logout) { // Log them out of this grid Scene.PresenceService.LogoutAgent(sp.ControllingClient.SessionId); string userId = Scene.UserManagementModule.GetUserUUI(sp.UUID); Scene.GridUserService.LoggedOut(userId, UUID.Zero, Scene.RegionInfo.RegionID, sp.AbsolutePosition, sp.Lookat); } } protected override bool CreateAgent(ScenePresence sp, GridRegion reg, GridRegion finalDestination, AgentCircuitData agentCircuit, uint teleportFlags, out string reason, out bool logout) { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: CreateAgent {0} {1}", reg.ServerURI, finalDestination.ServerURI); reason = string.Empty; logout = false; int flags = Scene.GridService.GetRegionFlags(Scene.RegionInfo.ScopeID, reg.RegionID); if (flags == -1 /* no region in DB */ || (flags & (int)OpenSim.Framework.RegionFlags.Hyperlink) != 0) { // this user is going to another grid // for local users, check if HyperGrid teleport is allowed, based on user level if (Scene.UserManagementModule.IsLocalGridUser(sp.UUID) && sp.UserLevel < m_levelHGTeleport) { m_log.WarnFormat("[HG ENTITY TRANSFER MODULE]: Unable to HG teleport agent due to insufficient UserLevel."); reason = "Hypergrid teleport not allowed"; return false; } if (agentCircuit.ServiceURLs.ContainsKey("HomeURI")) { string userAgentDriver = agentCircuit.ServiceURLs["HomeURI"].ToString(); IUserAgentService connector; if (userAgentDriver.Equals(m_ThisHomeURI) && m_UAS != null) connector = m_UAS; else connector = new UserAgentServiceConnector(userAgentDriver); GridRegion source = new GridRegion(Scene.RegionInfo); source.RawServerURI = m_GatekeeperURI; bool success = connector.LoginAgentToGrid(source, agentCircuit, reg, finalDestination, false, out reason); logout = success; // flag for later logout from this grid; this is an HG TP if (success) sp.Scene.EventManager.TriggerTeleportStart(sp.ControllingClient, reg, finalDestination, teleportFlags, logout); return success; } else { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Agent does not have a HomeURI address"); return false; } } return base.CreateAgent(sp, reg, finalDestination, agentCircuit, teleportFlags, out reason, out logout); } protected override GridRegion GetFinalDestination(GridRegion region, UUID agentID, string agentHomeURI, out string message) { int flags = Scene.GridService.GetRegionFlags(Scene.RegionInfo.ScopeID, region.RegionID); m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: region {0} flags: {1}", region.RegionName, flags); message = null; if ((flags & (int)OpenSim.Framework.RegionFlags.Hyperlink) != 0) { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Destination region is hyperlink"); GridRegion real_destination = m_GatekeeperConnector.GetHyperlinkRegion(region, region.RegionID, agentID, agentHomeURI, out message); if (real_destination != null) m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: GetFinalDestination: ServerURI={0}", real_destination.ServerURI); else m_log.WarnFormat("[HG ENTITY TRANSFER MODULE]: GetHyperlinkRegion of region {0} from Gatekeeper {1} failed: {2}", region.RegionID, region.ServerURI, message); return real_destination; } return region; } protected override bool NeedsClosing(float drawdist, uint oldRegionX, uint newRegionX, uint oldRegionY, uint newRegionY, GridRegion reg) { if (base.NeedsClosing(drawdist, oldRegionX, newRegionX, oldRegionY, newRegionY, reg)) return true; int flags = Scene.GridService.GetRegionFlags(Scene.RegionInfo.ScopeID, reg.RegionID); if (flags == -1 /* no region in DB */ || (flags & (int)OpenSim.Framework.RegionFlags.Hyperlink) != 0) return true; return false; } protected override bool ValidateGenericConditions(ScenePresence sp, GridRegion reg, GridRegion finalDestination, uint teleportFlags, out string reason) { reason = "Please wear your grid's allowed appearance before teleporting to another grid"; if (!m_RestrictAppearanceAbroad) return true; // The rest is only needed for controlling appearance int flags = Scene.GridService.GetRegionFlags(Scene.RegionInfo.ScopeID, reg.RegionID); if (flags == -1 /* no region in DB */ || (flags & (int)OpenSim.Framework.RegionFlags.Hyperlink) != 0) { // this user is going to another grid if (Scene.UserManagementModule.IsLocalGridUser(sp.UUID)) { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: RestrictAppearanceAbroad is ON. Checking generic appearance"); // Check wearables for (int i = 0; i < AvatarWearable.MAX_WEARABLES; i++) { for (int j = 0; j < sp.Appearance.Wearables[i].Count; j++) { if (sp.Appearance.Wearables[i] == null) continue; bool found = false; foreach (AvatarAppearance a in ExportedAppearance) if (a.Wearables[i] != null) { found = true; break; } if (!found) { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Wearable not allowed to go outside {0}", i); return false; } found = false; foreach (AvatarAppearance a in ExportedAppearance) if (sp.Appearance.Wearables[i][j].AssetID == a.Wearables[i][j].AssetID) { found = true; break; } if (!found) { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Wearable not allowed to go outside {0}", i); return false; } } } // Check attachments foreach (AvatarAttachment att in sp.Appearance.GetAttachments()) { bool found = false; foreach (AvatarAttachment att2 in m_Attachs) { if (att2.AssetID == att.AssetID) { found = true; break; } } if (!found) { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Attachment not allowed to go outside {0}", att.AttachPoint); return false; } } } } reason = string.Empty; return true; } //protected override bool UpdateAgent(GridRegion reg, GridRegion finalDestination, AgentData agentData, ScenePresence sp) //{ // int flags = Scene.GridService.GetRegionFlags(Scene.RegionInfo.ScopeID, reg.RegionID); // if (flags == -1 /* no region in DB */ || (flags & (int)OpenSim.Data.RegionFlags.Hyperlink) != 0) // { // // this user is going to another grid // if (m_RestrictAppearanceAbroad && Scene.UserManagementModule.IsLocalGridUser(agentData.AgentID)) // { // // We need to strip the agent off its appearance // m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: RestrictAppearanceAbroad is ON. Sending generic appearance"); // // Delete existing npc attachments // Scene.AttachmentsModule.DeleteAttachmentsFromScene(sp, false); // // XXX: We can't just use IAvatarFactoryModule.SetAppearance() yet since it doesn't transfer attachments // AvatarAppearance newAppearance = new AvatarAppearance(ExportedAppearance, true); // sp.Appearance = newAppearance; // // Rez needed npc attachments // Scene.AttachmentsModule.RezAttachments(sp); // IAvatarFactoryModule module = Scene.RequestModuleInterface<IAvatarFactoryModule>(); // //module.SendAppearance(sp.UUID); // module.RequestRebake(sp, false); // Scene.AttachmentsModule.CopyAttachments(sp, agentData); // agentData.Appearance = sp.Appearance; // } // } // foreach (AvatarAttachment a in agentData.Appearance.GetAttachments()) // m_log.DebugFormat("[XXX]: {0}-{1}", a.ItemID, a.AssetID); // return base.UpdateAgent(reg, finalDestination, agentData, sp); //} #endregion HG overrides of IEntiryTransferModule #region IUserAgentVerificationModule public bool VerifyClient(AgentCircuitData aCircuit, string token) { if (aCircuit.ServiceURLs.ContainsKey("HomeURI")) { string url = aCircuit.ServiceURLs["HomeURI"].ToString(); IUserAgentService security = new UserAgentServiceConnector(url); return security.VerifyClient(aCircuit.SessionID, token); } else { m_log.DebugFormat( "[HG ENTITY TRANSFER MODULE]: Agent {0} {1} does not have a HomeURI OH NO!", aCircuit.firstname, aCircuit.lastname); } return false; } private void OnConnectionClosed(IClientAPI obj) { if (obj.SceneAgent.IsChildAgent) return; // Let's find out if this is a foreign user or a local user IUserManagement uMan = Scene.RequestModuleInterface<IUserManagement>(); // UserAccount account = Scene.UserAccountService.GetUserAccount(Scene.RegionInfo.ScopeID, obj.AgentId); if (uMan != null && uMan.IsLocalGridUser(obj.AgentId)) { // local grid user m_UAS.LogoutAgent(obj.AgentId, obj.SessionId); return; } AgentCircuitData aCircuit = ((Scene)(obj.Scene)).AuthenticateHandler.GetAgentCircuitData(obj.CircuitCode); if (aCircuit != null && aCircuit.ServiceURLs != null && aCircuit.ServiceURLs.ContainsKey("HomeURI")) { string url = aCircuit.ServiceURLs["HomeURI"].ToString(); IUserAgentService security = new UserAgentServiceConnector(url); security.LogoutAgent(obj.AgentId, obj.SessionId); //m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: Sent logout call to UserAgentService @ {0}", url); } else { m_log.DebugFormat("[HG ENTITY TRANSFER MODULE]: HomeURI not found for agent {0} logout", obj.AgentId); } } #endregion IUserAgentVerificationModule private GridRegion MakeRegion(AgentCircuitData aCircuit) { GridRegion region = new GridRegion(); Uri uri = null; if (!aCircuit.ServiceURLs.ContainsKey("HomeURI") || (aCircuit.ServiceURLs.ContainsKey("HomeURI") && !Uri.TryCreate(aCircuit.ServiceURLs["HomeURI"].ToString(), UriKind.Absolute, out uri))) return null; region.ExternalHostName = uri.Host; region.HttpPort = (uint)uri.Port; region.ServerURI = aCircuit.ServiceURLs["HomeURI"].ToString(); region.RegionName = string.Empty; region.InternalEndPoint = new System.Net.IPEndPoint(System.Net.IPAddress.Parse("0.0.0.0"), (int)0); return region; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using Xunit; using Tests.HashSet_HashSetTestSupport; using Tests.HashSet_SetCollectionRelationshipTests; using Tests.HashSet_SetCollectionComparerTests; using Tests.HashSet_SetCollectionDuplicateItemTests; namespace Tests { public class HashSet_IntersectWithTests { #region Set/Collection Relationship Tests (tests 1-42) //Test 1: other is null [Fact] public static void IsIntersectWith_Test1() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest1(out hashSet, out other); Assert.Throws<ArgumentNullException>(() => hashSet.IntersectWith(other)); //"ArgumenNullException expected." } //Test 2: other is empty and set is empty [Fact] public static void IsIntersectWith_Test2() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest2(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 3: Set/Collection Relationship Test 3: other is empty and set is single-item [Fact] public static void IsIntersectWith_Test3() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest3(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 4: Set/Collection Relationship Test 4: other is empty and set is multi-item [Fact] public static void IsIntersectWith_Test4() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest4(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 5: Set/Collection Relationship Test 5: other is single-item and set is empty [Fact] public static void IsIntersectWith_Test5() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest5(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 6: Set/Collection Relationship Test 6: other is single-item and set is single-item with a different item [Fact] public static void IsIntersectWith_Test6() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest6(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 7: Set/Collection Relationship Test 7: other is single-item and set is single-item with the same item [Fact] public static void IsIntersectWith_Test7() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest7(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(-23) }, hashSet.Comparer); } //Test 8: Set/Collection Relationship Test 8: other is single-item and set is multi-item where set and other are disjoint [Fact] public static void IsIntersectWith_Test8() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest8(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 9: Set/Collection Relationship Test 9: other is single-item and set is multi-item where set contains other [Fact] public static void IsIntersectWith_Test9() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest9(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(234) }, hashSet.Comparer); } //Test 10: Set/Collection Relationship Test 10: other is multi-item and set is empty [Fact] public static void IsIntersectWith_Test10() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest10(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 11: Set/Collection Relationship Test 11: other is multi-item and set is single-item and set and other are disjoint [Fact] public static void IsIntersectWith_Test11() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest11(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 12: Set/Collection Relationship Test 12: other is multi-item and set is single-item and other contains set [Fact] public static void IsIntersectWith_Test12() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest12(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(0) }, hashSet.Comparer); } //Test 13: Set/Collection Relationship Test 13: other is multi-item and set is multi-item and set and other disjoint [Fact] public static void IsIntersectWith_Test13() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest13(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 14: Set/Collection Relationship Test 14: other is multi-item and set is multi-item and set and other overlap but are non-comparable [Fact] public static void IsIntersectWith_Test14() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest14(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(-23), new Item(234) }, hashSet.Comparer); } //Test 15: Set/Collection Relationship Test 15: other is multi-item and set is multi-item and other is a proper subset of set [Fact] public static void IsIntersectWith_Test15() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest15(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(-23), new Item(0), new Item(234) }, hashSet.Comparer); } //Test 16: Set/Collection Relationship Test 16: other is multi-item and set is multi-item and set is a proper subset of other [Fact] public static void IsIntersectWith_Test16() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest16(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(0), new Item(23), new Item(222) }, hashSet.Comparer); } //Test 17: Set/Collection Relationship Test 17: other is multi-item and set is multi-item and set and other are equal [Fact] public static void IsIntersectWith_Test17() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest17(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(-23), new Item(0), new Item(23), new Item(222), new Item(234) }, hashSet.Comparer); } //Test 18: Set/Collection Relationship Test 18: other is set and set is empty [Fact] public static void IsIntersectWith_Test18() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest18(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 19: Set/Collection Relationship Test 19: other is set and set is single-item and set contains set [Fact] public static void IsIntersectWith_Test19() { HashSet<IEnumerable> hashSet; IEnumerable<IEnumerable> other; SetCollectionRelationshipTests.SetupTest19(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<IEnumerable>.VerifyHashSet(hashSet, new IEnumerable[] { other }, hashSet.Comparer); } //Test 20: Set/Collection Relationship Test 20: other is set and set is single-item and set does not contain set [Fact] public static void IsIntersectWith_Test20() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest20(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(22) }, hashSet.Comparer); } //Test 21: Set/Collection Relationship Test 21: other is set and set is multi-item and set contains set [Fact] public static void IsIntersectWith_Test21() { List<int> item1 = new List<int>(new int[] { 1, 3, 5, -2 }); List<int> item2 = new List<int>(new int[] { 1, -3, 5, -2 }); HashSet<IEnumerable> hashSet; IEnumerable<IEnumerable> other; SetCollectionRelationshipTests.SetupTest21(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<IEnumerable>.VerifyHashSet(hashSet, new IEnumerable[] { other, item1, item2 }, hashSet.Comparer); } //Test 22: Set/Collection Relationship Test 22: other is set and set is multi-item and set does not contain set [Fact] public static void IsIntersectWith_Test22() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest22(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(-2222), new Item(-2), new Item(22) }, hashSet.Comparer); } //Test 23: Set/Collection Relationship Test 23: item is only item in other: Item is the set and item is in the set [Fact] public static void IsIntersectWith_Test23() { HashSet<IEnumerable> hashSet; IEnumerable<IEnumerable> other; SetCollectionRelationshipTests.SetupTest23(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<IEnumerable>.VerifyHashSet(hashSet, new IEnumerable[] { hashSet }, hashSet.Comparer); } //Test 24: Set/Collection Relationship Test 24: item is only item in other: Item is the set and item is not in the set [Fact] public static void IsIntersectWith_Test24() { HashSet<IEnumerable> hashSet; IEnumerable<IEnumerable> other; SetCollectionRelationshipTests.SetupTest24(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<IEnumerable>.VerifyHashSet(hashSet, new IEnumerable[0], hashSet.Comparer); } //Test 25: Set/Collection Relationship Test 25: item is only item in other: Item is Default<T> and in set. T is a numeric type [Fact] public static void IsIntersectWith_Test25() { HashSet<int> hashSet; IEnumerable<int> other; SetCollectionRelationshipTests.SetupTest25(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<int>.VerifyHashSet(hashSet, new int[] { 0 }, hashSet.Comparer); } //Test 26: Set/Collection Relationship Test 26: item is only item in other: Item is Default<T> and in set. T is a reference type [Fact] public static void IsIntersectWith_Test26() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest26(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { null }, hashSet.Comparer); } //Test 27: Set/Collection Relationship Test 27: item is only item in other: Item is Default<T> and not in set. T is a numeric type [Fact] public static void IsIntersectWith_Test27() { HashSet<int> hashSet; IEnumerable<int> other; SetCollectionRelationshipTests.SetupTest27(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<int>.VerifyHashSet(hashSet, new int[0], hashSet.Comparer); } //Test 28: Set/Collection Relationship Test 28: item is only item in other: Item is Default<T> and not in set. T is a reference type [Fact] public static void IsIntersectWith_Test28() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest28(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 29: Set/Collection Relationship Test 29: item is only item in other: Item is equal to an item in set but different. [Fact] public static void IsIntersectWith_Test29() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest29(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(1) }, hashSet.Comparer); } //Test 30: Set/Collection Relationship Test 30: item is only item in other: Item shares hash value with unequal item in set [Fact] public static void IsIntersectWith_Test30() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest30(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 31: Set/Collection Relationship Test 31: item is only item in other: Item was previously in set but not currently [Fact] public static void IsIntersectWith_Test31() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest31(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 32: Set/Collection Relationship Test 32: item is only item in other: Item was previously removed from set but in it currently [Fact] public static void IsIntersectWith_Test32() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest32(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(1) }, hashSet.Comparer); } //Test 33: Set/Collection Relationship Test 33: item is one of the items in other: Item is the set and item is in the set [Fact] public static void IsIntersectWith_Test33() { HashSet<IEnumerable> hashSet; IEnumerable<IEnumerable> other; SetCollectionRelationshipTests.SetupTest33(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<IEnumerable>.VerifyHashSet(hashSet, new IEnumerable[] { hashSet }, hashSet.Comparer); } //Test 34: Set/Collection Relationship Test 34: item is one of the items in other: Item is the set and item is not in the set [Fact] public static void IsIntersectWith_Test34() { HashSet<IEnumerable> hashSet; IEnumerable<IEnumerable> other; SetCollectionRelationshipTests.SetupTest34(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<IEnumerable>.VerifyHashSet(hashSet, new IEnumerable[0], hashSet.Comparer); } //Test 35: Set/Collection Relationship Test 35: item is one of the items in other: Item is Default<T> and in set. T is a numeric type [Fact] public static void IsIntersectWith_Test35() { HashSet<int> hashSet; IEnumerable<int> other; SetCollectionRelationshipTests.SetupTest35(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<int>.VerifyHashSet(hashSet, new int[] { 0 }, hashSet.Comparer); } //Test 36: Set/Collection Relationship Test 36: item is one of the items in other: Item is Default<T> and in set. T is a reference type [Fact] public static void IsIntersectWith_Test36() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest36(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { null }, hashSet.Comparer); } //Test 37: Set/Collection Relationship Test 37: item is one of the items in other: Item is Default<T> and not in set. T is a numeric type [Fact] public static void IsIntersectWith_Test37() { HashSet<int> hashSet; IEnumerable<int> other; SetCollectionRelationshipTests.SetupTest37(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<int>.VerifyHashSet(hashSet, new int[0], hashSet.Comparer); } //Test 38: Set/Collection Relationship Test 38: item is one of the items in other: Item is Default<T> and not in set. T is a reference type [Fact] public static void IsIntersectWith_Test38() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest38(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 39: Set/Collection Relationship Test 39: item is one of the items in other: Item is equal to an item in set but different. [Fact] public static void IsIntersectWith_Test39() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest39(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(1) }, hashSet.Comparer); } //Test 40: Set/Collection Relationship Test 40: item is one of the items in other: Item shares hash value with unequal item in set [Fact] public static void IsIntersectWith_Test40() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest40(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 41: Set/Collection Relationship Test 41: item is one of the items in other: Item was previously in set but not currently [Fact] public static void IsIntersectWith_Test41() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest41(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[0], hashSet.Comparer); } //Test 42: Set/Collection Relationship Test 42: item is one of the items in other: Item was previously removed from set but in it currently [Fact] public static void IsIntersectWith_Test42() { HashSet<Item> hashSet; IEnumerable<Item> other; SetCollectionRelationshipTests.SetupTest42(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<Item>.VerifyHashSet(hashSet, new Item[] { new Item(1) }, hashSet.Comparer); } #endregion #region Set/Collection Comparer Tests (tests 43-57) //Test 43: Set/Collection Comparer Test 1: Item is in collection: item same as element in set by default comparer, different by sets comparer - set contains item that is equal by sets comparer [Fact] public static void IsIntersectWith_Test43() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest1(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 44: Set/Collection Comparer Test 2: Item is in collection: item same as element in set by default comparer, different by sets comparer - set does not contain item that is equal by sets comparer [Fact] public static void IsIntersectWith_Test44() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionComparerTests.SetupTest2(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 45: Set/Collection Comparer Test 3: Item is in collection: item same as element in set by sets comparer, different by default comparer - set contains item that is equal by default comparer [Fact] public static void IsIntersectWith_Test45() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest3(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 46: Set/Collection Comparer Test 4: Item is in collection: item same as element in set by sets comparer, different by default comparer - set does not contain item that is equal by default comparer [Fact] public static void IsIntersectWith_Test46() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest4(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 48: Set/Collection Comparer Test 6: Item is only item in collection: item same as element in set by default comparer, different by sets comparer - set contains item that is equal by sets comparer [Fact] public static void IsIntersectWith_Test48() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest6(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 49: Set/Collection Comparer Test 7: Item is only item in collection: item same as element in set by default comparer, different by sets comparer - set does not contain item that is equal by sets comparer [Fact] public static void IsIntersectWith_Test49() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionComparerTests.SetupTest7(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 50: Set/Collection Comparer Test 8: Item is only item in collection: item same as element in set by sets comparer, different by default comparer - set contains item that is equal by default comparer [Fact] public static void IsIntersectWith_Test50() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest8(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 51: Set/Collection Comparer Test 9: Item is only item in collection: item same as element in set by sets comparer, different by default comparer - set does not contain item that is equal by default comparer [Fact] public static void IsIntersectWith_Test51() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest9(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 52: Set/Collection Comparer Test 10: Item is only item in collection: item contains set and item in set with GetSetComparer<T> as comparer [Fact] public static void IsIntersectWith_Test52() { HashSet<IEnumerable> inneritem = new HashSet<IEnumerable>(); HashSet<HashSet<IEnumerable>> hashSet; IEnumerable<HashSet<IEnumerable>> other; SetCollectionComparerTests.SetupTest10(out hashSet, out other); inneritem.Add(hashSet); hashSet.IntersectWith(other); HashSet<IEnumerable>[] expected = new HashSet<IEnumerable>[] { inneritem }; HashSet<IEnumerable>[] actual = new HashSet<IEnumerable>[1]; hashSet.CopyTo(actual, 0, 1); Assert.Equal(1, hashSet.Count); //"Should be equal" HashSetTestSupport.HashSetContains(actual, expected); } //Test 53: Set/Collection Comparer Test 11: Item is collection: item same as element in set by default comparer, different by sets comparer - set contains item that is equal by sets comparer [Fact] public static void IsIntersectWith_Test53() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest11(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 54: Set/Collection Comparer Test 12: Item is collection: item same as element in set by default comparer, different by sets comparer - set does not contain item that is equal by sets comparer [Fact] public static void IsIntersectWith_Test54() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionComparerTests.SetupTest12(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 55: Set/Collection Comparer Test 13: Item is collection: item same as element in set by sets comparer, different by default comparer - set contains item that is equal by default comparer [Fact] public static void IsIntersectWith_Test55() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest13(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 56: Set/Collection Comparer Test 14: Item is collection: item same as element in set by sets comparer, different by default comparer - set does not contain item that is equal by default comparer [Fact] public static void IsIntersectWith_Test56() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionComparerTests.SetupTest14(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 57: Set/Collection Comparer Test 15: Item is collection: item contains set and item in set with GetSetComparer<T> as comparer [Fact] public static void IsIntersectWith_Test57() { ValueItem itemn4 = new ValueItem(-4, -4); ValueItem itemn3 = new ValueItem(-3, -3); ValueItem itemn2 = new ValueItem(-2, -2); ValueItem itemn1 = new ValueItem(-1, -1); ValueItem item1 = new ValueItem(1, 1); ValueItem item2 = new ValueItem(2, 2); ValueItem item3 = new ValueItem(3, 3); ValueItem item4 = new ValueItem(4, 4); HashSet<IEnumerable> itemhs1 = new HashSet<IEnumerable>(new ValueItem[] { item1, item2, item3, item4 }); HashSet<IEnumerable> itemhs2 = new HashSet<IEnumerable>(new ValueItem[] { itemn1, itemn2, itemn3, itemn4 }); HashSet<IEnumerable> inneritem = new HashSet<IEnumerable>(); HashSet<HashSet<IEnumerable>> hashSet; IEnumerable<HashSet<IEnumerable>> other; SetCollectionComparerTests.SetupTest15(out hashSet, out other); inneritem.Add(hashSet); hashSet.IntersectWith(other); HashSet<IEnumerable>[] expected = new HashSet<IEnumerable>[] { itemhs1, itemhs2, inneritem }; HashSet<IEnumerable>[] actual = new HashSet<IEnumerable>[3]; hashSet.CopyTo(actual, 0, 3); Assert.Equal(3, hashSet.Count); //"Should be equal" HashSetTestSupport.HashSetContains(actual, expected); } #endregion #region Set/Collection Duplicate Item Tests (tests 58-73) //Test 58: Set/Collection Duplicate Item Test 1: other collection is multi-item with duplicates, set is empty [Fact] public static void IsIntersectWith_Test58() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionDuplicateItemTests.SetupTest1(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 59: Set/Collection Duplicate Item Test 2: other collection is multi-item with duplicates, set contains a single item not in other [Fact] public static void IsIntersectWith_Test59() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionDuplicateItemTests.SetupTest2(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 60: Set/Collection Duplicate Item Test 3: other collection is multi-item with duplicates, set contains a single item that is in other but not a duplicate in other [Fact] public static void IsIntersectWith_Test60() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item3 = new ValueItem(9999, -2); SetCollectionDuplicateItemTests.SetupTest3(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item3 }, hashSet.Comparer); } //Test 61: Set/Collection Duplicate Item Test 4: other collection is multi-item with duplicates, set contains a single item that is a duplicate in other [Fact] public static void IsIntersectWith_Test61() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem item2 = new ValueItem(4, 4); SetCollectionDuplicateItemTests.SetupTest4(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { item2 }, hashSet.Comparer); } //Test 62: Set/Collection Duplicate Item Test 5: other collection is multi-item with duplicates, set is multi-item as well, set and other are disjoint [Fact] public static void IsIntersectWith_Test62() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionDuplicateItemTests.SetupTest5(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 63: Set/Collection Duplicate Item Test 6: other collection is multi-item with duplicates, set is multi-item as well, set and other overlap but are non-comparable, the overlap contains duplicate items from other [Fact] public static void IsIntersectWith_Test63() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem itemo2 = new ValueItem(4, 4); ValueItem itemo3 = new ValueItem(9999, -2); ValueItem itemo4 = new ValueItem(99, -2); ValueItem items2 = new ValueItem(-4, -4); ValueItem items4 = new ValueItem(-99, 2); SetCollectionDuplicateItemTests.SetupTest6(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { items2, items4, itemo2, itemo3, itemo4 }, hashSet.Comparer); } //Test 64: Set/Collection Duplicate Item Test 7: other collection is multi-item with duplicates, set is multi-item as well, set and other overlap but are non-comparable, the overlap does not contain duplicate items from other [Fact] public static void IsIntersectWith_Test64() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem itemo3 = new ValueItem(9999, -2); ValueItem items2 = new ValueItem(-4, -4); ValueItem items4 = new ValueItem(-99, 2); SetCollectionDuplicateItemTests.SetupTest7(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { items2, items4, itemo3 }, hashSet.Comparer); } //Test 65: Set/Collection Duplicate Item Test 8: other collection is multi-item with duplicates, set is multi-item as well, other is a proper subset of set [Fact] public static void IsIntersectWith_Test65() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem itemo1 = new ValueItem(34, -5); ValueItem itemo2 = new ValueItem(4, 4); ValueItem itemo3 = new ValueItem(9999, -2); ValueItem itemo4 = new ValueItem(99, -2); SetCollectionDuplicateItemTests.SetupTest8(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { itemo1, itemo2, itemo3, itemo4 }, hashSet.Comparer); } //Test 66: Set/Collection Duplicate Item Test 9: other collection is multi-item with duplicates, set is multi-item as well, set is a proper subset of other, set contains duplicate items from other [Fact] public static void IsIntersectWith_Test66() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem items1 = new ValueItem(-34, 5); ValueItem items2 = new ValueItem(-4, -4); ValueItem items3 = new ValueItem(-9999, 2); ValueItem items4 = new ValueItem(-99, 2); SetCollectionDuplicateItemTests.SetupTest9(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { items1, items2, items3, items4 }, hashSet.Comparer); } //Test 67: Set/Collection Duplicate Item Test 10: other collection is multi-item with duplicates, set is multi-item as well, set is a proper subset of other, set does not contain duplicate items from other [Fact] public static void IsIntersectWith_Test67() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem items1 = new ValueItem(-34, 5); ValueItem items2 = new ValueItem(-4, -4); ValueItem items3 = new ValueItem(-9999, 2); ValueItem items4 = new ValueItem(-99, 2); SetCollectionDuplicateItemTests.SetupTest10(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { items1, items2, items3, items4 }, hashSet.Comparer); } //Test 68: Set/Collection Duplicate Item Test 11: other collection is multi-item with duplicates, set is multi-item as well, set and other are equal [Fact] public static void IsIntersectWith_Test68() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem itemo1 = new ValueItem(34, -5); ValueItem itemo2 = new ValueItem(4, 4); ValueItem itemo3 = new ValueItem(9999, -2); ValueItem itemo4 = new ValueItem(99, -2); ValueItem items1 = new ValueItem(-34, 5); ValueItem items2 = new ValueItem(-4, -4); ValueItem items3 = new ValueItem(-9999, 2); ValueItem items4 = new ValueItem(-99, 2); SetCollectionDuplicateItemTests.SetupTest11(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { items1, items2, items3, items4, itemo1, itemo2, itemo3, itemo4 }, hashSet.Comparer); } //Test 69: Set/Collection Duplicate Item Test 12: other contains duplicates by sets comparer but not by default comparer [Fact] public static void IsIntersectWith_Test69() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; ValueItem items2 = new ValueItem(-4, -4); SetCollectionDuplicateItemTests.SetupTest12(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[] { items2 }, hashSet.Comparer); } //Test 70: Set/Collection Duplicate Item Test 13: other contains duplicates by default comparer but not by sets comparer [Fact] public static void IsIntersectWith_Test70() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionDuplicateItemTests.SetupTest13(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 71: Set/Collection Duplicate Item Test 14: set contains duplicate items by default comparer, those items also in other [Fact] public static void IsIntersectWith_Test71() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionDuplicateItemTests.SetupTest14(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 72: Set/Collection Duplicate Item Test 15: set contains duplicate items by default comparer, one of those items also in other [Fact] public static void IsIntersectWith_Test72() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionDuplicateItemTests.SetupTest15(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } //Test 73: Set/Collection Duplicate Item Test 16: set contains duplicate items by default comparer, those items not in other [Fact] public static void IsIntersectWith_Test73() { HashSet<ValueItem> hashSet; IEnumerable<ValueItem> other; SetCollectionDuplicateItemTests.SetupTest16(out hashSet, out other); hashSet.IntersectWith(other); HashSetTestSupport<ValueItem>.VerifyHashSet(hashSet, new ValueItem[0], hashSet.Comparer); } #endregion } }
using System.Data; using NUnit.Framework; namespace SequelocityDotNet.Tests.MySql.DatabaseCommandExtensionsTests { [TestFixture] public class ExecuteToMapTests { public class SuperHero { public long SuperHeroId; public string SuperHeroName; } [Test] public void Should_Call_The_DataRecordCall_Action_For_Each_Record_In_The_Result_Set() { // Arrange const string sql = @" DROP TEMPORARY TABLE IF EXISTS SuperHero; CREATE TEMPORARY TABLE SuperHero ( SuperHeroId INT NOT NULL AUTO_INCREMENT, SuperHeroName VARCHAR(120) NOT NULL, PRIMARY KEY ( SuperHeroId ) ); INSERT INTO SuperHero ( SuperHeroName ) VALUES ( 'Superman' ); INSERT INTO SuperHero ( SuperHeroName ) VALUES ( 'Batman' ); SELECT SuperHeroId, SuperHeroName FROM SuperHero; "; // Act var superHeroes = Sequelocity.GetDatabaseCommand( ConnectionStringsNames.MySqlConnectionString ) .SetCommandText( sql ) .ExecuteToMap( record => { var obj = new SuperHero { SuperHeroId = record.GetValue( 0 ).ToLong(), SuperHeroName = record.GetValue( 1 ).ToString() }; return obj; } ); // Assert Assert.That( superHeroes.Count == 2 ); } [Test] public void Should_Null_The_DbCommand_By_Default() { // Arrange const string sql = @" DROP TEMPORARY TABLE IF EXISTS SuperHero; CREATE TEMPORARY TABLE SuperHero ( SuperHeroId INT NOT NULL AUTO_INCREMENT, SuperHeroName VARCHAR(120) NOT NULL, PRIMARY KEY ( SuperHeroId ) ); INSERT INTO SuperHero ( SuperHeroName ) VALUES ( 'Superman' ); INSERT INTO SuperHero ( SuperHeroName ) VALUES ( 'Batman' ); SELECT SuperHeroId, SuperHeroName FROM SuperHero; "; var databaseCommand = Sequelocity.GetDatabaseCommand( ConnectionStringsNames.MySqlConnectionString ) .SetCommandText( sql ); // Act var superHeroes = databaseCommand.ExecuteToMap( record => { var obj = new SuperHero { SuperHeroId = record.GetValue( 0 ).ToLong(), SuperHeroName = record.GetValue( 1 ).ToString() }; return obj; } ); // Assert Assert.IsNull( databaseCommand.DbCommand ); } [Test] public void Should_Keep_The_Database_Connection_Open_If_keepConnectionOpen_Parameter_Was_True() { // Arrange const string sql = @" DROP TEMPORARY TABLE IF EXISTS SuperHero; CREATE TEMPORARY TABLE SuperHero ( SuperHeroId INT NOT NULL AUTO_INCREMENT, SuperHeroName VARCHAR(120) NOT NULL, PRIMARY KEY ( SuperHeroId ) ); INSERT INTO SuperHero ( SuperHeroName ) VALUES ( 'Superman' ); INSERT INTO SuperHero ( SuperHeroName ) VALUES ( 'Batman' ); SELECT SuperHeroId, SuperHeroName FROM SuperHero; "; var databaseCommand = Sequelocity.GetDatabaseCommand( ConnectionStringsNames.MySqlConnectionString ) .SetCommandText( sql ); // Act var superHeroes = databaseCommand.ExecuteToMap( record => { var obj = new SuperHero { SuperHeroId = record.GetValue( 0 ).ToLong(), SuperHeroName = record.GetValue( 1 ).ToString() }; return obj; }, true ); // Assert Assert.That( databaseCommand.DbCommand.Connection.State == ConnectionState.Open ); // Cleanup databaseCommand.Dispose(); } [Test] public void Should_Call_The_DatabaseCommandPreExecuteEventHandler() { // Arrange bool wasPreExecuteEventHandlerCalled = false; Sequelocity.ConfigurationSettings.EventHandlers.DatabaseCommandPreExecuteEventHandlers.Add( command => wasPreExecuteEventHandlerCalled = true ); // Act var superHeroes = Sequelocity.GetDatabaseCommand( ConnectionStringsNames.MySqlConnectionString ) .SetCommandText( "SELECT 1 as SuperHeroId, 'Superman' as SuperHeroName" ) .ExecuteToMap( record => { var obj = new SuperHero { SuperHeroId = record.GetValue( 0 ).ToLong(), SuperHeroName = record.GetValue( 1 ).ToString() }; return obj; } ); // Assert Assert.IsTrue( wasPreExecuteEventHandlerCalled ); } [Test] public void Should_Call_The_DatabaseCommandPostExecuteEventHandler() { // Arrange bool wasPostExecuteEventHandlerCalled = false; Sequelocity.ConfigurationSettings.EventHandlers.DatabaseCommandPostExecuteEventHandlers.Add( command => wasPostExecuteEventHandlerCalled = true ); // Act var superHeroes = Sequelocity.GetDatabaseCommand( ConnectionStringsNames.MySqlConnectionString ) .SetCommandText( "SELECT 1 as SuperHeroId, 'Superman' as SuperHeroName" ) .ExecuteToMap( record => { var obj = new SuperHero { SuperHeroId = record.GetValue( 0 ).ToLong(), SuperHeroName = record.GetValue( 1 ).ToString() }; return obj; } ); // Assert Assert.IsTrue( wasPostExecuteEventHandlerCalled ); } [Test] public void Should_Call_The_DatabaseCommandUnhandledExceptionEventHandler() { // Arrange bool wasUnhandledExceptionEventHandlerCalled = false; Sequelocity.ConfigurationSettings.EventHandlers.DatabaseCommandUnhandledExceptionEventHandlers.Add( ( exception, command ) => { wasUnhandledExceptionEventHandlerCalled = true; } ); // Act TestDelegate action = () => Sequelocity.GetDatabaseCommand( ConnectionStringsNames.MySqlConnectionString ) .SetCommandText( "asdf;lkj" ) .ExecuteToMap( record => { var obj = new SuperHero { SuperHeroId = record.GetValue( 0 ).ToLong(), SuperHeroName = record.GetValue( 1 ).ToString() }; return obj; } ); // Assert Assert.Throws<global::MySql.Data.MySqlClient.MySqlException>( action ); Assert.IsTrue( wasUnhandledExceptionEventHandlerCalled ); } } }
using System; using Microsoft.SPOT; namespace Microsoft.SPOT.Platform.Tests { class Helper { static string DigitialToHex(int x) { switch (x) { case 0: return "0"; case 1: return "1"; case 2: return "2"; case 3: return "3"; case 4: return "4"; case 5: return "5"; case 6: return "6"; case 7: return "7"; case 8: return "8"; case 9: return "9"; case 10: return "A"; case 11: return "B"; case 12: return "C"; case 13: return "D"; case 14: return "E"; case 15: return "F"; default: throw new Exception(); } } static string ByteToHex(byte b) { return DigitialToHex(b / 16) + DigitialToHex(b % 16); } static string LongToHex(long l) { string sHex = string.Empty; for (int i = 0; i < 8; i++) { sHex = ByteToHex((byte)(l & 0xff)) + sHex; l >>= 8; } return sHex; } static bool CompareByteArray(byte[] left, byte[] right) { if (left.Length != right.Length) { return false; } for (int i = 0; i < left.Length; i++) { if (left[i] != right[i]) { return false; } } return true; } static string ByteArrayToHex(byte[] bs) { string sHex = string.Empty; foreach (byte b in bs) { if (sHex.Length > 0) { sHex += "-"; } sHex += ByteToHex(b); } return sHex; } static public void DoubleToLongBits(double input, long expected) { long ret = BitConverter.DoubleToInt64Bits(input); if (ret != expected) { throw new TestFailException( "BitConverter.DoubleToInt64Bits", LongToHex(ret), LongToHex(expected), input); } } static public void GetBytesBool(bool input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<bool>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesChar(char input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<char>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesDouble(double input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<double>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesSingle(float input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<float>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesInt64(long input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<long>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesInt32(int input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<int>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesInt16(short input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<short>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesUInt64(ulong input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<ulong>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesUInt32(uint input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<uint>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void GetBytesUInt16(ushort input, byte[] expected) { byte[] ret = BitConverter.GetBytes(input); if (!CompareByteArray(ret, expected)) { throw new TestFailException( "BitConverter.GetBytes<ushort>", ByteArrayToHex(ret), ByteArrayToHex(expected), input); } } static public void LongBitsToDouble(long input, double expected) { double ret = BitConverter.Int64BitsToDouble(input); if (ret != expected) { throw new TestFailException( "BitConverter.Int64BitsToDouble", ret, expected, LongToHex(input)); } } static public void BAToBool(byte[] bytes, int index, bool expected) { bool ret = BitConverter.ToBoolean(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToBoolean", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToBoolThrow(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToBoolean(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToBoolean", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToChar(byte[] bytes, int index, char expected) { char ret = BitConverter.ToChar(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToChar", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToCharThrow(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToChar(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToChar", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToDouble(byte[] bytes, int index, double expected) { double ret = BitConverter.ToDouble(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToDouble", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToDoubleThrow(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToDouble(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToDouble", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToInt16(byte[] bytes, int index, short expected) { short ret = BitConverter.ToInt16(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToInt16", ret, expected, ByteArrayToHex(bytes), index, index); } } static public void BAToInt16Throw(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToInt16(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToInt16", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToInt32(byte[] bytes, int index, int expected) { int ret = BitConverter.ToInt32(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToInt32", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToInt32Throw(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToInt32(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToInt32", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToInt64(byte[] bytes, int index, long expected) { long ret = BitConverter.ToInt64(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToInt64", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToInt64Throw(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToInt64(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToInt64", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToSingle(byte[] bytes, int index, float expected) { float ret = BitConverter.ToSingle(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToSingle", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToSingleThrow(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToSingle(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToSingle", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToUInt16(byte[] bytes, int index, ushort expected) { ushort ret = BitConverter.ToUInt16(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToUInt16", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToUInt16Throw(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToUInt16(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToUInt16", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToUInt32(byte[] bytes, int index, uint expected) { uint ret = BitConverter.ToUInt32(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToUInt32", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToUInt32Throw(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToUInt32(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToUInt32", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void BAToUInt64(byte[] bytes, int index, ulong expected) { ulong ret = BitConverter.ToUInt64(bytes, index); if (ret != expected) { throw new TestFailException( "BitConverter.ToUInt64", ret, expected, ByteArrayToHex(bytes), index); } } static public void BAToUInt64Throw(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToUInt64(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToUInt64", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void WriteByteArray(byte[] bytes, string expected) { string ret = BitConverter.ToString(bytes); if (ret != expected) { throw new TestFailException( "BitConverter.ToString", ret, expected, ByteArrayToHex(bytes)); } } static public void WriteMultiLineByteArray(byte[] bytes, string expected) { string ret = string.Empty; const int rowSize = 20; int i; for (i = 0; i < bytes.Length - rowSize; i += rowSize) { if (ret.Length > 0) { ret += "\r\n"; } ret += BitConverter.ToString(bytes, i, rowSize); } if (ret.Length > 0) { ret += "\r\n"; } ret += BitConverter.ToString(bytes, i); if (ret != expected) { throw new TestFailException( "BitConverter.ToString(2)", ret, expected, ByteArrayToHex(bytes)); } } static public void ToStringThrow(byte[] bytes, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToString(bytes); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToString", exception, expectedExceptionType, ByteArrayToHex(bytes)); } } static public void ToStringThrow(byte[] bytes, int index, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToString(bytes, index); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToString", exception, expectedExceptionType, ByteArrayToHex(bytes), index); } } static public void ToStringThrow(byte[] bytes, int index, int length, Type expectedExceptionType) { Exception exception = null; try { BitConverter.ToString(bytes, index, length); } catch (Exception e) { exception = e; } if (exception == null || exception.GetType() != expectedExceptionType) { throw new TestFailException( "BitConverter.ToString", exception, expectedExceptionType, ByteArrayToHex(bytes), index, length); } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.Runtime.InteropServices; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem.Extensions; using Microsoft.VisualStudio.LanguageServices.Implementation.Venus; using Microsoft.VisualStudio.Shell.Interop; using Microsoft.VisualStudio.TextManager.Interop; using Roslyn.Utilities; using VsTextSpan = Microsoft.VisualStudio.TextManager.Interop.TextSpan; namespace Microsoft.VisualStudio.LanguageServices.Implementation.TaskList { internal class ProjectExternalErrorReporter : IVsReportExternalErrors, IVsLanguageServiceBuildErrorReporter2 { internal static readonly IReadOnlyList<string> CustomTags = ImmutableArray.Create(WellKnownDiagnosticTags.Telemetry); internal static readonly IReadOnlyList<string> CompilerDiagnosticCustomTags = ImmutableArray.Create(WellKnownDiagnosticTags.Compiler, WellKnownDiagnosticTags.Telemetry); private readonly ProjectId _projectId; private readonly string _errorCodePrefix; private readonly VisualStudioWorkspaceImpl _workspace; private readonly ExternalErrorDiagnosticUpdateSource _diagnosticProvider; public ProjectExternalErrorReporter(ProjectId projectId, string errorCodePrefix, IServiceProvider serviceProvider) { _projectId = projectId; _errorCodePrefix = errorCodePrefix; _diagnosticProvider = serviceProvider.GetMefService<ExternalErrorDiagnosticUpdateSource>(); _workspace = serviceProvider.GetMefService<VisualStudioWorkspaceImpl>(); Debug.Assert(_diagnosticProvider != null); Debug.Assert(_workspace != null); } public int AddNewErrors(IVsEnumExternalErrors pErrors) { var projectErrors = new HashSet<DiagnosticData>(); var documentErrorsMap = new Dictionary<DocumentId, HashSet<DiagnosticData>>(); var errors = new ExternalError[1]; uint fetched; while (pErrors.Next(1, errors, out fetched) == VSConstants.S_OK && fetched == 1) { var error = errors[0]; DiagnosticData diagnostic; if (error.bstrFileName != null) { diagnostic = CreateDocumentDiagnosticItem(error); if (diagnostic != null) { var diagnostics = documentErrorsMap.GetOrAdd(diagnostic.DocumentId, _ => new HashSet<DiagnosticData>()); diagnostics.Add(diagnostic); continue; } projectErrors.Add(CreateProjectDiagnosticItem(error)); } else { projectErrors.Add(CreateProjectDiagnosticItem(error)); } } _diagnosticProvider.AddNewErrors(_projectId, projectErrors, documentErrorsMap); return VSConstants.S_OK; } public int ClearAllErrors() { _diagnosticProvider.ClearErrors(_projectId); return VSConstants.S_OK; } public int GetErrors(out IVsEnumExternalErrors pErrors) { pErrors = null; Debug.Fail("This is not implemented, because no one called it."); return VSConstants.E_NOTIMPL; } private DiagnosticData CreateProjectDiagnosticItem(ExternalError error) { return GetDiagnosticData(error); } private DiagnosticData CreateDocumentDiagnosticItem(ExternalError error) { var hostProject = _workspace.GetHostProject(_projectId); if (!hostProject.ContainsFile(error.bstrFileName)) { return null; } var hostDocument = hostProject.GetCurrentDocumentFromPath(error.bstrFileName); var line = error.iLine; var column = error.iCol; var containedDocument = hostDocument as ContainedDocument; if (containedDocument != null) { var span = new VsTextSpan { iStartLine = line, iStartIndex = column, iEndLine = line, iEndIndex = column, }; var spans = new VsTextSpan[1]; Marshal.ThrowExceptionForHR(containedDocument.ContainedLanguage.BufferCoordinator.MapPrimaryToSecondarySpan( span, spans)); line = spans[0].iStartLine; column = spans[0].iStartIndex; } return GetDiagnosticData(error, hostDocument.Id, line, column); } public int ReportError(string bstrErrorMessage, string bstrErrorId, [ComAliasName("VsShell.VSTASKPRIORITY")]VSTASKPRIORITY nPriority, int iLine, int iColumn, string bstrFileName) { ReportError2(bstrErrorMessage, bstrErrorId, nPriority, iLine, iColumn, iLine, iColumn, bstrFileName); return VSConstants.S_OK; } // TODO: Use PreserveSig instead of throwing these exceptions for common cases. public void ReportError2(string bstrErrorMessage, string bstrErrorId, [ComAliasName("VsShell.VSTASKPRIORITY")]VSTASKPRIORITY nPriority, int iStartLine, int iStartColumn, int iEndLine, int iEndColumn, string bstrFileName) { if ((iEndLine >= 0 && iEndColumn >= 0) && ((iEndLine < iStartLine) || (iEndLine == iStartLine && iEndColumn < iStartColumn))) { throw new ArgumentException(ServicesVSResources.EndPositionMustBeGreaterThanStart); } // We only handle errors that have positions. For the rest, we punt back to the // project system. if (iStartLine < 0 || iStartColumn < 0) { throw new NotImplementedException(); } var hostProject = _workspace.GetHostProject(_projectId); if (!hostProject.ContainsFile(bstrFileName)) { throw new NotImplementedException(); } var hostDocument = hostProject.GetCurrentDocumentFromPath(bstrFileName); var priority = (VSTASKPRIORITY)nPriority; DiagnosticSeverity severity; switch (priority) { case VSTASKPRIORITY.TP_HIGH: severity = DiagnosticSeverity.Error; break; case VSTASKPRIORITY.TP_NORMAL: severity = DiagnosticSeverity.Warning; break; case VSTASKPRIORITY.TP_LOW: severity = DiagnosticSeverity.Info; break; default: throw new ArgumentException(ServicesVSResources.NotAValidValue, "nPriority"); } var diagnostic = GetDiagnosticData( hostDocument.Id, bstrErrorId, bstrErrorMessage, severity, null, iStartLine, iStartColumn, iEndLine, iEndColumn, bstrFileName, iStartLine, iStartColumn, iEndLine, iEndColumn); _diagnosticProvider.AddNewErrors(hostDocument.Id, diagnostic); } public int ClearErrors() { _diagnosticProvider.ClearErrors(_projectId); return VSConstants.S_OK; } private string GetErrorId(ExternalError error) { return string.Format("{0}{1:0000}", _errorCodePrefix, error.iErrorID); } private static int GetWarningLevel(DiagnosticSeverity severity) { return severity == DiagnosticSeverity.Error ? 0 : 1; } private static DiagnosticSeverity GetDiagnosticSeverity(ExternalError error) { return error.fError != 0 ? DiagnosticSeverity.Error : DiagnosticSeverity.Warning; } private DiagnosticData GetDiagnosticData( ExternalError error, DocumentId id = null, int line = 0, int column = 0) { if (id != null) { // save error line/column (surface buffer location) as mapped line/column so that we can display // right location on closed Venus file. return GetDiagnosticData( id, GetErrorId(error), error.bstrText, GetDiagnosticSeverity(error), null, error.iLine, error.iCol, error.iLine, error.iCol, error.bstrFileName, line, column, line, column); } return GetDiagnosticData( id, GetErrorId(error), error.bstrText, GetDiagnosticSeverity(error), null, 0, 0, 0, 0, null, 0, 0, 0, 0); } private static bool IsCompilerDiagnostic(string errorId) { if (!string.IsNullOrEmpty(errorId) && errorId.Length > 2) { var prefix = errorId.Substring(0, 2); if (prefix.Equals("CS", StringComparison.OrdinalIgnoreCase) || prefix.Equals("BC", StringComparison.OrdinalIgnoreCase)) { var suffix = errorId.Substring(2); int id; return int.TryParse(suffix, out id); } } return false; } private static IReadOnlyList<string> GetCustomTags(string errorId) { return IsCompilerDiagnostic(errorId) ? CompilerDiagnosticCustomTags : CustomTags; } private DiagnosticData GetDiagnosticData( DocumentId id, string errorId, string message, DiagnosticSeverity severity, string mappedFilePath, int mappedStartLine, int mappedStartColumn, int mappedEndLine, int mappedEndColumn, string originalFilePath, int originalStartLine, int originalStartColumn, int originalEndLine, int originalEndColumn) { return new DiagnosticData( id: errorId, category: WellKnownDiagnosticTags.Build, message: message, title: message, enuMessageForBingSearch: message, // Unfortunately, there is no way to get ENU text for this since this is an external error. severity: severity, defaultSeverity: severity, isEnabledByDefault: true, warningLevel: GetWarningLevel(severity), customTags: GetCustomTags(errorId), properties: DiagnosticData.PropertiesForBuildDiagnostic, workspace: _workspace, projectId: _projectId, location: new DiagnosticDataLocation(id, sourceSpan: null, originalFilePath: originalFilePath, originalStartLine: originalStartLine, originalStartColumn: originalStartColumn, originalEndLine: originalEndLine, originalEndColumn: originalEndColumn, mappedFilePath: mappedFilePath, mappedStartLine: mappedStartLine, mappedStartColumn: mappedStartColumn, mappedEndLine: mappedEndLine, mappedEndColumn: mappedEndColumn)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using Xunit; namespace System.Linq.Parallel.Tests { public class ToDictionaryTests { [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count); Assert.All(query.ToDictionary(x => x * 2), p => { seen.Add(p.Key / 2); Assert.Equal(p.Key, p.Value * 2); }); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary(labeled, count); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_ElementSelector(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count); Assert.All(query.ToDictionary(x => x, y => y * 2), p => { seen.Add(p.Key); Assert.Equal(p.Key * 2, p.Value); }); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_ElementSelector_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary_ElementSelector(labeled, count); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_CustomComparator(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count); Assert.All(query.ToDictionary(x => x * 2, new ModularCongruenceComparer(count * 2)), p => { seen.Add(p.Key / 2); Assert.Equal(p.Key, p.Value * 2); }); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_CustomComparator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary_CustomComparator(labeled, count); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_ElementSelector_CustomComparator(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; IntegerRangeSet seen = new IntegerRangeSet(0, count); Assert.All(query.ToDictionary(x => x, y => y * 2, new ModularCongruenceComparer(count)), p => { seen.Add(p.Key); Assert.Equal(p.Key * 2, p.Value); }); seen.AssertComplete(); } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_ElementSelector_CustomComparator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary_ElementSelector_CustomComparator(labeled, count); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_UniqueKeys_CustomComparator(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; if (count > 2) { AggregateException e = Assert.Throws<AggregateException>(() => query.ToDictionary(x => x, new ModularCongruenceComparer(2))); Assert.IsType<ArgumentException>(e.InnerException); } else if (count == 1 || count == 2) { IntegerRangeSet seen = new IntegerRangeSet(0, count); foreach (KeyValuePair<int, int> entry in query.ToDictionary(x => x, new ModularCongruenceComparer(2))) { seen.Add(entry.Key); Assert.Equal(entry.Key, entry.Value); } seen.AssertComplete(); } else { Assert.Empty(query.ToDictionary(x => x, new ModularCongruenceComparer(2))); } } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_UniqueKeys_CustomComparator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary_UniqueKeys_CustomComparator(labeled, count); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_ElementSelector_UniqueKeys_CustomComparator(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; if (count > 2) { AggregateException e = Assert.Throws<AggregateException>(() => query.ToDictionary(x => x, y => y, new ModularCongruenceComparer(2))); Assert.IsType<ArgumentException>(e.InnerException); } else if (count == 1 || count == 2) { IntegerRangeSet seen = new IntegerRangeSet(0, count); foreach (KeyValuePair<int, int> entry in query.ToDictionary(x => x, y => y, new ModularCongruenceComparer(2))) { seen.Add(entry.Key); Assert.Equal(entry.Key, entry.Value); } seen.AssertComplete(); } else { Assert.Empty(query.ToDictionary(x => x, y => y, new ModularCongruenceComparer(2))); } } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_ElementSelector_UniqueKeys_CustomComparator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary_ElementSelector_UniqueKeys_CustomComparator(labeled, count); } [Fact] public static void ToDictionary_DuplicateKeys() { AggregateException e = Assert.Throws<AggregateException>(() => ParallelEnumerable.Repeat(0, 2).ToDictionary(x => x)); Assert.IsType<ArgumentException>(e.InnerException); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_DuplicateKeys_ElementSelector(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; AggregateException e = Assert.Throws<AggregateException>(() => ParallelEnumerable.Repeat(0, 2).ToDictionary(x => x, y => y)); Assert.IsType<ArgumentException>(e.InnerException); } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_DuplicateKeys_ElementSelector_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary_DuplicateKeys_ElementSelector(labeled, count); } [Fact] public static void ToDictionary_DuplicateKeys_CustomComparator() { AggregateException e = Assert.Throws<AggregateException>(() => ParallelEnumerable.Repeat(0, 2).ToDictionary(x => x, new ModularCongruenceComparer(2))); Assert.IsType<ArgumentException>(e.InnerException); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 0, 1, 2, 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_DuplicateKeys_ElementSelector_CustomComparator(Labeled<ParallelQuery<int>> labeled, int count) { ParallelQuery<int> query = labeled.Item; AggregateException e = Assert.Throws<AggregateException>(() => ParallelEnumerable.Repeat(0, 2).ToDictionary(x => x, y => y, new ModularCongruenceComparer(2))); Assert.IsType<ArgumentException>(e.InnerException); } [Theory] [OuterLoop] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1024 * 4, 1024 * 16 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_DuplicateKeys_ElementSelector_CustomComparator_Longrunning(Labeled<ParallelQuery<int>> labeled, int count) { ToDictionary_DuplicateKeys_ElementSelector_CustomComparator(labeled, count); } [Fact] public static void ToDictionary_OperationCanceledException() { AssertThrows.EventuallyCanceled((source, canceler) => source.ToDictionary(x => x, new CancelingEqualityComparer<int>(canceler))); AssertThrows.EventuallyCanceled((source, canceler) => source.ToDictionary(x => x, y => y, new CancelingEqualityComparer<int>(canceler))); } [Fact] public static void ToDictionary_AggregateException_Wraps_OperationCanceledException() { AssertThrows.OtherTokenCanceled((source, canceler) => source.ToDictionary(x => x, new CancelingEqualityComparer<int>(canceler))); AssertThrows.OtherTokenCanceled((source, canceler) => source.ToDictionary(x => x, y => y, new CancelingEqualityComparer<int>(canceler))); AssertThrows.SameTokenNotCanceled((source, canceler) => source.ToDictionary(x => x, new CancelingEqualityComparer<int>(canceler))); AssertThrows.SameTokenNotCanceled((source, canceler) => source.ToDictionary(x => x, y => y, new CancelingEqualityComparer<int>(canceler))); } [Fact] public static void ToDictionary_OperationCanceledException_PreCanceled() { AssertThrows.AlreadyCanceled(source => source.ToDictionary(x => x)); AssertThrows.AlreadyCanceled(source => source.ToDictionary(x => x, EqualityComparer<int>.Default)); AssertThrows.AlreadyCanceled(source => source.ToDictionary(x => x, y => y)); AssertThrows.AlreadyCanceled(source => source.ToDictionary(x => x, y => y, EqualityComparer<int>.Default)); } [Theory] [MemberData(nameof(UnorderedSources.Ranges), new[] { 1 }, MemberType = typeof(UnorderedSources))] public static void ToDictionary_AggregateException(Labeled<ParallelQuery<int>> labeled, int count) { Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary((Func<int, int>)(x => { throw new DeliberateTestException(); }))); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary((Func<int, int>)(x => { throw new DeliberateTestException(); }), y => y)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary(x => x, (Func<int, int>)(y => { throw new DeliberateTestException(); }))); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary((Func<int, int>)(x => { throw new DeliberateTestException(); }), EqualityComparer<int>.Default)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary((Func<int, int>)(x => { throw new DeliberateTestException(); }), y => y, EqualityComparer<int>.Default)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary(x => x, (Func<int, int>)(y => { throw new DeliberateTestException(); }), EqualityComparer<int>.Default)); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary(x => x, new FailingEqualityComparer<int>())); Functions.AssertThrowsWrapped<DeliberateTestException>(() => labeled.Item.ToDictionary(x => x, y => y, new FailingEqualityComparer<int>())); } [Fact] public static void ToDictionary_ArgumentNullException() { Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).ToDictionary(x => x)); Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).ToDictionary(x => x, EqualityComparer<int>.Default)); Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).ToDictionary(x => x, y => y)); Assert.Throws<ArgumentNullException>(() => ((ParallelQuery<int>)null).ToDictionary(x => x, y => y, EqualityComparer<int>.Default)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Empty<int>().ToDictionary((Func<int, int>)null)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Empty<int>().ToDictionary((Func<int, int>)null, EqualityComparer<int>.Default)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Empty<int>().ToDictionary((Func<int, int>)null, y => y)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Empty<int>().ToDictionary((Func<int, int>)null, y => y, EqualityComparer<int>.Default)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Empty<int>().ToDictionary(x => x, (Func<int, int>)null)); Assert.Throws<ArgumentNullException>(() => ParallelEnumerable.Empty<int>().ToDictionary(x => x, (Func<int, int>)null, EqualityComparer<int>.Default)); } } }
/* Copyright (c) Microsoft Corporation All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Data.Linq; using System.Data.Linq.Mapping; using System.Xml; using System.Xml.Serialization; using Microsoft.Research.Dryad; namespace Microsoft.Research.Dryad.GraphManager { public class QueryPlanParser { public VertexInfo.IOType GetIoType(string type) { if (type == "File") return VertexInfo.IOType.FILELIST; if (type == "PartitionedFile") return VertexInfo.IOType.PARTITIONEDFILE; if (type == "FileDirectory") return VertexInfo.IOType.FILEDIRECTORY; if (type == "FileWildcard") return VertexInfo.IOType.FILEWILDCARD; if (type == "TidyFS") return VertexInfo.IOType.STREAM; if (type == "Dsc") return VertexInfo.IOType.STREAM; if (type == "Hdfs") return VertexInfo.IOType.HDFS_STREAM; if (type == "AzureBlob") return VertexInfo.IOType.AZUREBLOB; if (type == "FilePrefix") return VertexInfo.IOType.FILEPREFIX; throw new LinqToDryadException(String.Format("Unknown IoType: {0}", type)); } public Vertex.Type GetVertexType(string type) { if (type == "InputTable") return Vertex.Type.INPUTTABLE; if (type == "OutputTable") return Vertex.Type.OUTPUTTABLE; if (type == "Where") return Vertex.Type.WHERE; if (type == "Join") return Vertex.Type.JOIN; if (type == "Fork") return Vertex.Type.FORK; if (type == "Tee") return Vertex.Type.TEE; if (type == "Concat") return Vertex.Type.CONCAT; if (type == "Super") return Vertex.Type.SUPER; if (type == "Apply") return Vertex.Type.SUPER; return Vertex.Type.UNKNOWN; } public Predecessor.ChannelType GetChannelType(string type) { if (type == "DiskFile") return Predecessor.ChannelType.DISKFILE; if (type == "TCPPipe") return Predecessor.ChannelType.TCPPIPE; if (type == "MemoryFIFO") return Predecessor.ChannelType.MEMORYFIFO; throw new LinqToDryadException(String.Format("Unknown ChannelType: {0}", type)); } public Predecessor.ConnectionOperator GetConnectionOperator(string type) { if (type == "Pointwise") return Predecessor.ConnectionOperator.POINTWISE; if (type == "CrossProduct") return Predecessor.ConnectionOperator.CROSSPRODUCT; throw new LinqToDryadException(String.Format("Unknown ConnectionOperator: {0}", type)); } public Predecessor.AffinityConstraint GetAffinityConstraint(string type) { if (type == "UseDefault") return Predecessor.AffinityConstraint.UseDefault; if (type == "HardConstraint") return Predecessor.AffinityConstraint.HardConstraint; if (type == "OptimizationConstraint") return Predecessor.AffinityConstraint.OptimizationConstraint; if (type == "Preference") return Predecessor.AffinityConstraint.Preference; if (type == "DontCare") return Predecessor.AffinityConstraint.DontCare; throw new LinqToDryadException(String.Format("Unknown AffinityConstraint: {0}", type)); } public DynamicManager.Type GetDynamicManagerType(string type) { if (type == "None") return DynamicManager.Type.NONE; if (type == "Splitter") return DynamicManager.Type.SPLITTER; if (type == "PartialAggregator") return DynamicManager.Type.PARTIALAGGR; if (type == "FullAggregator") return DynamicManager.Type.FULLAGGR; if (type == "HashDistributor") return DynamicManager.Type.HASHDISTRIBUTOR; if (type == "RangeDistributor") return DynamicManager.Type.RANGEDISTRIBUTOR; if (type == "Broadcast") return DynamicManager.Type.BROADCAST; throw new LinqToDryadException(String.Format("Unknown DynamicManager: {0}", type)); } public static bool SplitEntryIntoAssemblyClassMethod(string entry, out string _assembly, out string _class, out string _method) { _assembly = ""; _class = ""; _method = ""; int indexBang = entry.IndexOf("!"); int indexPeriod = entry.LastIndexOf("."); if (indexBang == -1 || indexPeriod == -1 || indexPeriod <= indexBang) { return false; } _assembly = entry.Substring(0, indexBang); _class = entry.Substring(indexBang + 1, indexPeriod - indexBang - 1); _method = entry.Substring(indexPeriod + 1); return true; } private void ParseQueryXmlLinqToDryad(XmlDocument queryPlanDoc, Query query) { XmlElement root = queryPlanDoc.DocumentElement; // // Query globals // query.queryPlan = new SortedDictionary<int, Vertex>(); query.compilerversion = root.SelectSingleNode("DryadLinqVersion").InnerText; query.clusterName = root.SelectSingleNode("ClusterName").InnerText; query.visualization = root.SelectSingleNode("Visualization").InnerText; // Compression scheme for intermediate data XmlNode compressionNode = root.SelectSingleNode("IntermediateDataCompression"); if (compressionNode != null) { query.intermediateDataCompression = Convert.ToInt32(compressionNode.InnerText); } // // XmlExecHost arguments // XmlNodeList nodes = root.SelectSingleNode("XmlExecHostArgs").ChildNodes; query.xmlExecHostArgs = new string[nodes.Count]; for (int index=0; index<nodes.Count; index++) { query.xmlExecHostArgs[index] = nodes[index].InnerText; } // // Get Speculative duplication flag - default is enabled (true) // XmlNode duplicationNode = root.SelectSingleNode("EnableSpeculativeDuplication"); if (duplicationNode != null) { bool dupFlag; if (bool.TryParse(duplicationNode.InnerText, out dupFlag)) { query.enableSpeculativeDuplication = dupFlag; } } nodes = root.SelectSingleNode("QueryPlan").ChildNodes; // // Need to remember the conection operator for use when the // predecessors are being created. // string[] vertexConnectionOperator = new string[nodes.Count]; for (int index=0; index<nodes.Count; index++) { vertexConnectionOperator[index] = ""; } for (int index=0; index<nodes.Count; index++) { Vertex vertex = new Vertex(); // // Vertex globals // string uniqueId = nodes[index].SelectSingleNode("UniqueId").InnerText; string vertexType = nodes[index].SelectSingleNode("Type").InnerText; string name = nodes[index].SelectSingleNode("Name").InnerText; string partitions = nodes[index].SelectSingleNode("Partitions").InnerText; string channelType = nodes[index].SelectSingleNode("ChannelType").InnerText; // // Need to remember the conection operator for use when the // predecessors are being created. // vertexConnectionOperator[index] = nodes[index].SelectSingleNode("ConnectionOperator").InnerText; vertex.uniqueId = Convert.ToInt32(uniqueId); vertex.name = name; vertex.type = GetVertexType(vertexType); vertex.partitions = Convert.ToInt32(partitions); XmlNode dynamicManager = nodes[index].SelectSingleNode("DynamicManager"); string dmType = dynamicManager.SelectSingleNode("Type").InnerText; vertex.dynamicManager = new DynamicManager(); vertex.dynamicManager.type = GetDynamicManagerType(dmType); if (vertex.dynamicManager.type == DynamicManager.Type.FULLAGGR) { string levels = dynamicManager.SelectSingleNode("AggregationLevels").InnerText; vertex.dynamicManager.aggregationLevels = Convert.ToInt32(levels); } if (vertex.dynamicManager.type == DynamicManager.Type.RANGEDISTRIBUTOR) { string sampleRate = dynamicManager.SelectSingleNode("SampleRate").InnerText; string vertexId = dynamicManager.SelectSingleNode("VertexId").InnerText; vertex.dynamicManager.sampleRate = Convert.ToDouble(sampleRate); vertex.dynamicManager.splitVertexId = Convert.ToInt32(vertexId); } else { XmlNodeList entries = dynamicManager.SelectNodes("Entry"); vertex.dynamicManager.assemblyNames = new string[entries.Count]; vertex.dynamicManager.classNames = new string[entries.Count]; vertex.dynamicManager.methodNames = new string[entries.Count]; for (int entryIndex = 0; entryIndex < entries.Count; entryIndex++) { vertex.dynamicManager.assemblyNames[entryIndex] = entries[entryIndex].SelectSingleNode("AssemblyName").InnerText; vertex.dynamicManager.classNames[entryIndex] = entries[entryIndex].SelectSingleNode("ClassName").InnerText; vertex.dynamicManager.methodNames[entryIndex] = entries[entryIndex].SelectSingleNode("MethodName").InnerText; } } if (vertex.type == Vertex.Type.INPUTTABLE) { XmlNode storageSet = nodes[index].SelectSingleNode("StorageSet"); string ioType = storageSet.SelectSingleNode("Type").InnerText; vertex.info = new VertexInfo(); vertex.info.ioType = GetIoType(ioType); XmlNode recordType = storageSet.SelectSingleNode("RecordType"); if (recordType != null) { vertex.info.recordType = recordType.InnerXml; } XmlNodeList storageUris = storageSet.SelectNodes("SourceURI"); vertex.info.sources = new string[storageUris.Count]; for (int indexStorageUri=0; indexStorageUri<storageUris.Count; indexStorageUri++) { vertex.info.sources[indexStorageUri] = storageUris[indexStorageUri].InnerText; } } else if ( vertex.type == Vertex.Type.OUTPUTTABLE ) { XmlNode storageSet = nodes[index].SelectSingleNode("StorageSet"); string ioType = storageSet.SelectSingleNode("Type").InnerText; vertex.info = new VertexInfo(); vertex.info.ioType = GetIoType(ioType); string source = storageSet.SelectSingleNode("SinkURI").InnerText; vertex.info.sources = new string[1] { source }; if (vertex.info.ioType == VertexInfo.IOType.PARTITIONEDFILE ) { vertex.info.partitionUncPath = storageSet.SelectSingleNode("PartitionUncPath").InnerText; } XmlNode temporary = storageSet.SelectSingleNode("IsTemporary"); if (temporary != null) { if (bool.TryParse(temporary.InnerXml, out vertex.info.isTemporary) == false) { throw new LinqToDryadException(String.Format("Invalid value for IsTemporary: {0}", temporary.InnerXml)); } } else { vertex.info.isTemporary = false; } XmlNode recordType = storageSet.SelectSingleNode("RecordType"); if (recordType != null) { vertex.info.recordType = recordType.InnerXml; } XmlNode outputCompressionScheme = storageSet.SelectSingleNode("OutputCompressionScheme"); if (outputCompressionScheme != null) { if (int.TryParse(outputCompressionScheme.InnerXml, out vertex.info.compressionScheme) == false) { throw new LinqToDryadException(String.Format("Invalid value for OutputCompressionScheme: {0}", outputCompressionScheme.InnerXml)); } } else { vertex.info.compressionScheme = 0; // TODO: Change to Enum } } else /* JOIN etc. */ { XmlNode entry = nodes[index].SelectSingleNode("Entry"); vertex.info = new VertexInfo(); vertex.info.assemblyName = entry.SelectSingleNode("AssemblyName").InnerText; vertex.info.className = entry.SelectSingleNode("ClassName").InnerText; vertex.info.methodName = entry.SelectSingleNode("MethodName").InnerText; } // // everybody except inputs have children // if (vertex.type != Vertex.Type.INPUTTABLE) { XmlNodeList children = nodes[index].SelectSingleNode("Children").ChildNodes; vertex.info.predecessors = new Predecessor[children.Count]; for (int indexChild = 0; indexChild < children.Count; indexChild++) { int childId = Convert.ToInt32(children[indexChild].SelectSingleNode("UniqueId").InnerText); string childConstraint = children[indexChild].SelectSingleNode("AffinityConstraint").InnerText; vertex.info.predecessors[indexChild] = new Predecessor(); vertex.info.predecessors[indexChild].uniqueId = childId; vertex.info.predecessors[indexChild].connectionOperator = GetConnectionOperator(vertexConnectionOperator[childId]); vertex.info.predecessors[indexChild].channelType = GetChannelType(channelType); vertex.info.predecessors[indexChild].constraint = GetAffinityConstraint(childConstraint); } } // // In this parser the only way to have an optional Tag is immediately before consuming // a close of an outer tag. So we have to look for machines tag here. // if ((vertex.type != Vertex.Type.INPUTTABLE) && (vertex.type != Vertex.Type.OUTPUTTABLE)) { XmlNode machinesRoot = nodes[index].SelectSingleNode("Machines"); if (machinesRoot != null) { XmlNodeList machines = machinesRoot.ChildNodes; vertex.machines = new string[machines.Count]; for (int indexMachine = 0; indexMachine < machines.Count; indexMachine++) { vertex.machines[indexMachine] = machines[indexMachine].SelectSingleNode("Machine").InnerText; } } } // // Add the vertex // query.queryPlan.Add(vertex.uniqueId, vertex); } } public bool ParseQueryXml(string queryPlanFileName, Query query) { XmlNode version = null; XmlDocument queryPlanDoc = new XmlDocument(); // // Load query plan document // try { queryPlanDoc.Load(queryPlanFileName); } catch (Exception e) { DryadLogger.LogCritical(String.Format("Failed to load query plan: {0}: {1}", queryPlanFileName, e.ToString())); return false; } // // Get DryadLinqVersion - absence used to indicate Argentia query plan // try { version = queryPlanDoc.DocumentElement.SelectSingleNode("DryadLinqVersion"); } catch (System.Xml.XPath.XPathException e) { DryadLogger.LogCritical(String.Format("Failed to select node DryadLinqVersion from query plan: {0}: {1}", queryPlanFileName, e.ToString())); return false; } if (version == null) { DryadLogger.LogCritical(String.Format("Missing element 'DryadLinqVersion' in query plan: {0}", queryPlanFileName)); return false; } // // Parse query plan XML doc into Query // try { ParseQueryXmlLinqToDryad(queryPlanDoc, query); } catch (Exception e) { DryadLogger.LogCritical(String.Format("Failed to parse query plan: {0}: {1}", queryPlanFileName, e.ToString())); return false; } #if REMOVE_ARGENTIA else // If (version == null), Argentia query plan { // Add the namespace. XmlNamespaceManager nsmgr = new XmlNamespaceManager(queryPlanDoc.NameTable); nsmgr.AddNamespace("qp", "http://microsoft.com/PCP/Argentia/QueryPlan.xsd"); version = queryPlanDoc.DocumentElement.SelectSingleNode("qp:RuntimeVersion", nsmgr); if (version != null) { ParseQueryXmlArgentia(queryPlanDoc, query); } else { DryadLogger.LogCritical(0, null, "Unknown query plan format."); return false; } } #endif return true; } } }
// // Copyright (c) 2008-2011, Kenneth Bell // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. // using System; using System.IO; using System.Text; using NUnit.Framework; namespace DiscUtils.Fat { [TestFixture] public class FatFileSystemTest { [Test] public void FormatFloppy() { MemoryStream ms = new MemoryStream(); FatFileSystem fs = FatFileSystem.FormatFloppy(ms, FloppyDiskType.HighDensity, "KBFLOPPY "); } [Test] public void Cyrillic() { string lowerDE = "\x0434"; string upperDE = "\x0414"; MemoryStream ms = new MemoryStream(); using (FatFileSystem fs = FatFileSystem.FormatFloppy(ms, FloppyDiskType.HighDensity, "KBFLOPPY ")) { fs.FatOptions.FileNameEncoding = Encoding.GetEncoding(855); string name = lowerDE; fs.CreateDirectory(name); string[] dirs = fs.GetDirectories(""); Assert.AreEqual(1, dirs.Length); Assert.AreEqual(upperDE, dirs[0]); // Uppercase Assert.IsTrue(fs.DirectoryExists(lowerDE)); Assert.IsTrue(fs.DirectoryExists(upperDE)); fs.CreateDirectory(lowerDE + lowerDE + lowerDE); Assert.AreEqual(2, fs.GetDirectories("").Length); fs.DeleteDirectory(lowerDE + lowerDE + lowerDE); Assert.AreEqual(1, fs.GetDirectories("").Length); } DiscFileSystem fs2 = FileSystemManager.DetectDefaultFileSystems(ms)[0].Open( ms, new FileSystemParameters { FileNameEncoding = Encoding.GetEncoding(855) }); Assert.IsTrue(fs2.DirectoryExists(lowerDE)); Assert.IsTrue(fs2.DirectoryExists(upperDE)); Assert.AreEqual(1, fs2.GetDirectories("").Length); } [Test] public void DefaultCodepage() { string graphicChar = "\x255D"; MemoryStream ms = new MemoryStream(); FatFileSystem fs = FatFileSystem.FormatFloppy(ms, FloppyDiskType.HighDensity, "KBFLOPPY "); fs.FatOptions.FileNameEncoding = Encoding.GetEncoding(855); string name = graphicChar; fs.CreateDirectory(name); string[] dirs = fs.GetDirectories(""); Assert.AreEqual(1, dirs.Length); Assert.AreEqual(graphicChar, dirs[0]); // Uppercase Assert.IsTrue(fs.DirectoryExists(graphicChar)); } [Test] public void FormatPartition() { MemoryStream ms = new MemoryStream(); Geometry g = Geometry.FromCapacity(1024 * 1024 * 32); FatFileSystem fs = FatFileSystem.FormatPartition(ms, "KBPARTITION", g, 0, (int)g.TotalSectors, 13); fs.CreateDirectory(@"DIRB\DIRC"); FatFileSystem fs2 = new FatFileSystem(ms); Assert.AreEqual(1, fs2.Root.GetDirectories().Length); } [Test] public void CreateDirectory() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); fs.CreateDirectory(@"UnItTeSt"); Assert.AreEqual("UNITTEST", fs.Root.GetDirectories("UNITTEST")[0].Name); fs.CreateDirectory(@"folder\subflder"); Assert.AreEqual("FOLDER", fs.Root.GetDirectories("FOLDER")[0].Name); fs.CreateDirectory(@"folder\subflder"); Assert.AreEqual("SUBFLDER", fs.Root.GetDirectories("FOLDER")[0].GetDirectories("SUBFLDER")[0].Name); } [Test] public void CanWrite() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); Assert.AreEqual(true, fs.CanWrite); } [Test] public void Label() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); Assert.AreEqual("FLOPPY_IMG ", fs.VolumeLabel); fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, null); Assert.AreEqual("NO NAME ", fs.VolumeLabel); } [Test] public void FileInfo() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); DiscFileInfo fi = fs.GetFileInfo(@"SOMEDIR\SOMEFILE.TXT"); Assert.IsNotNull(fi); } [Test] public void DirectoryInfo() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); DiscDirectoryInfo fi = fs.GetDirectoryInfo(@"SOMEDIR"); Assert.IsNotNull(fi); } [Test] public void FileSystemInfo() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); DiscFileSystemInfo fi = fs.GetFileSystemInfo(@"SOMEDIR\SOMEFILE"); Assert.IsNotNull(fi); } [Test] public void Root() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); Assert.IsNotNull(fs.Root); Assert.IsTrue(fs.Root.Exists); Assert.IsEmpty(fs.Root.Name); Assert.IsNull(fs.Root.Parent); } [Test] [ExpectedException(typeof(DirectoryNotFoundException))] [Category("ThrowsException")] public void OpenFileAsDir() { FatFileSystem fs = FatFileSystem.FormatFloppy(new MemoryStream(), FloppyDiskType.HighDensity, "FLOPPY_IMG "); using (Stream s = fs.OpenFile("FOO.TXT", FileMode.Create, FileAccess.ReadWrite)) { StreamWriter w = new StreamWriter(s); w.WriteLine("FOO - some sample text"); w.Flush(); } fs.GetFiles("FOO.TXT"); } [Test] public void HonoursReadOnly() { SparseMemoryStream diskStream = new SparseMemoryStream(); FatFileSystem fs = FatFileSystem.FormatFloppy(diskStream, FloppyDiskType.HighDensity, "FLOPPY_IMG "); fs.CreateDirectory(@"AAA"); fs.CreateDirectory(@"BAR"); using (Stream t = fs.OpenFile(@"BAR\AAA.TXT", FileMode.Create, FileAccess.ReadWrite)) { } using (Stream s = fs.OpenFile(@"BAR\FOO.TXT", FileMode.Create, FileAccess.ReadWrite)) { StreamWriter w = new StreamWriter(s); w.WriteLine("FOO - some sample text"); w.Flush(); } fs.SetLastAccessTimeUtc(@"BAR", new DateTime(1980, 1, 1)); fs.SetLastAccessTimeUtc(@"BAR\FOO.TXT", new DateTime(1980, 1, 1)); // Check we can access a file without any errors SparseStream roDiskStream = SparseStream.ReadOnly(diskStream, Ownership.None); FatFileSystem fatFs = new FatFileSystem(roDiskStream); using (Stream fileStream = fatFs.OpenFile(@"BAR\FOO.TXT", FileMode.Open)) { fileStream.ReadByte(); } } } }
using System.Collections.Generic; using System.Security; using System.Text; namespace System.IO { /// <summary> /// Interface to interact with the filesystem in an asynchronuos fashion only. /// </summary> public interface IFilesystem { #region Watching /// <summary> /// Allows monitoring of folders. /// </summary> IFilesystemWatchdog Watchdog { get; } #endregion #region Directories /// <summary> /// The current directory, used when relative paths are given to any of these methods. /// </summary> string CurrentDirectory { get; set; } /// <summary> /// An object representing the current directory. /// </summary> IDirectoryInfo Current { get; } /// <summary> /// The current root directories (="drives") of this filesystem. /// </summary> IEnumerable<IDirectoryInfo> Roots { get; } /// <summary> /// Creates the given directory if it doesn't exist yet. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> IDirectoryInfo CreateDirectory(string path); /// <summary> /// Deletes an empty directory from a specified path. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> void DeleteDirectory(string path); /// <summary> /// Deletes the specified directory and, if indicated, any subdirectories and files /// in the directory. /// </summary> /// <param name="path"></param> /// <param name="recursive"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> void DeleteDirectory(string path, bool recursive); /// <summary> /// Determines whether the given path refers to an existing directory on disk. /// </summary> /// <param name="path">The path to test.</param> /// <returns></returns> bool DirectoryExists(string path); /// <summary> /// Obtains information about the given directory. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> IDirectoryInfo GetDirectoryInfo(string path); /// <summary> /// Returns an enumerable collection of file names in a specified path which match /// the specified pattern (if a pattern is specified). /// </summary> /// <param name="path"></param> /// <param name="searchPattern"></param> /// <param name="searchOption"></param> /// <param name="tolerateNonExistantPath"> /// When set to true, then this method will never throw a /// <see cref="DirectoryNotFoundException" /> exception and instead return an empty enumeration /// </param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> IReadOnlyList<string> EnumerateFiles(string path, string searchPattern = null, SearchOption searchOption = SearchOption.TopDirectoryOnly, bool tolerateNonExistantPath = false); /// <summary> /// Returns an enumerable collection of directory names in a specified path. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> IReadOnlyList<string> EnumerateDirectories(string path); /// <summary> /// Returns an enumerable collection of directory names in a specified path. /// </summary> /// <param name="path"></param> /// <param name="searchPattern"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> IReadOnlyList<string> EnumerateDirectories(string path, string searchPattern); /// <summary> /// Returns an enumerable collection of directory names in a specified path. /// </summary> /// <param name="path"></param> /// <param name="searchPattern"></param> /// <param name="searchOption"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> IReadOnlyList<string> EnumerateDirectories(string path, string searchPattern, SearchOption searchOption); #endregion #region Files /// <summary> /// /// </summary> /// <param name="fullPath"></param> /// <returns></returns> DateTime FileCreationTimeUtc(string fullPath); /// <summary> /// /// </summary> /// <param name="fullPath"></param> /// <returns></returns> DateTime FileLastAccessTimeUtc(string fullPath); /// <summary> /// /// </summary> /// <param name="fullPath"></param> /// <returns></returns> DateTime FileLastWriteTimeUtc(string fullPath); /// <summary> /// Obtains information about the given file. /// </summary> /// <param name="fileName"></param> /// <returns></returns> /// <exception cref="ArgumentException"><paramref name="fileName"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException"><paramref name="fileName"/> is null.</exception> /// <exception cref="PathTooLongException">The specified <paramref name="fileName"/> exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> IFileInfo GetFileInfo(string fileName); /// <summary> /// Determines whether the specified file exists. /// </summary> /// <param name="path">The file to check.</param> /// <returns></returns> bool FileExists(string path); /// <summary> /// The length of a file (in bytes). /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> long FileLength(string path); /// <summary> /// Whether or not the given file is readonly. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> bool IsFileReadOnly(string path); /// <summary> /// Creates a file in a particular path. If the file exists, it is replaced. /// The file is opened with ReadWrite accessand cannot be opened by another /// application until it has been closed. An IOException is thrown if the /// directory specified doesn't exist. /// Your application must have Create, Read, and Write permissions to /// the file. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="UnauthorizedAccessException">The caller does not have the required permission. /// -or- /// <paramref name="path"/> specified a file that is read-only.</exception> /// <exception cref="ArgumentException"><paramref name="path"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException"><paramref name="path"/> is null.</exception> /// <exception cref="PathTooLongException">The specified <paramref name="path"/> exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The specified <paramref name="path"/> is invalid, (for example, it is on an unmapped drive).</exception> /// <exception cref="IOException">An I/O error occurred while creating the file.</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> Stream CreateFile(string path); /// <summary> /// Opens the given file. /// </summary> /// <param name="path"></param> /// <param name="mode"></param> /// <param name="access"></param> /// <param name="share"></param> /// <returns></returns> /// <exception cref="ArgumentException"><paramref name="path"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException"><paramref name="path"/> is null.</exception> /// <exception cref="PathTooLongException">The specified <paramref name="path"/> exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The specified <paramref name="path"/> is invalid, (for example, it is on an unmapped drive).</exception> /// <exception cref="UnauthorizedAccessException"><paramref name="path"/> specified a directory. /// -or- /// The caller does not have the required permission.</exception> /// <exception cref="FileNotFoundException">The file specified in <paramref name="path"/> was not found.</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> /// <exception cref="IOException">An I/O error occurred while opening the file.</exception> Stream Open(string path, FileMode mode, FileAccess access, FileShare share); /// <summary> /// Opens the given file for reading. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="ArgumentException"><paramref name="path"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException"><paramref name="path"/> is null.</exception> /// <exception cref="PathTooLongException">The specified <paramref name="path"/> exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The specified <paramref name="path"/> is invalid, (for example, it is on an unmapped drive).</exception> /// <exception cref="UnauthorizedAccessException"><paramref name="path"/> specified a directory. /// -or- /// The caller does not have the required permission.</exception> /// <exception cref="FileNotFoundException">The file specified in <paramref name="path"/> was not found.</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> /// <exception cref="IOException">An I/O error occurred while opening the file.</exception> Stream OpenRead(string path); /// <summary> /// Opens an existing file or creates a new file for writing. /// </summary> /// <param name="path"></param> /// <returns></returns> /// <exception cref="UnauthorizedAccessException">The caller does not have the required permission. /// -or- /// path specified a read-only file or directory. /// </exception> /// <exception cref="ArgumentException"><paramref name="path"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException"><paramref name="path"/> is null.</exception> /// <exception cref="PathTooLongException">The specified path, file name, or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The specified path is invalid, (for example, it is on an unmapped drive).</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> Stream OpenWrite(string path); /// <summary> /// Creates a new file, writes the specified stream from its current position, and then closes the file. If the target file already exists, it is overwritten. /// </summary> /// <remarks> /// The given buffer may not be modified until the task has finished. /// </remarks> /// <param name="path"></param> /// <param name="stream"></param> /// <returns></returns> /// <exception cref="ArgumentException"><paramref name="path"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException">When <paramref name="path" /> or <paramref name="stream" /> is null</exception> /// <exception cref="PathTooLongException">The specified path, file name, or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The specified path is invalid (for example, it is on an unmapped drive).</exception> /// <exception cref="IOException">An I/O error occurred while opening the file.</exception> /// <exception cref="UnauthorizedAccessException"><paramref name="path"/> specified a file that is read-only. /// -or- /// This operation is not supported on the current platform. /// -or- /// path specified a directory. /// -or- /// The caller does not have the required permission.</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> /// <exception cref="SecurityException">The caller does not have the required permission.</exception> void Write(string path, Stream stream); /// <summary> /// Writes the given data to the given file. /// </summary> /// <remarks> /// This method copies the given buffer before writing to the file on the I/O thread. /// </remarks> /// <param name="path"></param> /// <param name="bytes"></param> /// <returns></returns> /// <exception cref="ArgumentException"><paramref name="path"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException">When <paramref name="path" /> or <paramref name="bytes" /> is null</exception> /// <exception cref="PathTooLongException">The specified <paramref name="path"/> exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The specified <paramref name="path"/> is invalid (for example, it is on an unmapped drive).</exception> /// <exception cref="IOException">An I/O error occurred while opening the file.</exception> /// <exception cref="UnauthorizedAccessException"><paramref name="path"/> specified a file that is read-only. /// -or- /// This operation is not supported on the current platform. /// -or- /// path specified a directory. /// -or- /// The caller does not have the required permission.</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> /// <exception cref="SecurityException">The caller does not have the required permission.</exception> void WriteAllBytes(string path, byte[] bytes); /// <summary> /// Creates a new file, writes the specified string to the file, and then closes the file. /// If the target file already exists, it is overwritten. /// </summary> /// <param name="path"></param> /// <param name="contents"></param> void WriteAllText(string path, string contents); /// <summary> /// Creates a new file, writes the specified string to the file using the specified encoding, /// and then closes the file. If the target file already exists, it is overwritten. /// </summary> /// <param name="path">The file to write to.</param> /// <param name="contents">The string to write to the file.</param> /// <param name="encoding">The encoding to apply to the string.</param> void WriteAllText(string path, string contents, Encoding encoding); /// <summary> /// Opens a binary file, reads the contents of the file into a byte array, /// and then closes the file. /// </summary> /// <param name="path">The file to open for reading.</param> /// <returns></returns> /// <exception cref="ArgumentException">path is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException">When <paramref name="path" /> is null</exception> /// <exception cref="PathTooLongException">The specified <paramref name="path"/> exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The specified <paramref name="path"/> is invalid (for example, it is on an unmapped drive).</exception> /// <exception cref="IOException">An I/O error occurred while opening the file.</exception> /// <exception cref="UnauthorizedAccessException"><paramref name="path"/> specified a directory. /// -or- /// The caller does not have the required permission.</exception> /// <exception cref="FileNotFoundException">The file specified in <paramref name="path"/> was not found.</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> /// <exception cref="SecurityException">The caller does not have the required permission.</exception> byte[] ReadAllBytes(string path); /// <summary> /// Opens a text file, reads all the text in the file into a string, and then closes the file. /// </summary> /// <param name="path">The file to open for reading.</param> string ReadAllText(string path); /// <summary> /// Opens a file, reads all text in the file with the specified encoding, and then closes the file. /// </summary> /// <param name="path">The file to open for reading.</param> /// <param name="encoding">The encoding applied to the contents of the file.</param> string ReadAllText(string path, Encoding encoding); /// <summary> /// Opens a text file, reads all lines of the file, and then closes the file. /// </summary> /// <param name="path">The file to open for reading.</param> /// <returns></returns> IReadOnlyList<string> ReadAllLines(string path); /// <summary> /// Opens a file, reads all lines of the file with the specified encoding, and then closes the file. /// </summary> /// <param name="path">The file to open for reading.</param> /// <param name="encoding">The encoding applied to the contents of the file.</param> /// <returns></returns> IReadOnlyList<string> ReadAllLines(string path, Encoding encoding); /// <summary> /// Copies an existing file to a new file. Overwriting a file of the same name is not allowed. /// </summary> /// <param name="sourceFileName">The file to copy.</param> /// <param name="destFileName">The name of the destination file. This cannot be a directory or an existing file.</param> /// <returns></returns> /// <exception cref="UnauthorizedAccessException">The caller does not have the required permission.</exception> /// <exception cref="ArgumentException"><paramref name="sourceFileName"/> or <paramref name="destFileName"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars. /// -or- /// <paramref name="sourceFileName"/> or <paramref name="destFileName"/> specifies a directory.</exception> /// <exception cref="PathTooLongException">The specified path, file name, or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="DirectoryNotFoundException">The path specified in <paramref name="sourceFileName"/> or <paramref name="destFileName"/> is invalid (for example, it is on an unmapped drive).</exception> /// <exception cref="FileNotFoundException"><paramref name="sourceFileName"/> was not found.</exception> /// <exception cref="IOException"><paramref name="destFileName"/> exists</exception> /// <exception cref="NotSupportedException"><paramref name="sourceFileName"/> or <paramref name="destFileName"/> is in an invalid format.</exception> void CopyFile(string sourceFileName, string destFileName); /// <summary> /// Deletes the specified file. /// If the file does not exist, Delete succeeds without throwing /// an exception. /// </summary> /// <param name="path">The name of the file to be deleted. Wildcard characters are not supported.</param> /// <returns></returns> /// <exception cref="ArgumentException"><paramref name="path"/> is a zero-length string, contains only white space, or contains one or more invalid characters as defined by InvalidPathChars.</exception> /// <exception cref="ArgumentNullException"><paramref name="path"/> is null.</exception> /// <exception cref="DirectoryNotFoundException">The specified path is invalid (for example, it is on an unmapped drive).</exception> /// <exception cref="IOException">The specified file is in use. /// -or- /// There is an open handle on the file, and the operating system is Windows XP or earlier. This open handle can result from enumerating directories and files. For more information, see How to: Enumerate Directories and Files.</exception> /// <exception cref="NotSupportedException"><paramref name="path"/> is in an invalid format.</exception> /// <exception cref="PathTooLongException">The specified path, file name, or both exceed the system-defined maximum length. For example, on Windows-based platforms, paths must be less than 248 characters, and file names must be less than 260 characters.</exception> /// <exception cref="UnauthorizedAccessException">The caller does not have the required permission. /// -or- /// The file is an executable file that is in use. /// -or- /// path is a directory. /// -or- /// path specified a read-only file.</exception> void DeleteFile(string path); #endregion } }
using System; using System.Collections.Generic; using System.Linq; using ICSharpCode.NRefactory.TypeSystem; using ICSharpCode.NRefactory.TypeSystem.Implementation; using Saltarelle.Compiler.JSModel.Expressions; namespace Saltarelle.Compiler.Tests { public class MockRuntimeLibrary : IRuntimeLibrary { private enum TypeContext { GenericArgument, TypeOf, CastTarget, GetDefaultValue, UseStaticMember, BindBaseCall, } private string GetTypeContextShortName(TypeContext c) { switch (c) { case TypeContext.GenericArgument: return "ga"; case TypeContext.TypeOf: return "to"; case TypeContext.UseStaticMember: return "sm"; case TypeContext.CastTarget: return "ct"; case TypeContext.GetDefaultValue: return "def"; case TypeContext.BindBaseCall: return "bind"; default: throw new ArgumentException("c"); } } public MockRuntimeLibrary() { GetTypeOf = (t, c) => GetScriptType(t, TypeContext.TypeOf, c.ResolveTypeParameter); InstantiateType = (t, c) => GetScriptType(t, TypeContext.UseStaticMember, c.ResolveTypeParameter); InstantiateTypeForUseAsTypeArgumentInInlineCode = (t, c) => GetScriptType(t, TypeContext.GenericArgument, c.ResolveTypeParameter); TypeIs = (e, s, t, c) => JsExpression.Invocation(JsExpression.Identifier("$TypeIs"), e, GetScriptType(t, TypeContext.CastTarget, c.ResolveTypeParameter)); TryDowncast = (e, s, d, c) => JsExpression.Invocation(JsExpression.Identifier("$TryCast"), e, GetScriptType(d, TypeContext.CastTarget, c.ResolveTypeParameter)); Downcast = (e, s, d, c) => JsExpression.Invocation(JsExpression.Identifier("$Cast"), e, GetScriptType(d, TypeContext.CastTarget, c.ResolveTypeParameter)); Upcast = (e, s, d, c) => JsExpression.Invocation(JsExpression.Identifier("$Upcast"), e, GetScriptType(d, TypeContext.CastTarget, c.ResolveTypeParameter)); ReferenceEquals = (a, b, c) => JsExpression.Invocation(JsExpression.Identifier("$ReferenceEquals"), a, b); ReferenceNotEquals = (a, b, c) => JsExpression.Invocation(JsExpression.Identifier("$ReferenceNotEquals"), a, b); InstantiateGenericMethod = (m, a, c) => JsExpression.Invocation(JsExpression.Identifier("$InstantiateGenericMethod"), new[] { m }.Concat(a.Select(x => GetScriptType(x, TypeContext.GenericArgument, c.ResolveTypeParameter)))); MakeException = (e, c) => JsExpression.Invocation(JsExpression.Identifier("$MakeException"), e); IntegerDivision = (n, d, c) => JsExpression.Invocation(JsExpression.Identifier("$IntDiv"), n, d); FloatToInt = (e, c) => JsExpression.Invocation(JsExpression.Identifier("$Truncate"), e); Coalesce = (a, b, c) => JsExpression.Invocation(JsExpression.Identifier("$Coalesce"), a, b); Lift = (e, c) => JsExpression.Invocation(JsExpression.Identifier("$Lift"), e); FromNullable = (e, c) => JsExpression.Invocation(JsExpression.Identifier("$FromNullable"), e); LiftedBooleanAnd = (a, b, c) => JsExpression.Invocation(JsExpression.Identifier("$LiftedBooleanAnd"), a, b); LiftedBooleanOr = (a, b, c) => JsExpression.Invocation(JsExpression.Identifier("$LiftedBooleanOr"), a, b); Bind = (f, t, c) => JsExpression.Invocation(JsExpression.Identifier("$Bind"), f, t); BindFirstParameterToThis = (f, c) => JsExpression.Invocation(JsExpression.Identifier("$BindFirstParameterToThis"), f); Default = (t, c) => t.Kind == TypeKind.Dynamic ? (JsExpression)JsExpression.Identifier("$DefaultDynamic") : JsExpression.Invocation(JsExpression.Identifier("$Default"), GetScriptType(t, TypeContext.GetDefaultValue, c.ResolveTypeParameter)); CreateArray = (t, dim, c) => JsExpression.Invocation(JsExpression.Identifier("$CreateArray"), new[] { GetScriptType(t, TypeContext.GetDefaultValue, c.ResolveTypeParameter) }.Concat(dim)); CloneDelegate = (e, s, t, c) => JsExpression.Invocation(JsExpression.Identifier("$CloneDelegate"), e); CallBase = (m, a, c) => JsExpression.Invocation(JsExpression.Identifier("$CallBase"), new[] { GetScriptType(m.DeclaringType, TypeContext.BindBaseCall, c.ResolveTypeParameter), JsExpression.String("$" + m.Name), JsExpression.ArrayLiteral(m is SpecializedMethod ? ((SpecializedMethod)m).TypeArguments.Select(x => GetScriptType(x, TypeContext.GenericArgument, c.ResolveTypeParameter)) : new JsExpression[0]), JsExpression.ArrayLiteral(a) }); BindBaseCall = (m, a, c) => JsExpression.Invocation(JsExpression.Identifier("$BindBaseCall"), new[] { GetScriptType(m.DeclaringType, TypeContext.BindBaseCall, c.ResolveTypeParameter), JsExpression.String("$" + m.Name), JsExpression.ArrayLiteral(m is SpecializedMethod ? ((SpecializedMethod)m).TypeArguments.Select(x => GetScriptType(x, TypeContext.GenericArgument, c.ResolveTypeParameter)) : new JsExpression[0]), a }); MakeEnumerator = (yt, mn, gc, d, c) => JsExpression.Invocation(JsExpression.Identifier("$MakeEnumerator"), new[] { GetScriptType(yt, TypeContext.GenericArgument, c.ResolveTypeParameter), mn, gc, d ?? JsExpression.Null }); MakeEnumerable = (yt, ge, c) => JsExpression.Invocation(JsExpression.Identifier("$MakeEnumerable"), new[] { GetScriptType(yt, TypeContext.GenericArgument, c.ResolveTypeParameter), ge }); GetMultiDimensionalArrayValue = (a, i, c) => JsExpression.Invocation(JsExpression.Identifier("$MultidimArrayGet"), new[] { a }.Concat(i)); SetMultiDimensionalArrayValue = (a, i, v, c) => JsExpression.Invocation(JsExpression.Identifier("$MultidimArraySet"), new[] { a }.Concat(i).Concat(new[] { v })); CreateTaskCompletionSource = (t, c) => JsExpression.Invocation(JsExpression.Identifier("$CreateTaskCompletionSource"), t != null ? GetScriptType(t, TypeContext.GenericArgument, c.ResolveTypeParameter) : JsExpression.String("non-generic")); SetAsyncResult = (t, v, c) => JsExpression.Invocation(JsExpression.Identifier("$SetAsyncResult"), t, v ?? JsExpression.String("<<null>>")); SetAsyncException = (t, e, c) => JsExpression.Invocation(JsExpression.Identifier("$SetAsyncException"), t, e); GetTaskFromTaskCompletionSource = (t, c) => JsExpression.Invocation(JsExpression.Identifier("$GetTask"), t); ApplyConstructor = (c, a, x) => JsExpression.Invocation(JsExpression.Identifier("$ApplyConstructor"), c, a); ShallowCopy = (s, t, c) => JsExpression.Invocation(JsExpression.Identifier("$ShallowCopy"), s, t); GetMember = (m, c) => JsExpression.Invocation(JsExpression.Identifier("$GetMember"), GetScriptType(m.DeclaringType, TypeContext.TypeOf, c.ResolveTypeParameter), JsExpression.String(m.Name)); GetExpressionForLocal = (n, a, t, c) => JsExpression.Invocation(JsExpression.Identifier("$Local"), JsExpression.String(n), GetScriptType(t, TypeContext.TypeOf, c.ResolveTypeParameter), a); } public Func<IType, IRuntimeContext, JsExpression> GetTypeOf { get; set; } public Func<IType, IRuntimeContext, JsExpression> InstantiateType { get; set; } public Func<IType, IRuntimeContext, JsExpression> InstantiateTypeForUseAsTypeArgumentInInlineCode { get; set; } public Func<JsExpression, IType, IType, IRuntimeContext, JsExpression> TypeIs { get; set; } public Func<JsExpression, IType, IType, IRuntimeContext, JsExpression> TryDowncast { get; set; } public Func<JsExpression, IType, IType, IRuntimeContext, JsExpression> Downcast { get; set; } public Func<JsExpression, IType, IType, IRuntimeContext, JsExpression> Upcast { get; set; } public Func<JsExpression, IEnumerable<IType>, IRuntimeContext, JsExpression> InstantiateGenericMethod { get; set; } new public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> ReferenceEquals { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> ReferenceNotEquals { get; set; } public Func<JsExpression, IRuntimeContext, JsExpression> MakeException { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> IntegerDivision { get; set; } public Func<JsExpression, IRuntimeContext, JsExpression> FloatToInt { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> Coalesce { get; set; } public Func<JsExpression, IRuntimeContext, JsExpression> Lift { get; set; } public Func<JsExpression, IRuntimeContext, JsExpression> FromNullable { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> LiftedBooleanAnd { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> LiftedBooleanOr { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> Bind { get; set; } public Func<JsExpression, IRuntimeContext, JsExpression> BindFirstParameterToThis { get; set; } public Func<IType, IRuntimeContext, JsExpression> Default { get; set; } public Func<IType, IEnumerable<JsExpression>, IRuntimeContext, JsExpression> CreateArray { get; set; } public Func<JsExpression, IType, IType, IRuntimeContext, JsExpression> CloneDelegate { get; set; } public Func<IMethod, IEnumerable<JsExpression>, IRuntimeContext, JsExpression> CallBase { get; set; } public Func<IMethod, JsExpression, IRuntimeContext, JsExpression> BindBaseCall { get; set; } public Func<IType, JsExpression, JsExpression, JsExpression, IRuntimeContext, JsExpression> MakeEnumerator { get; set; } public Func<IType, JsExpression, IRuntimeContext, JsExpression> MakeEnumerable { get; set; } public Func<JsExpression, IEnumerable<JsExpression>, IRuntimeContext, JsExpression> GetMultiDimensionalArrayValue { get; set; } public Func<JsExpression, IEnumerable<JsExpression>, JsExpression, IRuntimeContext, JsExpression> SetMultiDimensionalArrayValue { get; set; } public Func<IType, IRuntimeContext, JsExpression> CreateTaskCompletionSource { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> SetAsyncResult { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> SetAsyncException { get; set; } public Func<JsExpression, IRuntimeContext, JsExpression> GetTaskFromTaskCompletionSource { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> ApplyConstructor { get; set; } public Func<JsExpression, JsExpression, IRuntimeContext, JsExpression> ShallowCopy { get; set; } public Func<IMember, IRuntimeContext, JsExpression> GetMember { get; set; } public Func<string, JsExpression, IType, IRuntimeContext, JsExpression> GetExpressionForLocal { get; set; } private JsExpression GetScriptType(IType type, TypeContext context, Func<ITypeParameter, JsExpression> resolveTypeParameter) { string contextName = GetTypeContextShortName(context); if (type is ParameterizedType) { var pt = (ParameterizedType)type; return JsExpression.Invocation(JsExpression.Identifier(contextName + "_$InstantiateGenericType"), new[] { new JsTypeReferenceExpression(Common.CreateMockTypeDefinition(type.Name, Common.CreateMockAssembly())) }.Concat(pt.TypeArguments.Select(a => GetScriptType(a, TypeContext.GenericArgument, resolveTypeParameter)))); } else if (type.TypeParameterCount > 0) { // This handles open generic types ( typeof(C<,>) ) return new JsTypeReferenceExpression(Common.CreateMockTypeDefinition(contextName + "_" + type.GetDefinition().Name, Common.CreateMockAssembly())); } else if (type.Kind == TypeKind.Array) { return JsExpression.Invocation(JsExpression.Identifier(contextName + "_$Array"), GetScriptType(((ArrayType)type).ElementType, TypeContext.GenericArgument, resolveTypeParameter)); } else if (type.Kind == TypeKind.Anonymous) { return JsExpression.Identifier(contextName + "_$Anonymous"); } else if (type is ITypeDefinition) { return new JsTypeReferenceExpression(Common.CreateMockTypeDefinition(contextName + "_" + type.Name, Common.CreateMockAssembly())); } else if (type is ITypeParameter) { return resolveTypeParameter((ITypeParameter)type); } else { throw new ArgumentException("Unsupported type + " + type); } } JsExpression IRuntimeLibrary.TypeOf(IType type, IRuntimeContext context) { return GetTypeOf(type, context); } JsExpression IRuntimeLibrary.InstantiateType(IType type, IRuntimeContext context) { return InstantiateType(type, context); } JsExpression IRuntimeLibrary.InstantiateTypeForUseAsTypeArgumentInInlineCode(IType type, IRuntimeContext context) { return InstantiateTypeForUseAsTypeArgumentInInlineCode(type, context); } JsExpression IRuntimeLibrary.TypeIs(JsExpression expression, IType sourceType, IType targetType, IRuntimeContext context) { return TypeIs(expression, sourceType, targetType, context); } JsExpression IRuntimeLibrary.TryDowncast(JsExpression expression, IType sourceType, IType targetType, IRuntimeContext context) { return TryDowncast(expression, sourceType, targetType, context); } JsExpression IRuntimeLibrary.Downcast(JsExpression expression, IType sourceType, IType targetType, IRuntimeContext context) { return Downcast(expression, sourceType, targetType, context); } JsExpression IRuntimeLibrary.Upcast(JsExpression expression, IType sourceType, IType targetType, IRuntimeContext context) { return Upcast(expression, sourceType, targetType, context); } JsExpression IRuntimeLibrary.ReferenceEquals(JsExpression a, JsExpression b, IRuntimeContext context) { return ReferenceEquals(a, b, context); } JsExpression IRuntimeLibrary.ReferenceNotEquals(JsExpression a, JsExpression b, IRuntimeContext context) { return ReferenceNotEquals(a, b, context); } JsExpression IRuntimeLibrary.InstantiateGenericMethod(JsExpression type, IEnumerable<IType> typeArguments, IRuntimeContext context) { return InstantiateGenericMethod(type, typeArguments, context); } JsExpression IRuntimeLibrary.MakeException(JsExpression operand, IRuntimeContext context) { return MakeException(operand, context); } JsExpression IRuntimeLibrary.IntegerDivision(JsExpression numerator, JsExpression denominator, IRuntimeContext context) { return IntegerDivision(numerator, denominator, context); } JsExpression IRuntimeLibrary.FloatToInt(JsExpression operand, IRuntimeContext context) { return FloatToInt(operand, context); } JsExpression IRuntimeLibrary.Coalesce(JsExpression a, JsExpression b, IRuntimeContext context) { return Coalesce(a, b, context); } JsExpression IRuntimeLibrary.Lift(JsExpression expression, IRuntimeContext context) { return Lift(expression, context); } JsExpression IRuntimeLibrary.FromNullable(JsExpression expression, IRuntimeContext context) { return FromNullable(expression, context); } JsExpression IRuntimeLibrary.LiftedBooleanAnd(JsExpression a, JsExpression b, IRuntimeContext context) { return LiftedBooleanAnd(a, b, context); } JsExpression IRuntimeLibrary.LiftedBooleanOr(JsExpression a, JsExpression b, IRuntimeContext context) { return LiftedBooleanOr(a, b, context); } JsExpression IRuntimeLibrary.Bind(JsExpression function, JsExpression target, IRuntimeContext context) { return Bind(function, target, context); } JsExpression IRuntimeLibrary.BindFirstParameterToThis(JsExpression function, IRuntimeContext context) { return BindFirstParameterToThis(function, context); } JsExpression IRuntimeLibrary.Default(IType type, IRuntimeContext context) { return Default(type, context); } JsExpression IRuntimeLibrary.CreateArray(IType elementType, IEnumerable<JsExpression> size, IRuntimeContext context) { return CreateArray(elementType, size, context); } JsExpression IRuntimeLibrary.CloneDelegate(JsExpression source, IType sourceType, IType targetType, IRuntimeContext context) { return CloneDelegate(source, sourceType, targetType, context); } JsExpression IRuntimeLibrary.CallBase(IMethod method, IEnumerable<JsExpression> thisAndArguments, IRuntimeContext context) { return CallBase(method, thisAndArguments, context); } JsExpression IRuntimeLibrary.BindBaseCall(IMethod method, JsExpression @this, IRuntimeContext context) { return BindBaseCall(method, @this, context); } JsExpression IRuntimeLibrary.MakeEnumerator(IType yieldType, JsExpression moveNext, JsExpression getCurrent, JsExpression dispose, IRuntimeContext context) { return MakeEnumerator(yieldType, moveNext, getCurrent, dispose, context); } JsExpression IRuntimeLibrary.MakeEnumerable(IType yieldType, JsExpression getEnumerator, IRuntimeContext context) { return MakeEnumerable(yieldType, getEnumerator, context); } JsExpression IRuntimeLibrary.GetMultiDimensionalArrayValue(JsExpression array, IEnumerable<JsExpression> indices, IRuntimeContext context) { return GetMultiDimensionalArrayValue(array, indices, context); } JsExpression IRuntimeLibrary.SetMultiDimensionalArrayValue(JsExpression array, IEnumerable<JsExpression> indices, JsExpression value, IRuntimeContext context) { return SetMultiDimensionalArrayValue(array, indices, value, context); } JsExpression IRuntimeLibrary.CreateTaskCompletionSource(IType taskGenericArgument, IRuntimeContext context) { return CreateTaskCompletionSource(taskGenericArgument, context); } JsExpression IRuntimeLibrary.SetAsyncResult(JsExpression taskCompletionSource, JsExpression value, IRuntimeContext context) { return SetAsyncResult(taskCompletionSource, value, context); } JsExpression IRuntimeLibrary.SetAsyncException(JsExpression taskCompletionSource, JsExpression exception, IRuntimeContext context) { return SetAsyncException(taskCompletionSource, exception, context); } JsExpression IRuntimeLibrary.GetTaskFromTaskCompletionSource(JsExpression taskCompletionSource, IRuntimeContext context) { return GetTaskFromTaskCompletionSource(taskCompletionSource, context); } JsExpression IRuntimeLibrary.ApplyConstructor(JsExpression constructor, JsExpression argumentsArray, IRuntimeContext context) { return ApplyConstructor(constructor, argumentsArray, context); } JsExpression IRuntimeLibrary.ShallowCopy(JsExpression source, JsExpression target, IRuntimeContext context) { return ShallowCopy(source, target, context); } JsExpression IRuntimeLibrary.GetMember(IMember member, IRuntimeContext context) { return GetMember(member, context); } JsExpression IRuntimeLibrary.GetExpressionForLocal(string name, JsExpression accessor, IType type, IRuntimeContext context) { return GetExpressionForLocal(name, accessor, type, context); } } }
#pragma warning disable 414 using System; using System.Globalization; //Define all the standard delegates to be used public delegate int iDi(int i, out string m); public delegate int iDNi(int? i, out string m); public delegate int iDI(I i, out string m); public delegate int iDS(S s, out string m); public delegate int iDNS(S? s, out string m); public delegate int IDo(object o, out string m); public delegate S SDi(int i, out string m); public delegate S? NSDi(int i, out string m); public delegate I IDi(int i, out string m); public delegate int iDo(object o, out string m); public delegate object oDi(int i, out string m); //Define all the open instance delegates to be used public delegate int iDi<T>(T t,int i, out string m); public delegate int iDNi<T>(T t,int? i, out string m); public delegate int iDI<T>(T t,I i, out string m); public delegate int iDS<T>(T t,S s, out string m); public delegate int iDNS<T>(T t,S? s, out string m); public delegate int iDo<T>(T t,object o, out string m); public delegate S SDi<T>(T t, int i, out string m); public delegate S? NSDi<T>(T t, int i, out string m); public delegate I IDi<T>(T t, int i, out string m); public delegate int IDo<T>(T t,object o, out string m); public delegate object oDi<T>(T t, int i, out string m); //Define all the closed static delegates to be used public delegate T tD<T>(out string m); public delegate T tDi<T>(int i, out string m); //@TODO - Are G<Foo> and G<Foo?> equivalent? Can you even specify a G<Foo?>?? //@TODO - Can you close over an out or ref parameter??? //@TODO - another case, ex. close this method static M(int? i) over a null argument //@TODO - A delegate declared as D(S?) used as an open instance to bind to a method on S????? Probably just doesn't work as the type isn't really an S, might work to bind to methods on the Nullable<T> type, but that would be expected. Should check it to be sure. //Define the custom types to be used public interface I{ bool Equals(int i); } public struct S : I{ //Dummy fields to extend this value type and stress //the stub. We really don't care that they're not used. private double f1,f2,f3,f4,f5,f6,f7,f8,f9,f10; //An assignable field to be checked for correctness public int value; public S(int i){ f1=0;f2=0;f3=0;f4=0;f5=0;f6=0;f7=0;f8=0;f9=0;f10=0;//@BUGBUG - It puzzles me to no end why there is a compiler error if I don't initialize these in the constructor value = i; } public bool Equals(int i){ return (value==i); } //For later cleanliness public static bool operator ==(S s, int i){ return s.Equals(i); } public static bool operator !=(S s, int i){ return !s.Equals(i);; } public override bool Equals(object o){ throw new Exception("this just exists to stop a compiler warning, don't call it"); } public override int GetHashCode(){ throw new Exception("this just exists to stop a compiler warning, don't call it"); } } //Define the various delegate target methods public class RefInst{ //Instance methods on a reference class //The out parameters are a crude tag to verify which method //was actually called. Necessary because the other functionality //of the methods is pretty much identical #region Overloads for BindToMethodName ambiguity testing //These should appear in order from most general to most //specific or (@TODO) we should have additional tests that //vary the method order. This is to confirm that any //ambiguous matching logic in BindToMethodName isn't just //settling for the first "match" it sees. There should //be no ambiguity at all in matching. public int M(int? i, out string m){ m = "one"; if(i==null) throw new ArgumentNullException(); else return (int)i; } public int M(S? s, out string m){ m = "two"; if(s==null) throw new ArgumentException(); else return ((S)s).value; } public int M(I i, out string m){ m = "three"; if(i==null) throw new ArgumentNullException(); if(!(i is S)) throw new ArgumentException(); return ((S)i).value; } public int M(object o, out string m){ m = "four"; if(o == null) throw new ArgumentNullException(); if(!(o is S)) throw new ArgumentException(); return ((S)o).value; } public int M(S s, out string m){ m = "five"; return s.value; } public int M(int i, out string m){ m = "six"; return i; } #endregion #region Non-overloaded methods to allow for (easier) explicit method selection public int iMNi(int? i, out string m){ m = "iMNi"; if(i==null) throw new ArgumentNullException(); else return (int)i; } public int iMNS(S? s, out string m){ m = "iMNS"; if(s==null) throw new ArgumentException(); else return ((S)s).value; } public int iMI(I i, out string m){ m = "iMI"; if(i==null) throw new ArgumentNullException(); if(!(i is S)) throw new ArgumentException(); return ((S)i).value; } public int iMo(object o, out string m){ m = "iMo"; if(o == null) throw new ArgumentNullException(); if(!(o is S)) throw new ArgumentException(); return ((S)o).value; } public int iMS(S s, out string m){ m = "iMS"; return s.value; } public int iMi(int i, out string m){ m = "iMi"; return i; } #endregion public S SMi(int i, out string m){ m = "SMi"; return new S(i); } public S? NSMi(int i, out string m){ m = "NSMi"; return new S(i); } public I IMi(int i, out string m){ m = "IMi"; return new S(i); } public object oMi(int i, out string m){ m = "oMi"; return new S(i); } } public class RefStat{ //Static methods on a reference class //The out parameters are a crude tag to verify which method //was actually called. Necessary because the other functionality //of the methods is pretty much identical #region Overloads for BindToMethodName ambiguity testing //These should appear in order from most general to most //specific or (@TODO) we should have additional tests that //vary the method order. This is to confirm that any //ambiguous matching logic in BindToMethodName isn't just //settling for the first "match" it sees. There should //be no ambiguity at all in matching. public static int M(int? i, out string m){ m = "one"; if(i==null) throw new ArgumentNullException(); else return (int)i; } public static int M(S? s, out string m){ m = "two"; if(s==null) throw new ArgumentException(); else return ((S)s).value; } public static int M(I i, out string m){ m = "three"; if(i==null) throw new ArgumentNullException(); if(!(i is S)) throw new ArgumentException(); return ((S)i).value; } public static int M(object o, out string m){ m = "four"; if(o == null) throw new ArgumentNullException(); if(!(o is S)) throw new ArgumentException(); return ((S)o).value; } public static int M(S s, out string m){ m = "five"; return s.value; } public static int M(int i, out string m){ m = "six"; return i; } #endregion #region Non-overloaded methods to allow for (easier) explicit method selection public static int iMNi(int? i, out string m){ m = "iMNi"; if(i==null) throw new ArgumentNullException(); else return (int)i; } public static int iMNS(S? s, out string m){ m = "iMNS"; if(s==null) throw new ArgumentException(); else return ((S)s).value; } public static int iMI(I i, out string m){ m = "iMI"; if(i==null) throw new ArgumentNullException(); if(!(i is S)) throw new ArgumentException(); return ((S)i).value; } public static int iMo(object o, out string m){ m = "iMo"; if(o == null) throw new ArgumentNullException(); if(!(o is S)) throw new ArgumentException(); return ((S)o).value; } public static int iMS(S s, out string m){ m = "iMS"; return s.value; } public static int iMi(int i, out string m){ m = "iMi"; return i; } #endregion public static S SMi(int i, out string m){ m = "SMi"; return new S(i); } public static S? NSMi(int i, out string m){ m = "NSMi"; return new S(i); } public static I IMi(int i, out string m){ m = "IMi"; return new S(i); } public static object oMi(int i, out string m){ m = "oMi"; return new S(i); } } public struct ValInst{ //Instance methods on a value class } public struct ValStat{ //Static methods on a value class } //Some reusable helper methods public class Util{ //Method to do quick culture invariant string comparisons (quick in the sense that I don't have to type of cultureinfo.invariantlsjflakjdlfjsldkjf 7000 times). public static bool Equals(string s1, string s2){ return String.Equals(s1, s2, StringComparison.Ordinal); } } #pragma warning restore
using System; using System.Collections.Generic; using System.IO; using System.Text; using System.Text.Encodings.Web; using System.Threading; using System.Threading.Tasks; using Microsoft.AspNetCore.Html; namespace OrchardCore.DisplayManagement.Liquid { /// <summary> /// An <see cref="IHtmlContent"/> implementation that inherits from <see cref="TextWriter"/> to write to the ASP.NET ViewBufferTextWriter /// in an optimal way. /// </summary> public class ViewBufferTextWriterContent : TextWriter, IHtmlContent { private StringBuilder _builder; private StringBuilderPool _pooledBuilder; private List<StringBuilderPool> _previousPooledBuilders; private readonly bool _releaseOnWrite; public override Encoding Encoding => Encoding.UTF8; public ViewBufferTextWriterContent(bool releaseOnWrite = true) { _pooledBuilder = StringBuilderPool.GetInstance(); _builder = _pooledBuilder.Builder; _releaseOnWrite = releaseOnWrite; } protected override void Dispose(bool disposing) { base.Dispose(disposing); ReleasePooledBuffer(); } private void ReleasePooledBuffer() { if (_pooledBuilder != null) { _pooledBuilder.Dispose(); _pooledBuilder = null; _builder = null; if (_previousPooledBuilders != null) { foreach (var pooledBuilder in _previousPooledBuilders) { pooledBuilder.Dispose(); } _previousPooledBuilders.Clear(); _previousPooledBuilders = null; } } } private StringBuilder AllocateBuilder() { _previousPooledBuilders ??= new List<StringBuilderPool>(); _previousPooledBuilders.Add(_pooledBuilder); _pooledBuilder = StringBuilderPool.GetInstance(); _builder = _pooledBuilder.Builder; return _builder; } // Invoked when used as TextWriter to intercept what is supposed to be written public override void Write(string value) { if (value == null || value.Length == 0) { return; } if (_builder.Length + value.Length <= _builder.Capacity) { _builder.Append(value); } else { // The string doesn't fit in the buffer, rent more var index = 0; do { var sizeToCopy = Math.Min(_builder.Capacity - _builder.Length, value.Length - index); _builder.Append(value.AsSpan(index, sizeToCopy)); if (_builder.Length == _builder.Capacity) { AllocateBuilder(); } index += sizeToCopy; } while (index < value.Length); } } public override void Write(char value) { if (_builder.Length >= _builder.Capacity) { AllocateBuilder(); } _builder.Append(value); } public override void Write(char[] buffer) { if (buffer == null || buffer.Length == 0) { return; } if (_builder.Length + buffer.Length <= _builder.Capacity) { _builder.Append(buffer); } else { // The string doesn't fit in the buffer, rent more var index = 0; do { var sizeToCopy = Math.Min(_builder.Capacity - _builder.Length, buffer.Length - index); _builder.Append(buffer.AsSpan(index, sizeToCopy)); if (_builder.Length == _builder.Capacity) { AllocateBuilder(); } index += sizeToCopy; } while (index < buffer.Length); } } public override void Write(char[] buffer, int offset, int count) { if (buffer == null || buffer.Length == 0 || count == 0) { return; } if (_builder.Length + count <= _builder.Capacity) { _builder.Append(buffer, offset, count); } else { // The string doesn't fit in the buffer, rent more var index = 0; do { var sizeToCopy = Math.Min(_builder.Capacity - _builder.Length, count - index); _builder.Append(buffer.AsSpan(index + offset, sizeToCopy)); if (_builder.Length == _builder.Capacity) { AllocateBuilder(); } index += sizeToCopy; } while (index < count); } } public override void Write(ReadOnlySpan<char> buffer) { if (buffer.Length == 0) { return; } if (_builder.Length + buffer.Length <= _builder.Capacity) { _builder.Append(buffer); } else { // The string doesn't fit in the buffer, rent more var index = 0; do { var sizeToCopy = Math.Min(_builder.Capacity - _builder.Length, buffer.Length - index); _builder.Append(buffer.Slice(index, sizeToCopy)); if (_builder.Length == _builder.Capacity) { AllocateBuilder(); } index += sizeToCopy; } while (index < buffer.Length); } } public override void Write(StringBuilder value) { if (value != null) { foreach (var chunk in value.GetChunks()) { if (!chunk.IsEmpty) { Write(chunk.Span); } } } } public void WriteTo(TextWriter writer, HtmlEncoder encoder) { if (_builder == null) { throw new InvalidOperationException("Buffer has already been rendered"); } if (_previousPooledBuilders != null) { foreach (var pooledBuilder in _previousPooledBuilders) { foreach (var chunk in pooledBuilder.Builder.GetChunks()) { if (!chunk.IsEmpty) { writer.Write(chunk.Span); } } } } foreach (var chunk in _builder.GetChunks()) { if (!chunk.IsEmpty) { writer.Write(chunk.Span); } } if (_releaseOnWrite) { ReleasePooledBuffer(); } } public override Task FlushAsync() { // Override since the base implementation does unnecessary work return Task.CompletedTask; } #region Async Methods public override Task WriteAsync(string value) { Write(value); return Task.CompletedTask; } public override Task WriteAsync(char value) { Write(value); return Task.CompletedTask; } public override Task WriteAsync(char[] buffer, int index, int count) { Write(buffer, index, count); return Task.CompletedTask; } public override Task WriteAsync(ReadOnlyMemory<char> buffer, CancellationToken cancellationToken = default) { if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } Write(buffer.Span); return Task.CompletedTask; } public override Task WriteAsync(StringBuilder value, CancellationToken cancellationToken = default) { if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } Write(value); return Task.CompletedTask; } public override Task WriteLineAsync(char value) { WriteLine(value); return Task.CompletedTask; } public override Task WriteLineAsync(string value) { WriteLine(value); return Task.CompletedTask; } public override Task WriteLineAsync(char[] buffer, int index, int count) { WriteLine(buffer, index, count); return Task.CompletedTask; } public override Task WriteLineAsync(ReadOnlyMemory<char> buffer, CancellationToken cancellationToken = default) { if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } WriteLine(buffer); return Task.CompletedTask; } public override Task WriteLineAsync(StringBuilder value, CancellationToken cancellationToken = default) { if (cancellationToken.IsCancellationRequested) { return Task.FromCanceled(cancellationToken); } WriteLine(value); return Task.CompletedTask; } #endregion } }
#region Copyright // // This framework is based on log4j see http://jakarta.apache.org/log4j // Copyright (C) The Apache Software Foundation. All rights reserved. // // This software is published under the terms of the Apache Software // License version 1.1, a copy of which has been included with this // distribution in the LICENSE.txt file. // #endregion using System; using System.Collections; using System.Globalization; using System.Reflection; using System.Xml; using log4net.Appender; using log4net.Layout; using log4net.Filter; using log4net.helpers; using log4net.spi; using log4net.ObjectRenderer; namespace log4net.Repository.Hierarchy { /// <summary> /// Initializes the log4net environment using a DOM tree. /// </summary> /// <remarks> /// Configures a <see cref="Hierarchy"/> using an XML DOM tree. /// </remarks> public class DOMHierarchyConfigurator { private enum ConfigUpdateMode {Merge, Overwrite}; #region Public Instance Constructors /// <summary> /// Initializes a new instance of the <see cref="DOMHierarchyConfigurator" /> class /// with the specified <see cref="Hierarchy" />. /// </summary> /// <param name="hierarchy">The hierarchy to build.</param> public DOMHierarchyConfigurator(Hierarchy hierarchy) { m_hierarchy = hierarchy; m_appenderBag = new Hashtable(); } #endregion Public Instance Constructors #region Public Instance Methods /// <summary> /// Configures the log4net framework by parsing a DOM tree of XML elements. /// </summary> /// <param name="element">The root element to parse.</param> public void Configure(XmlElement element) { if (element == null || m_hierarchy == null) { return; } string rootElementName = element.LocalName; if (rootElementName != CONFIGURATION_TAG) { LogLog.Error("DOMConfigurator: DOM element is - not a <" + CONFIGURATION_TAG + "> element."); return; } if (!LogLog.InternalDebugging) { // Look for a debug attribute to enable internal debug string debugAttrib = element.GetAttribute(INTERNAL_DEBUG_ATTR); LogLog.Debug("DOMConfigurator: "+INTERNAL_DEBUG_ATTR+" attribute [" + debugAttrib + "]."); if (debugAttrib.Length>0 && debugAttrib != "null") { LogLog.InternalDebugging = OptionConverter.ToBoolean(debugAttrib, true); } else { LogLog.Debug("DOMConfigurator: Ignoring " + INTERNAL_DEBUG_ATTR + " attribute."); } string confDebug = element.GetAttribute(CONFIG_DEBUG_ATTR); if (confDebug.Length>0 && confDebug != "null") { LogLog.Warn("DOMConfigurator: The \"" + CONFIG_DEBUG_ATTR + "\" attribute is deprecated."); LogLog.Warn("DOMConfigurator: Use the \"" + INTERNAL_DEBUG_ATTR + "\" attribute instead."); LogLog.InternalDebugging = OptionConverter.ToBoolean(confDebug, true); } } // Default mode is merge ConfigUpdateMode configUpdateMode = ConfigUpdateMode.Merge; // Look for the config update attribute string configUpdateModeAttrib = element.GetAttribute(CONFIG_UPDATE_MODE_ATTR); if (configUpdateModeAttrib != null && configUpdateModeAttrib.Length > 0) { // Parse the attribute try { configUpdateMode = (ConfigUpdateMode)OptionConverter.ConvertStringTo(typeof(ConfigUpdateMode), configUpdateModeAttrib); } catch { LogLog.Error("DOMConfigurator: Invalid " + CONFIG_UPDATE_MODE_ATTR + " attribute value [" + configUpdateModeAttrib + "]"); } } #if (!NETCF) LogLog.Debug("DOMConfigurator: Configuration update mode [" + configUpdateMode.ToString(CultureInfo.InvariantCulture) + "]."); #else LogLog.Debug("DOMConfigurator: Configuration update mode [" + configUpdateMode.ToString() + "]."); #endif // Only reset configuration if overwrite flag specified if (configUpdateMode == ConfigUpdateMode.Overwrite) { // Reset to original unset configuration m_hierarchy.ResetConfiguration(); LogLog.Debug("DOMConfigurator: Configuration reset before reading config."); } /* Building Appender objects, placing them in a local namespace for future reference */ /* Process all the top level elements */ foreach (XmlNode currentNode in element.ChildNodes) { if (currentNode.NodeType == XmlNodeType.Element) { XmlElement currentElement = (XmlElement)currentNode; if (currentElement.LocalName == LOGGER_TAG) { ParseLogger(currentElement); } else if (currentElement.LocalName == CATEGORY_TAG) { // TODO: deprecated use of category ParseLogger(currentElement); } else if (currentElement.LocalName == ROOT_TAG) { ParseRoot(currentElement); } else if (currentElement.LocalName == RENDERER_TAG) { ParseRenderer(currentElement); } else if (currentElement.LocalName == APPENDER_TAG) { // We ignore appenders in this pass. They will // be found and loaded if they are referenced. } else { // Read the param tags and set properties on the hierarchy SetParameter(currentElement, m_hierarchy); } } } // Lastly set the hierarchy threshold string thresholdStr = element.GetAttribute(THRESHOLD_ATTR); LogLog.Debug("DOMConfigurator: Hierarchy Threshold [" + thresholdStr + "]"); if (thresholdStr.Length > 0 && thresholdStr != "null") { Level thresholdLevel = (Level) ConvertStringTo(typeof(Level), thresholdStr); if (thresholdLevel != null) { m_hierarchy.Threshold = thresholdLevel; } else { LogLog.Warn("DOMConfigurator: Unable to set hierarchy threshold using value [" + thresholdStr + "] (with acceptable conversion types)"); } } // Done reading config } #endregion Public Instance Methods #region Protected Instance Methods /// <summary> /// Parse appenders by IDREF. /// </summary> /// <param name="appenderRef">The appender ref element.</param> /// <returns>The instance of the appender that the ref refers to.</returns> protected IAppender FindAppenderByReference(XmlElement appenderRef) { string appenderName = appenderRef.GetAttribute(REF_ATTR); IAppender appender = (IAppender)m_appenderBag[appenderName]; if (appender != null) { return appender; } else { // Find the element with that id XmlElement element = null; if (appenderName != null && appenderName.Length > 0) { foreach (XmlNode node in appenderRef.OwnerDocument.GetElementsByTagName(APPENDER_TAG)) { if (((XmlElement)node).GetAttribute("name") == appenderName) { element = (XmlElement)node; break; } } } if (element == null) { LogLog.Error("DOMConfigurator: No appender named [" + appenderName + "] could be found."); return null; } else { appender = ParseAppender(element); if (appender != null) { m_appenderBag[appenderName] = appender; } return appender; } } } /// <summary> /// Parses an appender element. /// </summary> /// <param name="appenderElement">The appender element.</param> /// <returns>The appender instance or <c>null</c> when parsing failed.</returns> protected IAppender ParseAppender(XmlElement appenderElement) { string appenderName = appenderElement.GetAttribute(NAME_ATTR); string typeName = appenderElement.GetAttribute(TYPE_ATTR); LogLog.Debug("DOMConfigurator: Loading Appender [" + appenderName + "] type: [" + typeName + "]"); try { IAppender appender = (IAppender)SystemInfo.GetTypeFromString(typeName, true, true).GetConstructor(SystemInfo.EmptyTypes).Invoke(BindingFlags.Public | BindingFlags.Instance, null, new object[0], CultureInfo.InvariantCulture); appender.Name = appenderName; foreach (XmlNode currentNode in appenderElement.ChildNodes) { /* We're only interested in Elements */ if (currentNode.NodeType == XmlNodeType.Element) { XmlElement currentElement = (XmlElement)currentNode; // Look for the appender ref tag if (currentElement.LocalName == APPENDER_REF_TAG) { string refName = currentElement.GetAttribute(REF_ATTR); if (appender is IAppenderAttachable) { IAppenderAttachable aa = (IAppenderAttachable) appender; LogLog.Debug("DOMConfigurator: Attaching appender named [" + refName + "] to appender named [" + appender.Name + "]."); IAppender a = FindAppenderByReference(currentElement); if (a != null) { aa.AddAppender(a); } } else { LogLog.Error("DOMConfigurator: Requesting attachment of appender named ["+refName+ "] to appender named [" + appender.Name + "] which does not implement log4net.spi.IAppenderAttachable."); } } else { // For all other tags we use standard set param method SetParameter(currentElement, appender); } } } if (appender is IOptionHandler) { ((IOptionHandler) appender).ActivateOptions(); } LogLog.Debug("DOMConfigurator: Created Appender [" + appenderName + "]"); return appender; } /* Yes, it's ugly. But all of these exceptions point to the same problem: we can't create an Appender */ catch (Exception oops) { LogLog.Error("DOMConfigurator: Could not create Appender [" + appenderName + "] of type [" + typeName + "]. Reported error follows.", oops); return null; } } /// <summary> /// Parses a logger element. /// </summary> /// <param name="loggerElement">The logger element.</param> protected void ParseLogger(XmlElement loggerElement) { // Create a new log4net.Logger object from the <logger> element. string loggerName = loggerElement.GetAttribute(NAME_ATTR); LogLog.Debug("DOMConfigurator: Retrieving an instance of log4net.Repository.Logger for logger [" + loggerName + "]."); Logger log = m_hierarchy.GetLogger(loggerName) as Logger; // Setting up a logger needs to be an atomic operation, in order // to protect potential log operations while logger // configuration is in progress. lock(log) { bool additivity = OptionConverter.ToBoolean(loggerElement.GetAttribute(ADDITIVITY_ATTR), true); LogLog.Debug("DOMConfigurator: Setting [" + log.Name + "] additivity to [" + additivity + "]."); log.Additivity = additivity; ParseChildrenOfLoggerElement(loggerElement, log, false); } } /// <summary> /// Parses the root logger element. /// </summary> /// <param name="rootElement">The root element.</param> protected void ParseRoot(XmlElement rootElement) { Logger root = m_hierarchy.Root; // logger configuration needs to be atomic lock(root) { ParseChildrenOfLoggerElement(rootElement, root, true); } } /// <summary> /// Parses the children of a logger element. /// </summary> /// <param name="catElement">The category element.</param> /// <param name="log">The logger instance.</param> /// <param name="isRoot">Flag to indicate if the logger is the root logger.</param> protected void ParseChildrenOfLoggerElement(XmlElement catElement, Logger log, bool isRoot) { // Remove all existing appenders from log. They will be // reconstructed if need be. log.RemoveAllAppenders(); foreach (XmlNode currentNode in catElement.ChildNodes) { if (currentNode.NodeType == XmlNodeType.Element) { XmlElement currentElement = (XmlElement) currentNode; if (currentElement.LocalName == APPENDER_REF_TAG) { IAppender appender = FindAppenderByReference(currentElement); string refName = currentElement.GetAttribute(REF_ATTR); if (appender != null) { LogLog.Debug("DOMConfigurator: Adding appender named [" + refName + "] to logger [" + log.Name + "]."); log.AddAppender(appender); } else { LogLog.Error("DOMConfigurator: Appender named [" + refName + "] not found."); } } else if (currentElement.LocalName == LEVEL_TAG || currentElement.LocalName == PRIORITY_TAG) { ParseLevel(currentElement, log, isRoot); } else { SetParameter(currentElement, log); } } } if (log is IOptionHandler) { ((IOptionHandler) log).ActivateOptions(); } } /// <summary> /// Parses an object renderer. /// </summary> /// <param name="element">The renderer element.</param> protected void ParseRenderer(XmlElement element) { string renderingClassName = element.GetAttribute(RENDERING_TYPE_ATTR); string renderedClassName = element.GetAttribute(RENDERED_TYPE_ATTR); LogLog.Debug("DOMConfigurator: Rendering class [" + renderingClassName + "], Rendered class [" + renderedClassName + "]."); IObjectRenderer renderer = (IObjectRenderer)OptionConverter.InstantiateByClassName(renderingClassName, typeof(IObjectRenderer), null); if (renderer == null) { LogLog.Error("DOMConfigurator: Could not instantiate renderer [" + renderingClassName + "]."); return; } else { try { m_hierarchy.RendererMap.Put(SystemInfo.GetTypeFromString(renderedClassName, true, true), renderer); } catch(Exception e) { LogLog.Error("DOMConfigurator: Could not find class [" + renderedClassName + "].", e); } } } /// <summary> /// Parses a level element. /// </summary> /// <param name="element">The level element.</param> /// <param name="log">The logger object to set the level on.</param> /// <param name="isRoot">Flag to indicate if the logger is the root logger.</param> protected void ParseLevel(XmlElement element, Logger log, bool isRoot) { string catName = log.Name; if (isRoot) { catName = "root"; } string priStr = element.GetAttribute(VALUE_ATTR); LogLog.Debug("DOMConfigurator: Logger [" + catName + "] Level string is [" + priStr + "]."); if (INHERITED == priStr) { if (isRoot) { LogLog.Error("DOMConfigurator: Root level cannot be inherited. Ignoring directive."); } else { log.Level = null; } } else { log.Level = log.Hierarchy.LevelMap[priStr]; if (log.Level == null) { LogLog.Error("DOMConfigurator: Undefined level [" + priStr + "] on Logger [" + log.Name + "]."); } } LogLog.Debug("DOMConfigurator: Logger [" + catName + "] level set to [name=\"" + log.Level.Name + "\",value=" + log.Level.Value + "]."); } /// <summary> /// Sets a paramater on an object. /// </summary> /// <remarks> /// The parameter name must correspond to a writable property /// on the object. The value of the parameter is a string, /// therefore this function will attempt to set a string /// property first. If unable to set a string property it /// will inspect the property and its argument type. It will /// attempt to call a static method called 'Parse' on the /// type of the property. This method will take a single /// string argument and return a value that can be used to /// set the property. /// </remarks> /// <param name="element">The parameter element.</param> /// <param name="target">The object to set the parameter on.</param> protected void SetParameter(XmlElement element, object target) { // Get the property name string name = element.GetAttribute(NAME_ATTR); // If the name attribute does not exist then use the name of the element if (element.LocalName != PARAM_TAG || name == null || name.Length == 0) { name = element.LocalName; } // Look for the property on the target object Type targetType = target.GetType(); Type propertyType = null; PropertyInfo propInfo = null; MethodInfo methInfo = null; // Try to find a writable property propInfo = targetType.GetProperty(name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase); if (propInfo != null && propInfo.CanWrite) { // found a property propertyType = propInfo.PropertyType; } else { propInfo = null; // look for a method with the signature Add<property>(type) methInfo = targetType.GetMethod("Add" + name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase); if (methInfo != null && methInfo.IsPublic && !methInfo.IsStatic) { System.Reflection.ParameterInfo[] methParams = methInfo.GetParameters(); if (methParams.Length == 1) { propertyType = methParams[0].ParameterType; } else { methInfo = null; } } else { methInfo = null; } } if (propertyType == null) { LogLog.Error("DOMConfigurator: Cannot find Property [" + name + "] to set object on [" + target.ToString() + "]"); } else { if (element.GetAttributeNode(VALUE_ATTR) != null) { string propertyValue = element.GetAttribute(VALUE_ATTR); // Fixup embedded non-printable chars propertyValue = OptionConverter.ConvertSpecialChars(propertyValue); #if !NETCF try { // Expand environment variables in the string. propertyValue = OptionConverter.SubstVars(propertyValue, Environment.GetEnvironmentVariables()); } catch(System.Security.SecurityException) { // This security exception will occur if the caller does not have // unrestricted environment permission. If this occurs the expansion // will be skipped with the following warning message. LogLog.Debug("DOMConfigurator: Security exception while trying to expand environment variables. Error Ignored. No Expansion."); } #endif // Now try to convert the string value to an acceptable type // to pass to this property. object convertedValue = ConvertStringTo(propertyType, propertyValue); if (convertedValue != null) { if (propInfo != null) { // Got a converted result LogLog.Debug("DOMConfigurator: Setting Property [" + propInfo.Name + "] to " + convertedValue.GetType().Name + " value [" + convertedValue.ToString() + "]"); // Pass to the property propInfo.SetValue(target, convertedValue, BindingFlags.SetProperty, null, null, CultureInfo.InvariantCulture); } else if (methInfo != null) { // Got a converted result LogLog.Debug("DOMConfigurator: Setting Collection Property [" + methInfo.Name + "] to " + convertedValue.GetType().Name + " value [" + convertedValue.ToString() + "]"); // Pass to the property methInfo.Invoke(target, BindingFlags.InvokeMethod, null, new object[] {convertedValue}, CultureInfo.InvariantCulture); } } else { LogLog.Warn("DOMConfigurator: Unable to set property [" + name + "] on object [" + target + "] using value [" + propertyValue + "] (with acceptable conversion types)"); } } else { // No value specified Type defaultObjectType = null; if (propertyType.IsClass && !propertyType.IsAbstract) { defaultObjectType = propertyType; } object createdObject = CreateObjectFromXml(element, defaultObjectType, propertyType); if (createdObject == null) { LogLog.Error("DOMConfigurator: Failed to create object to set param: "+name); } else { if (propInfo != null) { // Got a converted result LogLog.Debug("DOMConfigurator: Setting Property ["+ propInfo.Name +"] to object ["+ createdObject +"]"); // Pass to the property propInfo.SetValue(target, createdObject, BindingFlags.SetProperty, null, null, CultureInfo.InvariantCulture); } else if (methInfo != null) { // Got a converted result LogLog.Debug("DOMConfigurator: Setting Collection Property ["+ methInfo.Name +"] to object ["+ createdObject +"]"); // Pass to the property methInfo.Invoke(target, BindingFlags.InvokeMethod, null, new object[] {createdObject}, CultureInfo.InvariantCulture); } } } } } /// <summary> /// Converts a string value to a target type. /// </summary> /// <param name="type">The type of object to convert the string to.</param> /// <param name="value">The string value to use as the value of the object.</param> /// <returns> /// An object of type <paramref name="type"/> with value <paramref name="value"/> or /// <c>null</c> when the conversion could not be performed. /// </returns> protected object ConvertStringTo(Type type, string value) { // Hack to allow use of Level in property if (type.IsAssignableFrom(typeof(Level))) { // Property wants a level return m_hierarchy.LevelMap[value]; } return OptionConverter.ConvertStringTo(type, value); } /// <summary> /// Creates an object as specified in XML. /// </summary> /// <param name="element">The XML element that contains the definition of the object.</param> /// <param name="defaultTargetType">The object type to use if not explicitly specified.</param> /// <param name="typeConstraint">The type that the returned object must be or must inherit from.</param> /// <returns>The object or <c>null</c></returns> protected object CreateObjectFromXml(XmlElement element, Type defaultTargetType, Type typeConstraint) { Type objectType = null; // Get the object type string objectTypeString = element.GetAttribute(TYPE_ATTR); if (objectTypeString == null || objectTypeString.Length == 0) { if (defaultTargetType == null) { LogLog.Error("DOMConfigurator: Object type not specified. Cannot create object."); return null; } else { // Use the default object type objectType = defaultTargetType; } } else { // Read the explicit object type try { objectType = SystemInfo.GetTypeFromString(objectTypeString, true, true); } catch(Exception ex) { LogLog.Error("DOMConfigurator: Failed to find type ["+objectTypeString+"]", ex); return null; } } bool requiresConversion = false; // Got the object type. Check that it meets the typeConstraint if (typeConstraint != null) { if (!typeConstraint.IsAssignableFrom(objectType)) { // Check if there is an appropriate type converter if (OptionConverter.CanConvertTypeTo(objectType, typeConstraint)) { requiresConversion = true; } else { LogLog.Error("DOMConfigurator: Object type ["+objectType.FullName+"] is not assignable to type ["+typeConstraint.FullName+"]. There are no acceptable type convertions."); return null; } } } // Look for the default constructor ConstructorInfo constInfo = objectType.GetConstructor(SystemInfo.EmptyTypes); if (constInfo == null) { LogLog.Error("DOMConfigurator: Failed to find default constructor for type [" + objectType.FullName + "]"); return null; } // Call the constructor object createdObject = constInfo.Invoke(BindingFlags.Public | BindingFlags.Instance, null, new object[0], CultureInfo.InvariantCulture); // Set any params on object foreach (XmlNode currentNode in element.ChildNodes) { if (currentNode.NodeType == XmlNodeType.Element) { SetParameter((XmlElement)currentNode, createdObject); } } // Check if we need to call ActivateOptions if (createdObject is IOptionHandler) { ((IOptionHandler) createdObject).ActivateOptions(); } // Ok object should be initialised if (requiresConversion) { // Convert the object type return OptionConverter.ConvertTypeTo(createdObject, typeConstraint); } else { // The object is of the correct type return createdObject; } } #endregion Protected Instance Methods #region Private Static Fields // String constants used while parsing the XML data private const string CONFIGURATION_TAG = "log4net"; private const string RENDERER_TAG = "renderer"; private const string APPENDER_TAG = "appender"; private const string APPENDER_REF_TAG = "appender-ref"; private const string PARAM_TAG = "param"; // TODO: Deprecate use of category tags private const string CATEGORY_TAG = "category"; // TODO: Deprecate use of priority tag private const string PRIORITY_TAG = "priority"; private const string LOGGER_TAG = "logger"; private const string NAME_ATTR = "name"; private const string TYPE_ATTR = "type"; private const string VALUE_ATTR = "value"; private const string ROOT_TAG = "root"; private const string LEVEL_TAG = "level"; private const string REF_ATTR = "ref"; private const string ADDITIVITY_ATTR = "additivity"; private const string THRESHOLD_ATTR = "threshold"; private const string CONFIG_DEBUG_ATTR = "configDebug"; private const string INTERNAL_DEBUG_ATTR = "debug"; private const string CONFIG_UPDATE_MODE_ATTR = "update"; private const string RENDERING_TYPE_ATTR = "renderingClass"; private const string RENDERED_TYPE_ATTR = "renderedClass"; // flag used on the level element private const string INHERITED = "inherited"; #endregion Private Static Fields #region Private Instance Fields /// <summary> /// key: appenderName, value: appender. /// </summary> private Hashtable m_appenderBag; /// <summary> /// The Hierarchy being configured. /// </summary> private readonly Hierarchy m_hierarchy; #endregion Private Instance Fields } }
/********************************************************************++ Copyright (c) Microsoft Corporation. All rights reserved. --********************************************************************/ using System; using System.IO; using System.Security; using System.Security.Cryptography; using System.Runtime.InteropServices; using System.Globalization; using System.Management.Automation; using System.Management.Automation.Internal; using System.Text; namespace Microsoft.PowerShell { /// <summary> /// helper class for secure string related functionality /// </summary> /// internal static class SecureStringHelper { // Some random hex characters to identify the beginning of a // V2-exported SecureString. internal static string SecureStringExportHeader = "76492d1116743f0423413b16050a5345"; /// <summary> /// Create a new SecureString based on the specified binary data. /// /// The binary data must be byte[] version of unicode char[], /// otherwise the results are unpredictable. /// </summary> /// /// <param name="data"> input data </param> /// /// <returns> a SecureString </returns> /// private static SecureString New(byte[] data) { if ((data.Length % 2) != 0) { // If the data is not an even length, they supplied an invalid key String error = Serialization.InvalidKey; throw new PSArgumentException(error); } char ch; SecureString ss = new SecureString(); // // each unicode char is 2 bytes. // int len = data.Length / 2; for (int i = 0; i < len; i++) { ch = (char)(data[2 * i + 1] * 256 + data[2 * i]); ss.AppendChar(ch); // // zero out the data slots as soon as we use them // data[2 * i] = 0; data[2 * i + 1] = 0; } return ss; } /// <summary> /// get the contents of a SecureString as byte[] /// </summary> /// /// <param name="s"> input string </param> /// /// <returns> contents of s (char[]) converted to byte[] </returns> /// [ArchitectureSensitive] internal static byte[] GetData(SecureString s) { // // each unicode char is 2 bytes. // byte[] data = new byte[s.Length * 2]; if (s.Length > 0) { IntPtr ptr = ClrFacade.SecureStringToCoTaskMemUnicode(s); try { Marshal.Copy(ptr, data, 0, data.Length); } finally { Marshal.ZeroFreeCoTaskMemUnicode(ptr); } } return data; } /// <summary> /// Encode the specified byte[] as a unicode string. /// /// Currently we use simple hex encoding but this /// method can be changed to use a better encoding /// such as base64. /// </summary> /// /// <param name="data"> binary data to encode </param> /// /// <returns> a string representing encoded data </returns> /// internal static string ByteArrayToString(byte[] data) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < data.Length; i++) { sb.Append(data[i].ToString("x2", System.Globalization.CultureInfo.InvariantCulture)); } return sb.ToString(); } /// <summary> /// Convert a string obtained using ByteArrayToString() /// back to byte[] format. /// </summary> /// /// <param name="s"> encoded input string </param> /// /// <returns> bin data as byte[] </returns> /// internal static byte[] ByteArrayFromString(string s) { // // two hex chars per byte // int dataLen = s.Length / 2; byte[] data = new byte[dataLen]; if (s.Length > 0) { for (int i = 0; i < dataLen; i++) { data[i] = byte.Parse(s.Substring(2 * i, 2), NumberStyles.AllowHexSpecifier, System.Globalization.CultureInfo.InvariantCulture); } } return data; } /// <summary> /// return contents of the SecureString after encrypting /// using DPAPI and encoding the encrypted blob as a string /// </summary> /// /// <param name="input"> SecureString to protect </param> /// /// <returns> a string (see summary) </returns> /// internal static string Protect(SecureString input) { Utils.CheckSecureStringArg(input, "input"); string output = ""; byte[] data = null; byte[] protectedData = null; data = GetData(input); protectedData = ProtectedData.Protect(data, null, DataProtectionScope.CurrentUser); for (int i = 0; i < data.Length; i++) { data[i] = 0; } output = ByteArrayToString(protectedData); return output; } /// <summary> /// Decrypts the specified string using DPAPI and return /// equivalent SecureString. /// /// The string must be obtained earlier by a call to Protect() /// </summary> /// /// <param name="input"> encrypted string </param> /// /// <returns> SecureString </returns> /// internal static SecureString Unprotect(string input) { Utils.CheckArgForNullOrEmpty(input, "input"); if ((input.Length % 2) != 0) { throw PSTraceSource.NewArgumentException("input", Serialization.InvalidEncryptedString, input); } byte[] data = null; byte[] protectedData = null; SecureString s; protectedData = ByteArrayFromString(input); data = ProtectedData.Unprotect(protectedData, null, DataProtectionScope.CurrentUser); s = New(data); return s; } /// <summary> /// return contents of the SecureString after encrypting /// using the specified key and encoding the encrypted blob as a string /// </summary> /// /// <param name="input"> input string to encrypt </param> /// /// <param name="key"> encryption key </param> /// /// <returns> a string (see summary) </returns> /// /// <remarks> </remarks> /// internal static EncryptionResult Encrypt(SecureString input, SecureString key) { EncryptionResult output = null; // // get clear text key from the SecureString key // byte[] keyBlob = GetData(key); // // encrypt the data // output = Encrypt(input, keyBlob); // // clear the clear text key // Array.Clear(keyBlob, 0, keyBlob.Length); return output; } /// <summary> /// return contents of the SecureString after encrypting /// using the specified key and encoding the encrypted blob as a string /// </summary> /// /// <param name="input"> input string to encrypt </param> /// /// <param name="key"> encryption key </param> /// /// <returns> a string (see summary) </returns> /// /// <remarks> </remarks> /// internal static EncryptionResult Encrypt(SecureString input, byte[] key) { return Encrypt(input, key, null); } internal static EncryptionResult Encrypt(SecureString input, byte[] key, byte[] iv) { Utils.CheckSecureStringArg(input, "input"); Utils.CheckKeyArg(key, "key"); byte[] encryptedData = null; MemoryStream ms = null; ICryptoTransform encryptor = null; CryptoStream cs = null; // // prepare the crypto stuff. Initialization Vector is // randomized by default. // Aes aes = Aes.Create(); if (iv == null) iv = aes.IV; encryptor = aes.CreateEncryptor(key, iv); ms = new MemoryStream(); using (cs = new CryptoStream(ms, encryptor, CryptoStreamMode.Write)) { // // get clear text data from the input SecureString // byte[] data = GetData(input); // // encrypt it // cs.Write(data, 0, data.Length); cs.FlushFinalBlock(); // // clear the clear text data array // Array.Clear(data, 0, data.Length); // // convert the encrypted blob to a string // encryptedData = ms.ToArray(); EncryptionResult output = new EncryptionResult(ByteArrayToString(encryptedData), Convert.ToBase64String(iv)); return output; } } /// <summary> /// Decrypts the specified string using the specified key /// and return equivalent SecureString. /// /// The string must be obtained earlier by a call to Encrypt() /// </summary> /// /// <param name="input"> encrypted string </param> /// /// <param name="key"> encryption key </param> /// /// <param name="IV"> encryption initialization vector. If this is set to null, the method uses internally computed strong random number as IV </param> /// /// <returns> SecureString </returns> /// internal static SecureString Decrypt(string input, SecureString key, byte[] IV) { SecureString output = null; // // get clear text key from the SecureString key // byte[] keyBlob = GetData(key); // // decrypt the data // output = Decrypt(input, keyBlob, IV); // // clear the clear text key // Array.Clear(keyBlob, 0, keyBlob.Length); return output; } /// <summary> /// Decrypts the specified string using the specified key /// and return equivalent SecureString. /// /// The string must be obtained earlier by a call to Encrypt() /// </summary> /// /// <param name="input"> encrypted string </param> /// /// <param name="key"> encryption key </param> /// /// <param name="IV"> encryption initialization vector. If this is set to null, the method uses internally computed strong random number as IV </param> /// /// <returns> SecureString </returns> /// internal static SecureString Decrypt(string input, byte[] key, byte[] IV) { Utils.CheckArgForNullOrEmpty(input, "input"); Utils.CheckKeyArg(key, "key"); byte[] decryptedData = null; byte[] encryptedData = null; SecureString s = null; // // prepare the crypto stuff // Aes aes = Aes.Create(); encryptedData = ByteArrayFromString(input); var decryptor = aes.CreateDecryptor(key, IV ?? aes.IV); MemoryStream ms = new MemoryStream(encryptedData); using (CryptoStream cs = new CryptoStream(ms, decryptor, CryptoStreamMode.Read)) { byte[] tempDecryptedData = new byte[encryptedData.Length]; int numBytesRead = 0; // // decrypt the data // numBytesRead = cs.Read(tempDecryptedData, 0, tempDecryptedData.Length); decryptedData = new byte[numBytesRead]; for (int i = 0; i < numBytesRead; i++) { decryptedData[i] = tempDecryptedData[i]; } s = New(decryptedData); Array.Clear(decryptedData, 0, decryptedData.Length); Array.Clear(tempDecryptedData, 0, tempDecryptedData.Length); return s; } } } /// <summary> /// Helper class to return encryption results, and the IV used to /// do the encryption /// </summary> /// internal class EncryptionResult { internal EncryptionResult(string encrypted, string IV) { EncryptedData = encrypted; this.IV = IV; } /// <summary> /// Gets the encrypted data /// </summary> internal String EncryptedData { get; } /// <summary> /// Gets the IV used to encrypt the data /// </summary> internal String IV { get; } } #if CORECLR // The DPAPIs implemented in this section are temporary workaround. // CoreCLR team will bring 'ProtectedData' type to Project K eventually. #region DPAPI internal enum DataProtectionScope { CurrentUser = 0x00, LocalMachine = 0x01 } internal static class ProtectedData { /// <summary> /// Protect /// </summary> public static byte[] Protect(byte[] userData, byte[] optionalEntropy, DataProtectionScope scope) { if (userData == null) throw new ArgumentNullException("userData"); GCHandle pbDataIn = new GCHandle(); GCHandle pOptionalEntropy = new GCHandle(); CAPI.CRYPTOAPI_BLOB blob = new CAPI.CRYPTOAPI_BLOB(); try { pbDataIn = GCHandle.Alloc(userData, GCHandleType.Pinned); CAPI.CRYPTOAPI_BLOB dataIn = new CAPI.CRYPTOAPI_BLOB(); dataIn.cbData = (uint)userData.Length; dataIn.pbData = pbDataIn.AddrOfPinnedObject(); CAPI.CRYPTOAPI_BLOB entropy = new CAPI.CRYPTOAPI_BLOB(); if (optionalEntropy != null) { pOptionalEntropy = GCHandle.Alloc(optionalEntropy, GCHandleType.Pinned); entropy.cbData = (uint)optionalEntropy.Length; entropy.pbData = pOptionalEntropy.AddrOfPinnedObject(); } uint dwFlags = CAPI.CRYPTPROTECT_UI_FORBIDDEN; if (scope == DataProtectionScope.LocalMachine) dwFlags |= CAPI.CRYPTPROTECT_LOCAL_MACHINE; unsafe { if (!CAPI.CryptProtectData(new IntPtr(&dataIn), String.Empty, new IntPtr(&entropy), IntPtr.Zero, IntPtr.Zero, dwFlags, new IntPtr(&blob))) { int lastWin32Error = Marshal.GetLastWin32Error(); // One of the most common reasons that DPAPI operations fail is that the user // profile is not loaded (for instance in the case of impersonation or running in a // service. In those cases, throw an exception that provides more specific details // about what happened. if (CAPI.ErrorMayBeCausedByUnloadedProfile(lastWin32Error)) { throw new CryptographicException("Cryptography_DpApi_ProfileMayNotBeLoaded"); } else { throw new CryptographicException(lastWin32Error); } } } // In some cases, the API would fail due to OOM but simply return a null pointer. if (blob.pbData == IntPtr.Zero) throw new OutOfMemoryException(); byte[] encryptedData = new byte[(int)blob.cbData]; Marshal.Copy(blob.pbData, encryptedData, 0, encryptedData.Length); return encryptedData; } finally { if (pbDataIn.IsAllocated) pbDataIn.Free(); if (pOptionalEntropy.IsAllocated) pOptionalEntropy.Free(); if (blob.pbData != IntPtr.Zero) { CAPI.ZeroMemory(blob.pbData, blob.cbData); CAPI.LocalFree(blob.pbData); } } } /// <summary> /// Unprotect /// </summary> public static byte[] Unprotect(byte[] encryptedData, byte[] optionalEntropy, DataProtectionScope scope) { if (encryptedData == null) throw new ArgumentNullException("encryptedData"); GCHandle pbDataIn = new GCHandle(); GCHandle pOptionalEntropy = new GCHandle(); CAPI.CRYPTOAPI_BLOB userData = new CAPI.CRYPTOAPI_BLOB(); try { pbDataIn = GCHandle.Alloc(encryptedData, GCHandleType.Pinned); CAPI.CRYPTOAPI_BLOB dataIn = new CAPI.CRYPTOAPI_BLOB(); dataIn.cbData = (uint)encryptedData.Length; dataIn.pbData = pbDataIn.AddrOfPinnedObject(); CAPI.CRYPTOAPI_BLOB entropy = new CAPI.CRYPTOAPI_BLOB(); if (optionalEntropy != null) { pOptionalEntropy = GCHandle.Alloc(optionalEntropy, GCHandleType.Pinned); entropy.cbData = (uint)optionalEntropy.Length; entropy.pbData = pOptionalEntropy.AddrOfPinnedObject(); } uint dwFlags = CAPI.CRYPTPROTECT_UI_FORBIDDEN; if (scope == DataProtectionScope.LocalMachine) dwFlags |= CAPI.CRYPTPROTECT_LOCAL_MACHINE; unsafe { if (!CAPI.CryptUnprotectData(new IntPtr(&dataIn), IntPtr.Zero, new IntPtr(&entropy), IntPtr.Zero, IntPtr.Zero, dwFlags, new IntPtr(&userData))) throw new CryptographicException(Marshal.GetLastWin32Error()); } // In some cases, the API would fail due to OOM but simply return a null pointer. if (userData.pbData == IntPtr.Zero) throw new OutOfMemoryException(); byte[] data = new byte[(int)userData.cbData]; Marshal.Copy(userData.pbData, data, 0, data.Length); return data; } finally { if (pbDataIn.IsAllocated) pbDataIn.Free(); if (pOptionalEntropy.IsAllocated) pOptionalEntropy.Free(); if (userData.pbData != IntPtr.Zero) { CAPI.ZeroMemory(userData.pbData, userData.cbData); CAPI.LocalFree(userData.pbData); } } } } internal static class CAPI { internal const uint CRYPTPROTECT_UI_FORBIDDEN = 0x1; internal const uint CRYPTPROTECT_LOCAL_MACHINE = 0x4; internal const int E_FILENOTFOUND = unchecked((int)0x80070002); // File not found internal const int ERROR_FILE_NOT_FOUND = 2; // File not found [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] internal struct CRYPTOAPI_BLOB { internal uint cbData; internal IntPtr pbData; } internal static bool ErrorMayBeCausedByUnloadedProfile(int errorCode) { // CAPI returns a file not found error if the user profile is not yet loaded return errorCode == E_FILENOTFOUND || errorCode == ERROR_FILE_NOT_FOUND; } [DllImport("CRYPT32.dll", CharSet = CharSet.Unicode, SetLastError = true)] internal static extern bool CryptProtectData( [In] IntPtr pDataIn, [In] string szDataDescr, [In] IntPtr pOptionalEntropy, [In] IntPtr pvReserved, [In] IntPtr pPromptStruct, [In] uint dwFlags, [In, Out] IntPtr pDataBlob); [DllImport("CRYPT32.dll", CharSet = CharSet.Unicode, SetLastError = true)] internal static extern bool CryptUnprotectData( [In] IntPtr pDataIn, [In] IntPtr ppszDataDescr, [In] IntPtr pOptionalEntropy, [In] IntPtr pvReserved, [In] IntPtr pPromptStruct, [In] uint dwFlags, [In, Out] IntPtr pDataBlob); [DllImport("ntdll.dll", EntryPoint = "RtlZeroMemory", SetLastError = true)] internal static extern void ZeroMemory(IntPtr handle, uint length); [DllImport(PinvokeDllNames.LocalFreeDllName, SetLastError = true)] internal static extern IntPtr LocalFree(IntPtr handle); } #endregion DPAPI #endif }
using System; using System.Reflection; using System.CodeDom; using System.CodeDom.Compiler; using System.Collections.Generic; using System.IO; using System.Collections; namespace Stetic { internal static class CodeGenerator { public static void GenerateProjectCode (string file, CodeDomProvider provider, GenerationOptions options, ProjectBackend[] projects) { CodeGenerationResult res = GenerateProjectCode (options, projects); ICodeGenerator gen = provider.CreateGenerator (); string basePath = Path.GetDirectoryName (file); foreach (SteticCompilationUnit unit in res.Units) { string fname; if (unit.Name.Length == 0) fname = file; else fname = Path.Combine (basePath, unit.Name); StreamWriter fileStream = new StreamWriter (fname); try { gen.GenerateCodeFromCompileUnit (unit, fileStream, new CodeGeneratorOptions ()); } finally { fileStream.Close (); } } } public static CodeGenerationResult GenerateProjectCode (GenerationOptions options, ProjectBackend[] projects) { ArrayList warningList = new ArrayList (); List<SteticCompilationUnit> units = new List<SteticCompilationUnit> (); SteticCompilationUnit globalUnit = new SteticCompilationUnit (""); units.Add (globalUnit); if (options == null) options = new GenerationOptions (); CodeNamespace globalNs = new CodeNamespace (options.GlobalNamespace); globalUnit.Namespaces.Add (globalNs); // Global class CodeTypeDeclaration globalType = new CodeTypeDeclaration ("Gui"); globalType.Attributes = MemberAttributes.Private; globalType.TypeAttributes = TypeAttributes.NestedAssembly; globalNs.Types.Add (globalType); // Create the project initialization method // This method will only be added at the end if there // is actually something to initialize CodeMemberMethod initMethod = new CodeMemberMethod (); initMethod.Name = "Initialize"; initMethod.ReturnType = new CodeTypeReference (typeof(void)); initMethod.Attributes = MemberAttributes.Assembly | MemberAttributes.Static; initMethod.Parameters.Add (new CodeParameterDeclarationExpression (typeof(Gtk.Widget), "iconRenderer")); GeneratorContext initContext = new ProjectGeneratorContext (globalNs, globalType, initMethod.Statements, options); initContext.RootObject = new CodeArgumentReferenceExpression ("iconRenderer"); // Generate icon factory creation foreach (ProjectBackend gp in projects) { if (gp.IconFactory.Icons.Count > 0) gp.IconFactory.GenerateBuildCode (initContext); } warningList.AddRange (initContext.Warnings); // Generate the code if (options.UsePartialClasses) CodeGeneratorPartialClass.GenerateProjectGuiCode (globalUnit, globalNs, globalType, options, units, projects, warningList); else CodeGeneratorInternalClass.GenerateProjectGuiCode (globalUnit, globalNs, globalType, options, units, projects, warningList); GenerateProjectActionsCode (globalNs, options, projects); // Final step. If there is some initialization code, add all needed infrastructure globalType.Members.Add (initMethod); CodeMemberField initField = new CodeMemberField (typeof(bool), "initialized"); initField.Attributes = MemberAttributes.Private | MemberAttributes.Static; globalType.Members.Add (initField); CodeFieldReferenceExpression initVar = new CodeFieldReferenceExpression ( new CodeTypeReferenceExpression (globalNs.Name + ".Gui"), "initialized" ); CodeConditionStatement initCondition = new CodeConditionStatement (); initCondition.Condition = new CodeBinaryOperatorExpression ( initVar, CodeBinaryOperatorType.IdentityEquality, new CodePrimitiveExpression (false) ); initCondition.TrueStatements.Add (new CodeAssignStatement ( initVar, new CodePrimitiveExpression (true) )); initCondition.TrueStatements.AddRange (initMethod.Statements); initMethod.Statements.Clear (); initMethod.Statements.Add (initCondition); return new CodeGenerationResult (units.ToArray (), (string[]) warningList.ToArray (typeof(string))); } internal static void BindSignalHandlers (CodeExpression targetObjectVar, ObjectWrapper wrapper, Stetic.WidgetMap map, CodeStatementCollection statements, GenerationOptions options) { foreach (Signal signal in wrapper.Signals) { SignalDescriptor descriptor = signal.SignalDescriptor; CodeExpression createDelegate; if (options.UsePartialClasses) { createDelegate = new CodeDelegateCreateExpression ( new CodeTypeReference (descriptor.HandlerTypeName), new CodeThisReferenceExpression (), signal.Handler); } else { createDelegate = new CodeMethodInvokeExpression ( new CodeTypeReferenceExpression (typeof(Delegate)), "CreateDelegate", new CodeTypeOfExpression (descriptor.HandlerTypeName), targetObjectVar, new CodePrimitiveExpression (signal.Handler)); createDelegate = new CodeCastExpression (descriptor.HandlerTypeName, createDelegate); } CodeAttachEventStatement cevent = new CodeAttachEventStatement ( new CodeEventReferenceExpression ( map.GetWidgetExp (wrapper), descriptor.Name), createDelegate); statements.Add (cevent); } Wrapper.Widget widget = wrapper as Wrapper.Widget; if (widget != null && widget.IsTopLevel) { // Bind local action signals foreach (Wrapper.ActionGroup grp in widget.LocalActionGroups) { foreach (Wrapper.Action ac in grp.Actions) BindSignalHandlers (targetObjectVar, ac, map, statements, options); } } Gtk.Container cont = wrapper.Wrapped as Gtk.Container; if (cont != null) { foreach (Gtk.Widget child in cont.AllChildren) { Stetic.Wrapper.Widget ww = Stetic.Wrapper.Widget.Lookup (child); if (ww != null) BindSignalHandlers (targetObjectVar, ww, map, statements, options); } } } static void GenerateProjectActionsCode (CodeNamespace cns, GenerationOptions options, params ProjectBackend[] projects) { bool multiProject = projects.Length > 1; CodeTypeDeclaration type = new CodeTypeDeclaration ("ActionGroups"); type.Attributes = MemberAttributes.Private; type.TypeAttributes = TypeAttributes.NestedAssembly; cns.Types.Add (type); // Generate the global action group getter CodeMemberMethod met = new CodeMemberMethod (); met.Name = "GetActionGroup"; type.Members.Add (met); met.Parameters.Add (new CodeParameterDeclarationExpression (typeof(Type), "type")); if (multiProject) met.Parameters.Add (new CodeParameterDeclarationExpression (typeof(string), "file")); met.ReturnType = new CodeTypeReference (typeof(Gtk.ActionGroup)); met.Attributes = MemberAttributes.Public | MemberAttributes.Static; CodeMethodInvokeExpression call = new CodeMethodInvokeExpression ( new CodeMethodReferenceExpression ( new CodeTypeReferenceExpression (cns.Name + ".ActionGroups"), "GetActionGroup" ), new CodePropertyReferenceExpression ( new CodeArgumentReferenceExpression ("type"), "FullName" ) ); if (multiProject) call.Parameters.Add (new CodeArgumentReferenceExpression ("file")); met.Statements.Add (new CodeMethodReturnStatement (call)); // Generate the global action group getter (overload) met = new CodeMemberMethod (); met.Name = "GetActionGroup"; type.Members.Add (met); met.Parameters.Add (new CodeParameterDeclarationExpression (typeof(string), "name")); if (multiProject) met.Parameters.Add (new CodeParameterDeclarationExpression (typeof(string), "file")); met.ReturnType = new CodeTypeReference (typeof(Gtk.ActionGroup)); met.Attributes = MemberAttributes.Public | MemberAttributes.Static; CodeArgumentReferenceExpression cfile = new CodeArgumentReferenceExpression ("file"); CodeArgumentReferenceExpression cid = new CodeArgumentReferenceExpression ("name"); CodeStatementCollection projectCol = met.Statements; int n=1; foreach (ProjectBackend gp in projects) { CodeStatementCollection widgetCol; if (multiProject) { CodeConditionStatement pcond = new CodeConditionStatement (); pcond.Condition = new CodeBinaryOperatorExpression ( cfile, CodeBinaryOperatorType.IdentityEquality, new CodePrimitiveExpression (gp.Id) ); projectCol.Add (pcond); widgetCol = pcond.TrueStatements; projectCol = pcond.FalseStatements; } else { widgetCol = projectCol; } foreach (Wrapper.ActionGroup grp in gp.ActionGroups) { string fname = "group" + (n++); CodeMemberField grpField = new CodeMemberField (typeof(Gtk.ActionGroup), fname); grpField.Attributes |= MemberAttributes.Static; type.Members.Add (grpField); CodeFieldReferenceExpression grpVar = new CodeFieldReferenceExpression ( new CodeTypeReferenceExpression (cns.Name + ".ActionGroups"), fname ); CodeConditionStatement pcond = new CodeConditionStatement (); pcond.Condition = new CodeBinaryOperatorExpression ( cid, CodeBinaryOperatorType.IdentityEquality, new CodePrimitiveExpression (grp.Name) ); widgetCol.Add (pcond); // If the group has not yet been created, create it CodeConditionStatement pcondGrp = new CodeConditionStatement (); pcondGrp.Condition = new CodeBinaryOperatorExpression ( grpVar, CodeBinaryOperatorType.IdentityEquality, new CodePrimitiveExpression (null) ); pcondGrp.TrueStatements.Add ( new CodeAssignStatement ( grpVar, new CodeObjectCreateExpression (grp.Name) ) ); pcond.TrueStatements.Add (pcondGrp); pcond.TrueStatements.Add (new CodeMethodReturnStatement (grpVar)); widgetCol = pcond.FalseStatements; } widgetCol.Add (new CodeMethodReturnStatement (new CodePrimitiveExpression (null))); } if (met.Statements.Count == 0) met.Statements.Add (new CodeMethodReturnStatement (new CodePrimitiveExpression (null))); } internal static List<ObjectBindInfo> GetFieldsToBind (ObjectWrapper wrapper) { List<ObjectBindInfo> tobind = new List<ObjectBindInfo> (); GetFieldsToBind (tobind, wrapper); return tobind; } static void GetFieldsToBind (List<ObjectBindInfo> tobind, ObjectWrapper wrapper) { string memberName = null; if (wrapper is Wrapper.Widget) { Wrapper.Widget ww = wrapper as Wrapper.Widget; if (!ww.IsTopLevel && ww.InternalChildProperty == null && !ww.Unselectable) memberName = ((Wrapper.Widget) wrapper).Wrapped.Name; } else if (wrapper is Wrapper.Action) memberName = ((Wrapper.Action) wrapper).Name; if (memberName != null) { ObjectBindInfo binfo = new ObjectBindInfo (wrapper.WrappedTypeName, memberName); tobind.Add (binfo); } Wrapper.ActionGroup agroup = wrapper as Wrapper.ActionGroup; if (agroup != null) { foreach (Wrapper.Action ac in agroup.Actions) GetFieldsToBind (tobind, ac); } Wrapper.Widget widget = wrapper as Wrapper.Widget; if (widget != null && widget.IsTopLevel) { // Generate fields for local actions foreach (Wrapper.ActionGroup grp in widget.LocalActionGroups) { GetFieldsToBind (tobind, grp); } } Gtk.Container cont = wrapper.Wrapped as Gtk.Container; if (cont != null) { foreach (Gtk.Widget child in cont.AllChildren) { Stetic.Wrapper.Widget ww = Stetic.Wrapper.Widget.Lookup (child); if (ww != null) GetFieldsToBind (tobind, ww); } } } public static WidgetMap GenerateCreationCode (CodeNamespace cns, CodeTypeDeclaration type, Gtk.Widget w, CodeExpression widgetVarExp, CodeStatementCollection statements, GenerationOptions options, ArrayList warnings) { statements.Add (new CodeCommentStatement ("Widget " + w.Name)); GeneratorContext ctx = new ProjectGeneratorContext (cns, type, statements, options); Stetic.Wrapper.Widget ww = Stetic.Wrapper.Widget.Lookup (w); ctx.GenerateCreationCode (ww, widgetVarExp); ctx.EndGeneration (); warnings.AddRange (ctx.Warnings); return ctx.WidgetMap; } public static WidgetMap GenerateCreationCode (CodeNamespace cns, CodeTypeDeclaration type, Wrapper.ActionGroup grp, CodeExpression groupVarExp, CodeStatementCollection statements, GenerationOptions options, ArrayList warnings) { statements.Add (new CodeCommentStatement ("Action group " + grp.Name)); GeneratorContext ctx = new ProjectGeneratorContext (cns, type, statements, options); ctx.GenerateCreationCode (grp, groupVarExp); ctx.EndGeneration (); warnings.AddRange (ctx.Warnings); return ctx.WidgetMap; } } class ProjectGeneratorContext: GeneratorContext { CodeTypeDeclaration type; public ProjectGeneratorContext (CodeNamespace cns, CodeTypeDeclaration type, CodeStatementCollection statements, GenerationOptions options): base (cns, "w", statements, options) { this.type = type; } public override CodeExpression GenerateInstanceExpression (ObjectWrapper wrapper, CodeExpression newObject) { string typeName = wrapper.WrappedTypeName; string memberName = null; if (wrapper is Wrapper.Widget) memberName = ((Wrapper.Widget) wrapper).Wrapped.Name; else if (wrapper is Wrapper.Action) memberName = ((Wrapper.Action) wrapper).Name; if (memberName == null) return base.GenerateInstanceExpression (wrapper, newObject); if (Options.UsePartialClasses) { // Don't generate fields for top level widgets and for widgets accessible // through other widget's properties Wrapper.Widget ww = wrapper as Wrapper.Widget; if (ww == null || (!ww.IsTopLevel && ww.InternalChildProperty == null && !ww.Unselectable)) { type.Members.Add ( new CodeMemberField ( typeName, memberName ) ); CodeExpression var = new CodeFieldReferenceExpression ( new CodeThisReferenceExpression (), memberName ); Statements.Add ( new CodeAssignStatement ( var, newObject ) ); return var; } else return base.GenerateInstanceExpression (wrapper, newObject); } else { CodeExpression var = base.GenerateInstanceExpression (wrapper, newObject); Statements.Add ( new CodeAssignStatement ( new CodeIndexerExpression ( new CodeVariableReferenceExpression ("bindings"), new CodePrimitiveExpression (memberName) ), var ) ); return var; } } } [Serializable] public class SteticCompilationUnit: CodeCompileUnit { string name; public SteticCompilationUnit (string name) { this.name = name; } public string Name { get { return name; } internal set { name = value; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Reflection; using Xunit; namespace System.Collections.Immutable.Tests { public class ImmutableDictionaryBuilderTest : ImmutableDictionaryBuilderTestBase { [Fact] public void CreateBuilder() { var builder = ImmutableDictionary.CreateBuilder<string, string>(); Assert.Same(EqualityComparer<string>.Default, builder.KeyComparer); Assert.Same(EqualityComparer<string>.Default, builder.ValueComparer); builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); Assert.Same(StringComparer.Ordinal, builder.KeyComparer); Assert.Same(EqualityComparer<string>.Default, builder.ValueComparer); builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal, StringComparer.OrdinalIgnoreCase); Assert.Same(StringComparer.Ordinal, builder.KeyComparer); Assert.Same(StringComparer.OrdinalIgnoreCase, builder.ValueComparer); } [Fact] public void ToBuilder() { var builder = ImmutableDictionary<int, string>.Empty.ToBuilder(); builder.Add(3, "3"); builder.Add(5, "5"); Assert.Equal(2, builder.Count); Assert.True(builder.ContainsKey(3)); Assert.True(builder.ContainsKey(5)); Assert.False(builder.ContainsKey(7)); var set = builder.ToImmutable(); Assert.Equal(builder.Count, set.Count); builder.Add(8, "8"); Assert.Equal(3, builder.Count); Assert.Equal(2, set.Count); Assert.True(builder.ContainsKey(8)); Assert.False(set.ContainsKey(8)); } [Fact] public void BuilderFromMap() { var set = ImmutableDictionary<int, string>.Empty.Add(1, "1"); var builder = set.ToBuilder(); Assert.True(builder.ContainsKey(1)); builder.Add(3, "3"); builder.Add(5, "5"); Assert.Equal(3, builder.Count); Assert.True(builder.ContainsKey(3)); Assert.True(builder.ContainsKey(5)); Assert.False(builder.ContainsKey(7)); var set2 = builder.ToImmutable(); Assert.Equal(builder.Count, set2.Count); Assert.True(set2.ContainsKey(1)); builder.Add(8, "8"); Assert.Equal(4, builder.Count); Assert.Equal(3, set2.Count); Assert.True(builder.ContainsKey(8)); Assert.False(set.ContainsKey(8)); Assert.False(set2.ContainsKey(8)); } [Fact] public void SeveralChanges() { var mutable = ImmutableDictionary<int, string>.Empty.ToBuilder(); var immutable1 = mutable.ToImmutable(); Assert.Same(immutable1, mutable.ToImmutable()); // "The Immutable property getter is creating new objects without any differences." mutable.Add(1, "a"); var immutable2 = mutable.ToImmutable(); Assert.NotSame(immutable1, immutable2); // "Mutating the collection did not reset the Immutable property." Assert.Same(immutable2, mutable.ToImmutable()); // "The Immutable property getter is creating new objects without any differences." Assert.Equal(1, immutable2.Count); } [Fact] public void EnumerateBuilderWhileMutating() { var builder = ImmutableDictionary<int, string>.Empty .AddRange(Enumerable.Range(1, 10).Select(n => new KeyValuePair<int, string>(n, null))) .ToBuilder(); Assert.Equal( Enumerable.Range(1, 10).Select(n => new KeyValuePair<int, string>(n, null)), builder); var enumerator = builder.GetEnumerator(); Assert.True(enumerator.MoveNext()); builder.Add(11, null); // Verify that a new enumerator will succeed. Assert.Equal( Enumerable.Range(1, 11).Select(n => new KeyValuePair<int, string>(n, null)), builder); // Try enumerating further with the previous enumerable now that we've changed the collection. Assert.Throws<InvalidOperationException>(() => enumerator.MoveNext()); enumerator.Reset(); enumerator.MoveNext(); // resetting should fix the problem. // Verify that by obtaining a new enumerator, we can enumerate all the contents. Assert.Equal( Enumerable.Range(1, 11).Select(n => new KeyValuePair<int, string>(n, null)), builder); } [Fact] public void BuilderReusesUnchangedImmutableInstances() { var collection = ImmutableDictionary<int, string>.Empty.Add(1, null); var builder = collection.ToBuilder(); Assert.Same(collection, builder.ToImmutable()); // no changes at all. builder.Add(2, null); var newImmutable = builder.ToImmutable(); Assert.NotSame(collection, newImmutable); // first ToImmutable with changes should be a new instance. Assert.Same(newImmutable, builder.ToImmutable()); // second ToImmutable without changes should be the same instance. } [Fact] public void AddRange() { var builder = ImmutableDictionary.Create<string, int>().ToBuilder(); builder.AddRange(new Dictionary<string, int> { { "a", 1 }, { "b", 2 } }); Assert.Equal(2, builder.Count); Assert.Equal(1, builder["a"]); Assert.Equal(2, builder["b"]); } [Fact] public void RemoveRange() { var builder = ImmutableDictionary.Create<string, int>() .AddRange(new Dictionary<string, int> { { "a", 1 }, { "b", 2 }, { "c", 3 } }) .ToBuilder(); Assert.Equal(3, builder.Count); builder.RemoveRange(new[] { "a", "b" }); Assert.Equal(1, builder.Count); Assert.Equal(3, builder["c"]); } [Fact] public void Clear() { var builder = ImmutableDictionary.Create<string, int>().ToBuilder(); builder.Add("five", 5); Assert.Equal(1, builder.Count); builder.Clear(); Assert.Equal(0, builder.Count); } [Fact] public void ContainsValue() { var map = ImmutableDictionary.Create<string, int>().Add("five", 5); var builder = map.ToBuilder(); Assert.True(builder.ContainsValue(5)); Assert.False(builder.ContainsValue(4)); } [Fact] public void KeyComparer() { var builder = ImmutableDictionary.Create<string, string>() .Add("a", "1").Add("B", "1").ToBuilder(); Assert.Same(EqualityComparer<string>.Default, builder.KeyComparer); Assert.True(builder.ContainsKey("a")); Assert.False(builder.ContainsKey("A")); builder.KeyComparer = StringComparer.OrdinalIgnoreCase; Assert.Same(StringComparer.OrdinalIgnoreCase, builder.KeyComparer); Assert.Equal(2, builder.Count); Assert.True(builder.ContainsKey("a")); Assert.True(builder.ContainsKey("A")); Assert.True(builder.ContainsKey("b")); var set = builder.ToImmutable(); Assert.Same(StringComparer.OrdinalIgnoreCase, set.KeyComparer); Assert.True(set.ContainsKey("a")); Assert.True(set.ContainsKey("A")); Assert.True(set.ContainsKey("b")); } [Fact] public void KeyComparerCollisions() { // First check where collisions have matching values. var builder = ImmutableDictionary.Create<string, string>() .Add("a", "1").Add("A", "1").ToBuilder(); builder.KeyComparer = StringComparer.OrdinalIgnoreCase; Assert.Equal(1, builder.Count); Assert.True(builder.ContainsKey("a")); var set = builder.ToImmutable(); Assert.Same(StringComparer.OrdinalIgnoreCase, set.KeyComparer); Assert.Equal(1, set.Count); Assert.True(set.ContainsKey("a")); // Now check where collisions have conflicting values. builder = ImmutableDictionary.Create<string, string>() .Add("a", "1").Add("A", "2").Add("b", "3").ToBuilder(); AssertExtensions.Throws<ArgumentException>(null, () => builder.KeyComparer = StringComparer.OrdinalIgnoreCase); // Force all values to be considered equal. builder.ValueComparer = EverythingEqual<string>.Default; Assert.Same(EverythingEqual<string>.Default, builder.ValueComparer); builder.KeyComparer = StringComparer.OrdinalIgnoreCase; // should not throw because values will be seen as equal. Assert.Equal(2, builder.Count); Assert.True(builder.ContainsKey("a")); Assert.True(builder.ContainsKey("b")); } [Fact] public void KeyComparerEmptyCollection() { var builder = ImmutableDictionary.Create<string, string>() .Add("a", "1").Add("B", "1").ToBuilder(); Assert.Same(EqualityComparer<string>.Default, builder.KeyComparer); builder.KeyComparer = StringComparer.OrdinalIgnoreCase; Assert.Same(StringComparer.OrdinalIgnoreCase, builder.KeyComparer); var set = builder.ToImmutable(); Assert.Same(StringComparer.OrdinalIgnoreCase, set.KeyComparer); } [Fact] public void GetValueOrDefaultOfConcreteType() { var empty = ImmutableDictionary.Create<string, int>().ToBuilder(); var populated = ImmutableDictionary.Create<string, int>().Add("a", 5).ToBuilder(); Assert.Equal(0, empty.GetValueOrDefault("a")); Assert.Equal(1, empty.GetValueOrDefault("a", 1)); Assert.Equal(5, populated.GetValueOrDefault("a")); Assert.Equal(5, populated.GetValueOrDefault("a", 1)); } [Fact] public void DebuggerAttributesValid() { DebuggerAttributes.ValidateDebuggerDisplayReferences(ImmutableDictionary.CreateBuilder<string, int>()); ImmutableDictionary<int, string>.Builder builder = ImmutableDictionary.CreateBuilder<int, string>(); builder.Add(1, "One"); builder.Add(2, "Two"); DebuggerAttributeInfo info = DebuggerAttributes.ValidateDebuggerTypeProxyProperties(builder); PropertyInfo itemProperty = info.Properties.Single(pr => pr.GetCustomAttribute<DebuggerBrowsableAttribute>().State == DebuggerBrowsableState.RootHidden); KeyValuePair<int, string>[] items = itemProperty.GetValue(info.Instance) as KeyValuePair<int, string>[]; Assert.Equal(builder, items); } [Fact] public static void TestDebuggerAttributes_Null() { Type proxyType = DebuggerAttributes.GetProxyType(ImmutableHashSet.Create<string>()); TargetInvocationException tie = Assert.Throws<TargetInvocationException>(() => Activator.CreateInstance(proxyType, (object)null)); Assert.IsType<ArgumentNullException>(tie.InnerException); } [Fact] public void ToImmutableDictionary() { ImmutableDictionary<int, int>.Builder builder = ImmutableDictionary.CreateBuilder<int, int>(); builder.Add(0, 0); builder.Add(1, 1); builder.Add(2, 2); var dictionary = builder.ToImmutableDictionary(); Assert.Equal(0, dictionary[0]); Assert.Equal(1, dictionary[1]); Assert.Equal(2, dictionary[2]); builder[1] = 5; Assert.Equal(5, builder[1]); Assert.Equal(1, dictionary[1]); builder.Clear(); Assert.True(builder.ToImmutableDictionary().IsEmpty); Assert.False(dictionary.IsEmpty); ImmutableDictionary<int, int>.Builder nullBuilder = null; AssertExtensions.Throws<ArgumentNullException>("builder", () => nullBuilder.ToImmutableDictionary()); } protected override IImmutableDictionary<TKey, TValue> GetEmptyImmutableDictionary<TKey, TValue>() { return ImmutableDictionary.Create<TKey, TValue>(); } protected override IImmutableDictionary<string, TValue> Empty<TValue>(StringComparer comparer) { return ImmutableDictionary.Create<string, TValue>(comparer); } protected override bool TryGetKeyHelper<TKey, TValue>(IDictionary<TKey, TValue> dictionary, TKey equalKey, out TKey actualKey) { return ((ImmutableDictionary<TKey, TValue>.Builder)dictionary).TryGetKey(equalKey, out actualKey); } protected override IDictionary<TKey, TValue> GetBuilder<TKey, TValue>(IImmutableDictionary<TKey, TValue> basis) { return ((ImmutableDictionary<TKey, TValue>)(basis ?? GetEmptyImmutableDictionary<TKey, TValue>())).ToBuilder(); } } }
// // ScrollViewBackend.cs // // Author: // Lluis Sanchez Gual <lluis@xamarin.com> // // Copyright (c) 2012 Xamarin Inc // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using Xwt.Backends; #if MONOMAC using nint = System.Int32; using nfloat = System.Single; using CGRect = System.Drawing.RectangleF; using CGPoint = System.Drawing.PointF; using CGSize = System.Drawing.SizeF; using MonoMac.AppKit; using MonoMac.CoreGraphics; #else using AppKit; using CoreGraphics; #endif namespace Xwt.Mac { public class ScrollViewBackend: ViewBackend<NSScrollView,IScrollViewEventSink>, IScrollViewBackend { IWidgetBackend child; ScrollPolicy verticalScrollPolicy; ScrollPolicy horizontalScrollPolicy; NormalClipView clipView; public override void Initialize () { ViewObject = new CustomScrollView (); Widget.HasHorizontalScroller = true; Widget.HasVerticalScroller = true; Widget.AutoresizesSubviews = true; } protected override Size GetNaturalSize () { return EventSink.GetDefaultNaturalSize (); } public void SetChild (IWidgetBackend child) { this.child = child; ViewBackend backend = (ViewBackend) child; if (backend.EventSink.SupportsCustomScrolling ()) { var vs = new ScrollAdjustmentBackend (Widget, true); var hs = new ScrollAdjustmentBackend (Widget, false); CustomClipView clipView = new CustomClipView (hs, vs); Widget.ContentView = clipView; var dummy = new DummyClipView (); dummy.AddSubview (backend.Widget); backend.Widget.Frame = new CGRect (0, 0, clipView.Frame.Width, clipView.Frame.Height); clipView.DocumentView = dummy; backend.EventSink.SetScrollAdjustments (hs, vs); vertScroll = vs; horScroll = hs; } else { clipView = new NormalClipView (); clipView.Scrolled += OnScrolled; Widget.ContentView = clipView; Widget.DocumentView = backend.Widget; UpdateChildSize (); } } public ScrollPolicy VerticalScrollPolicy { get { return verticalScrollPolicy; } set { verticalScrollPolicy = value; Widget.HasVerticalScroller = verticalScrollPolicy != ScrollPolicy.Never; } } public ScrollPolicy HorizontalScrollPolicy { get { return horizontalScrollPolicy; } set { horizontalScrollPolicy = value; Widget.HasHorizontalScroller = horizontalScrollPolicy != ScrollPolicy.Never; } } IScrollControlBackend vertScroll; public IScrollControlBackend CreateVerticalScrollControl () { if (vertScroll == null) vertScroll = new ScrollControlBackend (ApplicationContext, Widget, true); return vertScroll; } IScrollControlBackend horScroll; public IScrollControlBackend CreateHorizontalScrollControl () { if (horScroll == null) horScroll = new ScrollControlBackend (ApplicationContext, Widget, false); return horScroll; } void OnScrolled (object o, EventArgs e) { if (vertScroll is ScrollControlBackend) ((ScrollControlBackend)vertScroll).NotifyValueChanged (); if (horScroll is ScrollControlBackend) ((ScrollControlBackend)horScroll).NotifyValueChanged (); } public Rectangle VisibleRect { get { return Rectangle.Zero; } } public bool BorderVisible { get { return false; } set { } } void UpdateChildSize () { if (child == null) return; if (Widget.ContentView is CustomClipView) { } else { NSView view = (NSView)Widget.DocumentView; ViewBackend c = (ViewBackend)child; Size s; if (horizontalScrollPolicy == ScrollPolicy.Never) { s = c.Frontend.Surface.GetPreferredSize (SizeConstraint.WithSize (Widget.ContentView.Frame.Width), SizeConstraint.Unconstrained); } else if (verticalScrollPolicy == ScrollPolicy.Never) { s = c.Frontend.Surface.GetPreferredSize (SizeConstraint.Unconstrained, SizeConstraint.WithSize (Widget.ContentView.Frame.Width)); } else { s = c.Frontend.Surface.GetPreferredSize (); } var w = Math.Max (s.Width, Widget.ContentView.Frame.Width); var h = Math.Max (s.Height, Widget.ContentView.Frame.Height); view.Frame = new CGRect (view.Frame.X, view.Frame.Y, (nfloat)w, (nfloat)h); } } public void SetChildSize (Size s) { UpdateChildSize (); } public override Drawing.Color BackgroundColor { get { return Widget.BackgroundColor.ToXwtColor (); } set { base.BackgroundColor = value; Widget.BackgroundColor = value.ToNSColor (); } } } class CustomScrollView: NSScrollView, IViewObject { public NSView View { get { return this; } } public ViewBackend Backend { get; set; } public override bool IsFlipped { get { return true; } } } class DummyClipView: NSView { public override bool IsFlipped { get { return true; } } } class CustomClipView: NSClipView { ScrollAdjustmentBackend hScroll; ScrollAdjustmentBackend vScroll; double currentX; double currentY; float ratioX = 1, ratioY = 1; public CustomClipView (ScrollAdjustmentBackend hScroll, ScrollAdjustmentBackend vScroll) { this.hScroll = hScroll; this.vScroll = vScroll; CopiesOnScroll = false; } public double CurrentX { get { return hScroll.LowerValue + (currentX / ratioX); } set { ScrollToPoint (new CGPoint ((nfloat)(value - hScroll.LowerValue) * ratioX, (nfloat)currentY)); } } public double CurrentY { get { return vScroll.LowerValue + (currentY / ratioY); } set { ScrollToPoint (new CGPoint ((nfloat)currentX, (nfloat)(value - vScroll.LowerValue) * ratioY)); } } public override bool IsFlipped { get { return true; } } public override void SetFrameSize (CGSize newSize) { base.SetFrameSize (newSize); var v = DocumentView.Subviews [0]; v.Frame = new CGRect (v.Frame.X, v.Frame.Y, newSize.Width, newSize.Height); } public override void ScrollToPoint (CGPoint newOrigin) { base.ScrollToPoint (newOrigin); var v = DocumentView.Subviews [0]; currentX = newOrigin.X >= 0 ? newOrigin.X : 0; currentY = newOrigin.Y >= 0 ? newOrigin.Y : 0; if (currentX + v.Frame.Width > DocumentView.Frame.Width) currentX = DocumentView.Frame.Width - v.Frame.Width; if (currentY + v.Frame.Height > DocumentView.Frame.Height) currentY = DocumentView.Frame.Height - v.Frame.Height; v.Frame = new CGRect ((nfloat)currentX, (nfloat)currentY, v.Frame.Width, v.Frame.Height); hScroll.NotifyValueChanged (); vScroll.NotifyValueChanged (); } public void UpdateDocumentSize () { var vr = DocumentVisibleRect (); ratioX = hScroll.PageSize != 0 ? (float)vr.Width / (float)hScroll.PageSize : 1; ratioY = vScroll.PageSize != 0 ? (float)vr.Height / (float)vScroll.PageSize : 1; DocumentView.Frame = new CGRect (0, 0, (nfloat)(hScroll.UpperValue - hScroll.LowerValue) * ratioX, (nfloat)(vScroll.UpperValue - vScroll.LowerValue) * ratioY); } } class NormalClipView: NSClipView { public event EventHandler Scrolled; public override void ScrollToPoint (CGPoint newOrigin) { base.ScrollToPoint (newOrigin); if (Scrolled != null) Scrolled (this, EventArgs.Empty); } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.Linq; using System.Threading; using Fasterflect; using QuantConnect.Algorithm; using QuantConnect.Configuration; using QuantConnect.Data; using QuantConnect.Data.Market; using QuantConnect.Data.UniverseSelection; using QuantConnect.Interfaces; using QuantConnect.Lean.Engine.Alpha; using QuantConnect.Lean.Engine.DataFeeds; using QuantConnect.Lean.Engine.RealTime; using QuantConnect.Lean.Engine.Results; using QuantConnect.Lean.Engine.Server; using QuantConnect.Lean.Engine.TransactionHandlers; using QuantConnect.Logging; using QuantConnect.Orders; using QuantConnect.Packets; using QuantConnect.Securities; using QuantConnect.Util; using QuantConnect.Securities.Option; using QuantConnect.Securities.Volatility; using QuantConnect.Util.RateLimit; namespace QuantConnect.Lean.Engine { /// <summary> /// Algorithm manager class executes the algorithm and generates and passes through the algorithm events. /// </summary> public class AlgorithmManager { private IAlgorithm _algorithm; private readonly object _lock; private readonly bool _liveMode; /// <summary> /// Publicly accessible algorithm status /// </summary> public AlgorithmStatus State => _algorithm?.Status ?? AlgorithmStatus.Running; /// <summary> /// Public access to the currently running algorithm id. /// </summary> public string AlgorithmId { get; private set; } /// <summary> /// Provides the isolator with a function for verifying that we're not spending too much time in each /// algorithm manager time loop /// </summary> public AlgorithmTimeLimitManager TimeLimit { get; } /// <summary> /// Quit state flag for the running algorithm. When true the user has requested the backtest stops through a Quit() method. /// </summary> /// <seealso cref="QCAlgorithm.Quit(String)"/> public bool QuitState => State == AlgorithmStatus.Deleted; /// <summary> /// Gets the number of data points processed per second /// </summary> public long DataPoints { get; private set; } /// <summary> /// Initializes a new instance of the <see cref="AlgorithmManager"/> class /// </summary> /// <param name="liveMode">True if we're running in live mode, false for backtest mode</param> /// <param name="job">Provided by LEAN when creating a new algo manager. This is the job /// that the algo manager is about to execute. Research and other consumers can provide the /// default value of null</param> public AlgorithmManager(bool liveMode, AlgorithmNodePacket job = null) { AlgorithmId = ""; _liveMode = liveMode; _lock = new object(); // initialize the time limit manager TimeLimit = new AlgorithmTimeLimitManager( CreateTokenBucket(job?.Controls?.TrainingLimits), TimeSpan.FromMinutes(Config.GetDouble("algorithm-manager-time-loop-maximum", 20)) ); } /// <summary> /// Launch the algorithm manager to run this strategy /// </summary> /// <param name="job">Algorithm job</param> /// <param name="algorithm">Algorithm instance</param> /// <param name="synchronizer">Instance which implements <see cref="ISynchronizer"/>. Used to stream the data</param> /// <param name="transactions">Transaction manager object</param> /// <param name="results">Result handler object</param> /// <param name="realtime">Realtime processing object</param> /// <param name="leanManager">ILeanManager implementation that is updated periodically with the IAlgorithm instance</param> /// <param name="alphas">Alpha handler used to process algorithm generated insights</param> /// <param name="token">Cancellation token</param> /// <remarks>Modify with caution</remarks> public void Run(AlgorithmNodePacket job, IAlgorithm algorithm, ISynchronizer synchronizer, ITransactionHandler transactions, IResultHandler results, IRealTimeHandler realtime, ILeanManager leanManager, IAlphaHandler alphas, CancellationToken token) { //Initialize: DataPoints = 0; _algorithm = algorithm; var backtestMode = (job.Type == PacketType.BacktestNode); var methodInvokers = new Dictionary<Type, MethodInvoker>(); var marginCallFrequency = TimeSpan.FromMinutes(5); var nextMarginCallTime = DateTime.MinValue; var settlementScanFrequency = TimeSpan.FromMinutes(30); var nextSettlementScanTime = DateTime.MinValue; var time = algorithm.StartDate.Date; var pendingDelistings = new List<Delisting>(); var splitWarnings = new List<Split>(); //Initialize Properties: AlgorithmId = job.AlgorithmId; _algorithm.Status = AlgorithmStatus.Running; //Create the method accessors to push generic types into algorithm: Find all OnData events: // Algorithm 2.0 data accessors var hasOnDataTradeBars = AddMethodInvoker<TradeBars>(algorithm, methodInvokers); var hasOnDataQuoteBars = AddMethodInvoker<QuoteBars>(algorithm, methodInvokers); var hasOnDataOptionChains = AddMethodInvoker<OptionChains>(algorithm, methodInvokers); var hasOnDataTicks = AddMethodInvoker<Ticks>(algorithm, methodInvokers); // dividend and split events var hasOnDataDividends = AddMethodInvoker<Dividends>(algorithm, methodInvokers); var hasOnDataSplits = AddMethodInvoker<Splits>(algorithm, methodInvokers); var hasOnDataDelistings = AddMethodInvoker<Delistings>(algorithm, methodInvokers); var hasOnDataSymbolChangedEvents = AddMethodInvoker<SymbolChangedEvents>(algorithm, methodInvokers); //Go through the subscription types and create invokers to trigger the event handlers for each custom type: foreach (var config in algorithm.SubscriptionManager.Subscriptions) { //If type is a custom feed, check for a dedicated event handler if (config.IsCustomData) { //Get the matching method for this event handler - e.g. public void OnData(Quandl data) { .. } var genericMethod = (algorithm.GetType()).GetMethod("OnData", new[] { config.Type }); //If we already have this Type-handler then don't add it to invokers again. if (methodInvokers.ContainsKey(config.Type)) continue; if (genericMethod != null) { methodInvokers.Add(config.Type, genericMethod.DelegateForCallMethod()); } } } // Schedule a daily event for sampling at midnight every night algorithm.Schedule.On("Daily Sampling", algorithm.Schedule.DateRules.EveryDay(), algorithm.Schedule.TimeRules.Midnight, () => { results.Sample(algorithm.UtcTime); }); //Loop over the queues: get a data collection, then pass them all into relevent methods in the algorithm. Log.Trace("AlgorithmManager.Run(): Begin DataStream - Start: " + algorithm.StartDate + " Stop: " + algorithm.EndDate); foreach (var timeSlice in Stream(algorithm, synchronizer, results, token)) { // reset our timer on each loop TimeLimit.StartNewTimeStep(); //Check this backtest is still running: if (_algorithm.Status != AlgorithmStatus.Running && _algorithm.RunTimeError == null) { Log.Error($"AlgorithmManager.Run(): Algorithm state changed to {_algorithm.Status} at {timeSlice.Time.ToStringInvariant()}"); break; } //Execute with TimeLimit Monitor: if (token.IsCancellationRequested) { Log.Error($"AlgorithmManager.Run(): CancellationRequestion at {timeSlice.Time.ToStringInvariant()}"); return; } // Update the ILeanManager leanManager.Update(); time = timeSlice.Time; DataPoints += timeSlice.DataPointCount; if (backtestMode) { if (algorithm.Portfolio.TotalPortfolioValue <= 0) { var logMessage = "AlgorithmManager.Run(): Portfolio value is less than or equal to zero, stopping algorithm."; Log.Error(logMessage); results.SystemDebugMessage(logMessage); break; } // If backtesting, we need to check if there are realtime events in the past // which didn't fire because at the scheduled times there was no data (i.e. markets closed) // and fire them with the correct date/time. realtime.ScanPastEvents(time); } //Set the algorithm and real time handler's time algorithm.SetDateTime(time); // the time pulse are just to advance algorithm time, lets shortcut the loop here if (timeSlice.IsTimePulse) { continue; } // Update the current slice before firing scheduled events or any other task algorithm.SetCurrentSlice(timeSlice.Slice); if (timeSlice.Slice.SymbolChangedEvents.Count != 0) { if (hasOnDataSymbolChangedEvents) { methodInvokers[typeof (SymbolChangedEvents)](algorithm, timeSlice.Slice.SymbolChangedEvents); } foreach (var symbol in timeSlice.Slice.SymbolChangedEvents.Keys) { // cancel all orders for the old symbol foreach (var ticket in transactions.GetOpenOrderTickets(x => x.Symbol == symbol)) { ticket.Cancel("Open order cancelled on symbol changed event"); } } } if (timeSlice.SecurityChanges != SecurityChanges.None) { foreach (var security in timeSlice.SecurityChanges.AddedSecurities) { security.IsTradable = true; // uses TryAdd, so don't need to worry about duplicates here algorithm.Securities.Add(security); } var activeSecurities = algorithm.UniverseManager.ActiveSecurities; foreach (var security in timeSlice.SecurityChanges.RemovedSecurities) { if (!activeSecurities.ContainsKey(security.Symbol)) { security.IsTradable = false; } } leanManager.OnSecuritiesChanged(timeSlice.SecurityChanges); realtime.OnSecuritiesChanged(timeSlice.SecurityChanges); results.OnSecuritiesChanged(timeSlice.SecurityChanges); } //Update the securities properties: first before calling user code to avoid issues with data foreach (var update in timeSlice.SecuritiesUpdateData) { var security = update.Target; security.Update(update.Data, update.DataType, update.ContainsFillForwardData); if (!update.IsInternalConfig) { // Send market price updates to the TradeBuilder algorithm.TradeBuilder.SetMarketPrice(security.Symbol, security.Price); } } //Update the securities properties with any universe data if (timeSlice.UniverseData.Count > 0) { foreach (var kvp in timeSlice.UniverseData) { foreach (var data in kvp.Value.Data) { Security security; if (algorithm.Securities.TryGetValue(data.Symbol, out security)) { security.Cache.StoreData(new[] {data}, data.GetType()); } } } } // poke each cash object to update from the recent security data foreach (var cash in algorithm.Portfolio.CashBook.Values.Where(x => x.CurrencyConversion != null)) { cash.Update(); } // security prices got updated algorithm.Portfolio.InvalidateTotalPortfolioValue(); // process fill models on the updated data before entering algorithm, applies to all non-market orders transactions.ProcessSynchronousEvents(); // fire real time events after we've updated based on the new data realtime.SetTime(timeSlice.Time); // process split warnings for options ProcessSplitSymbols(algorithm, splitWarnings, pendingDelistings); //Check if the user's signalled Quit: loop over data until day changes. if (_algorithm.Status != AlgorithmStatus.Running && _algorithm.RunTimeError == null) { Log.Error($"AlgorithmManager.Run(): Algorithm state changed to {_algorithm.Status} at {timeSlice.Time.ToStringInvariant()}"); break; } if (algorithm.RunTimeError != null) { Log.Error($"AlgorithmManager.Run(): Stopping, encountered a runtime error at {algorithm.UtcTime} UTC."); return; } // perform margin calls, in live mode we can also use realtime to emit these if (time >= nextMarginCallTime || (_liveMode && nextMarginCallTime > DateTime.UtcNow)) { // determine if there are possible margin call orders to be executed bool issueMarginCallWarning; var marginCallOrders = algorithm.Portfolio.MarginCallModel.GetMarginCallOrders(out issueMarginCallWarning); if (marginCallOrders.Count != 0) { var executingMarginCall = false; try { // tell the algorithm we're about to issue the margin call algorithm.OnMarginCall(marginCallOrders); executingMarginCall = true; // execute the margin call orders var executedTickets = algorithm.Portfolio.MarginCallModel.ExecuteMarginCall(marginCallOrders); foreach (var ticket in executedTickets) { algorithm.Error($"{algorithm.Time.ToStringInvariant()} - Executed MarginCallOrder: {ticket.Symbol} - " + $"Quantity: {ticket.Quantity.ToStringInvariant()} @ {ticket.AverageFillPrice.ToStringInvariant()}" ); } } catch (Exception err) { algorithm.SetRuntimeError(err, executingMarginCall ? "Portfolio.MarginCallModel.ExecuteMarginCall" : "OnMarginCall"); return; } } // we didn't perform a margin call, but got the warning flag back, so issue the warning to the algorithm else if (issueMarginCallWarning) { try { algorithm.OnMarginCallWarning(); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnMarginCallWarning"); return; } } nextMarginCallTime = time + marginCallFrequency; } // perform check for settlement of unsettled funds if (time >= nextSettlementScanTime || (_liveMode && nextSettlementScanTime > DateTime.UtcNow)) { algorithm.Portfolio.ScanForCashSettlement(algorithm.UtcTime); nextSettlementScanTime = time + settlementScanFrequency; } // before we call any events, let the algorithm know about universe changes if (timeSlice.SecurityChanges != SecurityChanges.None) { try { var algorithmSecurityChanges = new SecurityChanges(timeSlice.SecurityChanges) { // by default for user code we want to filter out custom securities FilterCustomSecurities = true, // by default for user code we want to filter out internal securities FilterInternalSecurities = true }; algorithm.OnSecuritiesChanged(algorithmSecurityChanges); algorithm.OnFrameworkSecuritiesChanged(algorithmSecurityChanges); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnSecuritiesChanged"); return; } } // apply dividends foreach (var dividend in timeSlice.Slice.Dividends.Values) { Log.Debug($"AlgorithmManager.Run(): {algorithm.Time}: Applying Dividend: {dividend}"); Security security = null; if (_liveMode && algorithm.Securities.TryGetValue(dividend.Symbol, out security)) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Pre-Dividend: {dividend}. " + $"Security Holdings: {security.Holdings.Quantity} Account Currency Holdings: " + $"{algorithm.Portfolio.CashBook[algorithm.AccountCurrency].Amount}"); } var mode = algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(dividend.Symbol) .DataNormalizationMode(); // apply the dividend event to the portfolio algorithm.Portfolio.ApplyDividend(dividend, _liveMode, mode); if (_liveMode && security != null) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Post-Dividend: {dividend}. Security " + $"Holdings: {security.Holdings.Quantity} Account Currency Holdings: " + $"{algorithm.Portfolio.CashBook[algorithm.AccountCurrency].Amount}"); } } // apply splits foreach (var split in timeSlice.Slice.Splits.Values) { try { // only process split occurred events (ignore warnings) if (split.Type != SplitType.SplitOccurred) { continue; } Log.Debug($"AlgorithmManager.Run(): {algorithm.Time}: Applying Split for {split.Symbol}"); Security security = null; if (_liveMode && algorithm.Securities.TryGetValue(split.Symbol, out security)) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Pre-Split for {split}. Security Price: {security.Price} Holdings: {security.Holdings.Quantity}"); } var mode = algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(split.Symbol) .DataNormalizationMode(); // apply the split event to the portfolio algorithm.Portfolio.ApplySplit(split, _liveMode, mode); if (_liveMode && security != null) { Log.Trace($"AlgorithmManager.Run(): {algorithm.Time}: Post-Split for {split}. Security Price: {security.Price} Holdings: {security.Holdings.Quantity}"); } // apply the split to open orders as well in raw mode, all other modes are split adjusted if (_liveMode || mode == DataNormalizationMode.Raw) { // in live mode we always want to have our order match the order at the brokerage, so apply the split to the orders var openOrders = transactions.GetOpenOrderTickets(ticket => ticket.Symbol == split.Symbol); algorithm.BrokerageModel.ApplySplit(openOrders.ToList(), split); } } catch (Exception err) { algorithm.SetRuntimeError(err, "Split event"); return; } } //Update registered consolidators for this symbol index try { if (timeSlice.ConsolidatorUpdateData.Count > 0) { var timeKeeper = algorithm.TimeKeeper; foreach (var update in timeSlice.ConsolidatorUpdateData) { var localTime = timeKeeper.GetLocalTimeKeeper(update.Target.ExchangeTimeZone).LocalTime; var consolidators = update.Target.Consolidators; foreach (var consolidator in consolidators) { foreach (var dataPoint in update.Data) { // only push data into consolidators on the native, subscribed to resolution if (EndTimeIsInNativeResolution(update.Target, dataPoint.EndTime)) { consolidator.Update(dataPoint); } } // scan for time after we've pumped all the data through for this consolidator consolidator.Scan(localTime); } } } } catch (Exception err) { algorithm.SetRuntimeError(err, "Consolidators update"); return; } // fire custom event handlers foreach (var update in timeSlice.CustomData) { MethodInvoker methodInvoker; if (!methodInvokers.TryGetValue(update.DataType, out methodInvoker)) { continue; } try { foreach (var dataPoint in update.Data) { if (update.DataType.IsInstanceOfType(dataPoint)) { methodInvoker(algorithm, dataPoint); } } } catch (Exception err) { algorithm.SetRuntimeError(err, "Custom Data"); return; } } try { // fire off the dividend and split events before pricing events if (hasOnDataDividends && timeSlice.Slice.Dividends.Count != 0) { methodInvokers[typeof(Dividends)](algorithm, timeSlice.Slice.Dividends); } if (hasOnDataSplits && timeSlice.Slice.Splits.Count != 0) { methodInvokers[typeof(Splits)](algorithm, timeSlice.Slice.Splits); } if (hasOnDataDelistings && timeSlice.Slice.Delistings.Count != 0) { methodInvokers[typeof(Delistings)](algorithm, timeSlice.Slice.Delistings); } } catch (Exception err) { algorithm.SetRuntimeError(err, "Dividends/Splits/Delistings"); return; } // Only track pending delistings in non-live mode. if (!algorithm.LiveMode) { // Keep this up to date even though we don't process delistings here anymore foreach(var delisting in timeSlice.Slice.Delistings.Values) { if (delisting.Type == DelistingType.Warning) { // Store our delistings warnings because they are still used by ProcessSplitSymbols above pendingDelistings.Add(delisting); } else { // If we have an actual delisting event, remove it from pending delistings var index = pendingDelistings.FindIndex(x => x.Symbol == delisting.Symbol); if (index != -1) { pendingDelistings.RemoveAt(index); } } } } // run split logic after firing split events HandleSplitSymbols(timeSlice.Slice.Splits, splitWarnings); //After we've fired all other events in this second, fire the pricing events: try { if (hasOnDataTradeBars && timeSlice.Slice.Bars.Count > 0) methodInvokers[typeof(TradeBars)](algorithm, timeSlice.Slice.Bars); if (hasOnDataQuoteBars && timeSlice.Slice.QuoteBars.Count > 0) methodInvokers[typeof(QuoteBars)](algorithm, timeSlice.Slice.QuoteBars); if (hasOnDataOptionChains && timeSlice.Slice.OptionChains.Count > 0) methodInvokers[typeof(OptionChains)](algorithm, timeSlice.Slice.OptionChains); if (hasOnDataTicks && timeSlice.Slice.Ticks.Count > 0) methodInvokers[typeof(Ticks)](algorithm, timeSlice.Slice.Ticks); } catch (Exception err) { algorithm.SetRuntimeError(err, "methodInvokers"); return; } try { if (timeSlice.Slice.HasData) { // EVENT HANDLER v3.0 -- all data in a single event algorithm.OnData(timeSlice.Slice); } // always turn the crank on this method to ensure universe selection models function properly on day changes w/out data algorithm.OnFrameworkData(timeSlice.Slice); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnData"); return; } //If its the historical/paper trading models, wait until market orders have been "filled" // Manually trigger the event handler to prevent thread switch. transactions.ProcessSynchronousEvents(); // sample alpha charts now that we've updated time/price information and after transactions // are processed so that insights closed because of new order based insights get updated alphas.ProcessSynchronousEvents(); // send the alpha statistics to the result handler for storage/transmit with the result packets results.SetAlphaRuntimeStatistics(alphas.RuntimeStatistics); // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(); // poke the algorithm at the end of each time step algorithm.OnEndOfTimeStep(); } // End of ForEach feed.Bridge.GetConsumingEnumerable // stop timing the loops TimeLimit.StopEnforcingTimeLimit(); //Stream over:: Send the final packet and fire final events: Log.Trace("AlgorithmManager.Run(): Firing On End Of Algorithm..."); try { algorithm.OnEndOfAlgorithm(); } catch (Exception err) { algorithm.SetRuntimeError(err, "OnEndOfAlgorithm"); return; } // final processing now that the algorithm has completed alphas.ProcessSynchronousEvents(); // send the final alpha statistics to the result handler for storage/transmit with the result packets results.SetAlphaRuntimeStatistics(alphas.RuntimeStatistics); // Process any required events of the results handler such as sampling assets, equity, or stock prices. results.ProcessSynchronousEvents(forceProcess: true); //Liquidate Holdings for Calculations: if (_algorithm.Status == AlgorithmStatus.Liquidated && _liveMode) { Log.Trace("AlgorithmManager.Run(): Liquidating algorithm holdings..."); algorithm.Liquidate(); results.LogMessage("Algorithm Liquidated"); results.SendStatusUpdate(AlgorithmStatus.Liquidated); } //Manually stopped the algorithm if (_algorithm.Status == AlgorithmStatus.Stopped) { Log.Trace("AlgorithmManager.Run(): Stopping algorithm..."); results.LogMessage("Algorithm Stopped"); results.SendStatusUpdate(AlgorithmStatus.Stopped); } //Backtest deleted. if (_algorithm.Status == AlgorithmStatus.Deleted) { Log.Trace("AlgorithmManager.Run(): Deleting algorithm..."); results.DebugMessage("Algorithm Id:(" + job.AlgorithmId + ") Deleted by request."); results.SendStatusUpdate(AlgorithmStatus.Deleted); } //Algorithm finished, send regardless of commands: results.SendStatusUpdate(AlgorithmStatus.Completed); SetStatus(AlgorithmStatus.Completed); //Take final samples: results.Sample(time); } // End of Run(); /// <summary> /// Set the quit state. /// </summary> public void SetStatus(AlgorithmStatus state) { lock (_lock) { //We don't want anyone else to set our internal state to "Running". //This is controlled by the algorithm private variable only. //Algorithm could be null after it's initialized and they call Run on us if (state != AlgorithmStatus.Running && _algorithm != null) { _algorithm.SetStatus(state); } } } private IEnumerable<TimeSlice> Stream(IAlgorithm algorithm, ISynchronizer synchronizer, IResultHandler results, CancellationToken cancellationToken) { bool setStartTime = false; var timeZone = algorithm.TimeZone; var history = algorithm.HistoryProvider; // fulfilling history requirements of volatility models in live mode if (algorithm.LiveMode) { ProcessVolatilityHistoryRequirements(algorithm); } // get the required history job from the algorithm DateTime? lastHistoryTimeUtc = null; var historyRequests = algorithm.GetWarmupHistoryRequests().ToList(); // initialize variables for progress computation var warmUpStartTicks = DateTime.UtcNow.Ticks; var nextStatusTime = DateTime.UtcNow.AddSeconds(1); var minimumIncrement = algorithm.UniverseManager .Select(x => x.Value.UniverseSettings?.Resolution.ToTimeSpan() ?? algorithm.UniverseSettings.Resolution.ToTimeSpan()) .DefaultIfEmpty(Time.OneSecond) .Min(); minimumIncrement = minimumIncrement == TimeSpan.Zero ? Time.OneSecond : minimumIncrement; if (historyRequests.Count != 0) { // rewrite internal feed requests var subscriptions = algorithm.SubscriptionManager.Subscriptions.Where(x => !x.IsInternalFeed).ToList(); var minResolution = subscriptions.Count > 0 ? subscriptions.Min(x => x.Resolution) : Resolution.Second; foreach (var request in historyRequests) { Security security; if (algorithm.Securities.TryGetValue(request.Symbol, out security) && security.IsInternalFeed()) { if (request.Resolution < minResolution) { request.Resolution = minResolution; request.FillForwardResolution = request.FillForwardResolution.HasValue ? minResolution : (Resolution?) null; } } } // rewrite all to share the same fill forward resolution if (historyRequests.Any(x => x.FillForwardResolution.HasValue)) { minResolution = historyRequests.Where(x => x.FillForwardResolution.HasValue).Min(x => x.FillForwardResolution.Value); foreach (var request in historyRequests.Where(x => x.FillForwardResolution.HasValue)) { request.FillForwardResolution = minResolution; } } foreach (var request in historyRequests) { warmUpStartTicks = Math.Min(request.StartTimeUtc.Ticks, warmUpStartTicks); Log.Trace($"AlgorithmManager.Stream(): WarmupHistoryRequest: {request.Symbol}: Start: {request.StartTimeUtc} End: {request.EndTimeUtc} Resolution: {request.Resolution}"); } var timeSliceFactory = new TimeSliceFactory(timeZone); // make the history request and build time slices foreach (var slice in history.GetHistory(historyRequests, timeZone)) { TimeSlice timeSlice; try { // we need to recombine this slice into a time slice var paired = new List<DataFeedPacket>(); foreach (var symbol in slice.Keys) { var security = algorithm.Securities[symbol]; var data = slice[symbol]; var list = new List<BaseData>(); Type dataType; var ticks = data as List<Tick>; if (ticks != null) { list.AddRange(ticks); dataType = typeof(Tick); } else { list.Add(data); dataType = data.GetType(); } var config = algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(symbol, includeInternalConfigs: true) .FirstOrDefault(subscription => dataType.IsAssignableFrom(subscription.Type)); if (config == null) { throw new Exception($"A data subscription for type '{dataType.Name}' was not found."); } paired.Add(new DataFeedPacket(security, config, list)); } timeSlice = timeSliceFactory.Create(slice.Time.ConvertToUtc(timeZone), paired, SecurityChanges.None, new Dictionary<Universe, BaseDataCollection>()); } catch (Exception err) { algorithm.SetRuntimeError(err, $"Warmup history request. Slice.Time {slice.Time}"); yield break; } if (timeSlice != null) { if (!setStartTime) { setStartTime = true; algorithm.Debug("Algorithm warming up..."); } if (DateTime.UtcNow > nextStatusTime) { // send some status to the user letting them know we're done history, but still warming up, // catching up to real time data nextStatusTime = DateTime.UtcNow.AddSeconds(1); var percent = (int)(100 * (timeSlice.Time.Ticks - warmUpStartTicks) / (double)(DateTime.UtcNow.Ticks - warmUpStartTicks)); results.SendStatusUpdate(AlgorithmStatus.History, $"Catching up to realtime {percent}%..."); } yield return timeSlice; lastHistoryTimeUtc = timeSlice.Time; } } } // if we're not live or didn't event request warmup, then set us as not warming up if (!algorithm.LiveMode || historyRequests.Count == 0) { algorithm.SetFinishedWarmingUp(); if (historyRequests.Count != 0) { algorithm.Debug("Algorithm finished warming up."); Log.Trace("AlgorithmManager.Stream(): Finished warmup"); } } foreach (var timeSlice in synchronizer.StreamData(cancellationToken)) { if (algorithm.LiveMode && algorithm.IsWarmingUp) { if (timeSlice.IsTimePulse) { continue; } // this is hand-over logic, we spin up the data feed first and then request // the history for warmup, so there will be some overlap between the data if (lastHistoryTimeUtc.HasValue) { // make sure there's no historical data, this only matters for the handover var hasHistoricalData = false; foreach (var data in timeSlice.Slice.Ticks.Values.SelectMany(x => x).Concat<BaseData>(timeSlice.Slice.Bars.Values)) { // check if any ticks in the list are on or after our last warmup point, if so, skip this data if (data.EndTime.ConvertToUtc(algorithm.Securities[data.Symbol].Exchange.TimeZone) >= lastHistoryTimeUtc) { hasHistoricalData = true; break; } } if (hasHistoricalData) { continue; } // prevent us from doing these checks every loop lastHistoryTimeUtc = null; } // in live mode wait to mark us as finished warming up when // the data feed has caught up to now within the min increment if (timeSlice.Time > DateTime.UtcNow.Subtract(minimumIncrement)) { algorithm.SetFinishedWarmingUp(); algorithm.Debug("Algorithm finished warming up."); Log.Trace("AlgorithmManager.Stream(): Finished warmup"); } else if (DateTime.UtcNow > nextStatusTime) { // send some status to the user letting them know we're done history, but still warming up, // catching up to real time data nextStatusTime = DateTime.UtcNow.AddSeconds(1); var percent = (int) (100*(timeSlice.Time.Ticks - warmUpStartTicks)/(double) (DateTime.UtcNow.Ticks - warmUpStartTicks)); results.SendStatusUpdate(AlgorithmStatus.History, $"Catching up to realtime {percent}%..."); } } yield return timeSlice; } } /// <summary> /// Helper method used to process securities volatility history requirements /// </summary> /// <remarks>Implemented as static to facilitate testing</remarks> /// <param name="algorithm">The algorithm instance</param> public static void ProcessVolatilityHistoryRequirements(IAlgorithm algorithm) { Log.Trace("ProcessVolatilityHistoryRequirements(): Updating volatility models with historical data..."); foreach (var kvp in algorithm.Securities) { var security = kvp.Value; if (security.VolatilityModel != VolatilityModel.Null) { // start: this is a work around to maintain retro compatibility // did not want to add IVolatilityModel.SetSubscriptionDataConfigProvider // to prevent breaking existing user models. var baseType = security.VolatilityModel as BaseVolatilityModel; baseType?.SetSubscriptionDataConfigProvider( algorithm.SubscriptionManager.SubscriptionDataConfigService); // end var historyReq = security.VolatilityModel.GetHistoryRequirements(security, algorithm.UtcTime); if (historyReq != null && algorithm.HistoryProvider != null) { var history = algorithm.HistoryProvider.GetHistory(historyReq, algorithm.TimeZone); if (history != null) { foreach (var slice in history) { if (slice.Bars.ContainsKey(security.Symbol)) security.VolatilityModel.Update(security, slice.Bars[security.Symbol]); } } } } } Log.Trace("ProcessVolatilityHistoryRequirements(): finished."); } /// <summary> /// Adds a method invoker if the method exists to the method invokers dictionary /// </summary> /// <typeparam name="T">The data type to check for 'OnData(T data)</typeparam> /// <param name="algorithm">The algorithm instance</param> /// <param name="methodInvokers">The dictionary of method invokers</param> /// <param name="methodName">The name of the method to search for</param> /// <returns>True if the method existed and was added to the collection</returns> private bool AddMethodInvoker<T>(IAlgorithm algorithm, Dictionary<Type, MethodInvoker> methodInvokers, string methodName = "OnData") { var newSplitMethodInfo = algorithm.GetType().GetMethod(methodName, new[] {typeof (T)}); if (newSplitMethodInfo != null) { methodInvokers.Add(typeof(T), newSplitMethodInfo.DelegateForCallMethod()); return true; } return false; } /// <summary> /// Keeps track of split warnings so we can later liquidate option contracts /// </summary> private void HandleSplitSymbols(Splits newSplits, List<Split> splitWarnings) { foreach (var split in newSplits.Values) { if (split.Type != SplitType.Warning) { Log.Trace($"AlgorithmManager.HandleSplitSymbols(): {_algorithm.Time} - Security split occurred: Split Factor: {split} Reference Price: {split.ReferencePrice}"); continue; } Log.Trace($"AlgorithmManager.HandleSplitSymbols(): {_algorithm.Time} - Security split warning: {split}"); if (!splitWarnings.Any(x => x.Symbol == split.Symbol && x.Type == SplitType.Warning)) { splitWarnings.Add(split); } } } /// <summary> /// Liquidate option contact holdings who's underlying security has split /// </summary> private void ProcessSplitSymbols(IAlgorithm algorithm, List<Split> splitWarnings, List<Delisting> pendingDelistings) { // NOTE: This method assumes option contracts have the same core trading hours as their underlying contract // This is a small performance optimization to prevent scanning every contract on every time step, // instead we scan just the underlyings, thereby reducing the time footprint of this methods by a factor // of N, the number of derivative subscriptions for (int i = splitWarnings.Count - 1; i >= 0; i--) { var split = splitWarnings[i]; var security = algorithm.Securities[split.Symbol]; if (!security.IsTradable && !algorithm.UniverseManager.ActiveSecurities.Keys.Contains(split.Symbol)) { Log.Debug($"AlgorithmManager.ProcessSplitSymbols(): {_algorithm.Time} - Removing split warning for {security.Symbol}"); // remove the warning from out list splitWarnings.RemoveAt(i); // Since we are storing the split warnings for a loop // we need to check if the security was removed. // When removed, it will be marked as non tradable but just in case // we expect it not to be an active security either continue; } var nextMarketClose = security.Exchange.Hours.GetNextMarketClose(security.LocalTime, false); // determine the latest possible time we can submit a MOC order var configs = algorithm.SubscriptionManager.SubscriptionDataConfigService .GetSubscriptionDataConfigs(security.Symbol); if (configs.Count == 0) { // should never happen at this point, if it does let's give some extra info throw new Exception( $"AlgorithmManager.ProcessSplitSymbols(): {_algorithm.Time} - No subscriptions found for {security.Symbol}" + $", IsTradable: {security.IsTradable}" + $", Active: {algorithm.UniverseManager.ActiveSecurities.Keys.Contains(split.Symbol)}"); } var latestMarketOnCloseTimeRoundedDownByResolution = nextMarketClose.Subtract(MarketOnCloseOrder.SubmissionTimeBuffer) .RoundDownInTimeZone(configs.GetHighestResolution().ToTimeSpan(), security.Exchange.TimeZone, configs.First().DataTimeZone); // we don't need to do anyhing until the market closes if (security.LocalTime < latestMarketOnCloseTimeRoundedDownByResolution) continue; // fetch all option derivatives of the underlying with holdings (excluding the canonical security) var derivatives = algorithm.Securities.Where(kvp => kvp.Key.HasUnderlying && kvp.Key.SecurityType.IsOption() && kvp.Key.Underlying == security.Symbol && !kvp.Key.Underlying.IsCanonical() && kvp.Value.HoldStock ); foreach (var kvp in derivatives) { var optionContractSymbol = kvp.Key; var optionContractSecurity = (Option)kvp.Value; if (pendingDelistings.Any(x => x.Symbol == optionContractSymbol && x.Time.Date == optionContractSecurity.LocalTime.Date)) { // if the option is going to be delisted today we skip sending the market on close order continue; } // close any open orders algorithm.Transactions.CancelOpenOrders(optionContractSymbol, "Canceled due to impending split. Separate MarketOnClose order submitted to liquidate position."); var request = new SubmitOrderRequest(OrderType.MarketOnClose, optionContractSecurity.Type, optionContractSymbol, -optionContractSecurity.Holdings.Quantity, 0, 0, algorithm.UtcTime, "Liquidated due to impending split. Option splits are not currently supported." ); // send MOC order to liquidate option contract holdings algorithm.Transactions.AddOrder(request); // mark option contract as not tradable optionContractSecurity.IsTradable = false; algorithm.Debug($"MarketOnClose order submitted for option contract '{optionContractSymbol}' due to impending {split.Symbol.Value} split event. " + "Option splits are not currently supported."); } // remove the warning from out list splitWarnings.RemoveAt(i); } } /// <summary> /// Determines if a data point is in it's native, configured resolution /// </summary> private static bool EndTimeIsInNativeResolution(SubscriptionDataConfig config, DateTime dataPointEndTime) { if (config.Resolution == Resolution.Tick || // time zones don't change seconds or milliseconds so we can // shortcut timezone conversions (config.Resolution == Resolution.Second || config.Resolution == Resolution.Minute) && dataPointEndTime.Ticks % config.Increment.Ticks == 0) { return true; } var roundedDataPointEndTime = dataPointEndTime.RoundDownInTimeZone(config.Increment, config.ExchangeTimeZone, config.DataTimeZone); return dataPointEndTime == roundedDataPointEndTime; } /// <summary> /// Constructs the correct <see cref="ITokenBucket"/> instance per the provided controls. /// The provided controls will be null when /// </summary> private static ITokenBucket CreateTokenBucket(LeakyBucketControlParameters controls) { if (controls == null) { // this will only be null when the AlgorithmManager is being initialized outside of LEAN // for example, in unit tests that don't provide a job package as well as from Research // in each of the above cases, it seems best to not enforce the leaky bucket restrictions return TokenBucket.Null; } Log.Trace("AlgorithmManager.CreateTokenBucket(): Initializing LeakyBucket: " + $"Capacity: {controls.Capacity} " + $"RefillAmount: {controls.RefillAmount} " + $"TimeInterval: {controls.TimeIntervalMinutes}" ); // these parameters view 'minutes' as the resource being rate limited. the capacity is the total // number of minutes available for burst operations and after controls.TimeIntervalMinutes time // has passed, we'll add controls.RefillAmount to the 'minutes' available, maxing at controls.Capacity return new LeakyBucket( controls.Capacity, controls.RefillAmount, TimeSpan.FromMinutes(controls.TimeIntervalMinutes) ); } } }
// /* // * Copyright (c) 2016, Alachisoft. All Rights Reserved. // * // * Licensed under the Apache License, Version 2.0 (the "License"); // * you may not use this file except in compliance with the License. // * You may obtain a copy of the License at // * // * http://www.apache.org/licenses/LICENSE-2.0 // * // * Unless required by applicable law or agreed to in writing, software // * distributed under the License is distributed on an "AS IS" BASIS, // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // * See the License for the specific language governing permissions and // * limitations under the License. // */ #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.ProtocolBuffers; using pbc = global::Google.ProtocolBuffers.Collections; using pbd = global::Google.ProtocolBuffers.Descriptors; using scg = global::System.Collections.Generic; namespace Alachisoft.NosDB.Common.Protobuf { namespace Proto { [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public static partial class AuthenticationToken { #region Extension registration public static void RegisterAllExtensions(pb::ExtensionRegistry registry) { } #endregion #region Static variables internal static pbd::MessageDescriptor internal__static_Alachisoft_NosDB_Common_Protobuf_AuthenticationToken__Descriptor; internal static pb::FieldAccess.FieldAccessorTable<global::Alachisoft.NosDB.Common.Protobuf.AuthenticationToken, global::Alachisoft.NosDB.Common.Protobuf.AuthenticationToken.Builder> internal__static_Alachisoft_NosDB_Common_Protobuf_AuthenticationToken__FieldAccessorTable; #endregion #region Descriptor public static pbd::FileDescriptor Descriptor { get { return descriptor; } } private static pbd::FileDescriptor descriptor; static AuthenticationToken() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "ChlBdXRoZW50aWNhdGlvblRva2VuLnByb3RvEiBBbGFjaGlzb2Z0Lk5vc0RC", "LkNvbW1vbi5Qcm90b2J1ZiI0ChNBdXRoZW50aWNhdGlvblRva2VuEg4KBnN0", "YXR1cxgBIAEoBRINCgV0b2tlbhgCIAEoDEJDCiRjb20uYWxhY2hpc29mdC5u", "b3NkYi5jb21tb24ucHJvdG9idWZCG0F1dGhlbnRpY2F0aW9uVG9rZW5Qcm90", "b2NvbA==")); pbd::FileDescriptor.InternalDescriptorAssigner assigner = delegate(pbd::FileDescriptor root) { descriptor = root; internal__static_Alachisoft_NosDB_Common_Protobuf_AuthenticationToken__Descriptor = Descriptor.MessageTypes[0]; internal__static_Alachisoft_NosDB_Common_Protobuf_AuthenticationToken__FieldAccessorTable = new pb::FieldAccess.FieldAccessorTable<global::Alachisoft.NosDB.Common.Protobuf.AuthenticationToken, global::Alachisoft.NosDB.Common.Protobuf.AuthenticationToken.Builder>(internal__static_Alachisoft_NosDB_Common_Protobuf_AuthenticationToken__Descriptor, new string[] { "Status", "Token", }); return null; }; pbd::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData, new pbd::FileDescriptor[] { }, assigner); } #endregion } } #region Messages [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class AuthenticationToken : pb::GeneratedMessage<AuthenticationToken, AuthenticationToken.Builder> { private AuthenticationToken() { } private static readonly AuthenticationToken defaultInstance = new AuthenticationToken().MakeReadOnly(); private static readonly string[] _authenticationTokenFieldNames = new string[] { "status", "token" }; private static readonly uint[] _authenticationTokenFieldTags = new uint[] { 8, 18 }; public static AuthenticationToken DefaultInstance { get { return defaultInstance; } } public override AuthenticationToken DefaultInstanceForType { get { return DefaultInstance; } } protected override AuthenticationToken ThisMessage { get { return this; } } public static pbd::MessageDescriptor Descriptor { get { return global::Alachisoft.NosDB.Common.Protobuf.Proto.AuthenticationToken.internal__static_Alachisoft_NosDB_Common_Protobuf_AuthenticationToken__Descriptor; } } protected override pb::FieldAccess.FieldAccessorTable<AuthenticationToken, AuthenticationToken.Builder> InternalFieldAccessors { get { return global::Alachisoft.NosDB.Common.Protobuf.Proto.AuthenticationToken.internal__static_Alachisoft_NosDB_Common_Protobuf_AuthenticationToken__FieldAccessorTable; } } public const int StatusFieldNumber = 1; private bool hasStatus; private int status_; public bool HasStatus { get { return hasStatus; } } public int Status { get { return status_; } } public const int TokenFieldNumber = 2; private bool hasToken; private pb::ByteString token_ = pb::ByteString.Empty; public bool HasToken { get { return hasToken; } } public pb::ByteString Token { get { return token_; } } public override bool IsInitialized { get { return true; } } public override void WriteTo(pb::ICodedOutputStream output) { CalcSerializedSize(); string[] field_names = _authenticationTokenFieldNames; if (hasStatus) { output.WriteInt32(1, field_names[0], Status); } if (hasToken) { output.WriteBytes(2, field_names[1], Token); } UnknownFields.WriteTo(output); } private int memoizedSerializedSize = -1; public override int SerializedSize { get { int size = memoizedSerializedSize; if (size != -1) return size; return CalcSerializedSize(); } } private int CalcSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (hasStatus) { size += pb::CodedOutputStream.ComputeInt32Size(1, Status); } if (hasToken) { size += pb::CodedOutputStream.ComputeBytesSize(2, Token); } size += UnknownFields.SerializedSize; memoizedSerializedSize = size; return size; } public static AuthenticationToken ParseFrom(pb::ByteString data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static AuthenticationToken ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static AuthenticationToken ParseFrom(byte[] data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static AuthenticationToken ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static AuthenticationToken ParseFrom(global::System.IO.Stream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static AuthenticationToken ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static AuthenticationToken ParseDelimitedFrom(global::System.IO.Stream input) { return CreateBuilder().MergeDelimitedFrom(input).BuildParsed(); } public static AuthenticationToken ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed(); } public static AuthenticationToken ParseFrom(pb::ICodedInputStream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static AuthenticationToken ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } private AuthenticationToken MakeReadOnly() { return this; } public static Builder CreateBuilder() { return new Builder(); } public override Builder ToBuilder() { return CreateBuilder(this); } public override Builder CreateBuilderForType() { return new Builder(); } public static Builder CreateBuilder(AuthenticationToken prototype) { return new Builder(prototype); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class Builder : pb::GeneratedBuilder<AuthenticationToken, Builder> { protected override Builder ThisBuilder { get { return this; } } public Builder() { result = DefaultInstance; resultIsReadOnly = true; } internal Builder(AuthenticationToken cloneFrom) { result = cloneFrom; resultIsReadOnly = true; } private bool resultIsReadOnly; private AuthenticationToken result; private AuthenticationToken PrepareBuilder() { if (resultIsReadOnly) { AuthenticationToken original = result; result = new AuthenticationToken(); resultIsReadOnly = false; MergeFrom(original); } return result; } public override bool IsInitialized { get { return result.IsInitialized; } } protected override AuthenticationToken MessageBeingBuilt { get { return PrepareBuilder(); } } public override Builder Clear() { result = DefaultInstance; resultIsReadOnly = true; return this; } public override Builder Clone() { if (resultIsReadOnly) { return new Builder(result); } else { return new Builder().MergeFrom(result); } } public override pbd::MessageDescriptor DescriptorForType { get { return global::Alachisoft.NosDB.Common.Protobuf.AuthenticationToken.Descriptor; } } public override AuthenticationToken DefaultInstanceForType { get { return global::Alachisoft.NosDB.Common.Protobuf.AuthenticationToken.DefaultInstance; } } public override AuthenticationToken BuildPartial() { if (resultIsReadOnly) { return result; } resultIsReadOnly = true; return result.MakeReadOnly(); } public override Builder MergeFrom(pb::IMessage other) { if (other is AuthenticationToken) { return MergeFrom((AuthenticationToken) other); } else { base.MergeFrom(other); return this; } } public override Builder MergeFrom(AuthenticationToken other) { if (other == global::Alachisoft.NosDB.Common.Protobuf.AuthenticationToken.DefaultInstance) return this; PrepareBuilder(); if (other.HasStatus) { Status = other.Status; } if (other.HasToken) { Token = other.Token; } this.MergeUnknownFields(other.UnknownFields); return this; } public override Builder MergeFrom(pb::ICodedInputStream input) { return MergeFrom(input, pb::ExtensionRegistry.Empty); } public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) { PrepareBuilder(); pb::UnknownFieldSet.Builder unknownFields = null; uint tag; string field_name; while (input.ReadTag(out tag, out field_name)) { if(tag == 0 && field_name != null) { int field_ordinal = global::System.Array.BinarySearch(_authenticationTokenFieldNames, field_name, global::System.StringComparer.Ordinal); if(field_ordinal >= 0) tag = _authenticationTokenFieldTags[field_ordinal]; else { if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name); continue; } } switch (tag) { case 0: { throw pb::InvalidProtocolBufferException.InvalidTag(); } default: { if (pb::WireFormat.IsEndGroupTag(tag)) { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name); break; } case 8: { result.hasStatus = input.ReadInt32(ref result.status_); break; } case 18: { result.hasToken = input.ReadBytes(ref result.token_); break; } } } if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } public bool HasStatus { get { return result.hasStatus; } } public int Status { get { return result.Status; } set { SetStatus(value); } } public Builder SetStatus(int value) { PrepareBuilder(); result.hasStatus = true; result.status_ = value; return this; } public Builder ClearStatus() { PrepareBuilder(); result.hasStatus = false; result.status_ = 0; return this; } public bool HasToken { get { return result.hasToken; } } public pb::ByteString Token { get { return result.Token; } set { SetToken(value); } } public Builder SetToken(pb::ByteString value) { pb::ThrowHelper.ThrowIfNull(value, "value"); PrepareBuilder(); result.hasToken = true; result.token_ = value; return this; } public Builder ClearToken() { PrepareBuilder(); result.hasToken = false; result.token_ = pb::ByteString.Empty; return this; } } static AuthenticationToken() { object.ReferenceEquals(global::Alachisoft.NosDB.Common.Protobuf.Proto.AuthenticationToken.Descriptor, null); } } #endregion } #endregion Designer generated code
using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Threading.Tasks; using Xunit; namespace ShopifySharp.Tests { [Trait("Category", "DraftOrder")] public class DraftOrder_Tests : IClassFixture<DraftOrder_Tests_Fixture> { private DraftOrder_Tests_Fixture Fixture { get; } public DraftOrder_Tests(DraftOrder_Tests_Fixture fixture) { this.Fixture = fixture; } [Fact] public async Task Counts_DraftOrders() { var count = await Fixture.Service.CountAsync(); Assert.True(count > 0); } [Fact] public async Task Lists_DraftOrders() { var list = await Fixture.Service.ListAsync(); Assert.True(list.Items.Count() > 0); } [Fact] public async Task Deletes_DraftOrders() { var created = await Fixture.Create(true); bool threw = false; try { await Fixture.Service.DeleteAsync(created.Id.Value); } catch (ShopifyException ex) { Console.WriteLine($"{nameof(Deletes_DraftOrders)} failed. {ex.Message}"); threw = true; } Assert.False(threw); } [Fact] public async Task Gets_DraftOrders() { var created = await Fixture.Create(); created = await Fixture.Service.GetAsync(created.Id.Value); Assert.NotNull(created); Assert.True(created.Id.HasValue); Assert.Equal(created.Note, Fixture.Note); Assert.False(string.IsNullOrEmpty(created.InvoiceUrl), "InvoiceUrl should not be null or empty."); foreach (var item in created.LineItems) { Assert.Equal(Fixture.LineItemTitle, item.Title); Assert.Equal(Fixture.LineItemQuantity, item.Quantity); Assert.Equal(Fixture.LineItemQuantity, item.Quantity); } } [Fact] public async Task Creates_DraftOrders() { var created = await Fixture.Create(); Assert.NotNull(created); Assert.True(created.Id.HasValue); Assert.Equal(created.Note, Fixture.Note); Assert.False(string.IsNullOrEmpty(created.InvoiceUrl), "InvoiceUrl should not be null or empty."); foreach (var item in created.LineItems) { Assert.Equal(Fixture.LineItemTitle, item.Title); Assert.Equal(Fixture.LineItemQuantity, item.Quantity); Assert.Equal(Fixture.LineItemQuantity, item.Quantity); } } [Fact] public async Task Updates_DraftOrders() { string newNote = $"New note value {Guid.NewGuid()}"; var created = await Fixture.Create(); long id = created.Id.Value; created.Note = newNote; created.Id = null; var updated = await Fixture.Service.UpdateAsync(id, created); // Reset the id so the Fixture can properly delete this object. created.Id = id; Assert.Equal(newNote, updated.Note); } [Fact(Skip = "Checkouts are disabled for ShopifySharp's dev store")] public async Task Sends_Invoice() { var created = await Fixture.Create(); string to = "joshua@example.com"; string subject = "Your draft order is ready"; string message = "Pay pls"; var result = await Fixture.Service.SendInvoiceAsync(created.Id.Value, new DraftOrderInvoice() { To = to, Subject = subject, CustomMessage = message, }); Assert.False(String.IsNullOrEmpty(result.From), "`From` should not be null or empty"); Assert.Equal(to, result.To); Assert.Equal(subject, result.Subject); Assert.Equal(message, result.CustomMessage); } [Fact(Skip = "Checkouts are disabled for ShopifySharp's dev store")] public async Task Completes_DraftOrder() { var created = await Fixture.Create(); created = await Fixture.Service.CompleteAsync(created.Id.Value); Assert.NotNull(created.CompletedAt); Assert.Equal("completed", created.Status); } [Fact(Skip = "Checkouts are disabled for ShopifySharp's dev store")] public async Task Completes_DraftOrder_With_Pending_Payment() { var created = await Fixture.Create(); created = await Fixture.Service.CompleteAsync(created.Id.Value, true); Assert.NotNull(created.CompletedAt); Assert.Equal("completed", created.Status); } } public class DraftOrder_Tests_Fixture : IAsyncLifetime { public DraftOrderService Service { get; } = new DraftOrderService(Utils.MyShopifyUrl, Utils.AccessToken); public List<DraftOrder> Created { get; } = new List<DraftOrder>(); public string LineItemTitle = "Custom Draft Line Item"; public decimal LineItemPrice = 15.00m; public int LineItemQuantity = 2; public string Note = "A note for the draft order."; public async Task InitializeAsync() { Service.SetExecutionPolicy(new LeakyBucketExecutionPolicy()); // Create one for count, list, get, etc. tests. await Create(); } public async Task DisposeAsync() { foreach (var obj in Created) { try { await Service.DeleteAsync(obj.Id.Value); } catch (ShopifyException ex) { if (ex.HttpStatusCode != HttpStatusCode.NotFound) { Console.WriteLine($"Failed to delete created DraftOrder with id {obj.Id.Value}. {ex.Message}"); } } } } /// <summary> /// Convenience function for running tests. Creates an object and automatically adds it to the queue for deleting after tests finish. /// </summary> public async Task<DraftOrder> Create(bool skipAddToCreateList = false) { var obj = await Service.CreateAsync(new DraftOrder() { LineItems = new List<DraftLineItem>() { new DraftLineItem() { Title = LineItemTitle, Price = LineItemPrice, Quantity = LineItemQuantity, } }, Note = Note }); if (!skipAddToCreateList) { Created.Add(obj); } return obj; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using INTPTR_INTCAST = System.Int32; using INTPTR_INTPTRCAST = System.IntPtr; namespace System.DirectoryServices { using System; using System.Net; using System.Runtime.InteropServices; using System.Collections; using System.Diagnostics; using System.DirectoryServices.Interop; using System.Text; using System.Configuration; using System.Security.Permissions; using System.Globalization; /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection"]/*' /> /// <devdoc> /// <para>Contains the instances of <see cref='System.DirectoryServices.SearchResult'/> returned during a /// query to the Active Directory hierarchy through <see cref='System.DirectoryServices.DirectorySearcher'/>.</para> /// </devdoc> public class SearchResultCollection : MarshalByRefObject, ICollection, IEnumerable, IDisposable { private IntPtr _handle; private string[] _properties; private UnsafeNativeMethods.IDirectorySearch _searchObject; private string _filter; private ArrayList _innerList; private bool _disposed; private DirectoryEntry _rootEntry; // clone of parent entry object private const string ADS_DIRSYNC_COOKIE = "fc8cb04d-311d-406c-8cb9-1ae8b843b418"; private IntPtr _adsDirsynCookieName = Marshal.StringToCoTaskMemUni(ADS_DIRSYNC_COOKIE); private const string ADS_VLV_RESPONSE = "fc8cb04d-311d-406c-8cb9-1ae8b843b419"; private IntPtr _adsVLVResponseName = Marshal.StringToCoTaskMemUni(ADS_VLV_RESPONSE); internal DirectorySearcher srch = null; ///<internalonly/> internal SearchResultCollection(DirectoryEntry root, IntPtr searchHandle, string[] propertiesLoaded, DirectorySearcher srch) { _handle = searchHandle; _properties = propertiesLoaded; _filter = srch.Filter; _rootEntry = root; this.srch = srch; } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.this"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public SearchResult this[int index] { get { return (SearchResult)InnerList[index]; } } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.Count"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public int Count { get { return InnerList.Count; } } ///<internalonly/> internal string Filter { get { return _filter; } } ///<internalonly/> private ArrayList InnerList { get { if (_innerList == null) { _innerList = new ArrayList(); IEnumerator enumerator = new ResultsEnumerator(this, _rootEntry.GetUsername(), _rootEntry.GetPassword(), _rootEntry.AuthenticationType); while (enumerator.MoveNext()) _innerList.Add(enumerator.Current); } return _innerList; } } ///<internalonly/> internal UnsafeNativeMethods.IDirectorySearch SearchObject { get { if (_searchObject == null) { _searchObject = (UnsafeNativeMethods.IDirectorySearch)_rootEntry.AdsObject; // get it only once } return _searchObject; } } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.Handle"]/*' /> /// <devdoc> /// <para>Gets the handle returned by IDirectorySearch::ExecuteSearch, which was called /// by the DirectorySearcher that created this object.</para> /// </devdoc> public IntPtr Handle { get { //The handle is no longer valid since the object has been disposed. if (_disposed) throw new ObjectDisposedException(GetType().Name); return _handle; } } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.PropertiesLoaded"]/*' /> /// <devdoc> /// <para>Gets a read-only collection of the properties /// specified on <see cref='System.DirectoryServices.DirectorySearcher'/> before the /// search was executed.</para> /// </devdoc> public string[] PropertiesLoaded { get { return _properties; } } internal byte[] DirsyncCookie { get { return RetrieveDirectorySynchronizationCookie(); } } internal DirectoryVirtualListView VLVResponse { get { return RetrieveVLVResponse(); } } internal unsafe byte[] RetrieveDirectorySynchronizationCookie() { if (_disposed) throw new ObjectDisposedException(GetType().Name); // get the dirsync cookie back AdsSearchColumn column = new AdsSearchColumn(); AdsSearchColumn* pColumn = &column; SearchObject.GetColumn(Handle, _adsDirsynCookieName, (INTPTR_INTPTRCAST)pColumn); try { AdsValue* pValue = column.pADsValues; byte[] value = (byte[])new AdsValueHelper(*pValue).GetValue(); return value; } finally { try { SearchObject.FreeColumn((INTPTR_INTPTRCAST)pColumn); } catch (COMException) { } } } internal unsafe DirectoryVirtualListView RetrieveVLVResponse() { if (_disposed) throw new ObjectDisposedException(GetType().Name); // get the vlv response back AdsSearchColumn column = new AdsSearchColumn(); AdsSearchColumn* pColumn = &column; SearchObject.GetColumn(Handle, _adsVLVResponseName, (INTPTR_INTPTRCAST)pColumn); try { AdsValue* pValue = column.pADsValues; DirectoryVirtualListView value = (DirectoryVirtualListView)new AdsValueHelper(*pValue).GetVlvValue(); return value; } finally { try { SearchObject.FreeColumn((INTPTR_INTPTRCAST)pColumn); } catch (COMException) { } } } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.Dispose"]/*' /> /// <devdoc> /// </devdoc> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.Dispose1"]/*' /> /// <devdoc> /// </devdoc> protected virtual void Dispose(bool disposing) { if (!_disposed) { if (_handle != (IntPtr)0 && _searchObject != null && disposing) { // NOTE: We can't call methods on SearchObject in the finalizer because it // runs on a different thread. The IDirectorySearch object is STA, so COM must create // a proxy stub to marshal the call back to the original thread. Unfortunately, the // IDirectorySearch interface cannot be registered, because it is not automation // compatible. Therefore the QI for IDirectorySearch on this thread fails, and we get // an InvalidCastException. The conclusion is that the user simply must call Dispose // on this object. _searchObject.CloseSearchHandle(_handle); _handle = (IntPtr)0; } if (disposing) _rootEntry.Dispose(); if (_adsDirsynCookieName != (IntPtr)0) Marshal.FreeCoTaskMem(_adsDirsynCookieName); if (_adsVLVResponseName != (IntPtr)0) Marshal.FreeCoTaskMem(_adsVLVResponseName); _disposed = true; } } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for=".Finalize"]/*' /> ~SearchResultCollection() { Dispose(false); // finalizer is called => Dispose has not been called yet. } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.GetEnumerator"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public IEnumerator GetEnumerator() { // Two ResultsEnumerators can't exist at the same time over the // same object. Need to get a new handle, which means re-querying. return new ResultsEnumerator(this, _rootEntry.GetUsername(), _rootEntry.GetPassword(), _rootEntry.AuthenticationType); } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.Contains"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public bool Contains(SearchResult result) { return InnerList.Contains(result); } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.CopyTo"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public void CopyTo(SearchResult[] results, int index) { InnerList.CopyTo(results, index); } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.IndexOf"]/*' /> /// <devdoc> /// <para>[To be supplied.]</para> /// </devdoc> public int IndexOf(SearchResult result) { return InnerList.IndexOf(result); } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.ICollection.IsSynchronized"]/*' /> ///<internalonly/> bool ICollection.IsSynchronized { get { return false; } } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.ICollection.SyncRoot"]/*' /> ///<internalonly/> object ICollection.SyncRoot { get { return this; } } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.ICollection.CopyTo"]/*' /> /// <internalonly/> void ICollection.CopyTo(Array array, int index) { InnerList.CopyTo(array, index); } /// <devdoc> /// <para> Supports a simple /// ForEach-style iteration over a collection.</para> /// </devdoc> private class ResultsEnumerator : IEnumerator { private NetworkCredential _parentCredentials; private AuthenticationTypes _parentAuthenticationType; private SearchResultCollection _results; private bool _initialized; private SearchResult _currentResult; private bool _eof; private bool _waitForResult = false; internal ResultsEnumerator(SearchResultCollection results, string parentUserName, string parentPassword, AuthenticationTypes parentAuthenticationType) { if (parentUserName != null && parentPassword != null) _parentCredentials = new NetworkCredential(parentUserName, parentPassword); _parentAuthenticationType = parentAuthenticationType; _results = results; _initialized = false; // get the app configuration information //object o = PrivilegedConfigurationManager.GetSection("system.directoryservices"); //if (o != null && o is bool) //{ // _waitForResult = (bool)o; //} } /// <devdoc> /// <para>Gets the current element in the collection.</para> /// </devdoc> public SearchResult Current { get { if (!_initialized || _eof) throw new InvalidOperationException(SR.DSNoCurrentEntry); if (_currentResult == null) _currentResult = GetCurrentResult(); return _currentResult; } } private unsafe SearchResult GetCurrentResult() { SearchResult entry = new SearchResult(_parentCredentials, _parentAuthenticationType); int hr = 0; IntPtr pszColumnName = (IntPtr)0; hr = _results.SearchObject.GetNextColumnName(_results.Handle, (INTPTR_INTPTRCAST)(&pszColumnName)); while (hr == 0) { try { AdsSearchColumn column = new AdsSearchColumn(); AdsSearchColumn* pColumn = &column; _results.SearchObject.GetColumn(_results.Handle, pszColumnName, (INTPTR_INTPTRCAST)pColumn); try { int numValues = column.dwNumValues; AdsValue* pValue = column.pADsValues; object[] values = new object[numValues]; for (int i = 0; i < numValues; i++) { values[i] = new AdsValueHelper(*pValue).GetValue(); pValue++; } entry.Properties.Add(Marshal.PtrToStringUni(pszColumnName), new ResultPropertyValueCollection(values)); } finally { try { _results.SearchObject.FreeColumn((INTPTR_INTPTRCAST)pColumn); } catch (COMException) { } } } finally { SafeNativeMethods.FreeADsMem(pszColumnName); } hr = _results.SearchObject.GetNextColumnName(_results.Handle, (INTPTR_INTPTRCAST)(&pszColumnName)); } return entry; } /// <include file='doc\SearchResultCollection.uex' path='docs/doc[@for="SearchResultCollection.ResultsEnumerator.MoveNext"]/*' /> /// <devdoc> /// <para>Advances /// the enumerator to the next element of the collection /// and returns a Boolean value indicating whether a valid element is available.</para> /// </devdoc> public bool MoveNext() { DirectorySynchronization tempsync = null; DirectoryVirtualListView tempvlv = null; int errorCode = 0; if (_eof) return false; _currentResult = null; if (!_initialized) { int hr = _results.SearchObject.GetFirstRow(_results.Handle); if (hr != UnsafeNativeMethods.S_ADS_NOMORE_ROWS) { //throw a clearer exception if the filter was invalid if (hr == UnsafeNativeMethods.INVALID_FILTER) throw new ArgumentException(String.Format(CultureInfo.CurrentCulture, SR.DSInvalidSearchFilter , _results.Filter)); if (hr != 0) throw COMExceptionHelper.CreateFormattedComException(hr); _eof = false; _initialized = true; return true; } _initialized = true; } while (true) { // clear the last error first CleanLastError(); errorCode = 0; int hr = _results.SearchObject.GetNextRow(_results.Handle); // SIZE_LIMIT_EXCEEDED occurs when we supply too generic filter or small SizeLimit value. if (hr == UnsafeNativeMethods.S_ADS_NOMORE_ROWS || hr == UnsafeNativeMethods.SIZE_LIMIT_EXCEEDED) { // need to make sure this is not the case that server actually still has record not returned yet if (hr == UnsafeNativeMethods.S_ADS_NOMORE_ROWS) { hr = GetLastError(ref errorCode); // get last error call failed, we need to bail out if (hr != 0) throw COMExceptionHelper.CreateFormattedComException(hr); } // not the case that server still has result, we are done here if (errorCode != SafeNativeMethods.ERROR_MORE_DATA) { // get the dirsync cookie as we finished all the rows if (_results.srch.directorySynchronizationSpecified) tempsync = _results.srch.DirectorySynchronization; // get the vlv response as we finished all the rows if (_results.srch.directoryVirtualListViewSpecified) tempvlv = _results.srch.VirtualListView; _results.srch.searchResult = null; _eof = true; _initialized = false; return false; } else { // if user chooses to wait to continue the search if (_waitForResult) { continue; } else { uint temp = (uint)errorCode; temp = ((((temp) & 0x0000FFFF) | (7 << 16) | 0x80000000)); throw COMExceptionHelper.CreateFormattedComException((int)temp); } } } //throw a clearer exception if the filter was invalid if (hr == UnsafeNativeMethods.INVALID_FILTER) throw new ArgumentException(String.Format(CultureInfo.CurrentCulture, SR.DSInvalidSearchFilter , _results.Filter)); if (hr != 0) throw COMExceptionHelper.CreateFormattedComException(hr); _eof = false; return true; } } /// <devdoc> /// <para>Resets the enumerator back to its initial position before the first element in the collection.</para> /// </devdoc> public void Reset() { _eof = false; _initialized = false; } object IEnumerator.Current { get { return Current; } } private void CleanLastError() { SafeNativeMethods.ADsSetLastError(SafeNativeMethods.ERROR_SUCCESS, null, null); } private int GetLastError(ref int errorCode) { StringBuilder errorBuffer = new StringBuilder(); StringBuilder nameBuffer = new StringBuilder(); errorCode = SafeNativeMethods.ERROR_SUCCESS; int hr = SafeNativeMethods.ADsGetLastError(out errorCode, errorBuffer, 0, nameBuffer, 0); return hr; } } } }
#region License /* The MIT License * * Copyright (c) 2011 Red Badger Consulting * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ #endregion namespace RedBadger.Xpf { #if WINDOWS_PHONE using System; using System.Collections.Generic; using System.Linq; using System.Reactive; using System.Reactive.Disposables; using System.Reactive.Linq; using System.Reactive.Subjects; using RedBadger.Xpf.Data; using RedBadger.Xpf.Internal; #else using System; using System.Collections.Generic; using System.Linq; using System.Reactive; using System.Reactive.Disposables; using System.Reactive.Linq; using System.Reactive.Subjects; using RedBadger.Xpf.Data; using RedBadger.Xpf.Internal; #endif /// <summary> /// Represents an object that participates in the Reactive Property system. /// </summary> public class ReactiveObject : IReactiveObject { private readonly Dictionary<IReactiveProperty, IDisposable> propertryBindings = new Dictionary<IReactiveProperty, IDisposable>(); private readonly Dictionary<IReactiveProperty, object> propertyValues = new Dictionary<IReactiveProperty, object>(); /// <summary> /// Bind One Way (from the Source). /// </summary> /// <typeparam name = "T">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see> <see cref = "Type">Type</see></typeparam> /// <param name = "property">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see></param> /// <param name = "fromSource"><see cref = "IObservable{T}">IObservable</see> of updates from the source</param> public void Bind<T>(ReactiveProperty<T> property, IObservable<T> fromSource) { this.SetBinding(property, fromSource.Subscribe(this.GetSubject(property))); } /// <summary> /// Bind One Way (to the Source). /// </summary> /// <typeparam name = "T">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see> <see cref = "Type">Type</see></typeparam> /// <param name = "property">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see></param> /// <param name = "toSource"><see cref = "IObserver{T}">IObserver</see> of updates for the Source</param> public void Bind<T>(ReactiveProperty<T> property, IObserver<T> toSource) { var binding = toSource as IOneWayToSourceBinding<T>; IDisposable disposable = binding != null ? binding.Initialize(this.GetSubject(property)) : this.GetSubject(property).Subscribe(toSource); this.SetBinding(property, disposable); } /// <summary> /// Bind Two Way (from and to the Source) /// </summary> /// <typeparam name = "T">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see> <see cref = "Type">Type</see></typeparam> /// <param name = "property">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see></param> /// <param name = "source">A <see cref = "TwoWayBinding{T}">TwoWayBinding</see> containing both an <see cref = "IObservable{T}">IObservable</see> and <see cref = "IObserver{T}">IObserver</see></param> public void Bind<T>(ReactiveProperty<T> property, IDualChannel<T> source) { var binding = source as TwoWayBinding<T>; if (binding != null) { this.SetBinding(property, binding.Initialize(this.GetSubject(property))); } else { this.Bind(property, source.Observable, source.Observer); } } /// <summary> /// Bind Two Way (from and to the Source) /// </summary> /// <typeparam name = "T">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see> <see cref = "Type">Type</see></typeparam> /// <param name = "property">Target <see cref = "ReactiveProperty{T}">ReactiveProperty</see></param> /// <param name = "fromSource"><see cref = "IObservable{T}">IObservable</see> of updates from the source</param> /// <param name = "toSource"><see cref = "IObserver{T}">IObserver</see> of updates for the Source</param> public void Bind<T>(ReactiveProperty<T> property, IObservable<T> fromSource, IObserver<T> toSource) { ISubject<T> target = this.GetSubject(property); this.SetBinding(property, new CompositeDisposable(fromSource.Subscribe(target), target.Subscribe(toSource))); } /// <summary> /// Clears the binding on the specified property. /// </summary> /// <param name = "property">The property who's binding you want to clear.</param> public void ClearBinding(IReactiveProperty property) { IDisposable binding; if (this.propertryBindings.TryGetValue(property, out binding)) { this.propertryBindings.Remove(property); binding.Dispose(); } } public void ClearValue(IReactiveProperty property) { if (property == null) { throw new ArgumentNullException("property"); } this.propertyValues.Remove(property); this.ClearBinding(property); } public IObservable<T> GetObservable<T, TOwner>(ReactiveProperty<T> property) where TOwner : class, IReactiveObject { return this.GetSubject(property).AsObservable(); } public IObserver<T> GetObserver<T, TOwner>(ReactiveProperty<T> property) where TOwner : class, IReactiveObject { return this.GetSubject(property).AsObserver(); } public T GetValue<T>(ReactiveProperty<T> property) { if (property == null) { throw new ArgumentNullException("property"); } return this.GetSubject(property).First(); } public void SetValue<T>(ReactiveProperty<T> property, T newValue) { if (property == null) { throw new ArgumentNullException("property"); } this.GetSubject(property).OnNext(newValue); } /// <summary> /// Resolves all the deferred bindingss for this object using the Data Context. /// </summary> /// <param name = "dataContext">The Data Context against which the binding should be resolved.</param> protected void ResolveDeferredBindings(object dataContext) { this.propertryBindings.Values.OfType<IBinding>().Where( binding => binding.ResolutionMode == BindingResolutionMode.Deferred).ForEach( deferredBinding => deferredBinding.Resolve(dataContext)); } private ISubject<T> GetSubject<T>(ReactiveProperty<T> property) { object value; if (this.propertyValues.TryGetValue(property, out value)) { return (ISubject<T>)value; } var subject = new ValueChangedBehaviorSubject<T>(property.DefaultValue); IObservable<T> leftSource = subject.StartWith(property.DefaultValue); IObservable<T> rightSource = leftSource.Skip(1); leftSource.Zip( rightSource, (oldValue, newValue) => new ReactivePropertyChangeEventArgs<T>(property, oldValue, newValue)).Where( propertyChange => !object.Equals(propertyChange.OldValue, propertyChange.NewValue)).Subscribe( this.RaiseChanged); this.propertyValues.Add(property, subject); return subject; } private void RaiseChanged<T>(ReactivePropertyChangeEventArgs<T> reactivePropertyChange) { Action<IReactiveObject, ReactivePropertyChangeEventArgs<T>> changedCallback = reactivePropertyChange.Property.ChangedCallback; if (changedCallback != null) { changedCallback(this, reactivePropertyChange); } } private void SetBinding(IReactiveProperty property, IDisposable binding) { this.ClearBinding(property); this.propertryBindings[property] = binding; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Threading.Tasks; using Microsoft.CodeAnalysis.Text; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Recommendations { public class VoidKeywordRecommenderTests : KeywordRecommenderTests { [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAtRoot_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"$$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterClass_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"class C { } $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterGlobalStatement_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"System.Console.WriteLine(); $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterGlobalVariableDeclaration_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"int i = 0; $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInUsingAlias() { await VerifyAbsenceAsync( @"using Foo = $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterStackAlloc() { await VerifyAbsenceAsync( @"class C { int* foo = stackalloc $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInFixedStatement() { await VerifyKeywordAsync( @"fixed ($$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInDelegateReturnType() { await VerifyKeywordAsync( @"public delegate $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInCastType() { await VerifyAbsenceAsync(AddInsideMethod( @"var str = (($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInCastType2() { await VerifyAbsenceAsync(AddInsideMethod( @"var str = (($$)items) as string;")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInEmptyStatement() { await VerifyAbsenceAsync(AddInsideMethod( @"$$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInTypeOf() { await VerifyKeywordAsync(AddInsideMethod( @"typeof($$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInCompilationUnit() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"$$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterExtern() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"extern alias Foo; $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterExtern_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"extern alias Foo; $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterUsing() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"using Foo; $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterUsing_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"using Foo; $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterNamespace() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"namespace N {} $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterTypeDeclaration() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"class C {} $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterDelegateDeclaration() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"delegate void Foo(); $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterMethod() { await VerifyKeywordAsync( @"class C { void Foo() {} $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterField() { await VerifyKeywordAsync( @"class C { int i; $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterProperty() { await VerifyKeywordAsync( @"class C { int i { get; } $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotBeforeUsing() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"$$ using Foo;"); } [WpfFact(Skip = "https://github.com/dotnet/roslyn/issues/9880"), Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotBeforeUsing_Interactive() { await VerifyAbsenceAsync(SourceCodeKind.Script, @"$$ using Foo;"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterAssemblyAttribute() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"[assembly: foo] $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterAssemblyAttribute_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"[assembly: foo] $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterRootAttribute() { await VerifyAbsenceAsync(@"[foo] $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedAttribute() { await VerifyKeywordAsync( @"class C { [foo] $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInsideStruct() { await VerifyKeywordAsync( @"struct S { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInsideInterface() { await VerifyKeywordAsync( @"interface I { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInsideClass() { await VerifyKeywordAsync( @"class C { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterPartial() { await VerifyAbsenceAsync(@"partial $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedPartial() { await VerifyKeywordAsync( @"class C { partial $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterAbstract() { await VerifyAbsenceAsync(@"abstract $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedAbstract() { await VerifyKeywordAsync( @"class C { abstract $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterInternal() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"internal $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterInternal_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"internal $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedInternal() { await VerifyKeywordAsync( @"class C { internal $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterPublic() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"public $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterPublic_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"public $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedPublic() { await VerifyKeywordAsync( @"class C { public $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterPrivate() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"private $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterPrivate_Script() { await VerifyKeywordAsync(SourceCodeKind.Script, @"private $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedPrivate() { await VerifyKeywordAsync( @"class C { private $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterProtected() { await VerifyAbsenceAsync( @"protected $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedProtected() { await VerifyKeywordAsync( @"class C { protected $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterSealed() { await VerifyAbsenceAsync(@"sealed $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedSealed() { await VerifyKeywordAsync( @"class C { sealed $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterStatic() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"static $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterStatic_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"static $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterStatic() { await VerifyKeywordAsync( @"class C { static $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterStaticPublic() { await VerifyAbsenceAsync(SourceCodeKind.Regular, @"static public $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterStaticPublic_Interactive() { await VerifyKeywordAsync(SourceCodeKind.Script, @"static public $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedStaticPublic() { await VerifyKeywordAsync( @"class C { static public $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterDelegate() { await VerifyKeywordAsync( @"delegate $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterAnonymousDelegate() { await VerifyAbsenceAsync(AddInsideMethod( @"var q = delegate $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterEvent() { await VerifyAbsenceAsync( @"class C { event $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterVoid() { await VerifyAbsenceAsync( @"class C { void $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterNew() { await VerifyAbsenceAsync( @"new $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterNestedNew() { await VerifyKeywordAsync( @"class C { new $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeBlock() { await VerifyKeywordAsync(AddInsideMethod( @"unsafe { $$")); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeMethod() { await VerifyKeywordAsync( @"class C { unsafe void Foo() { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeClass() { await VerifyKeywordAsync( @"unsafe class C { void Foo() { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInParameter() { await VerifyAbsenceAsync( @"class C { void Foo($$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeParameter1() { await VerifyKeywordAsync( @"class C { unsafe void Foo($$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeParameter2() { await VerifyKeywordAsync( @"unsafe class C { void Foo($$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInCast() { await VerifyAbsenceAsync( @"class C { void Foo() { hr = GetRealProcAddress(""CompareAssemblyIdentity"", ($$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInCast2() { await VerifyAbsenceAsync( @"class C { void Foo() { hr = GetRealProcAddress(""CompareAssemblyIdentity"", ($$**)pfnCompareAssemblyIdentity);"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeCast() { await VerifyKeywordAsync( @"unsafe class C { void Foo() { hr = GetRealProcAddress(""CompareAssemblyIdentity"", ($$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeCast2() { await VerifyKeywordAsync( @"unsafe class C { void Foo() { hr = GetRealProcAddress(""CompareAssemblyIdentity"", ($$**)pfnCompareAssemblyIdentity);"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeConversionOperator() { await VerifyKeywordAsync( @"class C { unsafe implicit operator int(C c) { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeOperator() { await VerifyKeywordAsync( @"class C { unsafe int operator ++(C c) { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeConstructor() { await VerifyKeywordAsync( @"class C { unsafe C() { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeDestructor() { await VerifyKeywordAsync( @"class C { unsafe ~C() { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeProperty() { await VerifyKeywordAsync( @"class C { unsafe int Foo { get { $$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeIndexer() { await VerifyKeywordAsync( @"class C { unsafe int this[int i] { get { $$"); } [WorkItem(538804, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/538804")] [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInDefault() { await VerifyAbsenceAsync(AddInsideMethod( @"default($$")); } [WorkItem(538804, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/538804")] [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInSizeOf() { await VerifyKeywordAsync(AddInsideMethod( @"sizeof($$")); } [WorkItem(544347, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/544347")] [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestInUnsafeDefaultExpression() { await VerifyKeywordAsync( @"unsafe class C { static void Method1(void* p1 = default($$"); } [WorkItem(544347, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/544347")] [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotInDefaultExpression() { await VerifyAbsenceAsync( @"class C { static void Method1(void* p1 = default($$"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestAfterAsync() { await VerifyKeywordAsync(@"class c { async $$ }"); } [Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)] public async Task TestNotAfterAsyncAsType() { await VerifyAbsenceAsync(@"class c { async async $$ }"); } } }
#region BSD License /* Copyright (c) 2004 - 2008 Matthew Holmes (matthew@wildfiregames.com), Dan Moorehead (dan05a@gmail.com), C.J. Adams-Collier (cjac@colliertech.org), Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #endregion using System; using System.Collections; using System.Collections.Specialized; using System.IO; using System.Reflection; using System.Text.RegularExpressions; using Prebuild.Core.Attributes; using Prebuild.Core.Interfaces; using Prebuild.Core.Nodes; using Prebuild.Core.Utilities; namespace Prebuild.Core.Targets { /// <summary> /// /// </summary> [Target("nant")] public class NAntTarget : ITarget { #region Fields private Kernel m_Kernel; #endregion #region Private Methods private static string PrependPath(string path) { string tmpPath = Helper.NormalizePath(path, '/'); Regex regex = new Regex(@"(\w):/(\w+)"); Match match = regex.Match(tmpPath); //if(match.Success || tmpPath[0] == '.' || tmpPath[0] == '/') //{ tmpPath = Helper.NormalizePath(tmpPath); //} // else // { // tmpPath = Helper.NormalizePath("./" + tmpPath); // } return tmpPath; } private static string BuildReference(SolutionNode solution, ProjectNode currentProject, ReferenceNode refr) { if (!String.IsNullOrEmpty(refr.Path)) { return refr.Path; } if (solution.ProjectsTable.ContainsKey(refr.Name)) { ProjectNode projectRef = (ProjectNode) solution.ProjectsTable[refr.Name]; string finalPath = Helper.NormalizePath(refr.Name + GetProjectExtension(projectRef), '/'); return finalPath; } ProjectNode project = (ProjectNode) refr.Parent; // Do we have an explicit file reference? string fileRef = FindFileReference(refr.Name, project); if (fileRef != null) { return fileRef; } // Is there an explicit path in the project ref? if (refr.Path != null) { return Helper.NormalizePath(refr.Path + "/" + refr.Name + GetProjectExtension(project), '/'); } // No, it's an extensionless GAC ref, but nant needs the .dll extension anyway return refr.Name + ".dll"; } public static string GetRefFileName(string refName) { if (ExtensionSpecified(refName)) { return refName; } else { return refName + ".dll"; } } private static bool ExtensionSpecified(string refName) { return refName.EndsWith(".dll") || refName.EndsWith(".exe"); } private static string GetProjectExtension(ProjectNode project) { string extension = ".dll"; if (project.Type == ProjectType.Exe || project.Type == ProjectType.WinExe) { extension = ".exe"; } return extension; } private static string FindFileReference(string refName, ProjectNode project) { foreach (ReferencePathNode refPath in project.ReferencePaths) { string fullPath = Helper.MakeFilePath(refPath.Path, refName); if (File.Exists(fullPath)) { return fullPath; } fullPath = Helper.MakeFilePath(refPath.Path, refName, "dll"); if (File.Exists(fullPath)) { return fullPath; } fullPath = Helper.MakeFilePath(refPath.Path, refName, "exe"); if (File.Exists(fullPath)) { return fullPath; } } return null; } /// <summary> /// Gets the XML doc file. /// </summary> /// <param name="project">The project.</param> /// <param name="conf">The conf.</param> /// <returns></returns> public static string GetXmlDocFile(ProjectNode project, ConfigurationNode conf) { if (conf == null) { throw new ArgumentNullException("conf"); } if (project == null) { throw new ArgumentNullException("project"); } string docFile = (string)conf.Options["XmlDocFile"]; // if(docFile != null && docFile.Length == 0)//default to assembly name if not specified // { // return Path.GetFileNameWithoutExtension(project.AssemblyName) + ".xml"; // } return docFile; } private void WriteProject(SolutionNode solution, ProjectNode project) { string projFile = Helper.MakeFilePath(project.FullPath, project.Name + GetProjectExtension(project), "build"); StreamWriter ss = new StreamWriter(projFile); m_Kernel.CurrentWorkingDirectory.Push(); Helper.SetCurrentDir(Path.GetDirectoryName(projFile)); bool hasDoc = false; using (ss) { ss.WriteLine("<?xml version=\"1.0\" ?>"); ss.WriteLine("<project name=\"{0}\" default=\"build\">", project.Name); ss.WriteLine(" <target name=\"{0}\">", "build"); ss.WriteLine(" <echo message=\"Build Directory is ${project::get-base-directory()}/${build.dir}\" />"); ss.WriteLine(" <mkdir dir=\"${project::get-base-directory()}/${build.dir}\" />"); ss.WriteLine(" <copy todir=\"${project::get-base-directory()}/${build.dir}\" flatten=\"true\">"); ss.WriteLine(" <fileset basedir=\"${project::get-base-directory()}\">"); foreach (ReferenceNode refr in project.References) { if (refr.LocalCopy) { ss.WriteLine(" <include name=\"{0}", Helper.NormalizePath(Helper.MakePathRelativeTo(project.FullPath, BuildReference(solution, project, refr)) + "\" />", '/')); } } ss.WriteLine(" </fileset>"); ss.WriteLine(" </copy>"); if (project.ConfigFile != null && project.ConfigFile.Length!=0) { ss.Write(" <copy file=\"" + project.ConfigFile + "\" tofile=\"${project::get-base-directory()}/${build.dir}/${project::get-name()}"); if (project.Type == ProjectType.Library) { ss.Write(".dll.config\""); } else { ss.Write(".exe.config\""); } ss.WriteLine(" />"); } // Add the content files to just be copied ss.WriteLine(" {0}", "<copy todir=\"${project::get-base-directory()}/${build.dir}\">"); ss.WriteLine(" {0}", "<fileset basedir=\".\">"); foreach (string file in project.Files) { // Ignore if we aren't content if (project.Files.GetBuildAction(file) != BuildAction.Content) continue; // Create a include tag ss.WriteLine(" {0}", "<include name=\"" + Helper.NormalizePath(PrependPath(file), '/') + "\" />"); } ss.WriteLine(" {0}", "</fileset>"); ss.WriteLine(" {0}", "</copy>"); ss.Write(" <csc"); ss.Write(" target=\"{0}\"", project.Type.ToString().ToLower()); ss.Write(" debug=\"{0}\"", "${build.debug}"); foreach (ConfigurationNode conf in project.Configurations) { if (conf.Options.KeyFile != "") { ss.Write(" keyfile=\"{0}\"", conf.Options.KeyFile); break; } } foreach (ConfigurationNode conf in project.Configurations) { ss.Write(" unsafe=\"{0}\"", conf.Options.AllowUnsafe); break; } foreach (ConfigurationNode conf in project.Configurations) { ss.Write(" warnaserror=\"{0}\"", conf.Options.WarningsAsErrors); break; } foreach (ConfigurationNode conf in project.Configurations) { ss.Write(" define=\"{0}\"", conf.Options.CompilerDefines); break; } foreach (ConfigurationNode conf in project.Configurations) { ss.Write(" nostdlib=\"{0}\"", conf.Options["NoStdLib"]); break; } ss.Write(" main=\"{0}\"", project.StartupObject); foreach (ConfigurationNode conf in project.Configurations) { if (GetXmlDocFile(project, conf) != "") { ss.Write(" doc=\"{0}\"", "${project::get-base-directory()}/${build.dir}/" + GetXmlDocFile(project, conf)); hasDoc = true; } break; } ss.Write(" output=\"{0}", "${project::get-base-directory()}/${build.dir}/${project::get-name()}"); if (project.Type == ProjectType.Library) { ss.Write(".dll\""); } else { ss.Write(".exe\""); } if (project.AppIcon != null && project.AppIcon.Length != 0) { ss.Write(" win32icon=\"{0}\"", Helper.NormalizePath(project.AppIcon, '/')); } ss.WriteLine(">"); ss.WriteLine(" <resources prefix=\"{0}\" dynamicprefix=\"true\" >", project.RootNamespace); foreach (string file in project.Files) { switch (project.Files.GetBuildAction(file)) { case BuildAction.EmbeddedResource: ss.WriteLine(" {0}", "<include name=\"" + Helper.NormalizePath(PrependPath(file), '/') + "\" />"); break; default: if (project.Files.GetSubType(file) != SubType.Code && project.Files.GetSubType(file) != SubType.Settings) { ss.WriteLine(" <include name=\"{0}\" />", file.Substring(0, file.LastIndexOf('.')) + ".resx"); } break; } } //if (project.Files.GetSubType(file).ToString() != "Code") //{ // ps.WriteLine(" <EmbeddedResource Include=\"{0}\">", file.Substring(0, file.LastIndexOf('.')) + ".resx"); ss.WriteLine(" </resources>"); ss.WriteLine(" <sources failonempty=\"true\">"); foreach (string file in project.Files) { switch (project.Files.GetBuildAction(file)) { case BuildAction.Compile: ss.WriteLine(" <include name=\"" + Helper.NormalizePath(PrependPath(file), '/') + "\" />"); break; default: break; } } ss.WriteLine(" </sources>"); ss.WriteLine(" <references basedir=\"${project::get-base-directory()}\">"); ss.WriteLine(" <lib>"); ss.WriteLine(" <include name=\"${project::get-base-directory()}\" />"); foreach(ReferencePathNode refPath in project.ReferencePaths) { ss.WriteLine(" <include name=\"${project::get-base-directory()}/" + refPath.Path.TrimEnd('/', '\\') + "\" />"); } ss.WriteLine(" </lib>"); foreach (ReferenceNode refr in project.References) { string path = Helper.NormalizePath(Helper.MakePathRelativeTo(project.FullPath, BuildReference(solution, project, refr)), '/'); ss.WriteLine(" <include name=\"" + path + "\" />"); } ss.WriteLine(" </references>"); ss.WriteLine(" </csc>"); foreach (ConfigurationNode conf in project.Configurations) { if (!String.IsNullOrEmpty(conf.Options.OutputPath)) { string targetDir = Helper.NormalizePath(conf.Options.OutputPath, '/'); ss.WriteLine(" <echo message=\"Copying from [${project::get-base-directory()}/${build.dir}/] to [${project::get-base-directory()}/" + targetDir + "\" />"); ss.WriteLine(" <mkdir dir=\"${project::get-base-directory()}/" + targetDir + "\"/>"); ss.WriteLine(" <copy todir=\"${project::get-base-directory()}/" + targetDir + "\">"); ss.WriteLine(" <fileset basedir=\"${project::get-base-directory()}/${build.dir}/\" >"); ss.WriteLine(" <include name=\"*.dll\"/>"); ss.WriteLine(" <include name=\"*.exe\"/>"); ss.WriteLine(" <include name=\"*.mdb\" if='${build.debug}'/>"); ss.WriteLine(" <include name=\"*.pdb\" if='${build.debug}'/>"); ss.WriteLine(" </fileset>"); ss.WriteLine(" </copy>"); break; } } ss.WriteLine(" </target>"); ss.WriteLine(" <target name=\"clean\">"); ss.WriteLine(" <delete dir=\"${bin.dir}\" failonerror=\"false\" />"); ss.WriteLine(" <delete dir=\"${obj.dir}\" failonerror=\"false\" />"); ss.WriteLine(" </target>"); ss.WriteLine(" <target name=\"doc\" description=\"Creates documentation.\">"); if (hasDoc) { ss.WriteLine(" <property name=\"doc.target\" value=\"\" />"); ss.WriteLine(" <if test=\"${platform::is-unix()}\">"); ss.WriteLine(" <property name=\"doc.target\" value=\"Web\" />"); ss.WriteLine(" </if>"); ss.WriteLine(" <ndoc failonerror=\"false\" verbose=\"true\">"); ss.WriteLine(" <assemblies basedir=\"${project::get-base-directory()}\">"); ss.Write(" <include name=\"${build.dir}/${project::get-name()}"); if (project.Type == ProjectType.Library) { ss.WriteLine(".dll\" />"); } else { ss.WriteLine(".exe\" />"); } ss.WriteLine(" </assemblies>"); ss.WriteLine(" <summaries basedir=\"${project::get-base-directory()}\">"); ss.WriteLine(" <include name=\"${build.dir}/${project::get-name()}.xml\"/>"); ss.WriteLine(" </summaries>"); ss.WriteLine(" <referencepaths basedir=\"${project::get-base-directory()}\">"); ss.WriteLine(" <include name=\"${build.dir}\" />"); // foreach(ReferenceNode refr in project.References) // { // string path = Helper.NormalizePath(Helper.MakePathRelativeTo(project.FullPath, BuildReferencePath(solution, refr)), '/'); // if (path != "") // { // ss.WriteLine(" <include name=\"{0}\" />", path); // } // } ss.WriteLine(" </referencepaths>"); ss.WriteLine(" <documenters>"); ss.WriteLine(" <documenter name=\"MSDN\">"); ss.WriteLine(" <property name=\"OutputDirectory\" value=\"${project::get-base-directory()}/${build.dir}/doc/${project::get-name()}\" />"); ss.WriteLine(" <property name=\"OutputTarget\" value=\"${doc.target}\" />"); ss.WriteLine(" <property name=\"HtmlHelpName\" value=\"${project::get-name()}\" />"); ss.WriteLine(" <property name=\"IncludeFavorites\" value=\"False\" />"); ss.WriteLine(" <property name=\"Title\" value=\"${project::get-name()} SDK Documentation\" />"); ss.WriteLine(" <property name=\"SplitTOCs\" value=\"False\" />"); ss.WriteLine(" <property name=\"DefaulTOC\" value=\"\" />"); ss.WriteLine(" <property name=\"ShowVisualBasic\" value=\"True\" />"); ss.WriteLine(" <property name=\"AutoDocumentConstructors\" value=\"True\" />"); ss.WriteLine(" <property name=\"ShowMissingSummaries\" value=\"${build.debug}\" />"); ss.WriteLine(" <property name=\"ShowMissingRemarks\" value=\"${build.debug}\" />"); ss.WriteLine(" <property name=\"ShowMissingParams\" value=\"${build.debug}\" />"); ss.WriteLine(" <property name=\"ShowMissingReturns\" value=\"${build.debug}\" />"); ss.WriteLine(" <property name=\"ShowMissingValues\" value=\"${build.debug}\" />"); ss.WriteLine(" <property name=\"DocumentInternals\" value=\"False\" />"); ss.WriteLine(" <property name=\"DocumentPrivates\" value=\"False\" />"); ss.WriteLine(" <property name=\"DocumentProtected\" value=\"True\" />"); ss.WriteLine(" <property name=\"DocumentEmptyNamespaces\" value=\"${build.debug}\" />"); ss.WriteLine(" <property name=\"IncludeAssemblyVersion\" value=\"True\" />"); ss.WriteLine(" </documenter>"); ss.WriteLine(" </documenters>"); ss.WriteLine(" </ndoc>"); } ss.WriteLine(" </target>"); ss.WriteLine("</project>"); } m_Kernel.CurrentWorkingDirectory.Pop(); } private void WriteCombine(SolutionNode solution) { m_Kernel.Log.Write("Creating NAnt build files"); foreach (ProjectNode project in solution.Projects) { if (m_Kernel.AllowProject(project.FilterGroups)) { m_Kernel.Log.Write("...Creating project: {0}", project.Name); WriteProject(solution, project); } } m_Kernel.Log.Write(""); string combFile = Helper.MakeFilePath(solution.FullPath, solution.Name, "build"); StreamWriter ss = new StreamWriter(combFile); m_Kernel.CurrentWorkingDirectory.Push(); Helper.SetCurrentDir(Path.GetDirectoryName(combFile)); using (ss) { ss.WriteLine("<?xml version=\"1.0\" ?>"); ss.WriteLine("<project name=\"{0}\" default=\"build\">", solution.Name); ss.WriteLine(" <echo message=\"Using '${nant.settings.currentframework}' Framework\"/>"); ss.WriteLine(); //ss.WriteLine(" <property name=\"dist.dir\" value=\"dist\" />"); //ss.WriteLine(" <property name=\"source.dir\" value=\"source\" />"); ss.WriteLine(" <property name=\"bin.dir\" value=\"bin\" />"); ss.WriteLine(" <property name=\"obj.dir\" value=\"obj\" />"); ss.WriteLine(" <property name=\"doc.dir\" value=\"doc\" />"); ss.WriteLine(" <property name=\"project.main.dir\" value=\"${project::get-base-directory()}\" />"); // actually use active config out of prebuild.xml ss.WriteLine(" <property name=\"project.config\" value=\"{0}\" />", solution.ActiveConfig); foreach (ConfigurationNode conf in solution.Configurations) { ss.WriteLine(); ss.WriteLine(" <target name=\"{0}\" description=\"\">", conf.Name); ss.WriteLine(" <property name=\"project.config\" value=\"{0}\" />", conf.Name); ss.WriteLine(" <property name=\"build.debug\" value=\"{0}\" />", conf.Options["DebugInformation"].ToString().ToLower()); ss.WriteLine(" </target>"); ss.WriteLine(); } ss.WriteLine(" <target name=\"net-1.1\" description=\"Sets framework to .NET 1.1\">"); ss.WriteLine(" <property name=\"nant.settings.currentframework\" value=\"net-1.1\" />"); ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"net-2.0\" description=\"Sets framework to .NET 2.0\">"); ss.WriteLine(" <property name=\"nant.settings.currentframework\" value=\"net-2.0\" />"); ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"net-3.5\" description=\"Sets framework to .NET 3.5\">"); ss.WriteLine(" <property name=\"nant.settings.currentframework\" value=\"net-3.5\" />"); ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"mono-1.0\" description=\"Sets framework to mono 1.0\">"); ss.WriteLine(" <property name=\"nant.settings.currentframework\" value=\"mono-1.0\" />"); ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"mono-2.0\" description=\"Sets framework to mono 2.0\">"); ss.WriteLine(" <property name=\"nant.settings.currentframework\" value=\"mono-2.0\" />"); ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"mono-3.5\" description=\"Sets framework to mono 3.5\">"); ss.WriteLine(" <property name=\"nant.settings.currentframework\" value=\"mono-3.5\" />"); ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"init\" description=\"\">"); ss.WriteLine(" <call target=\"${project.config}\" />"); ss.WriteLine(" <property name=\"sys.os.platform\""); ss.WriteLine(" value=\"${platform::get-name()}\""); ss.WriteLine(" />"); ss.WriteLine(" <echo message=\"Platform ${sys.os.platform}\" />"); ss.WriteLine(" <property name=\"build.dir\" value=\"${bin.dir}/${project.config}\" />"); ss.WriteLine(" </target>"); ss.WriteLine(); // sdague - ok, this is an ugly hack, but what it lets // us do is native include of files into the nant // created files from all .nant/*include files. This // lets us keep using prebuild, but allows for // extended nant targets to do build and the like. try { Regex re = new Regex(".include$"); DirectoryInfo nantdir = new DirectoryInfo(".nant"); foreach (FileSystemInfo item in nantdir.GetFileSystemInfos()) { if (item is DirectoryInfo) { } else if (item is FileInfo) { if (re.Match(((FileInfo)item).FullName) != System.Text.RegularExpressions.Match.Empty) { Console.WriteLine("Including file: " + ((FileInfo)item).FullName); using (FileStream fs = new FileStream(((FileInfo)item).FullName, FileMode.Open, FileAccess.Read, FileShare.None)) { using (StreamReader sr = new StreamReader(fs)) { ss.WriteLine("<!-- included from {0} -->", ((FileInfo)item).FullName); while (sr.Peek() != -1) { ss.WriteLine(sr.ReadLine()); } ss.WriteLine(); } } } } } } catch { } // ss.WriteLine(" <include buildfile=\".nant/local.include\" />"); // ss.WriteLine(" <target name=\"zip\" description=\"\">"); // ss.WriteLine(" <zip zipfile=\"{0}-{1}.zip\">", solution.Name, solution.Version); // ss.WriteLine(" <fileset basedir=\"${project::get-base-directory()}\">"); // ss.WriteLine(" <include name=\"${project::get-base-directory()}/**/*.cs\" />"); // // ss.WriteLine(" <include name=\"${project.main.dir}/**/*\" />"); // ss.WriteLine(" </fileset>"); // ss.WriteLine(" </zip>"); // ss.WriteLine(" <echo message=\"Building zip target\" />"); // ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"clean\" description=\"\">"); ss.WriteLine(" <echo message=\"Deleting all builds from all configurations\" />"); //ss.WriteLine(" <delete dir=\"${dist.dir}\" failonerror=\"false\" />"); ss.WriteLine(" <delete failonerror=\"false\">"); ss.WriteLine(" <fileset basedir=\"${bin.dir}\">"); ss.WriteLine(" <include name=\"OpenSim*.dll\"/>"); ss.WriteLine(" <include name=\"OpenSim*.exe\"/>"); ss.WriteLine(" <include name=\"ScriptEngines/*\"/>"); ss.WriteLine(" <include name=\"Physics/*\"/>"); ss.WriteLine(" <exclude name=\"OpenSim.32BitLaunch.exe\"/>"); ss.WriteLine(" <exclude name=\"ScriptEngines/Default.lsl\"/>"); ss.WriteLine(" </fileset>"); ss.WriteLine(" </delete>"); ss.WriteLine(" <delete dir=\"${obj.dir}\" failonerror=\"false\" />"); foreach (ProjectNode project in solution.Projects) { string path = Helper.MakePathRelativeTo(solution.FullPath, project.FullPath); ss.Write(" <nant buildfile=\"{0}\"", Helper.NormalizePath(Helper.MakeFilePath(path, project.Name + GetProjectExtension(project), "build"), '/')); ss.WriteLine(" target=\"clean\" />"); } ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"build\" depends=\"init\" description=\"\">"); foreach (ProjectNode project in solution.ProjectsTableOrder) { string path = Helper.MakePathRelativeTo(solution.FullPath, project.FullPath); ss.Write(" <nant buildfile=\"{0}\"", Helper.NormalizePath(Helper.MakeFilePath(path, project.Name + GetProjectExtension(project), "build"), '/')); ss.WriteLine(" target=\"build\" />"); } ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine(" <target name=\"build-release\" depends=\"Release, init, build\" description=\"Builds in Release mode\" />"); ss.WriteLine(); ss.WriteLine(" <target name=\"build-debug\" depends=\"Debug, init, build\" description=\"Builds in Debug mode\" />"); ss.WriteLine(); //ss.WriteLine(" <target name=\"package\" depends=\"clean, doc, copyfiles, zip\" description=\"Builds in Release mode\" />"); ss.WriteLine(" <target name=\"package\" depends=\"clean, doc\" description=\"Builds all\" />"); ss.WriteLine(); ss.WriteLine(" <target name=\"doc\" depends=\"build-release\">"); ss.WriteLine(" <echo message=\"Generating all documentation from all builds\" />"); foreach (ProjectNode project in solution.Projects) { string path = Helper.MakePathRelativeTo(solution.FullPath, project.FullPath); ss.Write(" <nant buildfile=\"{0}\"", Helper.NormalizePath(Helper.MakeFilePath(path, project.Name + GetProjectExtension(project), "build"), '/')); ss.WriteLine(" target=\"doc\" />"); } ss.WriteLine(" </target>"); ss.WriteLine(); ss.WriteLine("</project>"); } m_Kernel.CurrentWorkingDirectory.Pop(); } private void CleanProject(ProjectNode project) { m_Kernel.Log.Write("...Cleaning project: {0}", project.Name); string projectFile = Helper.MakeFilePath(project.FullPath, project.Name + GetProjectExtension(project), "build"); Helper.DeleteIfExists(projectFile); } private void CleanSolution(SolutionNode solution) { m_Kernel.Log.Write("Cleaning NAnt build files for", solution.Name); string slnFile = Helper.MakeFilePath(solution.FullPath, solution.Name, "build"); Helper.DeleteIfExists(slnFile); foreach (ProjectNode project in solution.Projects) { CleanProject(project); } m_Kernel.Log.Write(""); } #endregion #region ITarget Members /// <summary> /// Writes the specified kern. /// </summary> /// <param name="kern">The kern.</param> public void Write(Kernel kern) { if (kern == null) { throw new ArgumentNullException("kern"); } m_Kernel = kern; foreach (SolutionNode solution in kern.Solutions) { WriteCombine(solution); } m_Kernel = null; } /// <summary> /// Cleans the specified kern. /// </summary> /// <param name="kern">The kern.</param> public virtual void Clean(Kernel kern) { if (kern == null) { throw new ArgumentNullException("kern"); } m_Kernel = kern; foreach (SolutionNode sol in kern.Solutions) { CleanSolution(sol); } m_Kernel = null; } /// <summary> /// Gets the name. /// </summary> /// <value>The name.</value> public string Name { get { return "nant"; } } #endregion } }
/* FluorineFx open source library Copyright (C) 2007 Zoltan Csibi, zoltan@TheSilentGroup.com, FluorineFx.com This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ using System; using System.Collections; #if !(NET_1_1) using System.Collections.Generic; #endif using FluorineFx.Messaging.Api; using FluorineFx.Messaging.Api.Event; using FluorineFx.Messaging.Rtmp.Event; using FluorineFx.Collections.Generic; namespace FluorineFx.Messaging.Rtmp.SO { /// <summary> /// Shared object event. /// </summary> [CLSCompliant(false)] public class SharedObjectMessage : BaseEvent, ISharedObjectMessage { /// <summary> /// Shared object event name. /// </summary> private string _name; /// <summary> /// Shared object events chain. /// </summary> private ConcurrentLinkedQueue<ISharedObjectEvent> _events = new ConcurrentLinkedQueue<ISharedObjectEvent>(); /// <summary> /// Shared object version, used for synchronization purposes. /// </summary> private int _version = 0; /// <summary> /// Indicates whether shared object is persistent. /// </summary> private bool _persistent = false; /// <summary> /// Initializes a new instance of the SharedObjectMessage class with given name, version and persistence flag. /// </summary> /// <param name="name">Event name.</param> /// <param name="version">Shared object version.</param> /// <param name="persistent">Indicates whether shared object is persistent.</param> internal SharedObjectMessage(string name, int version, bool persistent) : this(null, name, version, persistent) { } /// <summary> /// Initializes a new instance of the SharedObjectMessage class with given listener, name, version and persistence flag. /// </summary> /// <param name="source">Event listener.</param> /// <param name="name">Event name.</param> /// <param name="version">Shared object version.</param> /// <param name="persistent">Indicates whether shared object is persistent.</param> internal SharedObjectMessage(IEventListener source, string name, int version, bool persistent) : base(EventType.SHARED_OBJECT, Constants.TypeSharedObject, source) { _name = name; _version = version; _persistent = persistent; } #region ISharedObjectMessage Members /// <summary> /// Gets shared object event name. /// </summary> public string Name { get { return _name; } } internal void SetName(string name) { _name = name; } /// <summary> /// Gets shared object version. /// </summary> public int Version { get { return _version; } } /// <summary> /// Gets a value indicating whether the shared object is persistent. /// </summary> public bool IsPersistent { get { return _persistent; } } internal void SetIsPersistent(bool persistent) { _persistent = persistent; } /// <summary> /// Add a shared object event. /// </summary> /// <param name="type">Event type.</param> /// <param name="key">Handler key.</param> /// <param name="value">Event value.</param> public void AddEvent(SharedObjectEventType type, string key, object value) { _events.Enqueue(new SharedObjectEvent(type, key, value)); } /// <summary> /// Add a shared object event. /// </summary> /// <param name="sharedObjectEvent">Shared object event.</param> public void AddEvent(ISharedObjectEvent sharedObjectEvent) { _events.Enqueue(sharedObjectEvent); } /// <summary> /// Clear shared object. /// </summary> public void Clear() { _events.Clear(); } /// <summary> /// Gets a value indicating whether the shared object is empty. /// </summary> public bool IsEmpty { get { return _events.Count == 0; } } #endregion /// <summary> /// Returns a set of ISharedObjectEvent objects containing informations what to change. /// </summary> public IQueue<ISharedObjectEvent> Events { get { return _events; } } /// <summary> /// Add a list of shared object events. /// </summary> /// <param name="events">List of shared object events.</param> public void AddEvents(IEnumerable<ISharedObjectEvent> events) { _events.AddRange(events); } #region IEvent Members /// <summary> /// Gets event context object. /// </summary> public override object Object { get { return this.Events; } } #endregion /// <summary> /// Returns a string that represents the current event object fields. /// </summary> /// <param name="indentLevel">The indentation level used for tracing the header members.</param> /// <returns>A string that represents the current event object fields.</returns> protected override string ToStringFields(int indentLevel) { string sep = GetFieldSeparator(indentLevel); string value = base.ToStringFields(indentLevel); value += sep + "events = "; string sep2 = GetFieldSeparator(indentLevel + 1); foreach (ISharedObjectEvent @event in _events) { //value += sep2 + @event.ToString(); value += sep2 + "SOEvent(" + @event.Type.ToString() + ", " + @event.Key + ", " + BodyToString(@event.Value, indentLevel + 2) + ")"; } return value; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Net.Sockets; using System.Threading; using System.Threading.Tasks; namespace System.Net { internal class DelegatedStream : Stream { private readonly Stream _stream; private readonly NetworkStream _netStream; protected DelegatedStream(Stream stream) { if (stream == null) throw new ArgumentNullException(nameof(stream)); _stream = stream; _netStream = stream as NetworkStream; } protected Stream BaseStream { get { return _stream; } } public override bool CanRead { get { return _stream.CanRead; } } public override bool CanSeek { get { return _stream.CanSeek; } } public override bool CanWrite { get { return _stream.CanWrite; } } public override long Length { get { if (!CanSeek) throw new NotSupportedException(SR.SeekNotSupported); return _stream.Length; } } public override long Position { get { if (!CanSeek) throw new NotSupportedException(SR.SeekNotSupported); return _stream.Position; } set { if (!CanSeek) throw new NotSupportedException(SR.SeekNotSupported); _stream.Position = value; } } public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { if (!CanRead) throw new NotSupportedException(SR.ReadNotSupported); IAsyncResult result = null; if (_netStream != null) { result = _netStream.BeginRead(buffer, offset, count, callback, state); } else { result = _stream.BeginRead(buffer, offset, count, callback, state); } return result; } public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { if (!CanWrite) throw new NotSupportedException(SR.WriteNotSupported); IAsyncResult result = null; if (_netStream != null) { result = _netStream.BeginWrite(buffer, offset, count, callback, state); } else { result = _stream.BeginWrite(buffer, offset, count, callback, state); } return result; } //This calls close on the inner stream //however, the stream may not be actually closed, but simpy flushed public override void Close() { _stream.Close(); } public override int EndRead(IAsyncResult asyncResult) { if (!CanRead) throw new NotSupportedException(SR.ReadNotSupported); int read = _stream.EndRead(asyncResult); return read; } public override void EndWrite(IAsyncResult asyncResult) { if (!CanWrite) throw new NotSupportedException(SR.WriteNotSupported); _stream.EndWrite(asyncResult); } public override void Flush() { _stream.Flush(); } public override Task FlushAsync(CancellationToken cancellationToken) { return _stream.FlushAsync(cancellationToken); } public override int Read(byte[] buffer, int offset, int count) { if (!CanRead) throw new NotSupportedException(SR.ReadNotSupported); int read = _stream.Read(buffer, offset, count); return read; } public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { if (!CanRead) throw new NotSupportedException(SR.ReadNotSupported); return _stream.ReadAsync(buffer, offset, count, cancellationToken); } public override long Seek(long offset, SeekOrigin origin) { if (!CanSeek) throw new NotSupportedException(SR.SeekNotSupported); long position = _stream.Seek(offset, origin); return position; } public override void SetLength(long value) { if (!CanSeek) throw new NotSupportedException(SR.SeekNotSupported); _stream.SetLength(value); } public override void Write(byte[] buffer, int offset, int count) { if (!CanWrite) throw new NotSupportedException(SR.WriteNotSupported); _stream.Write(buffer, offset, count); } public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { if (!CanWrite) throw new NotSupportedException(SR.WriteNotSupported); return _stream.WriteAsync(buffer, offset, count, cancellationToken); } } }
//============================================================================= // System : Sandcastle Help File Builder Utilities // File : ReferenceItemCollection.cs // Author : Eric Woodruff (Eric@EWoodruff.us) // Updated : 07/27/2008 // Note : Copyright 2006-2008, Eric Woodruff, All rights reserved // Compiler: Microsoft Visual C# // // This file contains a collection class used to hold the reference item // information. // // This code is published under the Microsoft Public License (Ms-PL). A copy // of the license should be distributed with the code. It can also be found // at the project website: http://SHFB.CodePlex.com. This notice, the // author's name, and all copyright notices must remain intact in all // applications, documentation, and source files. // // Version Date Who Comments // ============================================================================ // 1.1.0.0 08/23/2006 EFW Created the code // 1.8.0.0 06/23/2008 EFW Rewrote to support MSBuild project format //============================================================================= using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing.Design; using System.Globalization; using System.IO; using System.Text; using System.Xml; using Microsoft.Build.Evaluation; using SandcastleBuilder.Utils.Design; namespace SandcastleBuilder.Utils { /// <summary> /// This collection class is used to hold the reference items for a project /// </summary> public class ReferenceItemCollection : BindingList<ReferenceItem> { #region Private data members //===================================================================== private const string ReferenceType = "Reference"; private const string ProjectReferenceType = "ProjectReference"; private const string COMReferenceType = "COMReference"; private SandcastleProject projectFile; private DateTime timeOfLastDirty; private bool loadingItems; #endregion #region Constructor //===================================================================== /// <summary> /// Internal constructor /// </summary> /// <param name="project">The project that owns the collection</param> internal ReferenceItemCollection(SandcastleProject project) { projectFile = project; } #endregion #region Sort the collection //===================================================================== /// <summary> /// This is used to sort the collection in ascending order. /// </summary> public void Sort() { ((List<ReferenceItem>)base.Items).Sort( delegate(ReferenceItem x, ReferenceItem y) { return String.Compare(x.Reference, y.Reference, StringComparison.CurrentCultureIgnoreCase); }); } #endregion #region Load the build items from the project //===================================================================== /// <summary> /// This is used to ensure that the collection has up to date /// information about the reference build items in the project. /// </summary> /// <param name="refresh">True to refresh if out of date or false /// to only load the references if not already done.</param> /// <remarks>The collection is only loaded when necessary</remarks> public void EnsureCurrent(bool refresh) { Project project = projectFile.MSBuildProject; if(project.Xml.TimeLastChanged == timeOfLastDirty || (!refresh && timeOfLastDirty != DateTime.MinValue)) return; try { loadingItems = true; this.Clear(); timeOfLastDirty = project.Xml.TimeLastChanged; var referenceGroup = project.GetItems(ReferenceType); foreach(var item in referenceGroup) this.Add(new ReferenceItem(new ProjectElement(projectFile, item))); referenceGroup = project.GetItems(ProjectReferenceType); foreach(var item in referenceGroup) this.Add(new ProjectReferenceItem(new ProjectElement( projectFile, item))); referenceGroup = project.GetItems(COMReferenceType); foreach(var item in referenceGroup) this.Add(new COMReferenceItem(new ProjectElement(projectFile, item))); } finally { loadingItems = false; } } #endregion #region Add and remove elements from the project //===================================================================== // Note that adding COM references is not supported in the standalone // GUI as I haven't found a way to get a list of COM objects and // generate the necessary metadata for the reference items. It does // support COM references added via Visual Studio. /// <summary> /// Add a new GAC or file reference item to the collection /// </summary> /// <param name="referenceName">The reference name. This will be the /// GAC name or the base filename for file reference.</param> /// <param name="hintPath">The hint path for file references. For /// GAC references, this should be null.</param> /// <returns>The <see cref="ReferenceItem" /> added to the /// project. If the named item already exists in the collection /// a reference to the existing item is returned.</returns> /// <remarks>The <see cref="ReferenceItem" /> constructor is internal /// so that we control creation of the items and can associate them /// with a project element.</remarks> public ReferenceItem AddReference(string referenceName, string hintPath) { ReferenceItem item = new ReferenceItem(new ProjectElement( projectFile, ReferenceType, referenceName)); if(!String.IsNullOrEmpty(hintPath)) item.HintPath = new FilePath(hintPath, projectFile); int idx = base.IndexOf(item); if(idx == -1) base.Add(item); else item = base[idx]; return item; } /// <summary> /// Add a new project reference item to the collection /// </summary> /// <param name="projectPath">The path to the project</param> /// <returns>The <see cref="ProjectReferenceItem" /> added to the /// project. If the named item already exists in the collection /// a reference to the existing item is returned.</returns> public ProjectReferenceItem AddProjectReference(string projectPath) { ProjectReferenceItem item = new ProjectReferenceItem( new ProjectElement(projectFile, ProjectReferenceType, projectPath)); int idx = base.IndexOf(item); if(idx == -1) base.Add(item); else item = (ProjectReferenceItem)base[idx]; return item; } /// <summary> /// Remove an item from the collection and from the project file /// </summary> /// <param name="index">The index of the item to remove</param> protected override void RemoveItem(int index) { ReferenceItem item = this[index]; item.ProjectElement.RemoveFromProjectFile(); base.RemoveItem(index); } /// <summary> /// This is overridden to suppress the event when loading references /// </summary> /// <param name="e">The event arguments</param> protected override void OnListChanged(ListChangedEventArgs e) { if(!loadingItems) base.OnListChanged(e); } #endregion } }
using System; using System.Collections.Generic; using System.Linq.Expressions; using Arcade.Dsl.Implementation; namespace Arcade.Dsl { // ReSharper disable UnusedTypeParameter public static class ExtendsFlowConfigurer { public static IFlowConfigurer ContinueWithEbc<TFlow>(this IFlowConfigurer value) where TFlow : IFlow { return new EbcFlowConfigurer(value.FlowName, value, typeof(TFlow)); } public static IFlowConfigurer ContinueWithEbc<TFlow>(this IFlowConfigurer value, Func<TFlow> createFlow) where TFlow : class, IFlow { return new SelfCreatingEbcFlowConfigurer(value.FlowName, value, typeof(TFlow), createFlow); } public static IFlowConfigurer ContinueWithNamedFlow(this IFlowConfigurer value, string flowName) { return new ContinueWithNamedFlowConfigurer(value.FlowName, value, flowName); } public static IFlowConfigurer<TOut> ContinueWithEbc<TFlow, TIn, TOut>(this IFlowConfigurer<TIn> value) where TFlow : IFlow<TIn, TOut> { return new EbcFlowConfigurer<TOut>(value.FlowName, value, typeof(TFlow)); } public static IFlowConfigurer<TOut> ContinueWithEbc<TFlow, TIn, TOut>(this IFlowConfigurer<TIn> value, Func<TFlow> createFlow) where TFlow : class, IFlow<TIn, TOut> { return new SelfCreatingEbcFlowConfigurer<TOut>(value.FlowName, value, typeof(TFlow), createFlow); } public static IFlowConfigurer<TOut> ContinueWithContinuation<TContinuation, TIn, TOut>(this IFlowConfigurer<TIn> value) where TContinuation : IFlowContinuation<TIn, TOut> { return new ContinuationFlowConfigurer<TOut>(value.FlowName, value, typeof(TContinuation), typeof(IFlowContinuation<TIn, TOut>)); } public static IFlowConfigurer<TOut> ContinueWithContinuation<TContinuation, TIn, TOut>(this IFlowConfigurer<TIn> value, Func<TContinuation> createContinuation) where TContinuation : class , IFlowContinuation<TIn, TOut> { return new SelfCreatingContinuationFlowConfigurer<TOut>(value.FlowName, value, typeof(TContinuation), typeof(IFlowContinuation<TIn, TOut>), createContinuation); } public static IFlowConfigurer<TOut> ContinueWithContinuation<TIn, TOut>(this IFlowConfigurer<TIn> value, Expression<Func<IFlowContinuation<TIn, TOut>>> create) { return new ContinuationFlowConfigurer<TOut>(value.FlowName, value, create.Body.Type, typeof(IFlowContinuation<TIn, TOut>)); } public static IFlowConfigurer<TOut> ContinueWithFunction<TIn, TOut>(this IFlowConfigurer<TIn> value, Func<TIn, TOut> function) { return new FunctionFlowConfigurer<TOut>(value.FlowName, value, function); } public static IFlowConfigurer<TOut> ContinueWithNamedFlow<TIn, TOut>(this IFlowConfigurer<TIn> value, string flowName) { return new ContinueWithNamedFlowConfigurer<TOut>(value.FlowName, value, flowName); } public static IFlowConfigurer<TOut> ContinueWithEbc<TFlow, TOut>(this IFlowConfigurer value) where TFlow : IOutflow<TOut> { return new EbcFlowConfigurer<TOut>(value.FlowName, value, typeof(TFlow)); } public static IFlowConfigurer<TOut> ContinueWithEbc<TFlow, TOut>(this IFlowConfigurer value, Func<TFlow> createFlow) where TFlow : class, IOutflow<TOut> { return new SelfCreatingEbcFlowConfigurer<TOut>(value.FlowName, value, typeof(TFlow), createFlow); } public static IFlowConfigurer ContinueWithEbc<TFlow, TIn>(this IFlowConfigurer<TIn> value) where TFlow : ISink<TIn> { return new EbcFlowConfigurer(value.FlowName, value, typeof(TFlow)); } public static IFlowConfigurer ContinueWithEbc<TFlow, TIn>(this IFlowConfigurer<TIn> value, Func<TFlow> createFlow) where TFlow : class, ISink<TIn> { return new SelfCreatingEbcFlowConfigurer(value.FlowName, value, typeof(TFlow), createFlow); } public static IFlowConfigurer<TIn> BranchWhen<TIn>(this IFlowConfigurer<TIn> value, Func<TIn, bool> when, Func<IFlowConfigurer<TIn>, BranchEnd> sideBranch) { return new ConditionalFlowConfigurer<TIn>(value.FlowName, value, when, sideBranch); } public static IFlowConfigurer<TIn> GoToJoinpointIf<TIn>(this IFlowConfigurer<TIn> value, string joinpointName, Func<TIn, bool> when) { return new GoToFlowConfigurer<TIn>(value.FlowName, value, when, joinpointName); } public static IFlowConfigurer<TOut> JoinOrContinueWithEbc<TFlow, TIn, TOut>(this IFlowConfigurer<TIn> value, string joinpointName) where TFlow : IFlow<TIn, TOut> { var decorated = new EbcFlowConfigurer<TOut>(value.FlowName, value, typeof(TFlow)); return new JoinpointFlowConfigurer<TOut>(decorated, joinpointName); } public static IFlowConfigurer<TOut> JoinOrContinueWithFunction<TIn, TOut>(this IFlowConfigurer<TIn> value, Func<TIn, TOut> function, string joinpointName) { var decorated = new FunctionFlowConfigurer<TOut>(value.FlowName, value, function); return new JoinpointFlowConfigurer<TOut>(decorated, joinpointName); } public static IFlowConfigurer<TOut> JoinOrContinueWithContinuation<TContinuation, TIn, TOut>(this IFlowConfigurer<TIn> value, string joinpointName) where TContinuation : IFlowContinuation<TIn, TOut> { var decorated = new ContinuationFlowConfigurer<TOut>(value.FlowName, value, typeof (TContinuation), typeof (IFlowContinuation<TIn, TOut>)); return new JoinpointFlowConfigurer<TOut>(decorated, joinpointName); } public static IFlowConfigurer<TOut> JoinAtPort<TIn, TOut>(this IFlowConfigurer<TIn> value, string joinpointName, string portName) { var decorated = new WaitOnPortFlowConfigurer<TOut>(value.FlowName, value, portName); return new JoinpointFlowConfigurer<TOut>(decorated, joinpointName); } public static IFlowConfigurer<TOut> JoinAtTrigger<T, TOut>(this IFlowConfigurer<TOut> value, Func<TOut, T> selector, string joinpointName, string triggerName) { var decorated = new TriggerFlowConfigurer<TOut>(value.FlowName, value, triggerName, selector); return new JoinpointFlowConfigurer<TOut>(decorated, joinpointName); } public static IFlowConfigurer<TEnumerableOut> ScatterTo<TEnumerableIn, TIn, TOut, TEnumerableOut>(this IFlowConfigurer<TEnumerableIn> value, Func<IFlowConfigurer<TIn>, Gather> scatterOperation) where TEnumerableIn : IEnumerable<TIn> where TEnumerableOut : IEnumerable<TOut> { var startScatter = new ScatterOperationFlowConfigurer<TIn>(value.FlowName, value); var gatherFlowConfigurer = scatterOperation(startScatter).Last; return new ScatterFlowConfigurer<TEnumerableOut>(value.FlowName, value, startScatter, gatherFlowConfigurer); } public static Gather Gather<TOut>(this IFlowConfigurer<TOut> value, TreatExceptionsWhenGathering treatExceptions = TreatExceptionsWhenGathering.FailFlow) { var gatherFlowConfigurer = new GatherFlowConfigurer<TOut>(value.FlowName, value, treatExceptions); return new Gather(gatherFlowConfigurer); } public static IFlowConfigurer<TIn> WriteState<TIn, TState>(this IFlowConfigurer<TIn> value, Func<TIn, TState> selector) { return new WriteStateFlowConfigurer<TIn>(value.FlowName, value, selector, null); } public static IFlowConfigurer<TOut> WriteState<TIn, TOut, TState>(this IFlowConfigurer<TIn> value, Func<TIn, TState> stateSelector, Func<TIn, TOut> outputSelector) { return new WriteStateFlowConfigurer<TOut>(value.FlowName, value, stateSelector, outputSelector); } public static IFlowConfigurer<TOut> ReadState<TIn, TState, TOut>(this IFlowConfigurer<TIn> value, Func<TIn, TState, TOut> combine) { return new ReadStateFlowConfigurer<TOut>(value.FlowName, value, typeof (TState), combine); } public static FlowConfiguration Exit(this IFlowConfigurer value) { return new FlowConfiguration(new FinalFlowConfigurer(value.FlowName, value)); } public static BranchEnd JoinOnExit<TOut>(this IFlowConfigurer<TOut> value) { return new BranchEnd(value, String.Empty); } public static BranchEnd JoinAt<TOut>(this IFlowConfigurer<TOut> value, string joinpointName) { return new BranchEnd(value, joinpointName); } public static BranchEnd JoinOnExit(this IFlowConfigurer value) { return new BranchEnd(value); } public static BranchEnd JoinAt(this IFlowConfigurer value, string joinpointName) { return new BranchEnd(value, joinpointName); } public static IFlowConfigurer<TOut> WaitOnPort<TIn, TOut>(this IFlowConfigurer<TIn> value, string portName) { return new WaitOnPortFlowConfigurer<TOut>(value.FlowName, value, portName); } public static IFlowConfigurer<TOut> Trigger<T, TOut>(this IFlowConfigurer<TOut> value, string triggerName, Func<TOut, T> selector) { return new TriggerFlowConfigurer<TOut>(value.FlowName, value, triggerName, selector); } internal static IEnumerable<IFlowConfigurer> FindAll(this IFlowConfigurer value, Func<IFlowConfigurer, bool> where, IFlowConfigurer stopAt = null) { var current = value; do { if(stopAt != null && current.Equals(stopAt)) yield break; if (where(current)) yield return current; if (current is IConditionalFlowConfigurer) { var conditional = current as IConditionalFlowConfigurer; var sub = conditional.JoinFlowConfigurer.FindAll(where, current); foreach (var subFlowConfigurer in sub) { yield return subFlowConfigurer; } } current = current.Previous; } while (current != null); } internal static IFlowConfigurer NextParentAfter(this IFlowConfigurer value, IFlowConfigurer root) { var current = value; while (current != null && current.Previous != null && !Equals(current.Previous, root)) { current = current.Previous; } return current; } internal static IFlowConfigurer GetRoot(this IFlowConfigurer value) { var current = value; while (current.Previous != null) { current = current.Previous; } return current; } } // ReSharper restore UnusedTypeParameter }
using System; using System.Drawing; using System.Drawing.Drawing2D; namespace Netron.Lithium { /// <summary> /// Represents the connection between two connectors /// </summary> public class Connection : Entity { #region Fields /// <summary> /// the shape where the connection starts /// </summary> protected ShapeBase from; /// <summary> /// the shape where the connection ends /// </summary> protected ShapeBase to; /// <summary> /// the start and end points /// </summary> protected Point start, end; /// <summary> /// the pen used to draw the connection, /// can switch depending on the hovering state e.g. /// </summary> protected Pen currentPen; #endregion #region Properties /// <summary> /// Gets or sets the shape where the connection starts /// </summary> public ShapeBase From { get{return from;} set{from = value;} } /// <summary> /// Gets or sets where the connection ends /// </summary> public ShapeBase To { get{return to;} set{to = value;} } /// <summary> /// Get the point where the connection starts /// </summary> public Point Start { get { return new Point(from.X+from.Width/2,from.Y+from.Height/2); } } /// <summary> /// Gets the point where connection ends /// </summary> public Point End { get { end = new Point(to.X+to.Width/2,to.Y+to.Height/2); return end; } } #endregion #region Constructors /// <summary> /// Default ctor /// </summary> public Connection() { } /// <summary> /// Constructor /// </summary> /// <param name="from">the shape where the connection starts</param> /// <param name="to">the shape where the connection ends</param> public Connection(ShapeBase from, ShapeBase to) { this.from = from; this.to = to; currentPen = blackPen; } /// <summary> /// Constructor /// </summary> /// <param name="from">the shape where the connection starts</param> /// <param name="to">the shape where the connection ends</param> /// <param name="color">the color of the connection</param> public Connection(ShapeBase from, ShapeBase to, Color color) : this(from, to) { currentPen = new Pen(color, 1f); } /// <summary> /// Constructor /// </summary> /// <param name="from">the shape where the connection starts</param> /// <param name="to">the shape where the connection ends</param> /// <param name="color">the color of the connection</param> /// <param name="width">the (float) width of the connection (in pixels)</param> public Connection(ShapeBase from, ShapeBase to, Color color, float width) : this(from, to, color) { currentPen = new Pen(color, width); } #endregion #region Methods /// <summary> /// Paints the connection on the canvas /// The From part is always the child node while the To part is /// always the parent node. /// Hence; /// - vertical: Parent->Child <=> Top->Bottom /// - horizontal: Parent->Child <=> Left->Right /// </summary> /// <param name="g"></param> public override void Paint(System.Drawing.Graphics g) { g.SmoothingMode = SmoothingMode.AntiAlias; PointF p1, p2, p3, p4; //intermediate points if(visible) { if(hovered || isSelected) pen = redPen; else pen = currentPen; switch(site.ConnectionType) { case ConnectionType.Default: switch(site.LayoutDirection) { case TreeDirection.Vertical: p1 = new PointF(from.Left + from.Width/2, from.Top); p2 = new PointF(to.Left + to.Width/2, to.Bottom+5); g.DrawLine(pen,p1,p2); break; case TreeDirection.Horizontal: p1 = new PointF(from.Left, from.Top + from.Height/2); p2 = new PointF(to.Right +4, to.Top + to.Height/2); g.DrawLine(pen,p1,p2); break; } break; case ConnectionType.Traditional: switch(site.LayoutDirection) { case TreeDirection.Vertical: p1 = new PointF(from.Left + from.Width/2, from.Top - (from.Top - to.Bottom)/2); p2 = new PointF(to.Left + to.Width/2, from.Top - (from.Top - to.Bottom)/2); g.DrawLine(pen, Start,p1); g.DrawLine(pen, p1, p2); g.DrawLine(pen, End, p2); break; case TreeDirection.Horizontal: p1 = new PointF(to.Right + (from.Left - to.Right)/2, from.Top + from.Height/2); p2 = new PointF(to.Right + (from.Left - to.Right)/2, to.Top + to.Height/2); g.DrawLine(pen, Start,p1); g.DrawLine(pen, p1, p2); g.DrawLine(pen, End, p2); break; } break; case ConnectionType.Bezier: switch(site.LayoutDirection) { case TreeDirection.Vertical: p1 = new PointF(from.Left+from.Width/2,from.Top); p2 = new PointF(from.Left + from.Width/2, from.Top - (from.Top - to.Bottom)/2); p3 = new PointF(to.Left + to.Width/2, from.Top - (from.Top - to.Bottom)/2); p4 = new PointF(to.Left+to.Width/2,to.Bottom); g.DrawBezier(pen, p1, p2, p3, p4); break; case TreeDirection.Horizontal: p1 = new PointF(to.Right, to.Top + to.Height/2); p2 = new PointF(to.Right + (from.Left - to.Right)/2, to.Top + to.Height/2); p3 = new PointF(to.Right + (from.Left - to.Right)/2, from.Top + from.Height/2); p4 = new PointF(from.Left,from.Top + from.Height/2); g.DrawBezier(pen, p1, p2, p3, p4); break; } break; } } } /// <summary> /// Invalidates the connection /// </summary> public override void Invalidate() { site.Invalidate(Rectangle.Union(from.rectangle,to.rectangle)); } /// <summary> /// Tests if the mouse hits this connection /// </summary> /// <param name="p"></param> /// <returns></returns> public override bool Hit(Point p) { PointF p1,p2, p3, s; RectangleF r1, r2, r3; switch(site.ConnectionType) { case ConnectionType.Default: #region The default Hit method float o,u; p1 = start; p2 = end; // p1 must be the leftmost point. if (p1.X > p2.X) { s = p2; p2 = p1; p1 = s; } //this is specifically necessary when the layout works horizontally //the method beneth will not return true as should be in this case if(p1.Y==p2.Y) { p1.Y+=-3; return new RectangleF(p1,new SizeF(p2.X-p1.X,6)).Contains(p); } r1 = new RectangleF(p1.X, p1.Y, 0, 0); r2 = new RectangleF(p2.X, p2.Y, 0, 0); r1.Inflate(3, 3); r2.Inflate(3, 3); //this is like a topological neighborhood //the connection is shifted left and right //and the point under consideration has to be in between. if (RectangleF.Union(r1, r2).Contains(p)) { if (p1.Y < p2.Y) //SWNE { o = r1.Left + (((r2.Left - r1.Left) * (p.Y - r1.Bottom)) / (r2.Bottom - r1.Bottom)); u = r1.Right + (((r2.Right - r1.Right) * (p.Y - r1.Top)) / (r2.Top - r1.Top)); return ((p.X > o) && (p.X < u)); } else //NWSE { o = r1.Left + (((r2.Left - r1.Left) * (p.Y - r1.Top)) / (r2.Top - r1.Top)); u = r1.Right + (((r2.Right - r1.Right) * (p.Y - r1.Bottom)) / (r2.Bottom - r1.Bottom)); return ((p.X > o) && (p.X < u)); } } #endregion break; case ConnectionType.Traditional: #region The rectangular Hit method switch(site.LayoutDirection) { case TreeDirection.Vertical: p1 = new PointF(from.Left + from.Width/2-5, from.Top - (from.Top - to.Bottom)/2-5); //shift 5 to contain the connection p2 = new PointF(to.Left + to.Width/2-5, from.Top - (from.Top - to.Bottom)/2-5); p3 = new Point(to.Left+to.Width/2-5,to.Bottom-5); r1 = new RectangleF(p1, new SizeF(10,(from.Top - to.Bottom)/2+5)); if(p1.X<p2.X) r2 = new RectangleF(p1,new SizeF(p2.X-p1.X,10)); else r2 = new RectangleF(p2,new SizeF(p1.X-p2.X,10)); r3 = new RectangleF(p3, new SizeF(10, (from.Top - to.Bottom)/2+5)); return r1.Contains(p.X,p.Y) || r2.Contains(p.X,p.Y) || r3.Contains(p.X,p.Y) ; case TreeDirection.Horizontal: p1 = new PointF(to.Right + (from.Left - to.Right)/2, from.Top + from.Height/2); p2 = new PointF(to.Right + (from.Left - to.Right)/2, to.Top + to.Height/2); break; } #endregion break; } return false; } /// <summary> /// Moves the connection with the given shift /// </summary> /// <param name="p"></param> public override void Move(Point p) { } #endregion } }
// // AsyncTests.Framework.TestContext // // Authors: // Martin Baulig (martin.baulig@gmail.com) // // Copyright 2012 Xamarin Inc. (http://www.xamarin.com) // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Linq; using System.Reflection; using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; using NUnit.Framework; using NUnit.Framework.Constraints; using NUnit.Framework.SyntaxHelpers; namespace AsyncTests.Framework { public abstract class TestContext : IDisposable { List<TestError> errors; List<TestWarning> warnings; List<IDisposable> disposables; int countAssertions; public TestFixture Fixture { get; private set; } internal object Instance { get; private set; } public ThreadingMode ThreadingMode { get; internal set; } public TestConfiguration Configuration { get; internal set; } protected TestContext (TestFixture fixture, object instance) { this.Fixture = fixture; this.Instance = instance; ThreadingMode = ThreadingMode.Default; } public virtual void Log (string message, params object[] args) { Fixture.Log (message, args); } public T GetConfiguration<T> () where T : TestConfiguration { var message = string.Format ("GetConfiguration({0})", typeof (T).FullName); Assert (Configuration, Is.Not.Null, "GetConfiguration({0})", message); Assert (Configuration, Is.InstanceOfType (typeof (T)), message); return (T)Configuration; } internal void ClearErrors () { errors = null; warnings = null; countAssertions = 0; } internal void AddError (string name, Exception error) { if (errors == null) errors = new List<TestError> (); errors.Add (new TestError (name, null, error)); } public bool HasErrors { get { return errors != null; } } internal IList<TestError> Errors { get { return HasErrors ? errors.AsReadOnly () : null; } } public bool HasWarnings { get { return warnings != null; } } public IList<TestWarning> Warnings { get { return HasWarnings ? warnings.AsReadOnly () : null; } } internal void CheckErrors (string message) { if (errors == null) return; throw new TestErrorException (message, errors.ToArray ()); } protected internal Task Invoke (TestCase test, CancellationToken cancellationToken) { object[] args; if (test.Method.GetParameters ().Length == 1) args = new object[] { this }; else args = new object[] { this, cancellationToken }; return Invoke_internal (test.Name, test.Method, Instance, args); } protected internal async Task Invoke_internal (string name, MethodInfo method, object instance, object[] args) { ClearErrors (); try { var retval = method.Invoke (instance, args); var task = retval as Task; if (task != null) await task; } finally { AutoDispose (); } CheckErrors (name); } #region Assertions /* * By default, Exepct() is non-fatal. Multiple failed expections will be * collected and a TestErrorException will be thrown when the test method * returns. * * Use Assert() to immediately abort the test method or set 'AlwaysFatal = true'. * */ public bool AlwaysFatal { get; set; } public bool Expect (object actual, Constraint constraint) { return Expect (false, actual, constraint, null, null); } public bool Expect (object actual, Constraint constraint, string message) { return Expect (false, actual, constraint, message, null); } public bool Expect (object actual, Constraint constraint, string message, params object[] args) { return Expect (false, actual, constraint, message, args); } public bool Expect (bool fatal, object actual, Constraint constraint, string message, params object[] args) { if (constraint.Matches (actual)) { ++countAssertions; return true; } using (var writer = new TextMessageWriter (message, args)) { constraint.WriteMessageTo (writer); var error = new AssertionException (writer.ToString ()); string text = string.Empty;; if ((message != null) && (message != string.Empty)) { if (args != null) text = string.Format (message, args); else text = message; } AddError (text, error); if (AlwaysFatal || fatal) throw error; return false; } } public void Assert (object actual, Constraint constraint) { Expect (true, actual, constraint, null, null); } public void Assert (object actual, Constraint constraint, string message) { Expect (true, actual, constraint, message, null); } public void Assert (object actual, Constraint constraint, string message, params object[] args) { Expect (true, actual, constraint, message, args); } public bool Expect (bool condition, string message, params object[] args) { return Expect (false, condition, Is.True, message, args); } public bool Expect (bool condition, string message) { return Expect (false, condition, Is.True, message, null); } public bool Expect (bool condition) { return Expect (false, condition, Is.True, null, null); } public void Assert (bool condition, string message, params object[] args) { Expect (true, condition, Is.True, message, args); } public void Assert (bool condition, string message) { Expect (true, condition, Is.True, message, null); } public void Assert (bool condition) { Expect (true, condition, Is.True, null, null); } public void Warning (string message, params object[] args) { Warning (string.Format (message, args)); } public void Warning (string message) { if (warnings == null) warnings = new List<TestWarning> (); warnings.Add (new TestWarning (message)); } #endregion #region Disposing public void AutoDispose (IDisposable disposable) { if (disposable == null) return; if (disposables == null) disposables = new List<IDisposable> (); disposables.Add (disposable); } void AutoDispose () { if (disposables == null) return; foreach (var disposable in disposables) { try { disposable.Dispose (); } catch (Exception ex) { AddError ("Auto-dispose failed", ex); } } disposables = null; } ~TestContext () { Dispose (false); } public void Dispose () { Dispose (true); GC.SuppressFinalize (this); } protected virtual void Dispose (bool disposing) { } #endregion } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using sys = System; namespace Google.Ads.GoogleAds.V8.Resources { /// <summary>Resource name for the <c>WebpageView</c> resource.</summary> public sealed partial class WebpageViewName : gax::IResourceName, sys::IEquatable<WebpageViewName> { /// <summary>The possible contents of <see cref="WebpageViewName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern <c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c>. /// </summary> CustomerAdGroupCriterion = 1, } private static gax::PathTemplate s_customerAdGroupCriterion = new gax::PathTemplate("customers/{customer_id}/webpageViews/{ad_group_id_criterion_id}"); /// <summary>Creates a <see cref="WebpageViewName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="WebpageViewName"/> containing the provided /// <paramref name="unparsedResourceName"/>. /// </returns> public static WebpageViewName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new WebpageViewName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="WebpageViewName"/> with the pattern /// <c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="WebpageViewName"/> constructed from the provided ids.</returns> public static WebpageViewName FromCustomerAdGroupCriterion(string customerId, string adGroupId, string criterionId) => new WebpageViewName(ResourceNameType.CustomerAdGroupCriterion, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="WebpageViewName"/> with pattern /// <c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="WebpageViewName"/> with pattern /// <c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c>. /// </returns> public static string Format(string customerId, string adGroupId, string criterionId) => FormatCustomerAdGroupCriterion(customerId, adGroupId, criterionId); /// <summary> /// Formats the IDs into the string representation of this <see cref="WebpageViewName"/> with pattern /// <c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="WebpageViewName"/> with pattern /// <c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c>. /// </returns> public static string FormatCustomerAdGroupCriterion(string customerId, string adGroupId, string criterionId) => s_customerAdGroupCriterion.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), $"{(gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId)))}"); /// <summary>Parses the given resource name string into a new <see cref="WebpageViewName"/> instance.</summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// </remarks> /// <param name="webpageViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="WebpageViewName"/> if successful.</returns> public static WebpageViewName Parse(string webpageViewName) => Parse(webpageViewName, false); /// <summary> /// Parses the given resource name string into a new <see cref="WebpageViewName"/> instance; optionally allowing /// an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="webpageViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="WebpageViewName"/> if successful.</returns> public static WebpageViewName Parse(string webpageViewName, bool allowUnparsed) => TryParse(webpageViewName, allowUnparsed, out WebpageViewName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="WebpageViewName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// </remarks> /// <param name="webpageViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="WebpageViewName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string webpageViewName, out WebpageViewName result) => TryParse(webpageViewName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="WebpageViewName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="webpageViewName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="WebpageViewName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string webpageViewName, bool allowUnparsed, out WebpageViewName result) { gax::GaxPreconditions.CheckNotNull(webpageViewName, nameof(webpageViewName)); gax::TemplatedResourceName resourceName; if (s_customerAdGroupCriterion.TryParseName(webpageViewName, out resourceName)) { string[] split1 = ParseSplitHelper(resourceName[1], new char[] { '~', }); if (split1 == null) { result = null; return false; } result = FromCustomerAdGroupCriterion(resourceName[0], split1[0], split1[1]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(webpageViewName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private static string[] ParseSplitHelper(string s, char[] separators) { string[] result = new string[separators.Length + 1]; int i0 = 0; for (int i = 0; i <= separators.Length; i++) { int i1 = i < separators.Length ? s.IndexOf(separators[i], i0) : s.Length; if (i1 < 0 || i1 == i0) { return null; } result[i] = s.Substring(i0, i1 - i0); i0 = i1 + 1; } return result; } private WebpageViewName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string adGroupId = null, string criterionId = null, string customerId = null) { Type = type; UnparsedResource = unparsedResourceName; AdGroupId = adGroupId; CriterionId = criterionId; CustomerId = customerId; } /// <summary> /// Constructs a new instance of a <see cref="WebpageViewName"/> class from the component parts of pattern /// <c>customers/{customer_id}/webpageViews/{ad_group_id}~{criterion_id}</c> /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param> public WebpageViewName(string customerId, string adGroupId, string criterionId) : this(ResourceNameType.CustomerAdGroupCriterion, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>AdGroup</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string AdGroupId { get; } /// <summary> /// The <c>Criterion</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CriterionId { get; } /// <summary> /// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CustomerId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.CustomerAdGroupCriterion: return s_customerAdGroupCriterion.Expand(CustomerId, $"{AdGroupId}~{CriterionId}"); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as WebpageViewName); /// <inheritdoc/> public bool Equals(WebpageViewName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(WebpageViewName a, WebpageViewName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(WebpageViewName a, WebpageViewName b) => !(a == b); } public partial class WebpageView { /// <summary> /// <see cref="WebpageViewName"/>-typed view over the <see cref="ResourceName"/> resource name property. /// </summary> internal WebpageViewName ResourceNameAsWebpageViewName { get => string.IsNullOrEmpty(ResourceName) ? null : WebpageViewName.Parse(ResourceName, allowUnparsed: true); set => ResourceName = value?.ToString() ?? ""; } } }
using System; using System.Collections.Generic; using System.Text; using System.Windows.Forms; using System.Runtime.InteropServices; namespace Raccoom.Win32 { internal static class ShellHelper { #region Low/High Word /// <summary> /// Retrieves the High Word of a WParam of a WindowMessage /// </summary> /// <param name="ptr">The pointer to the WParam</param> /// <returns>The unsigned integer for the High Word</returns> public static uint HiWord(IntPtr ptr) { if (((uint)ptr & 0x80000000) == 0x80000000) return ((uint)ptr >> 16); else return ((uint)ptr >> 16) & 0xffff; } /// <summary> /// Retrieves the Low Word of a WParam of a WindowMessage /// </summary> /// <param name="ptr">The pointer to the WParam</param> /// <returns>The unsigned integer for the Low Word</returns> public static uint LoWord(IntPtr ptr) { return (uint)ptr & 0xffff; } #endregion #region IStream/IStorage public static bool GetIStream(ShellItem item, out IntPtr streamPtr, out IStream stream) { if (item.ParentItem.ShellFolder.BindToStorage( item.PIDLRel.Ptr, IntPtr.Zero, ref ShellAPI.IID_IStream, out streamPtr) == ShellAPI.S_OK) { stream = (IStream)Marshal.GetTypedObjectForIUnknown(streamPtr, typeof(IStream)); return true; } else { stream = null; streamPtr = IntPtr.Zero; return false; } } public static bool GetIStorage(ShellItem item, out IntPtr storagePtr, out IStorage storage) { if (item.ParentItem.ShellFolder.BindToStorage( item.PIDLRel.Ptr, IntPtr.Zero, ref ShellAPI.IID_IStorage, out storagePtr) == ShellAPI.S_OK) { storage = (IStorage)Marshal.GetTypedObjectForIUnknown(storagePtr, typeof(IStorage)); return true; } else { storage = null; storagePtr = IntPtr.Zero; return false; } } #endregion #region Drag/Drop /// <summary> /// This method will use the GetUIObjectOf method of IShellFolder to obtain the IDataObject of a /// ShellItem. /// </summary> /// <param name="items">The item for which to obtain the IDataObject</param> /// <returns>the IDataObject the ShellItem</returns> public static IntPtr GetIDataObject(ShellItem[] items) { ShellItem parent = items[0].ParentItem != null ? items[0].ParentItem : items[0]; IntPtr[] pidls = new IntPtr[items.Length]; for (int i = 0; i < items.Length; i++) pidls[i] = items[i].PIDLRel.Ptr; IntPtr dataObjectPtr; if (parent.ShellFolder.GetUIObjectOf( IntPtr.Zero, (uint)pidls.Length, pidls, ref ShellAPI.IID_IDataObject, IntPtr.Zero, out dataObjectPtr) == ShellAPI.S_OK) { return dataObjectPtr; } else { return IntPtr.Zero; } } public static bool GetIDropTarget(ShellItem item, out IntPtr dropTargetPtr, out Raccoom.Win32.IDropTarget dropTarget) { ShellItem parent = item.ParentItem != null ? item.ParentItem : item; if (parent.ShellFolder.GetUIObjectOf( IntPtr.Zero, 1, new IntPtr[] { item.PIDLRel.Ptr }, ref ShellAPI.IID_IDropTarget, IntPtr.Zero, out dropTargetPtr) == ShellAPI.S_OK) { dropTarget = (Raccoom.Win32.IDropTarget)Marshal.GetTypedObjectForIUnknown(dropTargetPtr, typeof(Raccoom.Win32.IDropTarget)); return true; } else { dropTarget = null; dropTargetPtr = IntPtr.Zero; return false; } } public static bool GetIDropTargetHelper(out IntPtr helperPtr, out IDropTargetHelper dropHelper) { if (ShellAPI.CoCreateInstance( ref ShellAPI.CLSID_DragDropHelper, IntPtr.Zero, ShellAPI.CLSCTX.INPROC_SERVER, ref ShellAPI.IID_IDropTargetHelper, out helperPtr) == ShellAPI.S_OK) { dropHelper = (IDropTargetHelper)Marshal.GetTypedObjectForIUnknown(helperPtr, typeof(IDropTargetHelper)); return true; } else { dropHelper = null; helperPtr = IntPtr.Zero; return false; } } public static DragDropEffects CanDropClipboard(ShellItem item) { IntPtr dataObject; ShellAPI.OleGetClipboard(out dataObject); IntPtr targetPtr; Raccoom.Win32.IDropTarget target; DragDropEffects retVal = DragDropEffects.None; if (GetIDropTarget(item, out targetPtr, out target)) { #region Check Copy DragDropEffects effects = DragDropEffects.Copy; if (target.DragEnter( dataObject, ShellAPI.MK.CONTROL, new ShellAPI.POINT(0, 0), ref effects) == ShellAPI.S_OK) { if (effects == DragDropEffects.Copy) retVal |= DragDropEffects.Copy; target.DragLeave(); } #endregion #region Check Move effects = DragDropEffects.Move; if (target.DragEnter( dataObject, ShellAPI.MK.SHIFT, new ShellAPI.POINT(0, 0), ref effects) == ShellAPI.S_OK) { if (effects == DragDropEffects.Move) retVal |= DragDropEffects.Move; target.DragLeave(); } #endregion #region Check Lick effects = DragDropEffects.Link; if (target.DragEnter( dataObject, ShellAPI.MK.ALT, new ShellAPI.POINT(0, 0), ref effects) == ShellAPI.S_OK) { if (effects == DragDropEffects.Link) retVal |= DragDropEffects.Link; target.DragLeave(); } #endregion Marshal.ReleaseComObject(target); Marshal.Release(targetPtr); } return retVal; } #endregion #region QueryInfo public static bool GetIQueryInfo(ShellItem item, out IntPtr iQueryInfoPtr, out IQueryInfo iQueryInfo) { ShellItem parent = item.ParentItem != null ? item.ParentItem : item; if (parent.ShellFolder.GetUIObjectOf( IntPtr.Zero, 1, new IntPtr[] { item.PIDLRel.Ptr }, ref ShellAPI.IID_IQueryInfo, IntPtr.Zero, out iQueryInfoPtr) == ShellAPI.S_OK) { iQueryInfo = (IQueryInfo)Marshal.GetTypedObjectForIUnknown(iQueryInfoPtr, typeof(IQueryInfo)); return true; } else { iQueryInfo = null; iQueryInfoPtr = IntPtr.Zero; return false; } } #endregion } }
using System; using System.Reflection; using Orleans; using Orleans.Runtime; using Orleans.Serialization; using Orleans.Utilities; using TestExtensions; using Xunit; namespace UnitTests.Serialization { [TestCategory("BVT"), TestCategory("Serialization")] public class ILBasedExceptionSerializerTests { private readonly ILSerializerGenerator serializerGenerator = new ILSerializerGenerator(); private readonly SerializationTestEnvironment environment; public ILBasedExceptionSerializerTests() { this.environment = SerializationTestEnvironment.Initialize(null, typeof(ILBasedSerializer)); } /// <summary> /// Tests that <see cref="ILBasedExceptionSerializer"/> supports distinct field selection for serialization /// versus copy operations. /// </summary> [Fact] public void ExceptionSerializer_SimpleException() { // Throw an exception so that is has a stack trace. var expected = GetNewException(); this.TestExceptionSerialization(expected); } private ILExceptionSerializerTestException TestExceptionSerialization(ILExceptionSerializerTestException expected) { var writer = new SerializationContext(this.environment.SerializationManager) { StreamWriter = new BinaryTokenStreamWriter() }; // Deep copies should be reference-equal. Assert.Equal( expected, SerializationManager.DeepCopyInner(expected, new SerializationContext(this.environment.SerializationManager)), ReferenceEqualsComparer.Instance); this.environment.SerializationManager.Serialize(expected, writer.StreamWriter); var reader = new DeserializationContext(this.environment.SerializationManager) { StreamReader = new BinaryTokenStreamReader(writer.StreamWriter.ToByteArray()) }; var actual = (ILExceptionSerializerTestException) this.environment.SerializationManager.Deserialize(null, reader.StreamReader); Assert.Equal(expected.BaseField.Value, actual.BaseField.Value, StringComparer.Ordinal); Assert.Equal(expected.SubClassField, actual.SubClassField, StringComparer.Ordinal); Assert.Equal(expected.OtherField.Value, actual.OtherField.Value, StringComparer.Ordinal); // Check for referential equality in the two fields which happened to be reference-equals. Assert.Equal(actual.BaseField, actual.OtherField, ReferenceEqualsComparer.Instance); return actual; } /// <summary> /// Tests that <see cref="ILBasedExceptionSerializer"/> supports reference cycles. /// </summary> [Fact] public void ExceptionSerializer_ReferenceCycle() { // Throw an exception so that is has a stack trace. var expected = GetNewException(); // Create a reference cycle at the top level. expected.SomeObject = expected; var actual = this.TestExceptionSerialization(expected); Assert.Equal(actual, actual.SomeObject); } /// <summary> /// Tests that <see cref="ILBasedExceptionSerializer"/> supports reference cycles. /// </summary> [Fact] public void ExceptionSerializer_NestedReferenceCycle() { // Throw an exception so that is has a stack trace. var exception = GetNewException(); var expected = new Outer { SomeFunObject = exception.OtherField, Object = exception, }; // Create a reference cycle. exception.SomeObject = expected; var writer = new SerializationContext(this.environment.SerializationManager) { StreamWriter = new BinaryTokenStreamWriter() }; this.environment.SerializationManager.Serialize(expected, writer.StreamWriter); var reader = new DeserializationContext(this.environment.SerializationManager) { StreamReader = new BinaryTokenStreamReader(writer.StreamWriter.ToByteArray()) }; var actual = (Outer)this.environment.SerializationManager.Deserialize(null, reader.StreamReader); Assert.Equal(expected.Object.BaseField.Value, actual.Object.BaseField.Value, StringComparer.Ordinal); Assert.Equal(expected.Object.SubClassField, actual.Object.SubClassField, StringComparer.Ordinal); Assert.Equal(expected.Object.OtherField.Value, actual.Object.OtherField.Value, StringComparer.Ordinal); // Check for referential equality in the fields which happened to be reference-equals. Assert.Equal(actual.Object.BaseField, actual.Object.OtherField, ReferenceEqualsComparer.Instance); Assert.Equal(actual, actual.Object.SomeObject, ReferenceEqualsComparer.Instance); Assert.Equal(actual.SomeFunObject, actual.Object.OtherField, ReferenceEqualsComparer.Instance); } private static ILExceptionSerializerTestException GetNewException() { ILExceptionSerializerTestException expected; try { var baseField = new SomeFunObject { Value = Guid.NewGuid().ToString() }; var res = new ILExceptionSerializerTestException { BaseField = baseField, SubClassField = Guid.NewGuid().ToString(), OtherField = baseField, }; throw res; } catch (ILExceptionSerializerTestException exception) { expected = exception; } return expected; } /// <summary> /// Tests that <see cref="ILBasedExceptionSerializer"/> supports distinct field selection for serialization /// versus copy operations. /// </summary> [Fact] public void ExceptionSerializer_UnknownException() { var expected = GetNewException(); var knowsException = new ILBasedExceptionSerializer(this.serializerGenerator, new TypeSerializer(new CachedTypeResolver())); var writer = new SerializationContext(this.environment.SerializationManager) { StreamWriter = new BinaryTokenStreamWriter() }; knowsException.Serialize(expected, writer, null); // Deep copies should be reference-equal. var copyContext = new SerializationContext(this.environment.SerializationManager); Assert.Equal(expected, knowsException.DeepCopy(expected, copyContext), ReferenceEqualsComparer.Instance); // Create a deserializer which doesn't know about the expected exception type. var reader = new DeserializationContext(this.environment.SerializationManager) { StreamReader = new BinaryTokenStreamReader(writer.StreamWriter.ToByteArray()) }; // Ensure that the deserialized object has the fallback type. var doesNotKnowException = new ILBasedExceptionSerializer(this.serializerGenerator, new TestTypeSerializer(new CachedTypeResolver())); var untypedActual = doesNotKnowException.Deserialize(null, reader); Assert.IsType<RemoteNonDeserializableException>(untypedActual); // Ensure that the original type name is preserved correctly. var actualDeserialized = (RemoteNonDeserializableException) untypedActual; Assert.Equal(RuntimeTypeNameFormatter.Format(typeof(ILExceptionSerializerTestException)), actualDeserialized.OriginalTypeName); // Re-serialize the deserialized object using the serializer which does not have access to the original type. writer = new SerializationContext(this.environment.SerializationManager) { StreamWriter = new BinaryTokenStreamWriter() }; doesNotKnowException.Serialize(untypedActual, writer, null); reader = new DeserializationContext(this.environment.SerializationManager) { StreamReader = new BinaryTokenStreamReader(writer.StreamWriter.ToByteArray()) }; // Deserialize the round-tripped object and verify that it has the original type and all properties are // correctly. untypedActual = knowsException.Deserialize(null, reader); Assert.IsType<ILExceptionSerializerTestException>(untypedActual); var actual = (ILExceptionSerializerTestException) untypedActual; Assert.Equal(expected.BaseField.Value, actual.BaseField.Value, StringComparer.Ordinal); Assert.Equal(expected.SubClassField, actual.SubClassField, StringComparer.Ordinal); Assert.Equal(expected.OtherField.Value, actual.OtherField.Value, StringComparer.Ordinal); // Check for referential equality in the two fields which happened to be reference-equals. Assert.Equal(actual.BaseField, actual.OtherField, ReferenceEqualsComparer.Instance); } private class Outer { public SomeFunObject SomeFunObject { get; set; } public ILExceptionSerializerTestException Object { get; set; } } private class SomeFunObject { public string Value { get; set; } } private class BaseException : Exception { public SomeFunObject BaseField { get; set; } } [Serializable] private class ILExceptionSerializerTestException : BaseException { public string SubClassField { get; set; } public SomeFunObject OtherField { get; set; } public object SomeObject { get; set; } } private class TestTypeSerializer : TypeSerializer { internal override Type GetTypeFromName(string assemblyQualifiedTypeName, bool throwOnError) { if (throwOnError) throw new TypeLoadException($"Type {assemblyQualifiedTypeName} could not be loaded"); return null; } public TestTypeSerializer(ITypeResolver typeResolver) : base(typeResolver) { } } } }
using System; using System.IO; using System.Collections; using System.Collections.Generic; using System.Globalization; using talib = TicTacTec.TA.Library; namespace indicatortestgenerator { class MainClass { public static void Main (string[] args) { List<double> openPrices = new List<double>(); List<double> closingPrices = new List<double>(); List<double> highPrices = new List<double>(); List<double> lowPrices = new List<double>(); List<double> volume = new List<double>(); // read the source data into an array to use for all the indicators using (var reader = new StreamReader (@"/home/eugened/Development/local/indicator-test-generator/indicator-test-generator/JSETOPI.2013.data")) { string line = null; while((line = reader.ReadLine()) != null) { string[] parts = line.Split (new char[]{ ',' }); // format is date, O, H, L, C, V // we will use close prices for all these tests openPrices.Add (Convert.ToDouble (parts [1].Replace (".", ","))); highPrices.Add (Convert.ToDouble (parts [2].Replace (".", ","))); lowPrices.Add (Convert.ToDouble (parts [3].Replace (".", ","))); closingPrices.Add (Convert.ToDouble (parts [4].Replace(".", ","))); volume.Add(Convert.ToDouble(parts[5].Replace (".", ","))); } } // now we need to create an output file for each indicator // SMA using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/sma_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.SmaLookback (10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Sma(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // EMA using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/ema_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.EmaLookback (10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Ema(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // WMA using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/wma_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.WmaLookback (10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Wma(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // DEMA using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/dema_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.DemaLookback (10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Dema(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // TEMA using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/tema_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.TemaLookback (10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Tema(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Variance using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/variance_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.VarianceLookback (10, 1.0); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Variance(0, dataLength, closingPrices.ToArray(), 10, 1.0, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Standard Deviation using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/stddev_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.StdDevLookback (10, 1.0); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.StdDev(0, dataLength, closingPrices.ToArray(), 10, 1.0, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Bollinger Bands using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/bb_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.SmaLookback (10); int dataLength = closingPrices.Count - 1; double[] outDataUpper = new double[dataLength - lookback + 1]; double[] outDataMiddle = new double[dataLength - lookback + 1]; double[] outDataLower = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Bbands(0, dataLength, closingPrices.ToArray(), 10, 2, 2, talib.Core.MAType.Sma, out outBeginIndex, out outNBElement, outDataUpper, outDataMiddle, outDataLower); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { for (int i= 0; i< outDataMiddle.Length;i++) { writer.WriteLine ("{0}, {1}, {2}", outDataUpper[i].ToString(CultureInfo.InvariantCulture), outDataMiddle[i].ToString(CultureInfo.InvariantCulture), outDataLower[i].ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // MACD using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/macd_12_26_9_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MacdLookback (12, 26, 9); int dataLength = closingPrices.Count - 1; double[] outMACD = new double[dataLength - lookback + 1]; double[] outMACDSignal = new double[dataLength - lookback + 1]; double[] outMACDHist = new double[dataLength - lookback + 1]; talib.Core.SetUnstablePeriod (talib.Core.FuncUnstId.FuncUnstAll, 0); talib.Core.RetCode retCode =talib.Core.Macd(0, dataLength, closingPrices.ToArray(), 12, 26, 9, out outBeginIndex, out outNBElement, outMACD, outMACDSignal, outMACDHist); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { for (int i= 0; i< outMACD.Length;i++) { writer.WriteLine ("{0}, {1}, {2}", outMACD[i].ToString(CultureInfo.InvariantCulture), outMACDSignal[i].ToString(CultureInfo.InvariantCulture), outMACDHist[i].ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Aroon using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/aroon_25_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AroonLookback (25); int dataLength = closingPrices.Count - 1; double[] outAroonDown = new double[dataLength - lookback + 1]; double[] outAroonUp = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Aroon(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), 25, out outBeginIndex, out outNBElement, outAroonDown, outAroonUp); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { for (int i= 0; i< outAroonUp.Length;i++) { writer.WriteLine ("{0}, {1}", outAroonUp[i].ToString(CultureInfo.InvariantCulture), outAroonDown[i].ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // AroonOsc using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/aroonosc_25_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AroonOscLookback (25); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.AroonOsc(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), 25, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { for (int i= 0; i< outData.Length;i++) { writer.WriteLine ("{0}", outData[i].ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } //True Range using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/truerange_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.TrueRangeLookback (); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.TrueRange(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Average True Range using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/atr_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AtrLookback (14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Atr(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), 14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Accumulation / Distribution line using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/adl_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AdLookback(); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Ad(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), volume.ToArray(), out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Chaikin Oscillator using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/chaikinosc_3_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AdOscLookback(3, 10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.AdOsc(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), volume.ToArray(), 3, 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // On Balance Volume using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/obv_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.ObvLookback(); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.Obv(0, dataLength, closingPrices.ToArray(), volume.ToArray(), out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // AvgPrice using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/avgprice_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AvgPriceLookback(); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.AvgPrice(0, dataLength, openPrices.ToArray(), highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // AvgPrice using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/medprice_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MedPriceLookback(); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.MedPrice(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // PLUS_DM using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/plusdm_1_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.PlusDMLookback(1); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.PlusDM(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(),1, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // PLUS_DM using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/plusdm_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.PlusDMLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.PlusDM(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // MINUS_DM using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/minusdm_1_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MinusDMLookback(1); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.MinusDM(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(),1, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // MINUS_DM using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/minusdm_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MinusDMLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.MinusDM(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // PLUS_DI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/plusdi_1_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.PlusDILookback(1); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.PlusDI(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), 1, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // PLUS_DI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/plusdi_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.PlusDILookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.PlusDI(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // MINUS_DI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/minusdi_1_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MinusDILookback(1); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback + 1]; talib.Core.RetCode retCode =talib.Core.MinusDI(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), 1, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // MINUS_DI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/minusdi_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MinusDILookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.MinusDI(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // DX using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/dx_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.DxLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Dx(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // ADX using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/adx_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AdxLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Adx(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // ADXR using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/adxr_1_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AdxrLookback(1); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Adxr(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),1, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // ADXR using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/adxr_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.AdxrLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Adxr(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // TypicalPrice using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/typprice_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.TypPriceLookback(); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.TypPrice(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // RSI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/rsi_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.RsiLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Rsi(0, dataLength, closingPrices.ToArray(), 14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // ROC using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/roc_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.RocLookback(10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Roc(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // ROCP using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/rocp_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.RocPLookback(10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.RocP(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // ROCR using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/rocr_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.RocRLookback(10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.RocR(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // ROCR using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/rocr100_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.RocR100Lookback(10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.RocR100(0, dataLength, closingPrices.ToArray(), 10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // MFI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/mfi_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MfiLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Mfi(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), volume.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // SAR using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/sar_002_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.SarLookback(0.02, 0.20); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Sar(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(),0.02, 0.20, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Linear Regression using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/linear_regression_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.LinearRegLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.LinearReg(0, dataLength, closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Linear Regression Slope using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/linear_regression_slope_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.LinearRegSlopeLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.LinearRegSlope(0, dataLength, closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Linear Regression Intercept using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/linear_regression_intercept_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.LinearRegInterceptLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.LinearRegIntercept(0, dataLength, closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Linear Regression Angle using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/linear_regression_angle_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.LinearRegAngleLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.LinearRegAngle(0, dataLength, closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // TSF using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/tsf_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.TsfLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Tsf(0, dataLength, closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // KAMA using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/kama_30_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.KamaLookback(30); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Kama(0, dataLength, closingPrices.ToArray(),30, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // TRIMA using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/trima_30_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.TrimaLookback(30); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Trima(0, dataLength, closingPrices.ToArray(),30, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // WILLR using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/willr_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.WillRLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.WillR(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // STOCH using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/stoch_5_3_3_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.StochLookback(5, 3, talib.Core.MAType.Sma, 3, talib.Core.MAType.Sma); int dataLength = closingPrices.Count - 1; double[] outSlowK = new double[dataLength - lookback +1]; double[] outSlowD = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Stoch(0, dataLength, highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(), 5,3, talib.Core.MAType.Sma, 3, talib.Core.MAType.Sma, out outBeginIndex, out outNBElement, outSlowK, outSlowD); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { for (var i=0;i< outSlowK.Length;i++) { writer.WriteLine ("{0}, {1}", outSlowK[i].ToString(CultureInfo.InvariantCulture), outSlowD[i].ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // STOCHRSI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/stochrsi_14_5_3_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.StochRsiLookback(14, 5, 3, talib.Core.MAType.Sma); int dataLength = closingPrices.Count - 1; double[] outFastK = new double[dataLength - lookback +1]; double[] outFastD = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.StochRsi(0, dataLength, closingPrices.ToArray(), 14,5,3, talib.Core.MAType.Sma, out outBeginIndex, out outNBElement, outFastK, outFastD); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { for (var i=0;i< outFastK.Length;i++) { writer.WriteLine ("{0}, {1}", outFastK[i].ToString(CultureInfo.InvariantCulture), outFastD[i].ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // Momentum using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/mom_10_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.MomLookback(10); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Mom(0, dataLength, closingPrices.ToArray(),10, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } // CCI using (var writer = new StreamWriter (@"/home/eugened/Development/go/src/github.com/thetruetrade/gotrade/testdata/cci_14_expectedresult.data")) { int outBeginIndex = 0; int outNBElement = 0; int lookback = talib.Core.CciLookback(14); int dataLength = closingPrices.Count - 1; double[] outData = new double[dataLength - lookback +1]; talib.Core.RetCode retCode =talib.Core.Cci(0, dataLength,highPrices.ToArray(), lowPrices.ToArray(), closingPrices.ToArray(),14, out outBeginIndex, out outNBElement, outData); if (retCode == TicTacTec.TA.Library.Core.RetCode.Success) { foreach (var item in outData) { writer.WriteLine (item.ToString(CultureInfo.InvariantCulture)); } } writer.Flush (); } } } }
// ----------------------------------------------------------------------- // <copyright file="ObservableDictionaryModificationTests.cs" company="Joerg Battermann"> // Copyright (c) 2017 Joerg Battermann. All rights reserved. // </copyright> // <author>Joerg Battermann</author> // <summary></summary> // ----------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Linq; using FluentAssertions; using Xunit; namespace JB.Collections.Reactive.Tests { public class ObservableDictionaryModificationTests { [Fact] public void AddAddsItem() { // given var key = 1; var value = "One"; using (var observableDictionary = new ObservableDictionary<int, string>()) { // when observableDictionary.Add(key, value); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(key, value); observableDictionary.Keys.Should().Contain(key); observableDictionary.Values.Should().Contain(value); } } [Fact] public void AddOrUpdateAddsNewItem() { // given var key = 1; var value = "One"; using (var observableDictionary = new ObservableDictionary<int, string>()) { // when observableDictionary.AddOrUpdate(key, value); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); } } [Fact] public void AddOrUpdateAllowsUpdateForExistingKeyWithSameValue() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when observableDictionary.AddOrUpdate(1, "One"); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); } } [Fact] public void AddOrUpdateShouldAllowUpdateWithDefaultValue() { // given var initialKvPs = new List<KeyValuePair<string, string>>() { new KeyValuePair<string, string>("1", "One Value") }; using (var observableDictionary = new ObservableDictionary<string, string>(initialKvPs)) { // when Action action = () => observableDictionary.AddOrUpdate("1", default(string)); // then action.Should().NotThrow<ArgumentNullException>(); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain("1", default(string)); } } [Fact] public void AddOrUpdateShouldAllowAddWithDefaultValue() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when Action action = () => observableDictionary.AddOrUpdate("1", default(string)); // then action.Should().NotThrow<ArgumentNullException>(); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain("1", default(string)); } } [Fact] public void AddOrUpdateThrowsOnNullKey() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when Action action = () => observableDictionary.AddOrUpdate(null, null); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); observableDictionary.Count.Should().Be(0); } } [Fact] public void AddOrUpdateUpdatesExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when observableDictionary.AddOrUpdate(1, "Two"); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "Two"); } } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(10)] [InlineData(100)] public void AddRangeOfKeyValuePairsAddsItems(int amountOfItemsToAdd) { // given var keyValuePairs = Enumerable.Range(0, amountOfItemsToAdd) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); using (var observableDictionary = new ObservableDictionary<int, string>()) { // when observableDictionary.AddRange(keyValuePairs); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(amountOfItemsToAdd); foreach (var keyValuePair in keyValuePairs) { observableDictionary.Should().Contain(keyValuePair); } } } [Fact] public void AddRangeOfKeyValuePairsThrowsOnNonExistingKeys() { // given var keyValuePairs = Enumerable.Range(0, 2) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when Action invalidRemoveRangeForNonExistingKey = () => observableDictionary.AddRange( new List<KeyValuePair<int, string>> { new KeyValuePair<int, string>(0, "#0"), new KeyValuePair<int, string>(1, "One"), new KeyValuePair<int, string>(2, "Two") }); // then invalidRemoveRangeForNonExistingKey .Should().Throw<ArgumentOutOfRangeException>() .WithMessage("The following key(s) are already in this dictionary and cannot be added to it: 0, 1\r\nParameter name: items"); observableDictionary.Count.Should().Be(3); observableDictionary.Should().Contain(0, "#0"); observableDictionary.Should().Contain(1, "#1"); observableDictionary.Should().Contain(2, "Two"); } } [Fact] public void AddRangeOfKeyValuePairsThrowsOnNullItems() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action action = () => observableDictionary.AddRange(null); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: items"); observableDictionary.Count.Should().Be(0); } } [Fact] public void AddShouldNotThrowOnDefaultValue() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when Action action = () => observableDictionary.Add("1", default(string)); // then action.Should().NotThrow<ArgumentNullException>(); observableDictionary.Count.Should().Be(1); } } [Fact] public void AddShouldThrowOnNullKey() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when Action action = () => observableDictionary.Add(null, null); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); observableDictionary.Count.Should().Be(0); } } [Fact] public void ClearClearsDictionary() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One"), new KeyValuePair<int, string>(2, "Two") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when observableDictionary.Clear(); // then observableDictionary.Count.Should().Be(0); } } [Fact] public void KeyIndexerGetGetsValueForExistingKey() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; // when using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // then observableDictionary[1].Should().Be("One"); } } [Fact] public void KeyIndexerGetShouldThrowForNonExistingKey() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; // when using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when Action action = () => { var value = observableDictionary[2]; }; // then action.Should().Throw<KeyNotFoundException>(); } } [Fact] public void KeyIndexerGetShouldThrowForNullKey() { // given var initialKvPs = new List<KeyValuePair<string, string>>() { new KeyValuePair<string, string>("1", "One") }; // when using (var observableDictionary = new ObservableDictionary<string, string>(initialKvPs)) { // when Action action = () => { var value = observableDictionary[null]; }; // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); } } [Fact] public void KeyIndexerSetShouldThrowForNullKey() { // given var initialKvPs = new List<KeyValuePair<string, string>>() { new KeyValuePair<string, string>("1", "One") }; // when using (var observableDictionary = new ObservableDictionary<string, string>(initialKvPs)) { // when Action action = () => { observableDictionary[null] = "Two"; }; // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); } } [Fact] public void KeyIndexerSetAddsNewItem() { // given var key = 1; var value = "One"; using (var observableDictionary = new ObservableDictionary<int, string>()) { // when observableDictionary[key] = value; // then check whether all items have been accounted for observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); observableDictionary.Keys.Should().Contain(1); observableDictionary.Values.Should().Contain("One"); } } [Fact] public void KeyIndexerSetUpdatesValueForExistingKey() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when observableDictionary[1] = "Two"; // then check whether all items have been accounted for observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "Two"); observableDictionary.Keys.Should().Contain(1); observableDictionary.Values.Should().Contain("Two"); observableDictionary.Values.Should().NotContain("One"); } } [Fact] public void RemoveOfKeyRemovesExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when observableDictionary.Remove(1); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(0); observableDictionary.Should().NotContain(1, "One"); observableDictionary.Keys.Should().NotContain(1); observableDictionary.Values.Should().NotContain("One"); } } [Fact] public void RemoveOfKeyShouldNotThrowOnNonExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One"), new KeyValuePair<int, string>(2, "Two") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when Action invalidRemoveRangeForNonExistingKey = () => observableDictionary.Remove(10); // then invalidRemoveRangeForNonExistingKey .Should().NotThrow<ArgumentOutOfRangeException>(); observableDictionary.Count.Should().Be(2); } } [Fact] public void RemoveOfKeyShouldReportBackCorrespondinglyOnNonExistingItems() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One"), new KeyValuePair<int, string>(2, "Two") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var removalResult = observableDictionary.Remove(10); // then removalResult.Should().Be(false); observableDictionary.Count.Should().Be(2); } } [Fact] public void RemoveOfKeyThrowsOnNullKey() { // given var initialKvPs = new List<KeyValuePair<string, string>>() { new KeyValuePair<string, string>("1", "One"), new KeyValuePair<string, string>("2", "Two") }; using (var observableDictionary = new ObservableDictionary<string, string>(initialKvPs)) { // when Action action = () => observableDictionary.Remove((string) null); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); observableDictionary.Count.Should().Be(2); } } [Theory] [InlineData(0, 0)] [InlineData(1, 0)] [InlineData(1, 1)] [InlineData(10, 5)] public void RemoveRangeOfKeysRemovesItems(int initialAmountOfItems, int amountsOfItemsToRemove) { // given var keyValuePairs = Enumerable.Range(0, initialAmountOfItems) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keysToRemove = Enumerable.Range(0, amountsOfItemsToRemove).ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when observableDictionary.RemoveRange(keysToRemove); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(initialAmountOfItems - amountsOfItemsToRemove); foreach (var removedKey in keysToRemove) { observableDictionary.Should().NotContainKey(removedKey); } } } [Fact] public void RemoveRangeOfKeysThrowsOnNonExistingItems() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action invalidRemoveRangeForNonExistingKey = () => observableDictionary.RemoveRange(new List<int>() {10}); // then invalidRemoveRangeForNonExistingKey .Should().Throw<ArgumentOutOfRangeException>() .WithMessage("The following key(s) are not in this dictionary and cannot be removed from it: 10\r\nParameter name: keys"); observableDictionary.Count.Should().Be(0); } } [Fact] public void RemoveRangeOfKeysThrowsOnNullKeys() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action action = () => observableDictionary.RemoveRange((List<int>) null); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: keys"); observableDictionary.Count.Should().Be(0); } } [Theory] [InlineData(0, 0)] [InlineData(1, 0)] [InlineData(1, 1)] [InlineData(10, 5)] public void RemoveRangeOfKeyValuePairsRemovesItems(int initialAmountOfItems, int amountsOfItemsToRemove) { // given var keyValuePairs = Enumerable.Range(0, initialAmountOfItems) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keyValuePairsToRemove = Enumerable.Range(0, amountsOfItemsToRemove) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToDictionary(keyValuePair => keyValuePair.Key, keyValuePair => keyValuePair.Value).ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when observableDictionary.RemoveRange(keyValuePairsToRemove); // then check whether all items have been accounted for observableDictionary.Count.Should().Be(initialAmountOfItems - amountsOfItemsToRemove); foreach (var removedKeyValuePair in keyValuePairsToRemove) { observableDictionary.Should().NotContain(removedKeyValuePair); } } } [Fact] public void RemoveRangeOfKeyValuePairsThrowsOnExistingItemWhenValueIsDifferent() { // given var initialKvPs = new List<KeyValuePair<int, string>>() {new KeyValuePair<int, string>(1, "One")}; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when Action invalidRemoveRangeForNonExistingKey = () => observableDictionary.RemoveRange(new List<KeyValuePair<int, string>>() {new KeyValuePair<int, string>(1, "Two")}); // then invalidRemoveRangeForNonExistingKey .Should().Throw<ArgumentOutOfRangeException>() .WithMessage("The following key/value pair(s) are not in this dictionary and cannot be removed from it: [1, Two]\r\nParameter name: items"); observableDictionary.Count.Should().Be(1); } } [Fact] public void RemoveRangeOfKeyValuePairsThrowsOnNonExistingItems() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action invalidRemoveRangeForNonExistingKey = () => observableDictionary.RemoveRange(new List<KeyValuePair<int, string>>() {new KeyValuePair<int, string>(10, "Ten")}); // then invalidRemoveRangeForNonExistingKey .Should().Throw<ArgumentOutOfRangeException>() .WithMessage("The following key/value pair(s) are not in this dictionary and cannot be removed from it: [10, Ten]\r\nParameter name: items"); observableDictionary.Count.Should().Be(0); } } [Fact] public void RemoveRangeOfKeyValuePairsThrowsOnNullItems() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action action = () => observableDictionary.RemoveRange((List<KeyValuePair<int, string>>) null); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: items"); observableDictionary.Count.Should().Be(0); } } [Fact] public void ResetDoesNotModifyDictionary() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One"), new KeyValuePair<int, string>(2, "Two") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when observableDictionary.Reset(); // then observableDictionary.Count.Should().Be(2); observableDictionary.Should().Contain(1, "One"); observableDictionary.Should().Contain(2, "Two"); } } [Fact] public void TryAddAddsNonExistingNewItem() { // given var key = 1; var value = "One"; using (var observableDictionary = new ObservableDictionary<int, string>()) { // when var tryAddResult = observableDictionary.TryAdd(key, value); // then check whether all items have been accounted for tryAddResult.Should().Be(true); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); } } [Fact] public void TryAddDoesNotAddExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var tryAddResult = observableDictionary.TryAdd(1, "Two"); // then check whether all items have been accounted for tryAddResult.Should().Be(false); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); } } [Theory] [InlineData(1, 1)] [InlineData(5, 10)] [InlineData(99, 100)] [InlineData(100, 100)] public void TryAddRangeAddsNonExistingItemsAndReportsNonAddedBack(int amountOfInitialItems, int amountOfItemsToAdd) { // given var initialKeyValuePairs = Enumerable.Range(0, amountOfInitialItems) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keyValuePairsToAdd = Enumerable.Range(0, amountOfItemsToAdd) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(initialKeyValuePairs)) { // when IDictionary<int, string> itemsThatCouldNotBeAdded; var tryAddResult = observableDictionary.TryAddRange(keyValuePairsToAdd, out itemsThatCouldNotBeAdded); // then check whether all items have been accounted for tryAddResult.Should().Be(false); itemsThatCouldNotBeAdded.Should().NotBeNull(); itemsThatCouldNotBeAdded.Should().NotBeEmpty(); observableDictionary.Count.Should().Be(amountOfInitialItems + amountOfItemsToAdd - itemsThatCouldNotBeAdded.Count); foreach (var keyValuePair in keyValuePairsToAdd.Except(itemsThatCouldNotBeAdded)) { observableDictionary.Should().Contain(keyValuePair); } foreach (var keyValuePair in initialKeyValuePairs.Intersect(keyValuePairsToAdd)) { itemsThatCouldNotBeAdded.Should().Contain(keyValuePair); } } } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(10)] [InlineData(100)] public void TryAddRangeAddsNonExistingNewItems(int amountOfItemsToAdd) { // given var keyValuePairs = Enumerable.Range(0, amountOfItemsToAdd) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); using (var observableDictionary = new ObservableDictionary<int, string>()) { // when IDictionary<int, string> nonAddedKeyValuePairs; var tryAddResult = observableDictionary.TryAddRange(keyValuePairs, out nonAddedKeyValuePairs); // then check whether all items have been accounted for tryAddResult.Should().Be(true); nonAddedKeyValuePairs.Should().NotBeNull(); nonAddedKeyValuePairs.Should().BeEmpty(); observableDictionary.Count.Should().Be(amountOfItemsToAdd); foreach (var keyValuePair in keyValuePairs) { observableDictionary.Should().Contain(keyValuePair); } } } [Fact] public void ContainsKeyShouldReturnFalseForNonExistingKey() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var result = observableDictionary.ContainsKey(2); // then result.Should().Be(false); } } [Fact] public void ContainsKeyShouldReturnTrueForExistingKey() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var result = observableDictionary.ContainsKey(1); // then result.Should().Be(true); } } [Fact] public void ContainsKeyThrowsOnNullKey() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when Action retrieval = () => observableDictionary.ContainsKey((string)null); // then retrieval .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); } } [Fact] public void TryGetDoesNotRetrieveNonExistingValue() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when string retrievedValue; var tryGetResult = observableDictionary.TryGetValue(2, out retrievedValue); // then check whether all items have been accounted for tryGetResult.Should().Be(false); retrievedValue.Should().Be(default(string)); } } [Fact] public void TryGetRetrievesExistingValue() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when string retrievedValue; var tryGetResult = observableDictionary.TryGetValue(1, out retrievedValue); // then check whether all items have been accounted for tryGetResult.Should().Be(true); retrievedValue.Should().Be("One"); } } [Fact] public void TryGetThrowsOnNullKey() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when string value; Action retrieval = () => observableDictionary.TryGetValue((string) null, out value); // then retrieval .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); observableDictionary.Count.Should().Be(0); } } [Fact] public void TryRemoveOfKeyDoesNotRemoveNonExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var removalResult = observableDictionary.TryRemove(2); // then check whether all items have been accounted for removalResult.Should().Be(false); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); } } [Fact] public void TryRemoveOfKeyShouldNotThrowOnNonExistingItem() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action invalidRemoveRangeForNonExistingKey = () => observableDictionary.TryRemove(10); // then invalidRemoveRangeForNonExistingKey .Should().NotThrow<ArgumentOutOfRangeException>(); observableDictionary.Count.Should().Be(0); } } [Fact] public void TryRemoveOfKeyThrowsOnNullKey() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when Action action = () => observableDictionary.TryRemove((string) null); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); observableDictionary.Count.Should().Be(0); } } [Fact] public void TryRemoveOfKeyValuePairRemovesExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var removalResult = observableDictionary.TryRemove(1); // then check whether all items have been accounted for removalResult.Should().Be(true); observableDictionary.Count.Should().Be(0); observableDictionary.Should().NotContain(1, "One"); } } [Fact] public void TryRemoveOfKeyWithValueRetrievalDoesNotRemoveNonExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when string value; var removalResult = observableDictionary.TryRemove(2, out value); // then check whether all items have been accounted for removalResult.Should().Be(false); value.Should().Be(default(string)); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); } } [Fact] public void TryRemoveOfKeyWithValueRetrievalRemovesExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when string value; var removalResult = observableDictionary.TryRemove(1, out value); // then check whether all items have been accounted for removalResult.Should().Be(true); value.Should().Be("One"); observableDictionary.Count.Should().Be(0); observableDictionary.Should().NotContain(1, "One"); } } [Fact] public void TryRemoveOfKeyWithValueRetrievalThrowsOnNullKey() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when string value; Action action = () => observableDictionary.TryRemove((string) null, out value); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); observableDictionary.Count.Should().Be(0); } } [Theory] [InlineData(0, 10)] [InlineData(1, 10)] [InlineData(100, 101)] public void TryRemoveRangeOfKeysForCornerCasesRemovesExistingItemsAndReportsNonremovablesBack(int initialAmountOfItems, int amountOfItemsToRemove) { // given var keyValuePairs = Enumerable.Range(0, initialAmountOfItems) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keyValuePairsToRemove = Enumerable.Range(0, amountOfItemsToRemove) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keysForKeyValuePairsToRemove = keyValuePairsToRemove.Select(kvp => kvp.Key).ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when IList<int> nonRemovedKeys; var tryRemoveResult = observableDictionary.TryRemoveRange(keysForKeyValuePairsToRemove, out nonRemovedKeys); // then check whether all items have been accounted for tryRemoveResult.Should().Be(false); nonRemovedKeys.Should().NotBeNull(); nonRemovedKeys.Should().NotBeEmpty(); observableDictionary.Count.Should().Be(initialAmountOfItems - amountOfItemsToRemove + nonRemovedKeys.Count); // check whether everything that was reported as removable is removed foreach (var keyValuePair in keysForKeyValuePairsToRemove.Except(nonRemovedKeys)) { observableDictionary.Should().NotContainKey(keyValuePair); } foreach (var keyValuePair in nonRemovedKeys) { observableDictionary.Should().NotContainKey(keyValuePair); } } } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(10)] [InlineData(100)] public void TryRemoveRangeOfKeysRemovesExistingItems(int initialAmountOfItems) { // given var keyValuePairs = Enumerable.Range(0, initialAmountOfItems) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keysForKeyValuePairs = keyValuePairs.Select(kvp => kvp.Key); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when IList<int> nonRemovables = new List<int>(); Action action = () => observableDictionary.TryRemoveRange(keysForKeyValuePairs, out nonRemovables); // then check whether all items have been accounted for action.Should().NotThrow(); observableDictionary.Count.Should().Be(0); nonRemovables.Should().NotBeNull(); nonRemovables.Should().BeEmpty(); } } [Fact] public void TryRemoveRangeOfKeysRemovesExistingItemsAndReportsNonremovablesBack() { // given var keyValuePairs = Enumerable.Range(0, 100) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keysForKeyValuePairs = keyValuePairs.Select(kvp => kvp.Key).ToList(); var keyValuePairsToRemove = Enumerable.Range(50, 100) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keysForKeyValuePairsToRemove = keyValuePairsToRemove.Select(kvp => kvp.Key).ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when IList<int> nonRemovedKeys; var tryRemoveResult = observableDictionary.TryRemoveRange(keysForKeyValuePairsToRemove, out nonRemovedKeys); // then check whether all items have been accounted for tryRemoveResult.Should().Be(false); nonRemovedKeys.Should().NotBeNull(); nonRemovedKeys.Should().NotBeEmpty(); nonRemovedKeys.Count.Should().Be(50); observableDictionary.Count.Should().Be(50); // check whether everything that was reported as removable is removed foreach (var keyValuePair in keysForKeyValuePairsToRemove.Except(nonRemovedKeys)) { observableDictionary.Should().NotContainKey(keyValuePair); } foreach (var keyValuePair in nonRemovedKeys) { observableDictionary.Should().NotContainKey(keyValuePair); } // and check whether all other one(s) are still there, too foreach (var keyValuePair in keysForKeyValuePairs.Except(keysForKeyValuePairsToRemove)) { observableDictionary.Should().ContainKey(keyValuePair); } } } [Fact] public void TryRemoveRangeOfKeysThrowsOnNullKeys() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action action = () => observableDictionary.TryRemoveRange(null, out IList<int> _); // then check whether all items have been accounted for action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: keys"); } } [Theory] [InlineData(0, 10)] [InlineData(1, 10)] [InlineData(100, 101)] public void TryRemoveRangeOfKeyValuePairsForCornerCasesRemovesExistingItemsAndReportsNonremovablesBack(int initialAmountOfItems, int amountOfItemsToRemove) { // given var keyValuePairs = Enumerable.Range(0, initialAmountOfItems) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keyValuePairsToRemove = Enumerable.Range(0, amountOfItemsToRemove) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when IDictionary<int, string> nonRemovedKeyValuePairs; var tryRemoveResult = observableDictionary.TryRemoveRange(keyValuePairsToRemove, out nonRemovedKeyValuePairs); // then check whether all items have been accounted for tryRemoveResult.Should().Be(false); nonRemovedKeyValuePairs.Should().NotBeNull(); nonRemovedKeyValuePairs.Should().NotBeEmpty(); observableDictionary.Count.Should().Be(initialAmountOfItems - amountOfItemsToRemove + nonRemovedKeyValuePairs.Count); // check whether everything that was reported as removable is removed foreach (var keyValuePair in keyValuePairsToRemove.Except(nonRemovedKeyValuePairs)) { observableDictionary.Should().NotContain(keyValuePair); } foreach (var keyValuePair in nonRemovedKeyValuePairs) { observableDictionary.Should().NotContain(keyValuePair); } } } [Theory] [InlineData(0)] [InlineData(1)] [InlineData(10)] [InlineData(100)] public void TryRemoveRangeOfKeyValuePairsRemovesExistingItems(int initialAmountOfItems) { // given var keyValuePairs = Enumerable.Range(0, initialAmountOfItems) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when IDictionary<int, string> nonRemovables; var removalResult = observableDictionary.TryRemoveRange(keyValuePairs, out nonRemovables); // then check whether all items have been accounted for removalResult.Should().Be(true); observableDictionary.Count.Should().Be(0); nonRemovables.Should().NotBeNull(); nonRemovables.Should().BeEmpty(); } } [Fact] public void TryRemoveRangeOfKeyValuePairsRemovesExistingItemsAndReportsNonremovablesBack() { // given var keyValuePairs = Enumerable.Range(0, 100) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); var keyValuePairsToRemove = Enumerable.Range(50, 100) .Select(i => new KeyValuePair<int, string>(i, $"#{i}")) .ToList(); using (var observableDictionary = new ObservableDictionary<int, string>(keyValuePairs)) { // when IDictionary<int, string> nonRemovedKeyValuePairs; var tryRemoveResult = observableDictionary.TryRemoveRange(keyValuePairsToRemove, out nonRemovedKeyValuePairs); // then check whether all items have been accounted for tryRemoveResult.Should().Be(false); nonRemovedKeyValuePairs.Should().NotBeNull(); nonRemovedKeyValuePairs.Should().NotBeEmpty(); nonRemovedKeyValuePairs.Count.Should().Be(50); observableDictionary.Count.Should().Be(50); // check whether everything that was reported as removable is removed foreach (var keyValuePair in keyValuePairsToRemove.Except(nonRemovedKeyValuePairs)) { observableDictionary.Should().NotContain(keyValuePair); } foreach (var keyValuePair in nonRemovedKeyValuePairs) { observableDictionary.Should().NotContain(keyValuePair); } // and check whether all other one(s) are still there, too foreach (var keyValuePair in keyValuePairs.Except(keyValuePairsToRemove)) { observableDictionary.Should().Contain(keyValuePair); } } } [Fact] public void TryRemoveRangeOfKeyValuePairsThrowsOnNullKeyValuePairs() { // given using (var observableDictionary = new ObservableDictionary<int, string>()) { // when Action action = () => observableDictionary.TryRemoveRange(null, out IDictionary<int, string> _); // then check whether all items have been accounted for action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: items"); } } [Fact] public void TryUpdateDoesNotUpdateNonExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var updateResult = observableDictionary.TryUpdate(2, "One"); // then updateResult.Should().Be(false); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "One"); } } [Fact] public void TryUpdateThrowsOnNullKey() { // given using (var observableDictionary = new ObservableDictionary<string, string>()) { // when Action action = () => observableDictionary.TryUpdate((string) null, "Null"); // then action .Should().Throw<ArgumentNullException>() .WithMessage("Value cannot be null.\r\nParameter name: key"); observableDictionary.Count.Should().Be(0); } } [Fact] public void TryUpdateUpdatesExistingItem() { // given var initialKvPs = new List<KeyValuePair<int, string>>() { new KeyValuePair<int, string>(1, "One") }; using (var observableDictionary = new ObservableDictionary<int, string>(initialKvPs)) { // when var updateResult = observableDictionary.TryUpdate(1, "Two"); // then updateResult.Should().Be(true); observableDictionary.Count.Should().Be(1); observableDictionary.Should().Contain(1, "Two"); observableDictionary.Keys.Should().Contain(1); observableDictionary.Values.Should().NotContain("One"); observableDictionary.Values.Should().Contain("Two"); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Compute { using System; using System.Linq; using System.Collections.Generic; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; using Microsoft.Rest.Serialization; using Newtonsoft.Json; using Microsoft.Rest.Azure; using Models; /// <summary> /// AvailabilitySetsOperations operations. /// </summary> internal partial class AvailabilitySetsOperations : IServiceOperations<ComputeManagementClient>, IAvailabilitySetsOperations { /// <summary> /// Initializes a new instance of the AvailabilitySetsOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal AvailabilitySetsOperations(ComputeManagementClient client) { if (client == null) { throw new ArgumentNullException("client"); } this.Client = client; } /// <summary> /// Gets a reference to the ComputeManagementClient /// </summary> public ComputeManagementClient Client { get; private set; } /// <summary> /// The operation to create or update the availability set. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='name'> /// Parameters supplied to the Create Availability Set operation. /// </param> /// <param name='parameters'> /// Parameters supplied to the Create Availability Set operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<AvailabilitySet>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string name, AvailabilitySet parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (name == null) { throw new ValidationException(ValidationRules.CannotBeNull, "name"); } if (parameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "parameters"); } if (parameters != null) { parameters.Validate(); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-30"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("name", name); tracingParameters.Add("parameters", parameters); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "CreateOrUpdate", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{name}").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{name}", Uri.EscapeDataString(name)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("PUT"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(parameters != null) { _requestContent = SafeJsonConvert.SerializeObject(parameters, this.Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8); _httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<AvailabilitySet>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<AvailabilitySet>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// The operation to delete the availability set. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='availabilitySetName'> /// The name of the availability set. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string availabilitySetName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (availabilitySetName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "availabilitySetName"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-30"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("availabilitySetName", availabilitySetName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Delete", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{availabilitySetName}", Uri.EscapeDataString(availabilitySetName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("DELETE"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 204) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// The operation to get the availability set. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='availabilitySetName'> /// The name of the availability set. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<AvailabilitySet>> GetWithHttpMessagesAsync(string resourceGroupName, string availabilitySetName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (availabilitySetName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "availabilitySetName"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-30"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("availabilitySetName", availabilitySetName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{availabilitySetName}", Uri.EscapeDataString(availabilitySetName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<AvailabilitySet>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<AvailabilitySet>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// The operation to list the availability sets. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IEnumerable<AvailabilitySet>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-30"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IEnumerable<AvailabilitySet>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Page<AvailabilitySet>>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Lists all available virtual machine sizes that can be used to create a new /// virtual machine in an existing availability set. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='availabilitySetName'> /// The name of the availability set. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IEnumerable<VirtualMachineSize>>> ListAvailableSizesWithHttpMessagesAsync(string resourceGroupName, string availabilitySetName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (availabilitySetName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "availabilitySetName"); } if (this.Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } string apiVersion = "2016-03-30"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("availabilitySetName", availabilitySetName); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListAvailableSizes", tracingParameters); } // Construct URL var _baseUrl = this.Client.BaseUri.AbsoluteUri; var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/availabilitySets/{availabilitySetName}/vmSizes").ToString(); _url = _url.Replace("{resourceGroupName}", Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{availabilitySetName}", Uri.EscapeDataString(availabilitySetName)); _url = _url.Replace("{subscriptionId}", Uri.EscapeDataString(this.Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += "?" + string.Join("&", _queryParameters); } // Create HTTP transport objects HttpRequestMessage _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new Uri(_url); // Set Headers if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", Guid.NewGuid().ToString()); } if (this.Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (this.Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, this.Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IEnumerable<VirtualMachineSize>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<Page<VirtualMachineSize>>(_responseContent, this.Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System.Collections.Specialized; using System.Linq; using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Game.Graphics; using osu.Game.Graphics.Containers; using osu.Game.Graphics.Sprites; using osu.Game.Overlays; using osu.Game.Users; using osu.Game.Users.Drawables; using osuTK; namespace osu.Game.Screens.OnlinePlay.Lounge.Components { public class DrawableRoomParticipantsList : OnlinePlayComposite { private const float avatar_size = 36; private FillFlowContainer<CircularAvatar> avatarFlow; private CircularAvatar hostAvatar; private LinkFlowContainer hostText; private HiddenUserCount hiddenUsers; private OsuSpriteText totalCount; public DrawableRoomParticipantsList() { AutoSizeAxes = Axes.X; Height = 60; } [BackgroundDependencyLoader] private void load(OverlayColourProvider colours) { InternalChildren = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, Masking = true, CornerRadius = 10, Shear = new Vector2(0.2f, 0), Child = new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background4, } }, new FillFlowContainer { RelativeSizeAxes = Axes.Y, AutoSizeAxes = Axes.X, Children = new Drawable[] { new FillFlowContainer { RelativeSizeAxes = Axes.Y, AutoSizeAxes = Axes.X, Spacing = new Vector2(8), Padding = new MarginPadding { Left = 8, Right = 16 }, Children = new Drawable[] { hostAvatar = new CircularAvatar { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, }, hostText = new LinkFlowContainer { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, AutoSizeAxes = Axes.Both } } }, new Container { RelativeSizeAxes = Axes.Y, AutoSizeAxes = Axes.X, Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, Masking = true, CornerRadius = 10, Shear = new Vector2(0.2f, 0), Child = new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background3, } }, new FillFlowContainer { Anchor = Anchor.Centre, Origin = Anchor.Centre, AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Spacing = new Vector2(4), Padding = new MarginPadding { Left = 8, Right = 16 }, Children = new Drawable[] { new SpriteIcon { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, Size = new Vector2(16), Icon = FontAwesome.Solid.User, }, totalCount = new OsuSpriteText { Font = OsuFont.Default.With(weight: FontWeight.Bold), Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, }, avatarFlow = new FillFlowContainer<CircularAvatar> { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Spacing = new Vector2(4), Margin = new MarginPadding { Left = 4 }, }, hiddenUsers = new HiddenUserCount { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, } } } } }, } } }; } protected override void LoadComplete() { base.LoadComplete(); RecentParticipants.BindCollectionChanged(onParticipantsChanged, true); ParticipantCount.BindValueChanged(_ => { updateHiddenUsers(); totalCount.Text = ParticipantCount.Value.ToString(); }, true); Host.BindValueChanged(onHostChanged, true); } private int numberOfCircles = 4; /// <summary> /// The maximum number of circles visible (including the "hidden count" circle in the overflow case). /// </summary> public int NumberOfCircles { get => numberOfCircles; set { numberOfCircles = value; if (LoadState < LoadState.Loaded) return; // Reinitialising the list looks janky, but this is unlikely to be used in a setting where it's visible. clearUsers(); foreach (var u in RecentParticipants) addUser(u); updateHiddenUsers(); } } private void onParticipantsChanged(object sender, NotifyCollectionChangedEventArgs e) { switch (e.Action) { case NotifyCollectionChangedAction.Add: foreach (var added in e.NewItems.OfType<User>()) addUser(added); break; case NotifyCollectionChangedAction.Remove: foreach (var removed in e.OldItems.OfType<User>()) removeUser(removed); break; case NotifyCollectionChangedAction.Reset: clearUsers(); break; case NotifyCollectionChangedAction.Replace: case NotifyCollectionChangedAction.Move: // Easiest is to just reinitialise the whole list. These are unlikely to ever be use cases. clearUsers(); foreach (var u in RecentParticipants) addUser(u); break; } updateHiddenUsers(); } private int displayedCircles => avatarFlow.Count + (hiddenUsers.Count > 0 ? 1 : 0); private void addUser(User user) { if (displayedCircles < NumberOfCircles) avatarFlow.Add(new CircularAvatar { User = user }); } private void removeUser(User user) { avatarFlow.RemoveAll(a => a.User == user); } private void clearUsers() { avatarFlow.Clear(); updateHiddenUsers(); } private void updateHiddenUsers() { int hiddenCount = 0; if (RecentParticipants.Count > NumberOfCircles) hiddenCount = ParticipantCount.Value - NumberOfCircles + 1; hiddenUsers.Count = hiddenCount; if (displayedCircles > NumberOfCircles) avatarFlow.Remove(avatarFlow.Last()); else if (displayedCircles < NumberOfCircles) { var nextUser = RecentParticipants.FirstOrDefault(u => avatarFlow.All(a => a.User != u)); if (nextUser != null) addUser(nextUser); } } private void onHostChanged(ValueChangedEvent<User> host) { hostAvatar.User = host.NewValue; hostText.Clear(); if (host.NewValue != null) { hostText.AddText("hosted by "); hostText.AddUserLink(host.NewValue); } } private class CircularAvatar : CompositeDrawable { public User User { get => avatar.User; set => avatar.User = value; } private readonly UpdateableAvatar avatar = new UpdateableAvatar(showUsernameTooltip: true) { RelativeSizeAxes = Axes.Both }; [BackgroundDependencyLoader] private void load(OverlayColourProvider colours) { Size = new Vector2(avatar_size); InternalChild = new CircularContainer { RelativeSizeAxes = Axes.Both, Masking = true, Children = new Drawable[] { new Box { Colour = colours.Background5, RelativeSizeAxes = Axes.Both, }, avatar } }; } } public class HiddenUserCount : CompositeDrawable { public int Count { get => count; set { count = value; countText.Text = $"+{count}"; if (count > 0) Show(); else Hide(); } } private int count; private readonly SpriteText countText = new OsuSpriteText { Anchor = Anchor.Centre, Origin = Anchor.Centre, Font = OsuFont.Default.With(weight: FontWeight.Bold), }; [BackgroundDependencyLoader] private void load(OverlayColourProvider colours) { Size = new Vector2(avatar_size); Alpha = 0; InternalChild = new CircularContainer { RelativeSizeAxes = Axes.Both, Masking = true, Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background5, }, countText } }; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Collections.Immutable; using System.IO; using System.Linq; using System.Reflection.Metadata.Ecma335; using System.Reflection.Metadata.Tests; using System.Reflection.PortableExecutable; using Xunit; namespace System.Reflection.Metadata.Decoding.Tests { public partial class SignatureDecoderTests { [Fact] public unsafe void VerifyMultipleOptionalModifiers() { // Type 1: int32 modopt([mscorlib]System.Runtime.CompilerServices.IsLong) modopt([mscorlib]System.Runtime.CompilerServices.CallConvCdecl) // Type 2: char* // Type 3: uint32 // Type 4: char modopt([mscorlib]System.Runtime.CompilerServices.IsConst)* var testSignature = new byte[] { 0x20, 0x45, 0x20, 0x69, 0x08, 0x0F, 0x03, 0x09, 0x0F, 0x20, 0x55, 0x03 }; var types = new string[] { "int32 modopt(100001A) modopt(1000011)", "char*", "uint32", "char modopt(1000015)*" }; fixed (byte* testSignaturePtr = &testSignature[0]) { var signatureBlob = new BlobReader(testSignaturePtr, testSignature.Length); var provider = new OpaqueTokenTypeProvider(); var decoder = new SignatureDecoder<string, DisassemblingGenericContext>(provider, metadataReader: null, genericContext: null); foreach (string typeString in types) { // Verify that each type is decoded as expected Assert.Equal(typeString, decoder.DecodeType(ref signatureBlob)); } // And that nothing is left over to decode Assert.True(signatureBlob.RemainingBytes == 0); Assert.Throws<BadImageFormatException>(() => decoder.DecodeType(ref signatureBlob)); } } [Theory] [InlineData(new string[] { "int32", "string" }, new byte[] { 0x0A /*GENERICINST*/, 2 /*count*/, 0x8 /*I4*/, 0xE /*STRING*/ })] public unsafe void DecodeValidMethodSpecificationSignature(string[] expectedTypes, byte[] testSignature) { fixed (byte* testSignaturePtr = &testSignature[0]) { var signatureBlob = new BlobReader(testSignaturePtr, testSignature.Length); var provider = new OpaqueTokenTypeProvider(); var decoder = new SignatureDecoder<string, DisassemblingGenericContext>(provider, metadataReader: null, genericContext: null); IEnumerable<string> actualTypes = decoder.DecodeMethodSpecificationSignature(ref signatureBlob); Assert.Equal(expectedTypes, actualTypes); Assert.True(signatureBlob.RemainingBytes == 0); Assert.Throws<BadImageFormatException>(() => decoder.DecodeType(ref signatureBlob)); } } [Theory] [InlineData(new byte[] { 0 })] // bad header [InlineData(new byte[] { 0x0A /*GENERICINST*/, 0 /*count*/ })] // no type parameters public unsafe void DecodeInvalidMethodSpecificationSignature(byte[] testSignature) { fixed (byte* testSignaturePtr = &testSignature[0]) { var signatureBlob = new BlobReader(testSignaturePtr, testSignature.Length); var provider = new OpaqueTokenTypeProvider(); var decoder = new SignatureDecoder<string, DisassemblingGenericContext>(provider, metadataReader: null, genericContext: null); } } [Fact] public void DecodeVarArgsDefAndRef() { using (FileStream stream = File.OpenRead(typeof(VarArgsToDecode).GetTypeInfo().Assembly.Location)) using (var peReader = new PEReader(stream)) { MetadataReader metadataReader = peReader.GetMetadataReader(); TypeDefinitionHandle typeDefHandle = TestMetadataResolver.FindTestType(metadataReader, typeof(VarArgsToDecode)); TypeDefinition typeDef = metadataReader.GetTypeDefinition(typeDefHandle); MethodDefinition methodDef = metadataReader.GetMethodDefinition(typeDef.GetMethods().First()); Assert.Equal("VarArgsCallee", metadataReader.GetString(methodDef.Name)); var provider = new OpaqueTokenTypeProvider(); MethodSignature<string> defSignature = methodDef.DecodeSignature(provider, null); Assert.Equal(SignatureCallingConvention.VarArgs, defSignature.Header.CallingConvention); Assert.Equal(1, defSignature.RequiredParameterCount); Assert.Equal(new[] { "int32" }, defSignature.ParameterTypes); int refCount = 0; foreach (MemberReferenceHandle memberRefHandle in metadataReader.MemberReferences) { MemberReference memberRef = metadataReader.GetMemberReference(memberRefHandle); if (metadataReader.StringComparer.Equals(memberRef.Name, "VarArgsCallee")) { Assert.Equal(MemberReferenceKind.Method, memberRef.GetKind()); MethodSignature<string> refSignature = memberRef.DecodeMethodSignature(provider, null); Assert.Equal(SignatureCallingConvention.VarArgs, refSignature.Header.CallingConvention); Assert.Equal(1, refSignature.RequiredParameterCount); Assert.Equal(new[] { "int32", "bool", "string", "float64" }, refSignature.ParameterTypes); refCount++; } } Assert.Equal(1, refCount); } } private static class VarArgsToDecode { public static void VarArgsCallee(int i, __arglist) { } public static void VarArgsCaller() { VarArgsCallee(1, __arglist(true, "hello", 0.42)); } } // Test as much as we can with simple C# examples inline below. [Fact] public void SimpleSignatureProviderCoverage() { using (FileStream stream = File.OpenRead(typeof(SignaturesToDecode<>).GetTypeInfo().Assembly.Location)) using (var peReader = new PEReader(stream)) { MetadataReader reader = peReader.GetMetadataReader(); var provider = new DisassemblingTypeProvider(); TypeDefinitionHandle typeHandle = TestMetadataResolver.FindTestType(reader, typeof(SignaturesToDecode<>)); Assert.Equal("System.Reflection.Metadata.Decoding.Tests.SignatureDecoderTests/SignaturesToDecode`1", provider.GetTypeFromHandle(reader, genericContext: null, handle: typeHandle)); TypeDefinition type = reader.GetTypeDefinition(typeHandle); Dictionary<string, string> expectedFields = GetExpectedFieldSignatures(); ImmutableArray<string> genericTypeParameters = type.GetGenericParameters().Select(h => reader.GetString(reader.GetGenericParameter(h).Name)).ToImmutableArray(); var genericTypeContext = new DisassemblingGenericContext(genericTypeParameters, ImmutableArray<string>.Empty); foreach (var fieldHandle in type.GetFields()) { FieldDefinition field = reader.GetFieldDefinition(fieldHandle); string fieldName = reader.GetString(field.Name); string expected; Assert.True(expectedFields.TryGetValue(fieldName, out expected), "Unexpected field: " + fieldName); Assert.Equal(expected, field.DecodeSignature(provider, genericTypeContext)); } Dictionary<string, string> expectedMethods = GetExpectedMethodSignatures(); foreach (var methodHandle in type.GetMethods()) { MethodDefinition method = reader.GetMethodDefinition(methodHandle); ImmutableArray<string> genericMethodParameters = method.GetGenericParameters().Select(h => reader.GetString(reader.GetGenericParameter(h).Name)).ToImmutableArray(); var genericMethodContext = new DisassemblingGenericContext(genericTypeParameters, genericMethodParameters); string methodName = reader.GetString(method.Name); string expected; Assert.True(expectedMethods.TryGetValue(methodName, out expected), "Unexpected method: " + methodName); MethodSignature<string> signature = method.DecodeSignature(provider, genericMethodContext); Assert.True(signature.Header.Kind == SignatureKind.Method); if (methodName.StartsWith("Generic")) { Assert.Equal(1, signature.GenericParameterCount); } else { Assert.Equal(0, signature.GenericParameterCount); } Assert.True(signature.GenericParameterCount <= 1 && (methodName != "GenericMethodParameter" || signature.GenericParameterCount == 1)); Assert.Equal(expected, provider.GetFunctionPointerType(signature)); } Dictionary<string, string> expectedProperties = GetExpectedPropertySignatures(); foreach (var propertyHandle in type.GetProperties()) { PropertyDefinition property = reader.GetPropertyDefinition(propertyHandle); string propertyName = reader.GetString(property.Name); string expected; Assert.True(expectedProperties.TryGetValue(propertyName, out expected), "Unexpected property: " + propertyName); MethodSignature<string> signature = property.DecodeSignature(provider, genericTypeContext); Assert.True(signature.Header.Kind == SignatureKind.Property); Assert.Equal(expected, provider.GetFunctionPointerType(signature)); } Dictionary<string, string> expectedEvents = GetExpectedEventSignatures(); foreach (var eventHandle in type.GetEvents()) { EventDefinition @event = reader.GetEventDefinition(eventHandle); string eventName = reader.GetString(@event.Name); string expected; Assert.True(expectedEvents.TryGetValue(eventName, out expected), "Unexpected event: " + eventName); Assert.Equal(expected, provider.GetTypeFromHandle(reader, genericTypeContext, @event.Type)); } Assert.Equal("[System.Collections]System.Collections.Generic.List`1<!T>", provider.GetTypeFromHandle(reader, genericTypeContext, handle: type.BaseType)); } } public unsafe class SignaturesToDecode<T> : List<T> { public sbyte SByte; public byte Byte; public short Int16; public ushort UInt16; public int Int32; public uint UInt32; public long Int64; public ulong UInt64; public string String; public object Object; public float Single; public double Double; public IntPtr IntPtr; public UIntPtr UIntPtr; public bool Boolean; public char Char; public volatile int ModifiedType; public int* Pointer; public int[] SZArray; public int[,] Array; public void ByReference(ref int i) { } public T GenericTypeParameter; public U GenericMethodParameter<U>() { throw null; } public List<int> GenericInstantiation; public struct Nested { } public Nested Property { get { throw null; } } public event EventHandler<EventArgs> Event { add { } remove { } } } [Fact] public void PinnedAndUnpinnedLocals() { using (FileStream stream = File.OpenRead(typeof(PinnedAndUnpinnedLocalsToDecode).GetTypeInfo().Assembly.Location)) using (var peReader = new PEReader(stream)) { MetadataReader reader = peReader.GetMetadataReader(); var provider = new DisassemblingTypeProvider(); TypeDefinitionHandle typeDefHandle = TestMetadataResolver.FindTestType(reader, typeof(PinnedAndUnpinnedLocalsToDecode)); TypeDefinition typeDef = reader.GetTypeDefinition(typeDefHandle); MethodDefinition methodDef = reader.GetMethodDefinition(typeDef.GetMethods().First()); Assert.Equal("DoSomething", reader.GetString(methodDef.Name)); MethodBodyBlock body = peReader.GetMethodBody(methodDef.RelativeVirtualAddress); StandaloneSignature localSignature = reader.GetStandaloneSignature(body.LocalSignature); ImmutableArray<string> localTypes = localSignature.DecodeLocalSignature(provider, genericContext: null); // Compiler can generate temporaries or re-order so just check the ones we expect are there. // (They could get optimized away too. If that happens in practice, change this test to use hard-coded signatures.) Assert.Contains("uint8& pinned", localTypes); Assert.Contains("uint8[]", localTypes); } } public static class PinnedAndUnpinnedLocalsToDecode { public static unsafe int DoSomething() { byte[] bytes = new byte[] { 1, 2, 3 }; fixed (byte* bytePtr = bytes) { return *bytePtr; } } } [Fact] public void WrongSignatureType() { using (FileStream stream = File.OpenRead(typeof(VarArgsToDecode).GetTypeInfo().Assembly.Location)) using (var peReader = new PEReader(stream)) { MetadataReader reader = peReader.GetMetadataReader(); var provider = new DisassemblingTypeProvider(); var decoder = new SignatureDecoder<string, DisassemblingGenericContext>(provider, reader, genericContext: null); BlobReader fieldSignature = reader.GetBlobReader(reader.GetFieldDefinition(MetadataTokens.FieldDefinitionHandle(1)).Signature); BlobReader methodSignature = reader.GetBlobReader(reader.GetMethodDefinition(MetadataTokens.MethodDefinitionHandle(1)).Signature); BlobReader propertySignature = reader.GetBlobReader(reader.GetPropertyDefinition(MetadataTokens.PropertyDefinitionHandle(1)).Signature); Assert.Throws<BadImageFormatException>(() => decoder.DecodeMethodSignature(ref fieldSignature)); Assert.Throws<BadImageFormatException>(() => decoder.DecodeFieldSignature(ref methodSignature)); Assert.Throws<BadImageFormatException>(() => decoder.DecodeLocalSignature(ref propertySignature)); } } private static Dictionary<string, string> GetExpectedFieldSignatures() { // Field name -> signature return new Dictionary<string, string>() { { "SByte", "int8" }, { "Byte", "uint8" }, { "Int16", "int16" }, { "UInt16", "uint16" }, { "Int32", "int32" }, { "UInt32", "uint32" }, { "Int64", "int64" }, { "UInt64", "uint64" }, { "String", "string" }, { "Object", "object" }, { "Single", "float32" }, { "Double", "float64" }, { "IntPtr", "native int" }, { "UIntPtr", "native uint" }, { "Boolean", "bool" }, { "Char", "char" }, { "ModifiedType", "int32 modreq([System.Runtime]System.Runtime.CompilerServices.IsVolatile)" }, { "Pointer", "int32*" }, { "SZArray", "int32[]" }, { "Array", "int32[0...,0...]" }, { "GenericTypeParameter", "!T" }, { "GenericInstantiation", "[System.Collections]System.Collections.Generic.List`1<int32>" }, }; } private static Dictionary<string, string> GetExpectedMethodSignatures() { // method name -> signature return new Dictionary<string, string>() { { "ByReference", "method void *(int32&)" }, { "GenericMethodParameter", "method !!U *()" }, { ".ctor", "method void *()" }, { "get_Property", "method System.Reflection.Metadata.Decoding.Tests.SignatureDecoderTests/SignaturesToDecode`1/Nested<!T> *()" }, { "add_Event", "method void *([System.Runtime]System.EventHandler`1<[System.Runtime]System.EventArgs>)" }, { "remove_Event", "method void *([System.Runtime]System.EventHandler`1<[System.Runtime]System.EventArgs>)" }, }; } private static Dictionary<string, string> GetExpectedPropertySignatures() { // field name -> signature return new Dictionary<string, string>() { { "Property", "method System.Reflection.Metadata.Decoding.Tests.SignatureDecoderTests/SignaturesToDecode`1/Nested<!T> *()" }, }; } private static Dictionary<string, string> GetExpectedEventSignatures() { // event name -> signature return new Dictionary<string, string>() { { "Event", "[System.Runtime]System.EventHandler`1<[System.Runtime]System.EventArgs>" }, }; } [Theory] [InlineData(new byte[] { 0x12 /*CLASS*/, 0x06 /*encoded type spec*/ })] // not def or ref [InlineData(new byte[] { 0x11 /*VALUETYPE*/, 0x06 /*encoded type spec*/})] // not def or ref [InlineData(new byte[] { 0x60 })] // Bad type code public unsafe void BadTypeSignature(byte[] signature) { fixed (byte* bytes = signature) { BlobReader reader = new BlobReader(bytes, signature.Length); Assert.Throws<BadImageFormatException>(() => new SignatureDecoder<string, DisassemblingGenericContext>(new OpaqueTokenTypeProvider(), metadataReader: null, genericContext: null).DecodeType(ref reader)); } } [Theory] [InlineData("method void *()", new byte[] { 0x1B /*FNPTR*/, 0 /*default calling convention*/, 0 /*parameters count*/, 0x1 /* return type (VOID)*/ })] [InlineData("int32[...]", new byte[] { 0x14 /*ARRAY*/, 0x8 /*I4*/, 1 /*rank*/, 0 /*sizes*/, 0 /*lower bounds*/ })] [InlineData("int32[...,...,...]", new byte[] { 0x14 /*ARRAY*/, 0x8 /*I4*/, 3 /*rank*/, 0 /*sizes*/, 0/*lower bounds*/ })] [InlineData("int32[-1...1]", new byte[] { 0x14 /*ARRAY*/, 0x8 /*I4*/, 1 /*rank*/, 1 /*sizes*/, 3 /*size*/, 1 /*lower bounds*/, 0x7F /*lower bound (compressed -1)*/ })] [InlineData("int32[1...]", new byte[] { 0x14 /*ARRAY*/, 0x8 /*I4*/, 1 /*rank*/, 0 /*sizes*/, 1 /*lower bounds*/, 2 /*lower bound (compressed +1)*/ })] public unsafe void ExoticTypeSignature(string expected, byte[] signature) { fixed (byte* bytes = signature) { BlobReader reader = new BlobReader(bytes, signature.Length); Assert.Equal(expected, new SignatureDecoder<string, DisassemblingGenericContext>(new OpaqueTokenTypeProvider(), metadataReader: null, genericContext: null).DecodeType(ref reader)); } } [Fact] public void ProviderCannotBeNull() { Assert.Throws<ArgumentNullException>("provider", () => new SignatureDecoder<int, object>(provider: null, metadataReader: null, genericContext: null)); } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using ServiceStack.Common; using ServiceStack.Common.Web; using ServiceStack.Logging; using ServiceStack.Markdown; using ServiceStack.ServiceHost; using ServiceStack.Text; using ServiceStack.WebHost.Endpoints; using ServiceStack.WebHost.EndPoints.Support.Markdown; namespace ServiceStack.WebHost.EndPoints.Formats { public enum MarkdownPageType { ContentPage = 1, ViewPage = 2, SharedViewPage = 3, } public class MarkdownFormat : IViewEngine { private static readonly ILog Log = LogManager.GetLogger(typeof(MarkdownFormat)); private const string ErrorPageNotFound = "Could not find Markdown page '{0}'"; public static string TemplateName = "default.shtml"; public static string TemplatePlaceHolder = "<!--@Body-->"; public static string WebHostUrlPlaceHolder = "~/"; public static MarkdownFormat Instance = new MarkdownFormat(); // ~/View - Dynamic Pages public Dictionary<string, MarkdownPage> ViewPages = new Dictionary<string, MarkdownPage>( StringComparer.CurrentCultureIgnoreCase); // ~/View/Shared - Dynamic Shared Pages public Dictionary<string, MarkdownPage> ViewSharedPages = new Dictionary<string, MarkdownPage>( StringComparer.CurrentCultureIgnoreCase); //Content Pages outside of ~/View public Dictionary<string, MarkdownPage> ContentPages = new Dictionary<string, MarkdownPage>( StringComparer.CurrentCultureIgnoreCase); public Dictionary<string, MarkdownTemplate> PageTemplates = new Dictionary<string, MarkdownTemplate>( StringComparer.CurrentCultureIgnoreCase); public Type MarkdownBaseType { get; set; } public Dictionary<string, Type> MarkdownGlobalHelpers { get; set; } public Func<string, IEnumerable<MarkdownPage>> FindMarkdownPagesFn { get; set; } private readonly MarkdownSharp.Markdown markdown; public IAppHost AppHost { get; set; } public Dictionary<string, string> MarkdownReplaceTokens { get; set; } public MarkdownFormat() { markdown = new MarkdownSharp.Markdown(); this.MarkdownBaseType = typeof(MarkdownViewBase); this.MarkdownGlobalHelpers = new Dictionary<string, Type>(); this.FindMarkdownPagesFn = FindMarkdownPages; this.MarkdownReplaceTokens = new Dictionary<string, string>(); } public void Register(IAppHost appHost) { this.AppHost = appHost; this.MarkdownReplaceTokens = appHost.Config.MarkdownReplaceTokens ?? new Dictionary<string, string>(); if (!appHost.Config.WebHostUrl.IsNullOrEmpty() && this.MarkdownReplaceTokens.ContainsKey("~/")) this.MarkdownReplaceTokens["~/"] = appHost.Config.WebHostUrl.WithTrailingSlash(); RegisterMarkdownPages(appHost.Config.MarkdownSearchPath); //Render HTML appHost.HtmlProviders.Add((requestContext, dto, httpRes) => { var httpReq = requestContext.Get<IHttpRequest>(); MarkdownPage markdownPage; if ((markdownPage = GetViewPageByResponse(dto, httpReq)) == null) return false; ReloadModifiedPageAndTemplates(markdownPage); return ProcessMarkdownPage(httpReq, markdownPage, dto, httpRes); }); appHost.CatchAllHandlers.Add((httpMethod, pathInfo, filePath) => { MarkdownPage markdownPage; if (filePath == null || (markdownPage = GetContentPage(filePath.WithoutExtension())) == null) return null; return new MarkdownHandler { MarkdownFormat = this, MarkdownPage = markdownPage, RequestName = "MarkdownPage", PathInfo = pathInfo, FilePath = filePath }; }); appHost.ContentTypeFilters.Register(ContentType.MarkdownText, SerializeToStream, null); appHost.ContentTypeFilters.Register(ContentType.PlainText, SerializeToStream, null); appHost.Config.IgnoreFormatsInMetadata.Add(ContentType.MarkdownText.ToContentFormat()); appHost.Config.IgnoreFormatsInMetadata.Add(ContentType.PlainText.ToContentFormat()); } public bool ProcessMarkdownPage(IHttpRequest httpReq, MarkdownPage markdownPage, object dto, IHttpResponse httpRes) { httpRes.AddHeaderLastModified(markdownPage.GetLastModified()); var renderInTemplate = true; var renderHtml = true; string format; if (httpReq != null && (format = httpReq.QueryString["format"]) != null) { renderHtml = !(format.StartsWithIgnoreCase("markdown") || format.StartsWithIgnoreCase("text") || format.StartsWithIgnoreCase("plain")); renderInTemplate = !httpReq.GetFormatModifier().StartsWithIgnoreCase("bare"); } if (!renderHtml) { httpRes.ContentType = ContentType.PlainText; } var markup = RenderDynamicPage(markdownPage, markdownPage.Name, dto, renderHtml, renderInTemplate); var markupBytes = markup.ToUtf8Bytes(); httpRes.OutputStream.Write(markupBytes, 0, markupBytes.Length); return true; } public void ReloadModifiedPageAndTemplates(MarkdownPage markdownPage) { var lastWriteTime = File.GetLastWriteTime(markdownPage.FilePath); if (lastWriteTime > markdownPage.LastModified) { markdownPage.Reload(); } MarkdownTemplate template; if (markdownPage.DirectiveTemplatePath != null && this.PageTemplates.TryGetValue(markdownPage.DirectiveTemplatePath, out template)) { lastWriteTime = File.GetLastWriteTime(markdownPage.DirectiveTemplatePath); if (lastWriteTime > template.LastModified) ReloadTemplate(template); } if (markdownPage.TemplatePath != null && this.PageTemplates.TryGetValue(markdownPage.TemplatePath, out template)) { lastWriteTime = File.GetLastWriteTime(markdownPage.TemplatePath); if (lastWriteTime > template.LastModified) ReloadTemplate(template); } } private void ReloadTemplate(MarkdownTemplate template) { var contents = File.ReadAllText(template.FilePath); foreach (var markdownReplaceToken in MarkdownReplaceTokens) { contents = contents.Replace(markdownReplaceToken.Key, markdownReplaceToken.Value); } template.Reload(contents); } /// <summary> /// Render Markdown for text/markdown and text/plain ContentTypes /// </summary> public void SerializeToStream(IRequestContext requestContext, object dto, Stream stream) { MarkdownPage markdownPage; if ((markdownPage = GetViewPageByResponse(dto, requestContext.Get<IHttpRequest>())) == null) throw new InvalidDataException(ErrorPageNotFound.FormatWith(GetPageName(dto, requestContext))); ReloadModifiedPageAndTemplates(markdownPage); const bool renderHtml = false; //i.e. render Markdown var markup = RenderStaticPage(markdownPage, renderHtml); var markupBytes = markup.ToUtf8Bytes(); stream.Write(markupBytes, 0, markupBytes.Length); } public string GetPageName(object dto, IRequestContext requestContext) { var httpRequest = requestContext != null ? requestContext.Get<IHttpRequest>() : null; var httpResult = dto as IHttpResult; if (httpResult != null) { if (httpResult.TemplateName != null) return httpResult.TemplateName; dto = httpResult.Response; } if (dto != null) return dto.GetType().Name; return httpRequest != null ? httpRequest.OperationName : null; } public MarkdownPage GetViewPageByResponse(object dto, IHttpRequest httpRequest) { var httpResult = dto as IHttpResult; if (httpResult != null) { //If TemplateName was specified don't look for anything else. if (httpResult.TemplateName != null) return GetViewPage(httpResult.TemplateName); dto = httpResult.Response; } if (dto != null) { var responseTypeName = dto.GetType().Name; var markdownPage = GetViewPage(responseTypeName); if (markdownPage != null) return markdownPage; } return httpRequest != null ? GetViewPage(httpRequest.OperationName) : null; } public MarkdownPage GetViewPage(string pageName) { MarkdownPage markdownPage; ViewPages.TryGetValue(pageName, out markdownPage); if (markdownPage != null) return markdownPage; ViewSharedPages.TryGetValue(pageName, out markdownPage); return markdownPage; } public MarkdownPage GetContentPage(string pageFilePath) { MarkdownPage markdownPage; ContentPages.TryGetValue(pageFilePath, out markdownPage); return markdownPage; } readonly Dictionary<string,string> templatePathsFound = new Dictionary<string, string>(StringComparer.InvariantCultureIgnoreCase); readonly HashSet<string> templatePathsNotFound = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase); public void RegisterMarkdownPages(string dirPath) { foreach (var page in FindMarkdownPagesFn(dirPath)) { AddPage(page); } } public IEnumerable<MarkdownPage> FindMarkdownPages(string dirPath) { var di = new DirectoryInfo(dirPath); var markDownFiles = di.GetMatchingFiles("*.md") .Concat(di.GetMatchingFiles("*.markdown")); var viewPath = Path.Combine(di.FullName, "Views"); var viewSharedPath = Path.Combine(viewPath, "Shared"); foreach (var markDownFile in markDownFiles) { var fileInfo = new FileInfo(markDownFile); var pageName = fileInfo.Name.WithoutExtension(); var pageContents = File.ReadAllText(markDownFile); var pageType = MarkdownPageType.ContentPage; if (fileInfo.FullName.StartsWithIgnoreCase(viewSharedPath)) pageType = MarkdownPageType.SharedViewPage; else if (fileInfo.FullName.StartsWithIgnoreCase(viewPath)) pageType = MarkdownPageType.ViewPage; var templatePath = GetTemplatePath(fileInfo.DirectoryName); yield return new MarkdownPage(this, markDownFile, pageName, pageContents, pageType) { TemplatePath = templatePath, LastModified = fileInfo.LastWriteTime, }; } } private string GetTemplatePath(string fileDirPath) { if (templatePathsNotFound.Contains(fileDirPath)) return null; var templateDirPath = fileDirPath; string templatePath; while (templateDirPath != null && !File.Exists(Path.Combine(templateDirPath, TemplateName))) { if (templatePathsFound.TryGetValue(templateDirPath, out templatePath)) return templatePath; templateDirPath = templateDirPath.ParentDirectory(); } if (templateDirPath != null) { templatePath = Path.Combine(templateDirPath, TemplateName); templatePathsFound[templateDirPath] = templatePath; return templatePath; } templatePathsNotFound.Add(fileDirPath); return null; } public void RegisterMarkdownPage(MarkdownPage markdownPage) { AddPage(markdownPage); } public void AddPage(MarkdownPage page) { try { page.Prepare(); switch (page.PageType) { case MarkdownPageType.ViewPage: ViewPages.Add(page.Name, page); break; case MarkdownPageType.SharedViewPage: ViewSharedPages.Add(page.Name, page); break; case MarkdownPageType.ContentPage: ContentPages.Add(page.FilePath.WithoutExtension(), page); break; } } catch (Exception ex) { Log.Error("AddViewPage() page.Prepare(): " + ex.Message, ex); } var templatePath = page.TemplatePath; if (page.TemplatePath == null) return; if (PageTemplates.ContainsKey(templatePath)) return; AddTemplate(templatePath, File.ReadAllText(templatePath)); } public MarkdownTemplate AddTemplate(string templatePath, string templateContents) { var templateFile = new FileInfo(templatePath); var templateName = templateFile.FullName.WithoutExtension(); foreach (var markdownReplaceToken in MarkdownReplaceTokens) { templateContents = templateContents.Replace(markdownReplaceToken.Key, markdownReplaceToken.Value); } var template = new MarkdownTemplate(templatePath, templateName, templateContents) { LastModified = templateFile.LastWriteTime, }; PageTemplates.Add(templatePath, template); try { template.Prepare(); return template; } catch (Exception ex) { Log.Error("AddViewPage() template.Prepare(): " + ex.Message, ex); return null; } } public string Transform(string template) { return markdown.Transform(template); } public string Transform(string template, bool renderHtml) { return renderHtml ? markdown.Transform(template) : template; } public string RenderStaticPageHtml(string filePath) { return RenderStaticPage(filePath, true); } public string RenderStaticPage(string filePath, bool renderHtml) { if (filePath == null) throw new ArgumentNullException("filePath"); filePath = filePath.WithoutExtension(); MarkdownPage markdownPage; if (!ContentPages.TryGetValue(filePath, out markdownPage)) throw new InvalidDataException(ErrorPageNotFound.FormatWith(filePath)); return RenderStaticPage(markdownPage, renderHtml); } private string RenderStaticPage(MarkdownPage markdownPage, bool renderHtml) { //TODO: Optimize if contains no dynamic elements return RenderDynamicPage(markdownPage, new Dictionary<string, object>(), renderHtml, true); } private string RenderInTemplateIfAny(MarkdownPage markdownPage, Dictionary<string, object> scopeArgs, string pageHtml) { MarkdownTemplate markdownTemplate = null; var directiveTemplatePath = markdownPage.DirectiveTemplatePath; if (directiveTemplatePath != null) { if (!PageTemplates.TryGetValue(directiveTemplatePath, out markdownTemplate)) { if (!File.Exists(directiveTemplatePath)) throw new FileNotFoundException("Could not find template: " + directiveTemplatePath); var templateContents = File.ReadAllText(directiveTemplatePath); markdownTemplate = AddTemplate(directiveTemplatePath, templateContents); } } if (markdownTemplate == null) { var templatePath = markdownPage.TemplatePath; if (templatePath == null) return pageHtml; markdownTemplate = PageTemplates[templatePath]; } if (scopeArgs != null) scopeArgs[MarkdownTemplate.BodyPlaceHolder] = pageHtml; var htmlPage = markdownTemplate.RenderToString(scopeArgs); return htmlPage; } public string RenderDynamicPageHtml(string pageName, object model) { return RenderDynamicPage(pageName, model, true); } public string RenderDynamicPageHtml(string pageName) { return RenderDynamicPage(GetViewPage(pageName), new Dictionary<string, object>(), true, true); } public string RenderDynamicPageHtml(string pageName, Dictionary<string, object> scopeArgs) { return RenderDynamicPage(GetViewPage(pageName), scopeArgs, true, true); } public string RenderPartial(string pageName, object model, bool renderHtml) { return RenderDynamicPage(GetViewPage(pageName), pageName, model, renderHtml, true); } public string RenderDynamicPage(string pageName, object model, bool renderHtml) { return RenderDynamicPage(GetViewPage(pageName), pageName, model, renderHtml, true); } private string RenderDynamicPage(MarkdownPage markdownPage, string pageName, object model, bool renderHtml, bool renderTemplate) { if (markdownPage == null) throw new InvalidDataException(ErrorPageNotFound.FormatWith(pageName)); var scopeArgs = new Dictionary<string, object> { { MarkdownPage.ModelName, model } }; return RenderDynamicPage(markdownPage, scopeArgs, renderHtml, renderTemplate); } public string RenderDynamicPage(MarkdownPage markdownPage, Dictionary<string, object> scopeArgs, bool renderHtml, bool renderTemplate) { scopeArgs = scopeArgs ?? new Dictionary<string, object>(); var htmlPage = markdownPage.RenderToString(scopeArgs, renderHtml); if (!renderTemplate) return htmlPage; var html = RenderInTemplateIfAny( markdownPage, scopeArgs, htmlPage); return html; } } }
using System; using System.Collections; using System.Collections.Generic; namespace Milo.Core { /// <summary> /// Property data collection. /// </summary> public class PropertyDataCollection : IList<PropertyData>, IList { /// <summary> /// The page list. /// </summary> private readonly List<PropertyData> _propertyList; /// <summary> /// Initializes a new instance of the <see cref="Milo.Core.PropertyDataCollection"/> class. /// </summary> public PropertyDataCollection () { _propertyList = new List<PropertyData> (); } /// <summary> /// Gets or sets the element at the specified index. /// </summary> /// <param name="index">The index.</param> /// <returns></returns> /// <exception cref="System.ArgumentNullException">value;Cannot add null object to PropertyDataCollection</exception> public PropertyData this[int index] { get { return _propertyList[index]; } set { if (value == null) { throw new ArgumentNullException("value", "Cannot add null object to PropertyDataCollection"); } _propertyList[index] = value; } } /// <summary> /// Gets the number of elements contained in the <see cref="T:System.Collections.Generic.ICollection`1" />. /// </summary> /// <returns>The number of elements contained in the <see cref="T:System.Collections.Generic.ICollection`1" />.</returns> public int Count { get { return _propertyList.Count; } } /// <summary> /// Gets an object that can be used to synchronize access to the <see cref="T:System.Collections.ICollection" />. /// </summary> /// <returns>An object that can be used to synchronize access to the <see cref="T:System.Collections.ICollection" />.</returns> public object SyncRoot { get { return _propertyList.ToArray(); } } /// <summary> /// Gets a value indicating whether access to the <see cref="T:System.Collections.ICollection" /> is synchronized (thread safe). /// </summary> /// <returns>true if access to the <see cref="T:System.Collections.ICollection" /> is synchronized (thread safe); otherwise, false.</returns> public bool IsSynchronized { get { return false; } } /// <summary> /// Gets a value indicating whether the <see cref="T:System.Collections.Generic.ICollection`1" /> is read-only. /// </summary> /// <returns>true if the <see cref="T:System.Collections.Generic.ICollection`1" /> is read-only; otherwise, false.</returns> public bool IsReadOnly { get { return false; } } /// <summary> /// Gets a value indicating whether the <see cref="T:System.Collections.IList" /> has a fixed size. /// </summary> /// <returns>true if the <see cref="T:System.Collections.IList" /> has a fixed size; otherwise, false.</returns> public bool IsFixedSize { get { return false; } } /// <summary> /// Initializes a new instance of the <see cref="Milo.Core.PropertyDataCollection"/> class. /// </summary> /// <param name="collection">Collection.</param> public PropertyDataCollection (IEnumerable<PropertyData> collection) { _propertyList = new List<PropertyData> (collection); } /// <summary> /// Clear this instance. /// </summary> public void Clear () { _propertyList.Clear (); } /// <summary> /// Inserts the specified index. /// </summary> /// <param name="index">The index.</param> /// <param name="page">The page.</param> /// <exception cref="System.ArgumentNullException">value;Cannot add null object to PropertyDataCollection</exception> public void Insert(int index, PropertyData page) { if (page == null) { throw new ArgumentNullException("value", "Cannot add null object to PropertyDataCollection"); } _propertyList.Insert(index, page); } /// <summary> /// Removes the <see cref="T:System.Collections.Generic.IList`1" /> item at the specified index. /// </summary> /// <param name="index">The zero-based index of the item to remove.</param> public void RemoveAt(int index) { _propertyList.RemoveAt(index); } /// <summary> /// Gets or sets the element at the specified index. /// </summary> /// <param name="index">The index.</param> /// <returns></returns> /// <exception cref="System.ArgumentException">Must be of type PropertyData</exception> object IList.this[int index] { get { return this[index]; } set { PropertyData propertyData = value as PropertyData; if (propertyData == null) { throw new ArgumentException("Must be of type PropertyData"); } this[index] = propertyData; } } /// <summary> /// Removes the range. /// </summary> /// <param name="index">The index.</param> /// <param name="count">The count.</param> public void RemoveRange(int index, int count) { _propertyList.RemoveRange(index, count); } /// <summary> /// Sorts the specified comparer. /// </summary> /// <param name="comparer">The comparer.</param> public void Sort(IComparer<PropertyData> comparer) { _propertyList.Sort(comparer); } /// <summary> /// Copy the PropertyDataCollection into a new variable. /// </summary> public PropertyDataCollection Copy () { return new PropertyDataCollection (_propertyList); } /// <summary> /// Adds the specified property. /// </summary> /// <param name="page">The property.</param> /// <exception cref="System.ArgumentNullException">value;Cannot add null object to PropertyDataCollection</exception> public void Add(PropertyData page) { if (page == null) { throw new ArgumentNullException("value", "Cannot add null object to PropertyDataCollection"); } _propertyList.Add(page); } /// <summary> /// Determines whether the <see cref="T:System.Collections.Generic.ICollection`1" /> contains a specific value. /// </summary> /// <param name="item">The object to locate in the <see cref="T:System.Collections.Generic.ICollection`1" />.</param> /// <returns> /// true if <paramref name="item" /> is found in the <see cref="T:System.Collections.Generic.ICollection`1" />; otherwise, false. /// </returns> public bool Contains(PropertyData item) { return _propertyList.Contains(item); } /// <summary> /// Removes the specified property. /// </summary> /// <param name="page">The property.</param> /// <returns></returns> public bool Remove(PropertyData page) { return _propertyList.Remove(page); } /// <summary> /// Copies the elements of the <see cref="T:System.Collections.ICollection" /> to an <see cref="T:System.Array" />, starting at a particular <see cref="T:System.Array" /> index. /// </summary> /// <param name="array">The one-dimensional <see cref="T:System.Array" /> that is the destination of the elements copied from <see cref="T:System.Collections.ICollection" />. The <see cref="T:System.Array" /> must have zero-based indexing.</param> /// <param name="index">The zero-based index in <paramref name="array" /> at which copying begins.</param> void ICollection.CopyTo(Array array, int index) { _propertyList.ToArray().CopyTo(array, index); } /// <summary> /// Copies the automatic. /// </summary> /// <param name="array">The array.</param> /// <param name="index">The index.</param> public void CopyTo(PropertyData[] array, int index) { _propertyList.CopyTo(array, index); } /// <summary> /// Determines the index of a specific item in the <see cref="T:System.Collections.Generic.IList`1" />. /// </summary> /// <param name="item">The object to locate in the <see cref="T:System.Collections.Generic.IList`1" />.</param> /// <returns> /// The index of <paramref name="item" /> if found in the list; otherwise, -1. /// </returns> public int IndexOf(PropertyData item) { return _propertyList.IndexOf(item); } /// <summary> /// Adds an item to the <see cref="T:System.Collections.IList" />. /// </summary> /// <param name="value">The object to add to the <see cref="T:System.Collections.IList" />.</param> /// <returns> /// The position into which the new element was inserted, or -1 to indicate that the item was not inserted into the collection, /// </returns> /// <exception cref="System.ArgumentException">Must be of type PropertyData;value</exception> public int Add(object value) { PropertyData property = value as PropertyData; if (property == null) { throw new ArgumentException("Must be of type PropertyData", "value"); } Add(property); return _propertyList.Count - 1; } /// <summary> /// Determines whether the <see cref="T:System.Collections.IList" /> contains a specific value. /// </summary> /// <param name="value">The object to locate in the <see cref="T:System.Collections.IList" />.</param> /// <returns> /// true if the <see cref="T:System.Object" /> is found in the <see cref="T:System.Collections.IList" />; otherwise, false. /// </returns> public bool Contains(object value) { PropertyData propertyData = value as PropertyData; return propertyData != null && Contains(propertyData); } /// <summary> /// Determines the index of a specific item in the <see cref="T:System.Collections.IList" />. /// </summary> /// <param name="value">The object to locate in the <see cref="T:System.Collections.IList" />.</param> /// <returns> /// The index of <paramref name="value" /> if found in the list; otherwise, -1. /// </returns> public int IndexOf(object value) { PropertyData propertyData = value as PropertyData; return propertyData == null ? -1 : IndexOf(propertyData); } /// <summary> /// Inserts an item to the <see cref="T:System.Collections.IList" /> at the specified index. /// </summary> /// <param name="index">The zero-based index at which <paramref name="value" /> should be inserted.</param> /// <param name="value">The object to insert into the <see cref="T:System.Collections.IList" />.</param> /// <exception cref="System.ArgumentException">Must be of type PropertyData;value</exception> public void Insert(int index, object value) { PropertyData propertyData = value as PropertyData; if (propertyData == null) { throw new ArgumentException("Must be of type PropertyData", "value"); } Insert(index, propertyData); } /// <summary> /// Removes the first occurrence of a specific object from the <see cref="T:System.Collections.IList" />. /// </summary> /// <param name="value">The object to remove from the <see cref="T:System.Collections.IList" />.</param> public void Remove(object value) { PropertyData propertyData = value as PropertyData; if (propertyData == null) { return; } Remove(propertyData); } /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="T:System.Collections.IEnumerator" /> object that can be used to iterate through the collection. /// </returns> IEnumerator IEnumerable.GetEnumerator() { return _propertyList.GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="T:System.Collections.Generic.IEnumerator`1" /> that can be used to iterate through the collection. /// </returns> public IEnumerator<PropertyData> GetEnumerator() { return _propertyList.GetEnumerator(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Reflection; using Xunit; namespace System.Linq.Expressions.Tests { public class PostIncrementAssignTests : IncDecAssignTests { [Theory] [PerCompilationType(nameof(Int16sAndIncrements))] [PerCompilationType(nameof(NullableInt16sAndIncrements))] [PerCompilationType(nameof(UInt16sAndIncrements))] [PerCompilationType(nameof(NullableUInt16sAndIncrements))] [PerCompilationType(nameof(Int32sAndIncrements))] [PerCompilationType(nameof(NullableInt32sAndIncrements))] [PerCompilationType(nameof(UInt32sAndIncrements))] [PerCompilationType(nameof(NullableUInt32sAndIncrements))] [PerCompilationType(nameof(Int64sAndIncrements))] [PerCompilationType(nameof(NullableInt64sAndIncrements))] [PerCompilationType(nameof(UInt64sAndIncrements))] [PerCompilationType(nameof(NullableUInt64sAndIncrements))] [PerCompilationType(nameof(DecimalsAndIncrements))] [PerCompilationType(nameof(NullableDecimalsAndIncrements))] [PerCompilationType(nameof(SinglesAndIncrements))] [PerCompilationType(nameof(NullableSinglesAndIncrements))] [PerCompilationType(nameof(DoublesAndIncrements))] [PerCompilationType(nameof(NullableDoublesAndIncrements))] public void ReturnsCorrectValues(Type type, object value, object _, bool useInterpreter) { ParameterExpression variable = Expression.Variable(type); BlockExpression block = Expression.Block( new[] { variable }, Expression.Assign(variable, Expression.Constant(value, type)), Expression.PostIncrementAssign(variable) ); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(Expression.Constant(value, type), block)).Compile(useInterpreter)()); } [Theory] [PerCompilationType(nameof(Int16sAndIncrements))] [PerCompilationType(nameof(NullableInt16sAndIncrements))] [PerCompilationType(nameof(UInt16sAndIncrements))] [PerCompilationType(nameof(NullableUInt16sAndIncrements))] [PerCompilationType(nameof(Int32sAndIncrements))] [PerCompilationType(nameof(NullableInt32sAndIncrements))] [PerCompilationType(nameof(UInt32sAndIncrements))] [PerCompilationType(nameof(NullableUInt32sAndIncrements))] [PerCompilationType(nameof(Int64sAndIncrements))] [PerCompilationType(nameof(NullableInt64sAndIncrements))] [PerCompilationType(nameof(UInt64sAndIncrements))] [PerCompilationType(nameof(NullableUInt64sAndIncrements))] [PerCompilationType(nameof(DecimalsAndIncrements))] [PerCompilationType(nameof(NullableDecimalsAndIncrements))] [PerCompilationType(nameof(SinglesAndIncrements))] [PerCompilationType(nameof(NullableSinglesAndIncrements))] [PerCompilationType(nameof(DoublesAndIncrements))] [PerCompilationType(nameof(NullableDoublesAndIncrements))] public void AssignsCorrectValues(Type type, object value, object result, bool useInterpreter) { ParameterExpression variable = Expression.Variable(type); LabelTarget target = Expression.Label(type); BlockExpression block = Expression.Block( new[] { variable }, Expression.Assign(variable, Expression.Constant(value, type)), Expression.PostIncrementAssign(variable), Expression.Return(target, variable), Expression.Label(target, Expression.Default(type)) ); Assert.True(Expression.Lambda<Func<bool>>(Expression.Equal(Expression.Constant(result, type), block)).Compile(useInterpreter)()); } [Theory] [ClassData(typeof(CompilationTypes))] public void SingleNanToNan(bool useInterpreter) { TestPropertyClass<float> instance = new TestPropertyClass<float>(); instance.TestInstance = float.NaN; Assert.True(float.IsNaN( Expression.Lambda<Func<float>>( Expression.PostIncrementAssign( Expression.Property( Expression.Constant(instance), typeof(TestPropertyClass<float>), "TestInstance" ) ) ).Compile(useInterpreter)() )); Assert.True(float.IsNaN(instance.TestInstance)); } [Theory] [ClassData(typeof(CompilationTypes))] public void DoubleNanToNan(bool useInterpreter) { TestPropertyClass<double> instance = new TestPropertyClass<double>(); instance.TestInstance = double.NaN; Assert.True(double.IsNaN( Expression.Lambda<Func<double>>( Expression.PostIncrementAssign( Expression.Property( Expression.Constant(instance), typeof(TestPropertyClass<double>), "TestInstance" ) ) ).Compile(useInterpreter)() )); Assert.True(double.IsNaN(instance.TestInstance)); } [Theory] [PerCompilationType(nameof(IncrementOverflowingValues))] public void OverflowingValuesThrow(object value, bool useInterpreter) { ParameterExpression variable = Expression.Variable(value.GetType()); Action overflow = Expression.Lambda<Action>( Expression.Block( typeof(void), new[] { variable }, Expression.Assign(variable, Expression.Constant(value)), Expression.PostIncrementAssign(variable) ) ).Compile(useInterpreter); Assert.Throws<OverflowException>(overflow); } [Theory] [MemberData(nameof(UnincrementableAndUndecrementableTypes))] public void InvalidOperandType(Type type) { ParameterExpression variable = Expression.Variable(type); Assert.Throws<InvalidOperationException>(() => Expression.PostIncrementAssign(variable)); } [Theory] [ClassData(typeof(CompilationTypes))] public void MethodCorrectResult(bool useInterpreter) { ParameterExpression variable = Expression.Variable(typeof(string)); BlockExpression block = Expression.Block( new[] { variable }, Expression.Assign(variable, Expression.Constant("hello")), Expression.PostIncrementAssign(variable, typeof(IncDecAssignTests).GetTypeInfo().GetDeclaredMethod("SillyMethod")) ); Assert.Equal("hello", Expression.Lambda<Func<string>>(block).Compile(useInterpreter)()); } [Theory] [ClassData(typeof(CompilationTypes))] public void MethodCorrectAssign(bool useInterpreter) { ParameterExpression variable = Expression.Variable(typeof(string)); LabelTarget target = Expression.Label(typeof(string)); BlockExpression block = Expression.Block( new[] { variable }, Expression.Assign(variable, Expression.Constant("hello")), Expression.PostIncrementAssign(variable, typeof(IncDecAssignTests).GetTypeInfo().GetDeclaredMethod("SillyMethod")), Expression.Return(target, variable), Expression.Label(target, Expression.Default(typeof(string))) ); Assert.Equal("Eggplant", Expression.Lambda<Func<string>>(block).Compile(useInterpreter)()); } [Fact] public void IncorrectMethodType() { Expression variable = Expression.Variable(typeof(int)); MethodInfo method = typeof(IncDecAssignTests).GetTypeInfo().GetDeclaredMethod("SillyMethod"); Assert.Throws<InvalidOperationException>(() => Expression.PostIncrementAssign(variable, method)); } [Fact] public void IncorrectMethodParameterCount() { Expression variable = Expression.Variable(typeof(string)); MethodInfo method = typeof(object).GetTypeInfo().GetDeclaredMethod("ReferenceEquals"); Assert.Throws<ArgumentException>(() => Expression.PostIncrementAssign(variable, method)); } [Fact] public void IncorrectMethodReturnType() { Expression variable = Expression.Variable(typeof(int)); MethodInfo method = typeof(IncDecAssignTests).GetTypeInfo().GetDeclaredMethod("GetString"); Assert.Throws<ArgumentException>(() => Expression.PostIncrementAssign(variable, method)); } [Theory] [ClassData(typeof(CompilationTypes))] public void StaticMemberAccessCorrect(bool useInterpreter) { TestPropertyClass<long>.TestStatic = 2L; Assert.Equal( 2L, Expression.Lambda<Func<long>>( Expression.PostIncrementAssign( Expression.Property(null, typeof(TestPropertyClass<long>), "TestStatic") ) ).Compile(useInterpreter)() ); Assert.Equal(3L, TestPropertyClass<long>.TestStatic); } [Theory] [ClassData(typeof(CompilationTypes))] public void InstanceMemberAccessCorrect(bool useInterpreter) { TestPropertyClass<int> instance = new TestPropertyClass<int>(); instance.TestInstance = 2; Assert.Equal( 2, Expression.Lambda<Func<int>>( Expression.PostIncrementAssign( Expression.Property( Expression.Constant(instance), typeof(TestPropertyClass<int>), "TestInstance" ) ) ).Compile(useInterpreter)() ); Assert.Equal(3, instance.TestInstance); } [Theory] [ClassData(typeof(CompilationTypes))] public void ArrayAccessCorrect(bool useInterpreter) { int[] array = new int[1]; array[0] = 2; Assert.Equal( 2, Expression.Lambda<Func<int>>( Expression.PostIncrementAssign( Expression.ArrayAccess(Expression.Constant(array), Expression.Constant(0)) ) ).Compile(useInterpreter)() ); Assert.Equal(3, array[0]); } [Fact] public void CanReduce() { ParameterExpression variable = Expression.Variable(typeof(int)); UnaryExpression op = Expression.PostIncrementAssign(variable); Assert.True(op.CanReduce); Assert.NotSame(op, op.ReduceAndCheck()); } [Fact] public void NullOperand() { Assert.Throws<ArgumentNullException>("expression", () => Expression.PostIncrementAssign(null)); } [Fact] public void UnwritableOperand() { Assert.Throws<ArgumentException>("expression", () => Expression.PostIncrementAssign(Expression.Constant(1))); } [Fact] public void UnreadableOperand() { Expression value = Expression.Property(null, typeof(Unreadable<int>), "WriteOnly"); Assert.Throws<ArgumentException>("expression", () => Expression.PostIncrementAssign(value)); } [Fact] public void UpdateSameOperandSameNode() { UnaryExpression op = Expression.PostIncrementAssign(Expression.Variable(typeof(int))); Assert.Same(op, op.Update(op.Operand)); } [Fact] public void UpdateDiffOperandDiffNode() { UnaryExpression op = Expression.PostIncrementAssign(Expression.Variable(typeof(int))); Assert.NotSame(op, op.Update(Expression.Variable(typeof(int)))); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: This class will encapsulate a byte and provide an ** Object representation of it. ** ** ===========================================================*/ using System.Globalization; using System.Runtime.InteropServices; using System.Runtime.Versioning; using System.Diagnostics.Contracts; namespace System { // The Byte class extends the Value class and // provides object representation of the byte primitive type. // [System.Runtime.InteropServices.StructLayout(LayoutKind.Sequential)] [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] public struct Byte : IComparable, IFormattable, IComparable<Byte>, IEquatable<Byte>, IConvertible { private byte m_value; // Do not rename (binary serialization) // The maximum value that a Byte may represent: 255. public const byte MaxValue = (byte)0xFF; // The minimum value that a Byte may represent: 0. public const byte MinValue = 0; // Compares this object to another object, returning an integer that // indicates the relationship. // Returns a value less than zero if this object // null is considered to be less than any instance. // If object is not of type byte, this method throws an ArgumentException. // public int CompareTo(Object value) { if (value == null) { return 1; } if (!(value is Byte)) { throw new ArgumentException(SR.Arg_MustBeByte); } return m_value - (((Byte)value).m_value); } public int CompareTo(Byte value) { return m_value - value; } // Determines whether two Byte objects are equal. public override bool Equals(Object obj) { if (!(obj is Byte)) { return false; } return m_value == ((Byte)obj).m_value; } [NonVersionable] public bool Equals(Byte obj) { return m_value == obj; } // Gets a hash code for this instance. public override int GetHashCode() { return m_value; } [Pure] public static byte Parse(String s) { return Parse(s, NumberStyles.Integer, null); } [Pure] public static byte Parse(String s, NumberStyles style) { UInt32.ValidateParseStyleInteger(style); return Parse(s, style, null); } [Pure] public static byte Parse(String s, IFormatProvider provider) { return Parse(s, NumberStyles.Integer, provider); } // Parses an unsigned byte from a String in the given style. If // a NumberFormatInfo isn't specified, the current culture's // NumberFormatInfo is assumed. [Pure] public static byte Parse(String s, NumberStyles style, IFormatProvider provider) { UInt32.ValidateParseStyleInteger(style); int i = 0; try { i = FormatProvider.ParseInt32(s, style, provider); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_Byte, e); } if (i < MinValue || i > MaxValue) throw new OverflowException(SR.Overflow_Byte); return (byte)i; } public static bool TryParse(String s, out Byte result) { return TryParse(s, NumberStyles.Integer, null /* NumberFormatInfo.CurrentInfo */, out result); } public static bool TryParse(String s, NumberStyles style, IFormatProvider provider, out Byte result) { UInt32.ValidateParseStyleInteger(style); result = 0; int i; if (!FormatProvider.TryParseInt32(s, style, provider, out i)) { return false; } if (i < MinValue || i > MaxValue) { return false; } result = (byte)i; return true; } [Pure] public override String ToString() { Contract.Ensures(Contract.Result<String>() != null); return FormatProvider.FormatInt32(m_value, null, null); } [Pure] public String ToString(String format) { Contract.Ensures(Contract.Result<String>() != null); return FormatProvider.FormatInt32(m_value, format, null); } [Pure] public String ToString(IFormatProvider provider) { Contract.Ensures(Contract.Result<String>() != null); // if (provider == null) // throw new ArgumentNullException("provider"); return FormatProvider.FormatInt32(m_value, null, provider); } [Pure] public String ToString(String format, IFormatProvider provider) { Contract.Ensures(Contract.Result<String>() != null); // if (provider == null) // throw new ArgumentNullException("provider"); return FormatProvider.FormatInt32(m_value, format, provider); } // // IConvertible implementation // [Pure] public TypeCode GetTypeCode() { return TypeCode.Byte; } bool IConvertible.ToBoolean(IFormatProvider provider) { return Convert.ToBoolean(m_value); } char IConvertible.ToChar(IFormatProvider provider) { return Convert.ToChar(m_value); } sbyte IConvertible.ToSByte(IFormatProvider provider) { return Convert.ToSByte(m_value); } byte IConvertible.ToByte(IFormatProvider provider) { return m_value; } short IConvertible.ToInt16(IFormatProvider provider) { return Convert.ToInt16(m_value); } ushort IConvertible.ToUInt16(IFormatProvider provider) { return Convert.ToUInt16(m_value); } int IConvertible.ToInt32(IFormatProvider provider) { return Convert.ToInt32(m_value); } uint IConvertible.ToUInt32(IFormatProvider provider) { return Convert.ToUInt32(m_value); } long IConvertible.ToInt64(IFormatProvider provider) { return Convert.ToInt64(m_value); } ulong IConvertible.ToUInt64(IFormatProvider provider) { return Convert.ToUInt64(m_value); } float IConvertible.ToSingle(IFormatProvider provider) { return Convert.ToSingle(m_value); } double IConvertible.ToDouble(IFormatProvider provider) { return Convert.ToDouble(m_value); } Decimal IConvertible.ToDecimal(IFormatProvider provider) { return Convert.ToDecimal(m_value); } DateTime IConvertible.ToDateTime(IFormatProvider provider) { throw new InvalidCastException(String.Format(SR.InvalidCast_FromTo, "Byte", "DateTime")); } Object IConvertible.ToType(Type type, IFormatProvider provider) { return Convert.DefaultToType((IConvertible)this, type, provider); } } }
#region Using Statements using System; using System.Collections.Generic; using System.Text; using Microsoft.Xna.Framework; using JigLibX.Physics; using JigLibX.Geometry; using JigLibX.Collision; using JigLibX.Utils; using System.Collections.ObjectModel; #endregion namespace JigLibX.Collision { /// <summary> /// The user of CollisionSystem creates an object derived from /// CollisionFunctor and passes it in to /// DetectCollisions. For every collision detected /// the functor gets called so that the user can decide if they want /// to keep the collision. /// </summary> public abstract class CollisionFunctor { #if WINDOWS_PHONE /// <summary> /// Skins are passed back because there maybe more than one skin /// per body, and the user can always get the body from the skin. /// </summary> /// <param name="collDetectInfo"></param> /// <param name="dirToBody0"></param> /// <param name="pointInfos"></param> /// <param name="numCollPts"></param> public abstract void CollisionNotify(ref CollDetectInfo collDetectInfo, ref Vector3 dirToBody0, SmallCollPointInfo[] pointInfos, int numCollPts); #else /// <summary> /// Skins are passed back because there maybe more than one skin /// per body, and the user can always get the body from the skin. /// </summary> /// <param name="collDetectInfo"></param> /// <param name="dirToBody0"></param> /// <param name="pointInfos"></param> /// <param name="numCollPts"></param> public unsafe abstract void CollisionNotify(ref CollDetectInfo collDetectInfo, ref Vector3 dirToBody0, SmallCollPointInfo* pointInfos, int numCollPts); #endif } /// <summary> /// The user can create an object derived from this and pass it in /// to CollisionSystem.DetectCollisions to indicate whether a pair /// of skins should be considered. /// </summary> public abstract class CollisionSkinPredicate2 { /// <summary> /// Decides whether a pair of skins should be considered for collision /// or not. /// </summary> /// <param name="skin0"></param> /// <param name="skin1"></param> /// <returns>True if the pair should be considered otherwise false.</returns> public abstract bool ConsiderSkinPair(CollisionSkin skin0, CollisionSkin skin1); } /// <summary> /// The user can create an object derived from this and pass it in /// to the ray/segment intersection functions to indicate whether certain /// skins should be considered. /// </summary> public abstract class CollisionSkinPredicate1 { /// <summary> /// Decides whether a CollisionSkin should be considered while /// doing SegmentIntersecting tests or not. /// </summary> /// <param name="skin0">Skin to be considered.</param> /// <returns>True if the skin should be considered otherwise false.</returns> public abstract bool ConsiderSkin(CollisionSkin skin0); } /// <summary> /// A skin can ask to get a callback when a collision is detected this will be called /// if it return false, the contact points will not be generated /// </summary> /// <param name="skin0">The skin that had the callback on</param> /// <param name="skin1">The other skin that we have collided with (maybe null tho would be odd...)</param> /// <returns>False to inhibit contact point generation</returns> public delegate bool CollisionCallbackFn( CollisionSkin skin0, CollisionSkin skin1); /// <summary> /// Interface to a class that will contain a list of all the /// collision objects in the world, and it will provide ways of /// detecting collisions between other objects and these collision /// objects. /// </summary> public abstract class CollisionSystem { private Dictionary<int,DetectFunctor> detectionFunctors = new Dictionary<int,DetectFunctor>(); private bool useSweepTests = false; private MaterialTable materialTable = new MaterialTable(); private static CollDetectBoxBox boxBoxCollDetector = new CollDetectBoxBox(); private static CollDetectBoxStaticMesh boxStaticMeshCollDetector = new CollDetectBoxStaticMesh(); private static CollDetectCapsuleBox capsuleBoxCollDetector = new CollDetectCapsuleBox(); private static CollDetectCapsuleCapsule capsuleCapsuleCollDetector = new CollDetectCapsuleCapsule(); private static CollDetectSphereCapsule sphereCapsuleCollDetector = new CollDetectSphereCapsule(); private static CollDetectSphereBox sphereBoxCollDetector = new CollDetectSphereBox(); private static CollDetectSphereSphere sphereSphereCollDetector = new CollDetectSphereSphere(); private static CollDetectBoxHeightmap boxHeightmapCollDetector = new CollDetectBoxHeightmap(); private static CollDetectSphereHeightmap sphereHeightmapCollDetector = new CollDetectSphereHeightmap(); private static CollDetectCapsuleHeightmap capsuleHeightmapCollDetector = new CollDetectCapsuleHeightmap(); private static CollDetectSphereStaticMesh sphereStaticMeshCollDetector = new CollDetectSphereStaticMesh(); private static CollDetectCapsuleStaticMesh capsuleStaticMeshCollDetector = new CollDetectCapsuleStaticMesh(); private static CollDetectBoxPlane boxPlaneCollDetector = new CollDetectBoxPlane(); private static CollDetectSpherePlane spherePlaneCollDetector = new CollDetectSpherePlane(); private static CollDetectCapsulePlane capsulePlaneCollDetector = new CollDetectCapsulePlane(); /// <summary> /// Constructor initialized the default CollisionFunctors. Other CollisionFunctors can be added with /// RegisterCollDetectFunctor. /// </summary> public CollisionSystem() { RegisterCollDetectFunctor(boxBoxCollDetector); RegisterCollDetectFunctor(boxStaticMeshCollDetector); RegisterCollDetectFunctor(capsuleBoxCollDetector); RegisterCollDetectFunctor(capsuleCapsuleCollDetector); RegisterCollDetectFunctor(sphereBoxCollDetector); RegisterCollDetectFunctor(sphereSphereCollDetector); RegisterCollDetectFunctor(sphereCapsuleCollDetector); RegisterCollDetectFunctor(boxHeightmapCollDetector); RegisterCollDetectFunctor(sphereHeightmapCollDetector); RegisterCollDetectFunctor(capsuleHeightmapCollDetector); RegisterCollDetectFunctor(sphereStaticMeshCollDetector); RegisterCollDetectFunctor(capsuleStaticMeshCollDetector); RegisterCollDetectFunctor(boxPlaneCollDetector); RegisterCollDetectFunctor(spherePlaneCollDetector); RegisterCollDetectFunctor(capsulePlaneCollDetector); } /// <summary> /// Don't add skins whilst doing detection! /// </summary> /// <param name="collisionSkin"></param> public abstract void AddCollisionSkin(CollisionSkin collisionSkin); /// <summary> /// Don't remove skins whilst doing detection! /// </summary> /// <param name="collisionSkin"></param> public abstract bool RemoveCollisionSkin(CollisionSkin collisionSkin); /// <summary> /// CollisionSkins /// </summary> public abstract ReadOnlyCollection<CollisionSkin> CollisionSkins { get; } /// <summary> /// Whenever a skin changes position it will call this to let us /// update our internal state. /// </summary> /// <param name="skin"></param> public abstract void CollisionSkinMoved(CollisionSkin skin); /// <summary> /// Detects all collisions between the body and all the registered /// collision skins (which should have already had their /// positions/bounding volumes etc updated). For each potential /// pair of skins then the predicate (if it exists) will be called /// to see whether or not to continue. If the skins are closer /// than collTolerance (+ve value means report objects that aren't /// quite colliding) then the functor will get called. /// You can't just loop over all your bodies calling this, because /// that will double-detect collisions. Use DetectAllCollisions for /// that. /// </summary> public abstract void DetectCollisions(Body body, CollisionFunctor collisionFunctor, CollisionSkinPredicate2 collisionPredicate, float collTolerance); /// <summary> /// As DetectCollisions but detects for all bodies, testing each pair /// only once /// </summary> /// <param name="bodies"></param> /// <param name="collisionFunctor"></param> /// <param name="collisionPredicate"></param> /// <param name="collTolerance"></param> public abstract void DetectAllCollisions(List<Body> bodies, CollisionFunctor collisionFunctor, CollisionSkinPredicate2 collisionPredicate, float collTolerance); /// <summary> /// type0/1 could be from tCollisionSkinType or they could be /// larger values. The collision detection table will get extended /// as necessary. You only need to register the function once /// (i.e. not for type0, type1 then type1, type 0). /// </summary> /// <param name="f"></param> public void RegisterCollDetectFunctor(DetectFunctor f) { int key01 = f.Type0 << 16 | f.Type1; int key10 = f.Type1 << 16 | f.Type0; if (!detectionFunctors.ContainsKey(key01)) detectionFunctors.Add(key01,f); if (!detectionFunctors.ContainsKey(key10)) detectionFunctors.Add(key10,f); } /// <summary> /// Get the previously registered function for the pair type. May /// return 0. /// </summary> /// <param name="type0"></param> /// <param name="type1"></param> /// <returns>DetectFunctor</returns> public DetectFunctor GetCollDetectFunctor(int type0, int type1) { DetectFunctor functor; int key01 = type0 << 16 | type1; if (detectionFunctors.TryGetValue(key01, out functor)) return functor; else return null; } /// <summary> /// Intersect a segment with the world. If non-zero the predicate /// allows certain skins to be excluded /// </summary> /// <param name="fracOut"></param> /// <param name="skinOut"></param> /// <param name="posOut"></param> /// <param name="normalOut"></param> /// <param name="seg"></param> /// <param name="collisionPredicate"></param> /// <returns>bool</returns> public abstract bool SegmentIntersect(out float fracOut, out CollisionSkin skinOut, out Vector3 posOut, out Vector3 normalOut, Segment seg, CollisionSkinPredicate1 collisionPredicate); /// <summary> /// Gets or Sets whether collision tests should use sweep or overlap /// </summary> public bool UseSweepTests { get { return useSweepTests; } set { useSweepTests = value; } } /// <summary> /// Gets the current MaterialTable of the CollisionSystem. /// </summary> public MaterialTable MaterialTable { get { return materialTable; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Globalization; using Xunit; namespace System.Tests { public static class UInt32Tests { [Fact] public static void Ctor_Empty() { var i = new uint(); Assert.Equal((uint)0, i); } [Fact] public static void Ctor_Value() { uint i = 41; Assert.Equal((uint)41, i); } [Fact] public static void MaxValue() { Assert.Equal(0xFFFFFFFF, uint.MaxValue); } [Fact] public static void MinValue() { Assert.Equal((uint)0, uint.MinValue); } [Theory] [InlineData((uint)234, (uint)234, 0)] [InlineData((uint)234, uint.MinValue, 1)] [InlineData((uint)234, (uint)0, 1)] [InlineData((uint)234, (uint)123, 1)] [InlineData((uint)234, (uint)456, -1)] [InlineData((uint)234, uint.MaxValue, -1)] [InlineData((uint)234, null, 1)] public static void CompareTo(uint i, object value, int expected) { if (value is uint) { Assert.Equal(expected, Math.Sign(i.CompareTo((uint)value))); } IComparable comparable = i; Assert.Equal(expected, Math.Sign(comparable.CompareTo(value))); } [Fact] public static void CompareTo_ObjectNotUInt_ThrowsArgumentException() { IComparable comparable = (uint)234; AssertExtensions.Throws<ArgumentException>(null, () => comparable.CompareTo("a")); // Obj is not a uint AssertExtensions.Throws<ArgumentException>(null, () => comparable.CompareTo(234)); // Obj is not a uint } [Theory] [InlineData((uint)789, (uint)789, true)] [InlineData((uint)788, (uint)0, false)] [InlineData((uint)0, (uint)0, true)] [InlineData((uint)789, null, false)] [InlineData((uint)789, "789", false)] [InlineData((uint)789, 789, false)] public static void Equals(uint i1, object obj, bool expected) { if (obj is uint) { uint i2 = (uint)obj; Assert.Equal(expected, i1.Equals(i2)); Assert.Equal(expected, i1.GetHashCode().Equals(i2.GetHashCode())); Assert.Equal((int)i1, i1.GetHashCode()); } Assert.Equal(expected, i1.Equals(obj)); } public static IEnumerable<object[]> ToString_TestData() { NumberFormatInfo emptyFormat = NumberFormatInfo.CurrentInfo; yield return new object[] { (uint)0, "G", emptyFormat, "0" }; yield return new object[] { (uint)4567, "G", emptyFormat, "4567" }; yield return new object[] { uint.MaxValue, "G", emptyFormat, "4294967295" }; yield return new object[] { (uint)0x2468, "x", emptyFormat, "2468" }; yield return new object[] { (uint)2468, "N", emptyFormat, string.Format("{0:N}", 2468.00) }; NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.NegativeSign = "#"; customFormat.NumberDecimalSeparator = "~"; customFormat.NumberGroupSeparator = "*"; yield return new object[] { (uint)2468, "N", customFormat, "2*468~00" }; } [Theory] [MemberData(nameof(ToString_TestData))] public static void ToString(uint i, string format, IFormatProvider provider, string expected) { // Format is case insensitive string upperFormat = format.ToUpperInvariant(); string lowerFormat = format.ToLowerInvariant(); string upperExpected = expected.ToUpperInvariant(); string lowerExpected = expected.ToLowerInvariant(); bool isDefaultProvider = (provider == null || provider == NumberFormatInfo.CurrentInfo); if (string.IsNullOrEmpty(format) || format.ToUpperInvariant() == "G") { if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString()); Assert.Equal(upperExpected, i.ToString((IFormatProvider)null)); } Assert.Equal(upperExpected, i.ToString(provider)); } if (isDefaultProvider) { Assert.Equal(upperExpected, i.ToString(upperFormat)); Assert.Equal(lowerExpected, i.ToString(lowerFormat)); Assert.Equal(upperExpected, i.ToString(upperFormat, null)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, null)); } Assert.Equal(upperExpected, i.ToString(upperFormat, provider)); Assert.Equal(lowerExpected, i.ToString(lowerFormat, provider)); } [Fact] public static void ToString_InvalidFormat_ThrowsFormatException() { uint i = 123; Assert.Throws<FormatException>(() => i.ToString("Y")); // Invalid format Assert.Throws<FormatException>(() => i.ToString("Y", null)); // Invalid format } public static IEnumerable<object[]> Parse_Valid_TestData() { NumberStyles defaultStyle = NumberStyles.Integer; NumberFormatInfo emptyFormat = new NumberFormatInfo(); NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.CurrencySymbol = "$"; yield return new object[] { "0", defaultStyle, null, (uint)0 }; yield return new object[] { "123", defaultStyle, null, (uint)123 }; yield return new object[] { "+123", defaultStyle, null, (uint)123 }; yield return new object[] { " 123 ", defaultStyle, null, (uint)123 }; yield return new object[] { "4294967295", defaultStyle, null, 4294967295 }; yield return new object[] { "12", NumberStyles.HexNumber, null, (uint)0x12 }; yield return new object[] { "1000", NumberStyles.AllowThousands, null, (uint)1000 }; yield return new object[] { "123", defaultStyle, emptyFormat, (uint)123 }; yield return new object[] { "123", NumberStyles.Any, emptyFormat, (uint)123 }; yield return new object[] { "12", NumberStyles.HexNumber, emptyFormat, (uint)0x12 }; yield return new object[] { "abc", NumberStyles.HexNumber, emptyFormat, (uint)0xabc }; yield return new object[] { "ABC", NumberStyles.HexNumber, emptyFormat, (uint)0xabc }; yield return new object[] { "$1,000", NumberStyles.Currency, customFormat, (uint)1000 }; } [Theory] [MemberData(nameof(Parse_Valid_TestData))] public static void Parse(string value, NumberStyles style, IFormatProvider provider, uint expected) { uint result; // If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.True(uint.TryParse(value, out result)); Assert.Equal(expected, result); Assert.Equal(expected, uint.Parse(value)); // If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (provider != null) { Assert.Equal(expected, uint.Parse(value, provider)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.True(uint.TryParse(value, style, provider ?? new NumberFormatInfo(), out result)); Assert.Equal(expected, result); // If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (provider == null) { Assert.Equal(expected, uint.Parse(value, style)); } Assert.Equal(expected, uint.Parse(value, style, provider ?? new NumberFormatInfo())); } public static IEnumerable<object[]> Parse_Invalid_TestData() { NumberStyles defaultStyle = NumberStyles.Integer; NumberFormatInfo customFormat = new NumberFormatInfo(); customFormat.CurrencySymbol = "$"; customFormat.NumberDecimalSeparator = "."; yield return new object[] { null, defaultStyle, null, typeof(ArgumentNullException) }; yield return new object[] { "", defaultStyle, null, typeof(FormatException) }; yield return new object[] { " \t \n \r ", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "Garbage", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "abc", defaultStyle, null, typeof(FormatException) }; // Hex value yield return new object[] { "1E23", defaultStyle, null, typeof(FormatException) }; // Exponent yield return new object[] { "(123)", defaultStyle, null, typeof(FormatException) }; // Parentheses yield return new object[] { 100.ToString("C0"), defaultStyle, null, typeof(FormatException) }; // Currency yield return new object[] { 1000.ToString("N0"), defaultStyle, null, typeof(FormatException) }; // Thousands yield return new object[] { 678.90.ToString("F2"), defaultStyle, null, typeof(FormatException) }; // Decimal yield return new object[] { "+-123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "-+123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "+abc", NumberStyles.HexNumber, null, typeof(FormatException) }; yield return new object[] { "-abc", NumberStyles.HexNumber, null, typeof(FormatException) }; yield return new object[] { "- 123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "+ 123", defaultStyle, null, typeof(FormatException) }; yield return new object[] { "abc", NumberStyles.None, null, typeof(FormatException) }; // Hex value yield return new object[] { " 123 ", NumberStyles.None, null, typeof(FormatException) }; // Trailing and leading whitespace yield return new object[] { "678.90", defaultStyle, customFormat, typeof(FormatException) }; // Decimal yield return new object[] { "-1", defaultStyle, null, typeof(OverflowException) }; // < min value yield return new object[] { "4294967296", defaultStyle, null, typeof(OverflowException) }; // > max value yield return new object[] { "(123)", NumberStyles.AllowParentheses, null, typeof(OverflowException) }; // Parentheses = negative } [Theory] [MemberData(nameof(Parse_Invalid_TestData))] public static void Parse_Invalid(string value, NumberStyles style, IFormatProvider provider, Type exceptionType) { uint result; // If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.False(uint.TryParse(value, out result)); Assert.Equal(default(uint), result); Assert.Throws(exceptionType, () => uint.Parse(value)); // If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (provider != null) { Assert.Throws(exceptionType, () => uint.Parse(value, provider)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.False(uint.TryParse(value, style, provider ?? new NumberFormatInfo(), out result)); Assert.Equal(default(uint), result); // If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (provider == null) { Assert.Throws(exceptionType, () => uint.Parse(value, style)); } Assert.Throws(exceptionType, () => uint.Parse(value, style, provider ?? new NumberFormatInfo())); } [Theory] [InlineData(NumberStyles.HexNumber | NumberStyles.AllowParentheses)] [InlineData(unchecked((NumberStyles)0xFFFFFC00))] public static void TryParse_InvalidNumberStyle_ThrowsArgumentException(NumberStyles style) { uint result = 0; Assert.Throws<ArgumentException>(() => uint.TryParse("1", style, null, out result)); Assert.Equal(default(uint), result); Assert.Throws<ArgumentException>(() => uint.Parse("1", style)); Assert.Throws<ArgumentException>(() => uint.Parse("1", style, null)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Runtime.InteropServices; using System.ComponentModel; namespace System.DirectoryServices.ActiveDirectory { public class ActiveDirectoryInterSiteTransport : IDisposable { private DirectoryContext _context = null; private DirectoryEntry _cachedEntry = null; private ActiveDirectoryTransportType _transport; private bool _disposed = false; private bool _linkRetrieved = false; private bool _bridgeRetrieved = false; private ReadOnlySiteLinkCollection _siteLinkCollection = new ReadOnlySiteLinkCollection(); private ReadOnlySiteLinkBridgeCollection _bridgeCollection = new ReadOnlySiteLinkBridgeCollection(); internal ActiveDirectoryInterSiteTransport(DirectoryContext context, ActiveDirectoryTransportType transport, DirectoryEntry entry) { _context = context; _transport = transport; _cachedEntry = entry; } public static ActiveDirectoryInterSiteTransport FindByTransportType(DirectoryContext context, ActiveDirectoryTransportType transport) { if (context == null) throw new ArgumentNullException("context"); // if target is not specified, then we determin the target from the logon credential, so if it is a local user context, it should fail if ((context.Name == null) && (!context.isRootDomain())) { throw new ArgumentException(SR.ContextNotAssociatedWithDomain, "context"); } // more validation for the context, if the target is not null, then it should be either forest name or server name if (context.Name != null) { if (!(context.isRootDomain() || context.isServer() || context.isADAMConfigSet())) throw new ArgumentException(SR.NotADOrADAM, "context"); } if (transport < ActiveDirectoryTransportType.Rpc || transport > ActiveDirectoryTransportType.Smtp) throw new InvalidEnumArgumentException("value", (int)transport, typeof(ActiveDirectoryTransportType)); // work with copy of the context context = new DirectoryContext(context); // bind to the rootdse to get the configurationnamingcontext DirectoryEntry de; try { de = DirectoryEntryManager.GetDirectoryEntry(context, WellKnownDN.RootDSE); string config = (string)PropertyManager.GetPropertyValue(context, de, PropertyManager.ConfigurationNamingContext); string containerDN = "CN=Inter-Site Transports,CN=Sites," + config; if (transport == ActiveDirectoryTransportType.Rpc) containerDN = "CN=IP," + containerDN; else containerDN = "CN=SMTP," + containerDN; de = DirectoryEntryManager.GetDirectoryEntry(context, containerDN); } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(context, e); } catch (ActiveDirectoryObjectNotFoundException) { // this is the case where the context is a config set and we could not find an ADAM instance in that config set throw new ActiveDirectoryOperationException(SR.Format(SR.ADAMInstanceNotFoundInConfigSet , context.Name)); } try { de.RefreshCache(new string[] { "options" }); } catch (COMException e) { if (e.ErrorCode == unchecked((int)0x80072030)) { // if it is ADAM and transport type is SMTP, throw NotSupportedException. DirectoryEntry tmpDE = DirectoryEntryManager.GetDirectoryEntry(context, WellKnownDN.RootDSE); if (Utils.CheckCapability(tmpDE, Capability.ActiveDirectoryApplicationMode) && transport == ActiveDirectoryTransportType.Smtp) { throw new NotSupportedException(SR.NotSupportTransportSMTP); } throw new ActiveDirectoryObjectNotFoundException(SR.Format(SR.TransportNotFound , transport.ToString()), typeof(ActiveDirectoryInterSiteTransport), transport.ToString()); } else throw ExceptionHelper.GetExceptionFromCOMException(context, e); } return new ActiveDirectoryInterSiteTransport(context, transport, de); } public ActiveDirectoryTransportType TransportType { get { if (_disposed) throw new ObjectDisposedException(GetType().Name); return _transport; } } public bool IgnoreReplicationSchedule { get { if (_disposed) throw new ObjectDisposedException(GetType().Name); int option = 0; try { if (_cachedEntry.Properties.Contains("options")) option = (int)_cachedEntry.Properties["options"][0]; } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(_context, e); } // NTDSTRANSPORT_OPT_IGNORE_SCHEDULES ( 1 << 0 ) Schedules disabled if ((option & 0x1) != 0) return true; else return false; } set { if (_disposed) throw new ObjectDisposedException(GetType().Name); int option = 0; try { if (_cachedEntry.Properties.Contains("options")) option = (int)_cachedEntry.Properties["options"][0]; // NTDSTRANSPORT_OPT_IGNORE_SCHEDULES ( 1 << 0 ) Schedules disabled if (value) option |= 0x1; else option &= (~(0x1)); _cachedEntry.Properties["options"].Value = option; } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(_context, e); } } } public bool BridgeAllSiteLinks { get { if (_disposed) throw new ObjectDisposedException(GetType().Name); int option = 0; try { if (_cachedEntry.Properties.Contains("options")) option = (int)_cachedEntry.Properties["options"][0]; } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(_context, e); } // NTDSTRANSPORT_OPT_BRIDGES_REQUIRED (1 << 1 ) siteLink bridges are required // That is to say, if this bit is set, it means that all site links are not bridged and user needs to create specific bridge if ((option & 0x2) != 0) return false; else return true; } set { if (_disposed) throw new ObjectDisposedException(GetType().Name); int option = 0; try { if (_cachedEntry.Properties.Contains("options")) option = (int)_cachedEntry.Properties["options"][0]; // NTDSTRANSPORT_OPT_BRIDGES_REQUIRED (1 << 1 ) siteLink bridges are required, all site links are not bridged // That is to say, if this bit is set, it means that all site links are not bridged and user needs to create specific bridge // if this bit is not set, all the site links are bridged if (value) option &= (~(0x2)); else option |= 0x2; _cachedEntry.Properties["options"].Value = option; } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(_context, e); } } } public ReadOnlySiteLinkCollection SiteLinks { get { if (_disposed) throw new ObjectDisposedException(GetType().Name); if (!_linkRetrieved) { _siteLinkCollection.Clear(); ADSearcher adSearcher = new ADSearcher(_cachedEntry, "(&(objectClass=siteLink)(objectCategory=SiteLink))", new string[] { "cn" }, SearchScope.OneLevel); SearchResultCollection results = null; try { results = adSearcher.FindAll(); } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(_context, e); } try { foreach (SearchResult result in results) { DirectoryEntry connectionEntry = result.GetDirectoryEntry(); string cn = (string)PropertyManager.GetSearchResultPropertyValue(result, PropertyManager.Cn); ActiveDirectorySiteLink link = new ActiveDirectorySiteLink(_context, cn, _transport, true, connectionEntry); _siteLinkCollection.Add(link); } } finally { results.Dispose(); } _linkRetrieved = true; } return _siteLinkCollection; } } public ReadOnlySiteLinkBridgeCollection SiteLinkBridges { get { if (_disposed) throw new ObjectDisposedException(GetType().Name); if (!_bridgeRetrieved) { _bridgeCollection.Clear(); ADSearcher adSearcher = new ADSearcher(_cachedEntry, "(&(objectClass=siteLinkBridge)(objectCategory=SiteLinkBridge))", new string[] { "cn" }, SearchScope.OneLevel); SearchResultCollection results = null; try { results = adSearcher.FindAll(); } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(_context, e); } try { foreach (SearchResult result in results) { DirectoryEntry connectionEntry = result.GetDirectoryEntry(); string cn = (string)PropertyManager.GetSearchResultPropertyValue(result, PropertyManager.Cn); ActiveDirectorySiteLinkBridge bridge = new ActiveDirectorySiteLinkBridge(_context, cn, _transport, true); bridge.cachedEntry = connectionEntry; _bridgeCollection.Add(bridge); } } finally { results.Dispose(); } _bridgeRetrieved = true; } return _bridgeCollection; } } public void Save() { if (_disposed) throw new ObjectDisposedException(GetType().Name); try { _cachedEntry.CommitChanges(); } catch (COMException e) { throw ExceptionHelper.GetExceptionFromCOMException(_context, e); } } public DirectoryEntry GetDirectoryEntry() { if (_disposed) throw new ObjectDisposedException(GetType().Name); return DirectoryEntryManager.GetDirectoryEntryInternal(_context, _cachedEntry.Path); } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } public override string ToString() { if (_disposed) throw new ObjectDisposedException(GetType().Name); return _transport.ToString(); } protected virtual void Dispose(bool disposing) { if (disposing) { // free other state (managed objects) if (_cachedEntry != null) _cachedEntry.Dispose(); } // free your own state (unmanaged objects) _disposed = true; } } }
using Lucene.Net.Support; using System.Collections.Generic; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Analyzer = Lucene.Net.Analysis.Analyzer; using Directory = Lucene.Net.Store.Directory; using Query = Lucene.Net.Search.Query; /// <summary> /// Class that tracks changes to a delegated /// <see cref="Index.IndexWriter"/>, used by /// <see cref="Search.ControlledRealTimeReopenThread{T}"/> to ensure specific /// changes are visible. Create this class (passing your /// <see cref="Index.IndexWriter"/>), and then pass this class to /// <see cref="Search.ControlledRealTimeReopenThread{T}"/>. /// Be sure to make all changes via the /// <see cref="TrackingIndexWriter"/>, otherwise /// <see cref="Search.ControlledRealTimeReopenThread{T}"/> won't know about the changes. /// <para/> /// @lucene.experimental /// </summary> public class TrackingIndexWriter { private readonly IndexWriter writer; private readonly AtomicInt64 indexingGen = new AtomicInt64(1); /// <summary> /// Create a <see cref="TrackingIndexWriter"/> wrapping the /// provided <see cref="Index.IndexWriter"/>. /// </summary> public TrackingIndexWriter(IndexWriter writer) { this.writer = writer; } /// <summary> /// Calls /// <see cref="IndexWriter.UpdateDocument(Term, IEnumerable{IIndexableField}, Analyzer)"/> /// and returns the generation that reflects this change. /// </summary> public virtual long UpdateDocument(Term t, IEnumerable<IIndexableField> d, Analyzer a) { writer.UpdateDocument(t, d, a); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls /// <see cref="IndexWriter.UpdateDocument(Term, IEnumerable{IIndexableField})"/> and /// returns the generation that reflects this change. /// </summary> public virtual long UpdateDocument(Term t, IEnumerable<IIndexableField> d) { writer.UpdateDocument(t, d); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls /// <see cref="IndexWriter.UpdateDocuments(Term, IEnumerable{IEnumerable{IIndexableField}}, Analyzer)"/> /// and returns the generation that reflects this change. /// </summary> public virtual long UpdateDocuments(Term t, IEnumerable<IEnumerable<IIndexableField>> docs, Analyzer a) { writer.UpdateDocuments(t, docs, a); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls /// <see cref="IndexWriter.UpdateDocuments(Term, IEnumerable{IEnumerable{IIndexableField}})"/> and returns /// the generation that reflects this change. /// </summary> public virtual long UpdateDocuments(Term t, IEnumerable<IEnumerable<IIndexableField>> docs) { writer.UpdateDocuments(t, docs); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.DeleteDocuments(Term)"/> and /// returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(Term t) { writer.DeleteDocuments(t); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.DeleteDocuments(Term[])"/> and /// returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(params Term[] terms) { writer.DeleteDocuments(terms); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.DeleteDocuments(Query)"/> and /// returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(Query q) { writer.DeleteDocuments(q); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.DeleteDocuments(Query[])"/> /// and returns the generation that reflects this change. /// </summary> public virtual long DeleteDocuments(params Query[] queries) { writer.DeleteDocuments(queries); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.DeleteAll()"/> and returns the /// generation that reflects this change. /// </summary> public virtual long DeleteAll() { writer.DeleteAll(); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls /// <see cref="IndexWriter.AddDocument(IEnumerable{IIndexableField}, Analyzer)"/> and /// returns the generation that reflects this change. /// </summary> public virtual long AddDocument(IEnumerable<IIndexableField> d, Analyzer a) { writer.AddDocument(d, a); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls /// <see cref="IndexWriter.AddDocuments(IEnumerable{IEnumerable{IIndexableField}}, Analyzer)"/> and /// returns the generation that reflects this change. /// </summary> public virtual long AddDocuments(IEnumerable<IEnumerable<IIndexableField>> docs, Analyzer a) { writer.AddDocuments(docs, a); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.AddDocument(IEnumerable{IIndexableField})"/> /// and returns the generation that reflects this change. /// </summary> public virtual long AddDocument(IEnumerable<IIndexableField> d) { writer.AddDocument(d); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.AddDocuments(IEnumerable{IEnumerable{IIndexableField}})"/> and /// returns the generation that reflects this change. /// </summary> public virtual long AddDocuments(IEnumerable<IEnumerable<IIndexableField>> docs) { writer.AddDocuments(docs); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.AddIndexes(Directory[])"/> and /// returns the generation that reflects this change. /// </summary> public virtual long AddIndexes(params Directory[] dirs) { writer.AddIndexes(dirs); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Calls <see cref="IndexWriter.AddIndexes(IndexReader[])"/> /// and returns the generation that reflects this change. /// </summary> public virtual long AddIndexes(params IndexReader[] readers) { writer.AddIndexes(readers); // Return gen as of when indexing finished: return indexingGen.Get(); } /// <summary> /// Return the current generation being indexed. </summary> public virtual long Generation { get { return indexingGen.Get(); } } /// <summary> /// Return the wrapped <see cref="Index.IndexWriter"/>. </summary> public virtual IndexWriter IndexWriter { get { return writer; } } /// <summary> /// Return and increment current gen. /// <para/> /// @lucene.internal /// </summary> public virtual long GetAndIncrementGeneration() { return indexingGen.GetAndIncrement(); } /// <summary> /// Cals /// <see cref="IndexWriter.TryDeleteDocument(IndexReader, int)"/> and /// returns the generation that reflects this change. /// </summary> public virtual long TryDeleteDocument(IndexReader reader, int docID) { if (writer.TryDeleteDocument(reader, docID)) { return indexingGen.Get(); } else { return -1; } } } }
#region S# License /****************************************************************************************** NOTICE!!! This program and source code is owned and licensed by StockSharp, LLC, www.stocksharp.com Viewing or use of this code requires your acceptance of the license agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE Removal of this comment is a violation of the license agreement. Project: SampleITCH.SampleITCHPublic File: MainWindow.xaml.cs Created: 2015, 11, 11, 2:32 PM Copyright 2010 by StockSharp, LLC *******************************************************************************************/ #endregion S# License namespace SampleITCH { using System; using System.ComponentModel; using System.Windows; using Ecng.Common; using Ecng.Xaml; using StockSharp.BusinessEntities; using StockSharp.ITCH; using StockSharp.Localization; using StockSharp.Logging; using StockSharp.Messages; using StockSharp.Xaml; public partial class MainWindow { private bool _isConnected; private bool _initialized; public ItchTrader Trader; private readonly SecuritiesWindow _securitiesWindow = new SecuritiesWindow(); private readonly TradesWindow _tradesWindow = new TradesWindow(); private readonly OrdersLogWindow _orderLogWindow = new OrdersLogWindow(); private readonly LogManager _logManager = new LogManager(); public MainWindow() { InitializeComponent(); _orderLogWindow.MakeHideable(); _tradesWindow.MakeHideable(); _securitiesWindow.MakeHideable(); Title = Title.Put("ITCH"); Instance = this; _logManager.Listeners.Add(new FileLogListener { LogDirectory = "StockSharp_ITCH" }); _logManager.Listeners.Add(new GuiLogListener(Monitor)); // create connector Trader = new ItchTrader { LogLevel = LogLevels.Debug, CreateDepthFromOrdersLog = true }; _logManager.Sources.Add(Trader); Settings.SelectedObject = Trader.MarketDataAdapter; } protected override void OnClosing(CancelEventArgs e) { _orderLogWindow.DeleteHideable(); _tradesWindow.DeleteHideable(); _securitiesWindow.DeleteHideable(); _securitiesWindow.Close(); _tradesWindow.Close(); _orderLogWindow.Close(); if (Trader != null) Trader.Dispose(); base.OnClosing(e); } public static MainWindow Instance { get; private set; } private void ConnectClick(object sender, RoutedEventArgs e) { if (!_isConnected) { if (Trader.Login.IsEmpty()) { MessageBox.Show(this, LocalizedStrings.Str2974); return; } else if (Trader.Password.IsEmpty()) { MessageBox.Show(this, LocalizedStrings.Str2975); return; } if (!_initialized) { _initialized = true; // update gui labes Trader.ReConnectionSettings.WorkingTime = ExchangeBoard.Forts.WorkingTime; Trader.Restored += () => this.GuiAsync(() => { ChangeConnectStatus(true); MessageBox.Show(this, LocalizedStrings.Str2958); }); // subscribe on connection successfully event Trader.Connected += () => { // set flag (connection is established) _isConnected = true; // update gui labes this.GuiAsync(() => ChangeConnectStatus(true)); }; Trader.Disconnected += () => this.GuiAsync(() => ChangeConnectStatus(false)); // subscribe on connection error event Trader.ConnectionError += error => this.GuiAsync(() => { // update gui labes ChangeConnectStatus(false); MessageBox.Show(this, error.ToString(), LocalizedStrings.Str2959); }); // subscribe on error event Trader.Error += error => this.GuiAsync(() => MessageBox.Show(this, error.ToString(), LocalizedStrings.Str2955)); // subscribe on error of market data subscription event Trader.MarketDataSubscriptionFailed += (security, msg, error) => this.GuiAsync(() => MessageBox.Show(this, error.ToString(), LocalizedStrings.Str2956Params.Put(msg.DataType, security))); Trader.NewSecurities += _securitiesWindow.SecurityPicker.Securities.AddRange; Trader.NewTrades += _tradesWindow.TradeGrid.Trades.AddRange; Trader.NewOrderLogItems += _orderLogWindow.OrderLogGrid.LogItems.AddRange; var subscribed = false; //if (AllDepths.IsChecked == true) { Trader.LookupSecuritiesResult += (error, securities) => { if (subscribed) return; subscribed = true; Trader.SendInMessage(new MarketDataMessage { IsSubscribe = true, DataType = MarketDataTypes.OrderLog, TransactionId = Trader.TransactionIdGenerator.GetNextId(), }); }; } // set market data provider _securitiesWindow.SecurityPicker.MarketDataProvider = Trader; ShowSecurities.IsEnabled = ShowTrades.IsEnabled = ShowOrdersLog.IsEnabled = true; } Trader.Connect(); } else { Trader.Disconnect(); } } private void ChangeConnectStatus(bool isConnected) { _isConnected = isConnected; ConnectBtn.Content = isConnected ? LocalizedStrings.Disconnect : LocalizedStrings.Connect; } private void ShowSecuritiesClick(object sender, RoutedEventArgs e) { ShowOrHide(_securitiesWindow); } private void ShowTradesClick(object sender, RoutedEventArgs e) { ShowOrHide(_tradesWindow); } private void ShowOrdersLogClick(object sender, RoutedEventArgs e) { ShowOrHide(_orderLogWindow); } private static void ShowOrHide(Window window) { if (window == null) throw new ArgumentNullException(nameof(window)); if (window.Visibility == Visibility.Visible) window.Hide(); else window.Show(); } } }