context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
using YAF.Lucene.Net.Store;
using YAF.Lucene.Net.Support;
using YAF.Lucene.Net.Util.Packed;
using System;
using System.Diagnostics;
namespace YAF.Lucene.Net.Codecs.Lucene41
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Encode all values in normal area with fixed bit width,
/// which is determined by the max value in this block.
/// </summary>
internal sealed class ForUtil
{
/// <summary>
/// Special number of bits per value used whenever all values to encode are equal.
/// </summary>
private static readonly int ALL_VALUES_EQUAL = 0;
/// <summary>
/// Upper limit of the number of bytes that might be required to stored
/// <see cref="Lucene41PostingsFormat.BLOCK_SIZE"/> encoded values.
/// </summary>
public static readonly int MAX_ENCODED_SIZE = Lucene41PostingsFormat.BLOCK_SIZE * 4;
/// <summary>
/// Upper limit of the number of values that might be decoded in a single call to
/// <see cref="ReadBlock(IndexInput, byte[], int[])"/>. Although values after
/// <see cref="Lucene41PostingsFormat.BLOCK_SIZE"/> are garbage, it is necessary to allocate value buffers
/// whose size is >= MAX_DATA_SIZE to avoid <see cref="IndexOutOfRangeException"/>s.
/// </summary>
public static readonly int MAX_DATA_SIZE = LoadMaxDataSize();
private static int LoadMaxDataSize() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
int maxDataSize = 0;
for (int version = PackedInt32s.VERSION_START; version <= PackedInt32s.VERSION_CURRENT; version++)
{
foreach (PackedInt32s.Format format in PackedInt32s.Format.Values/* Enum.GetValues(typeof(PackedInts.Format))*/)
{
for (int bpv = 1; bpv <= 32; ++bpv)
{
if (!format.IsSupported(bpv))
{
continue;
}
PackedInt32s.IDecoder decoder = PackedInt32s.GetDecoder(format, version, bpv);
int iterations = ComputeIterations(decoder);
maxDataSize = Math.Max(maxDataSize, iterations * decoder.ByteValueCount);
}
}
}
return maxDataSize;
}
/// <summary>
/// Compute the number of iterations required to decode <see cref="Lucene41PostingsFormat.BLOCK_SIZE"/>
/// values with the provided <see cref="PackedInt32s.IDecoder"/>.
/// </summary>
private static int ComputeIterations(PackedInt32s.IDecoder decoder)
{
return (int)Math.Ceiling((float)Lucene41PostingsFormat.BLOCK_SIZE / decoder.ByteValueCount);
}
/// <summary>
/// Compute the number of bytes required to encode a block of values that require
/// <paramref name="bitsPerValue"/> bits per value with format <paramref name="format"/>.
/// </summary>
private static int EncodedSize(PackedInt32s.Format format, int packedIntsVersion, int bitsPerValue)
{
long byteCount = format.ByteCount(packedIntsVersion, Lucene41PostingsFormat.BLOCK_SIZE, bitsPerValue);
Debug.Assert(byteCount >= 0 && byteCount <= int.MaxValue, byteCount.ToString());
return (int)byteCount;
}
private readonly int[] encodedSizes;
private readonly PackedInt32s.IEncoder[] encoders;
private readonly PackedInt32s.IDecoder[] decoders;
private readonly int[] iterations;
/// <summary>
/// Create a new <see cref="ForUtil"/> instance and save state into <paramref name="out"/>.
/// </summary>
internal ForUtil(float acceptableOverheadRatio, DataOutput @out)
{
@out.WriteVInt32(PackedInt32s.VERSION_CURRENT);
encodedSizes = new int[33];
encoders = new PackedInt32s.IEncoder[33];
decoders = new PackedInt32s.IDecoder[33];
iterations = new int[33];
for (int bpv = 1; bpv <= 32; ++bpv)
{
PackedInt32s.FormatAndBits formatAndBits = PackedInt32s.FastestFormatAndBits(Lucene41PostingsFormat.BLOCK_SIZE, bpv, acceptableOverheadRatio);
Debug.Assert(formatAndBits.Format.IsSupported(formatAndBits.BitsPerValue));
Debug.Assert(formatAndBits.BitsPerValue <= 32);
encodedSizes[bpv] = EncodedSize(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue);
encoders[bpv] = PackedInt32s.GetEncoder(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue);
decoders[bpv] = PackedInt32s.GetDecoder(formatAndBits.Format, PackedInt32s.VERSION_CURRENT, formatAndBits.BitsPerValue);
iterations[bpv] = ComputeIterations(decoders[bpv]);
@out.WriteVInt32(formatAndBits.Format.Id << 5 | (formatAndBits.BitsPerValue - 1));
}
}
/// <summary>
/// Restore a <see cref="ForUtil"/> from a <see cref="DataInput"/>.
/// </summary>
internal ForUtil(DataInput @in)
{
int packedIntsVersion = @in.ReadVInt32();
PackedInt32s.CheckVersion(packedIntsVersion);
encodedSizes = new int[33];
encoders = new PackedInt32s.IEncoder[33];
decoders = new PackedInt32s.IDecoder[33];
iterations = new int[33];
for (int bpv = 1; bpv <= 32; ++bpv)
{
var code = @in.ReadVInt32();
var formatId = (int)((uint)code >> 5);
var bitsPerValue = (code & 31) + 1;
PackedInt32s.Format format = PackedInt32s.Format.ById(formatId);
Debug.Assert(format.IsSupported(bitsPerValue));
encodedSizes[bpv] = EncodedSize(format, packedIntsVersion, bitsPerValue);
encoders[bpv] = PackedInt32s.GetEncoder(format, packedIntsVersion, bitsPerValue);
decoders[bpv] = PackedInt32s.GetDecoder(format, packedIntsVersion, bitsPerValue);
iterations[bpv] = ComputeIterations(decoders[bpv]);
}
}
/// <summary>
/// Write a block of data (<c>For</c> format).
/// </summary>
/// <param name="data"> The data to write. </param>
/// <param name="encoded"> A buffer to use to encode data. </param>
/// <param name="out"> The destination output. </param>
/// <exception cref="System.IO.IOException"> If there is a low-level I/O error. </exception>
internal void WriteBlock(int[] data, byte[] encoded, IndexOutput @out)
{
if (IsAllEqual(data))
{
@out.WriteByte((byte)(sbyte)ALL_VALUES_EQUAL);
@out.WriteVInt32(data[0]);
return;
}
int numBits = BitsRequired(data);
Debug.Assert(numBits > 0 && numBits <= 32, numBits.ToString());
PackedInt32s.IEncoder encoder = encoders[numBits];
int iters = iterations[numBits];
Debug.Assert(iters * encoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE);
int encodedSize = encodedSizes[numBits];
Debug.Assert(iters * encoder.ByteBlockCount >= encodedSize);
@out.WriteByte((byte)numBits);
encoder.Encode(data, 0, encoded, 0, iters);
@out.WriteBytes(encoded, encodedSize);
}
/// <summary>
/// Read the next block of data (<c>For</c> format).
/// </summary>
/// <param name="in"> The input to use to read data. </param>
/// <param name="encoded"> A buffer that can be used to store encoded data. </param>
/// <param name="decoded"> Where to write decoded data. </param>
/// <exception cref="System.IO.IOException"> If there is a low-level I/O error. </exception>
internal void ReadBlock(IndexInput @in, byte[] encoded, int[] decoded)
{
int numBits = @in.ReadByte();
Debug.Assert(numBits <= 32, numBits.ToString());
if (numBits == ALL_VALUES_EQUAL)
{
int value = @in.ReadVInt32();
Arrays.Fill(decoded, 0, Lucene41PostingsFormat.BLOCK_SIZE, value);
return;
}
int encodedSize = encodedSizes[numBits];
@in.ReadBytes(encoded, 0, encodedSize);
PackedInt32s.IDecoder decoder = decoders[numBits];
int iters = iterations[numBits];
Debug.Assert(iters * decoder.ByteValueCount >= Lucene41PostingsFormat.BLOCK_SIZE);
decoder.Decode(encoded, 0, decoded, 0, iters);
}
/// <summary>
/// Skip the next block of data.
/// </summary>
/// <param name="in"> The input where to read data. </param>
/// <exception cref="System.IO.IOException"> If there is a low-level I/O error. </exception>
internal void SkipBlock(IndexInput @in)
{
int numBits = @in.ReadByte();
if (numBits == ALL_VALUES_EQUAL)
{
@in.ReadVInt32();
return;
}
Debug.Assert(numBits > 0 && numBits <= 32, numBits.ToString());
int encodedSize = encodedSizes[numBits];
@in.Seek(@in.GetFilePointer() + encodedSize);
}
private static bool IsAllEqual(int[] data)
{
int v = data[0];
for (int i = 1; i < Lucene41PostingsFormat.BLOCK_SIZE; ++i)
{
if (data[i] != v)
{
return false;
}
}
return true;
}
/// <summary>
/// Compute the number of bits required to serialize any of the longs in
/// <paramref name="data"/>.
/// </summary>
private static int BitsRequired(int[] data)
{
long or = 0;
for (int i = 0; i < Lucene41PostingsFormat.BLOCK_SIZE; ++i)
{
Debug.Assert(data[i] >= 0);
or |= (uint)data[i];
}
return PackedInt32s.BitsRequired(or);
}
}
}
| |
/*
* DocuSign REST API
*
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2
* Contact: devcenter@docusign.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace DocuSign.eSign.Model
{
/// <summary>
/// IntegratedUserInfoList
/// </summary>
[DataContract]
public partial class IntegratedUserInfoList : IEquatable<IntegratedUserInfoList>, IValidatableObject
{
public IntegratedUserInfoList()
{
// Empty Constructor
}
/// <summary>
/// Initializes a new instance of the <see cref="IntegratedUserInfoList" /> class.
/// </summary>
/// <param name="AllUsersSelected">.</param>
/// <param name="EndPosition">The last position in the result set. .</param>
/// <param name="NextUri">The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. .</param>
/// <param name="PreviousUri">The postal code for the billing address..</param>
/// <param name="ResultSetSize">The number of results returned in this response. .</param>
/// <param name="StartPosition">Starting position of the current result set..</param>
/// <param name="TotalSetSize">The total number of items available in the result set. This will always be greater than or equal to the value of the property returning the results in the in the response..</param>
/// <param name="Users">.</param>
public IntegratedUserInfoList(string AllUsersSelected = default(string), string EndPosition = default(string), string NextUri = default(string), string PreviousUri = default(string), string ResultSetSize = default(string), string StartPosition = default(string), string TotalSetSize = default(string), List<UserInfo> Users = default(List<UserInfo>))
{
this.AllUsersSelected = AllUsersSelected;
this.EndPosition = EndPosition;
this.NextUri = NextUri;
this.PreviousUri = PreviousUri;
this.ResultSetSize = ResultSetSize;
this.StartPosition = StartPosition;
this.TotalSetSize = TotalSetSize;
this.Users = Users;
}
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="allUsersSelected", EmitDefaultValue=false)]
public string AllUsersSelected { get; set; }
/// <summary>
/// The last position in the result set.
/// </summary>
/// <value>The last position in the result set. </value>
[DataMember(Name="endPosition", EmitDefaultValue=false)]
public string EndPosition { get; set; }
/// <summary>
/// The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null.
/// </summary>
/// <value>The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. </value>
[DataMember(Name="nextUri", EmitDefaultValue=false)]
public string NextUri { get; set; }
/// <summary>
/// The postal code for the billing address.
/// </summary>
/// <value>The postal code for the billing address.</value>
[DataMember(Name="previousUri", EmitDefaultValue=false)]
public string PreviousUri { get; set; }
/// <summary>
/// The number of results returned in this response.
/// </summary>
/// <value>The number of results returned in this response. </value>
[DataMember(Name="resultSetSize", EmitDefaultValue=false)]
public string ResultSetSize { get; set; }
/// <summary>
/// Starting position of the current result set.
/// </summary>
/// <value>Starting position of the current result set.</value>
[DataMember(Name="startPosition", EmitDefaultValue=false)]
public string StartPosition { get; set; }
/// <summary>
/// The total number of items available in the result set. This will always be greater than or equal to the value of the property returning the results in the in the response.
/// </summary>
/// <value>The total number of items available in the result set. This will always be greater than or equal to the value of the property returning the results in the in the response.</value>
[DataMember(Name="totalSetSize", EmitDefaultValue=false)]
public string TotalSetSize { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="users", EmitDefaultValue=false)]
public List<UserInfo> Users { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class IntegratedUserInfoList {\n");
sb.Append(" AllUsersSelected: ").Append(AllUsersSelected).Append("\n");
sb.Append(" EndPosition: ").Append(EndPosition).Append("\n");
sb.Append(" NextUri: ").Append(NextUri).Append("\n");
sb.Append(" PreviousUri: ").Append(PreviousUri).Append("\n");
sb.Append(" ResultSetSize: ").Append(ResultSetSize).Append("\n");
sb.Append(" StartPosition: ").Append(StartPosition).Append("\n");
sb.Append(" TotalSetSize: ").Append(TotalSetSize).Append("\n");
sb.Append(" Users: ").Append(Users).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as IntegratedUserInfoList);
}
/// <summary>
/// Returns true if IntegratedUserInfoList instances are equal
/// </summary>
/// <param name="other">Instance of IntegratedUserInfoList to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(IntegratedUserInfoList other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.AllUsersSelected == other.AllUsersSelected ||
this.AllUsersSelected != null &&
this.AllUsersSelected.Equals(other.AllUsersSelected)
) &&
(
this.EndPosition == other.EndPosition ||
this.EndPosition != null &&
this.EndPosition.Equals(other.EndPosition)
) &&
(
this.NextUri == other.NextUri ||
this.NextUri != null &&
this.NextUri.Equals(other.NextUri)
) &&
(
this.PreviousUri == other.PreviousUri ||
this.PreviousUri != null &&
this.PreviousUri.Equals(other.PreviousUri)
) &&
(
this.ResultSetSize == other.ResultSetSize ||
this.ResultSetSize != null &&
this.ResultSetSize.Equals(other.ResultSetSize)
) &&
(
this.StartPosition == other.StartPosition ||
this.StartPosition != null &&
this.StartPosition.Equals(other.StartPosition)
) &&
(
this.TotalSetSize == other.TotalSetSize ||
this.TotalSetSize != null &&
this.TotalSetSize.Equals(other.TotalSetSize)
) &&
(
this.Users == other.Users ||
this.Users != null &&
this.Users.SequenceEqual(other.Users)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.AllUsersSelected != null)
hash = hash * 59 + this.AllUsersSelected.GetHashCode();
if (this.EndPosition != null)
hash = hash * 59 + this.EndPosition.GetHashCode();
if (this.NextUri != null)
hash = hash * 59 + this.NextUri.GetHashCode();
if (this.PreviousUri != null)
hash = hash * 59 + this.PreviousUri.GetHashCode();
if (this.ResultSetSize != null)
hash = hash * 59 + this.ResultSetSize.GetHashCode();
if (this.StartPosition != null)
hash = hash * 59 + this.StartPosition.GetHashCode();
if (this.TotalSetSize != null)
hash = hash * 59 + this.TotalSetSize.GetHashCode();
if (this.Users != null)
hash = hash * 59 + this.Users.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
using System;
using ClosedXML.Excel;
using ClosedXML.Excel.Caching;
using NUnit.Framework;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace ClosedXML_Tests.Excel.Caching
{
[TestFixture]
public class BaseRepositoryTests
{
[Test]
public void DifferentEntitiesWithSameKeyStoredOnce()
{
// Arrange
int key = 12345;
var entity1 = new SampleEntity(key);
var entity2 = new SampleEntity(key);
var sampleRepository = this.CreateSampleRepository();
// Act
var storedEntity1 = sampleRepository.Store(key, entity1);
var storedEntity2 = sampleRepository.Store(key, entity2);
// Assert
Assert.AreSame(entity1, storedEntity1);
Assert.AreSame(entity1, storedEntity2);
Assert.AreNotSame(entity2, storedEntity2);
}
[Test]
public void NonUsedReferencesAreGCed()
{
#if !DEBUG
// Arrange
int key = 12345;
var sampleRepository = this.CreateSampleRepository();
// Act
var storedEntityRef1 = new WeakReference(sampleRepository.Store(key, new SampleEntity(key)));
int count = 0;
do
{
Thread.Sleep(50);
GC.Collect();
count++;
} while (storedEntityRef1.IsAlive && count < 10);
// Assert
if (count == 10)
Assert.Fail("storedEntityRef1 was not GCed");
Assert.IsFalse(sampleRepository.Any());
#else
Assert.Ignore("Can't run in DEBUG");
#endif
}
[Test]
public void NonUsedReferencesAreGCed2()
{
#if !DEBUG
// Arrange
int countUnique = 30;
int repeatCount = 1000;
SampleEntity[] entities = new SampleEntity[countUnique * repeatCount];
for (int i = 0; i < countUnique; i++)
{
for (int j = 0; j < repeatCount; j++)
{
entities[i * repeatCount + j] = new SampleEntity(i);
}
}
var sampleRepository = this.CreateSampleRepository();
// Act
Parallel.ForEach(entities, new ParallelOptions { MaxDegreeOfParallelism = 8 },
e => sampleRepository.Store(e.Key, e));
Thread.Sleep(50);
GC.Collect();
var storedEntries = sampleRepository.ToList();
// Assert
Assert.AreEqual(0, storedEntries.Count);
#else
Assert.Ignore("Can't run in DEBUG");
#endif
}
[Test]
public void ConcurrentAddingCausesNoDuplication()
{
// Arrange
int countUnique = 30;
int repeatCount = 1000;
SampleEntity[] entities = new SampleEntity[countUnique * repeatCount];
for (int i = 0; i < countUnique; i++)
{
for (int j = 0; j < repeatCount; j++)
{
entities[i * repeatCount + j] = new SampleEntity(i);
}
}
var sampleRepository = this.CreateSampleRepository();
// Act
Parallel.ForEach(entities, new ParallelOptions { MaxDegreeOfParallelism = 8 },
e => sampleRepository.Store(e.Key, e));
var storedEntries = sampleRepository.ToList();
// Assert
Assert.AreEqual(countUnique, storedEntries.Count);
Assert.NotNull(entities); // To protect them from GC
}
[Test]
public void ReplaceKeyInRepository()
{
// Arrange
int key1 = 12345;
int key2 = 54321;
var entity = new SampleEntity(key1);
var sampleRepository = this.CreateSampleRepository();
var storedEntity1 = sampleRepository.Store(key1, entity);
// Act
sampleRepository.Replace(key1, key2);
bool containsOld = sampleRepository.ContainsKey(key1, out var _);
bool containsNew = sampleRepository.ContainsKey(key2, out var _);
var storedEntity2 = sampleRepository.GetOrCreate(key2);
// Assert
Assert.IsFalse(containsOld);
Assert.IsTrue(containsNew);
Assert.AreSame(entity, storedEntity1);
Assert.AreSame(entity, storedEntity2);
}
[Test]
public void ConcurrentReplaceKeyInRepository()
{
var sampleRepository = new EditableRepository();
int[] keys = Enumerable.Range(0, 1000).ToArray();
keys.ForEach(key => sampleRepository.GetOrCreate(key));
Parallel.ForEach(keys, key =>
{
var val1 = sampleRepository.Replace(key, key + 2000);
val1.Key = key + 2000;
var val2 = sampleRepository.GetOrCreate(key + 2000);
Assert.AreSame(val1, val2);
});
}
[Test]
public void ReplaceNonExistingKeyInRepository()
{
int key1 = 100;
int key2 = 200;
int key3 = 300;
var entity = new SampleEntity(key1);
var sampleRepository = this.CreateSampleRepository();
sampleRepository.Store(key1, entity);
sampleRepository.Replace(key2, key3);
var all = sampleRepository.ToList();
Assert.AreEqual(1, all.Count);
Assert.AreSame(entity, all.First());
}
private SampleRepository CreateSampleRepository()
{
return new SampleRepository();
}
/// <summary>
/// Class under testing
/// </summary>
internal class SampleRepository : XLRepositoryBase<int, SampleEntity>
{
public SampleRepository() : base(key => new SampleEntity(key))
{
}
}
public class SampleEntity
{
public int Key { get; private set; }
public SampleEntity(int key)
{
Key = key;
}
}
/// <summary>
/// Class under testing
/// </summary>
internal class EditableRepository : XLRepositoryBase<int, EditableEntity>
{
public EditableRepository() : base(key => new EditableEntity(key))
{
}
}
public class EditableEntity
{
public int Key { get; set; }
public EditableEntity(int key)
{
Key = key;
}
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using java.lang;
using java.util;
using stab.query;
using cnatural.helpers;
namespace cnatural.parser {
public class PreprocessedText {
PreprocessedText(char[] text, Iterable<InputSectionPart> inputSectionParts) {
this.Text = text;
this.InputSectionParts = inputSectionParts;
}
public char[] Text^;
public Iterable<InputSectionPart> InputSectionParts^;
}
public class Preprocessor {
private PreprocessorScanner scanner;
private PreprocessorLexicalUnit lexicalUnit;
private int[] warnings;
public Preprocessor(CodeErrorManager codeErrorManager, char[] text) {
this.scanner = new PreprocessorScanner(codeErrorManager, text);
this.Symbols = new HashSet<String>();
this.warnings = new int[16];
}
public String Filename {
get {
return scanner.Filename;
}
set {
scanner.Filename = value;
}
}
public char[] Text {
get {
return scanner.Text;
}
}
public HashSet<String> Symbols^;
public final PreprocessedText preprocess() {
var parts = new ArrayList<InputSectionPart>();
parseSource(scanner.Position, false, parts, true);
return new PreprocessedText(this.Text, parts);
}
private int parseSource(int position, bool skippedSection, ArrayList<InputSectionPart> parts, bool fail) {
int line = scanner.Line;
nextLexicalUnit(skippedSection);
for (; ; ) {
switch (lexicalUnit) {
case EndOfStream:
return position;
case SourceCode:
int endPosition = scanner.Position;
while (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.SourceCode) {
endPosition = scanner.Position;
}
parts.add(new SourceCodeSectionPart(position, endPosition - position, line));
position = endPosition;
break;
case NumberSign:
line = scanner.Line + 1;
parseOptionalWhitespace(skippedSection);
switch (lexicalUnit) {
case Define:
position = parseDefinition(position, skippedSection, true, parts);
break;
case Undef:
position = parseDefinition(position, skippedSection, false, parts);
break;
case Region:
position = parseRegion(position, skippedSection, parts);
break;
case Error:
position = parseDiagnostic(position, skippedSection, true, parts);
break;
case Warning:
position = parseDiagnostic(position, skippedSection, false, parts);
break;
case Line:
position = parseLine(position, skippedSection, parts);
break;
case Pragma:
position = parsePragma(position, skippedSection, parts);
break;
case If:
position = parseConditional(position, skippedSection, parts);
break;
default:
if (fail) {
throw scanner.error(ParseErrorId.MalformedPreprocessorDirective);
} else {
return position;
}
}
line = scanner.Line - 1;
break;
default:
throw scanner.error(ParseErrorId.InternalError);
}
}
}
private int parseDefinition(int position, bool skippedSection, bool define, ArrayList<InputSectionPart> parts) {
parseWhitespace(skippedSection);
String symbol;
int symbolOffset = scanner.Position;
switch (nextLexicalUnit(skippedSection)) {
case Symbol:
symbol = getSymbol(symbolOffset, scanner.Position - symbolOffset);
break;
case Define:
symbol = "define";
break;
case Undef:
symbol = "undef";
break;
case Warning:
symbol = "warning";
break;
case Region:
symbol = "region";
break;
case Pragma:
symbol = "pragma";
break;
case Line:
symbol = "line";
break;
case Error:
symbol = "error";
break;
case Endregion:
symbol = "endregion";
break;
case Endif:
symbol = "endif";
break;
case Else:
symbol = "else";
break;
default:
throw scanner.error(ParseErrorId.PreprocessorSymbolExpected);
}
int line = scanner.Line;
parseNewLine(true);
parts.add(new DefinitionSectionPart(position, scanner.Position - position, line, define, symbol));
position = scanner.Position;
nextLexicalUnit(skippedSection);
if (!skippedSection) {
if (define) {
Symbols.add(symbol);
} else {
Symbols.remove(symbol);
}
}
return position;
}
private int parseRegion(int position, bool skippedSection, ArrayList<InputSectionPart> parts) {
var smessage = "";
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
int messageOffset = scanner.Position;
scanner.scanMessage();
smessage = new String(this.Text, messageOffset, scanner.Position - messageOffset);
nextLexicalUnit(skippedSection);
}
checkNewLine();
var subParts = new ArrayList<InputSectionPart>();
parseSource(scanner.Position, skippedSection, subParts, false);
if (lexicalUnit != PreprocessorLexicalUnit.Endregion) {
throw scanner.error(ParseErrorId.EndregionExpected);
}
String emessage = "";
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
int messageOffset = scanner.Position;
scanner.scanMessage();
emessage = new String(this.Text, messageOffset, scanner.Position - messageOffset);
nextLexicalUnit(skippedSection);
}
int line = scanner.Line;
checkWhitespaceOrNewLine();
parts.add(new RegionSectionPart(position, scanner.Position - position, line, subParts, smessage, emessage));
position = scanner.Position;
nextLexicalUnit(skippedSection);
return position;
}
private int parseDiagnostic(int position, bool skippedSection, bool error, ArrayList<InputSectionPart> parts) {
if (skippedSection) {
int p = ignoreDirective(position);
parts.add(new DiagnosticSectionPart(position, p - position, scanner.Line, error, ""));
return p;
}
var message = "";
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
int messageOffset = scanner.Position;
scanner.scanMessage();
message = new String(this.Text, messageOffset, scanner.Position - messageOffset);
nextLexicalUnit(skippedSection);
}
int line = scanner.Line;
checkWhitespaceOrNewLine();
parts.add(new DiagnosticSectionPart(position, scanner.Position - position, line, error, message));
position = scanner.Position;
nextLexicalUnit(skippedSection);
return position;
}
private int parseLine(int position, bool skippedSection, ArrayList<InputSectionPart> parts) {
if (skippedSection) {
int p = ignoreDirective(position);
parts.add(new LineSectionPart(position, p - position, scanner.Line, false));
return p;
}
parseWhitespace(skippedSection);
int arg1Offset = scanner.Position;
switch (nextLexicalUnit(skippedSection)) {
case DecimalDigits:
int line;
try {
line = Integer.parseInt(new String(this.Text, arg1Offset, scanner.Position - arg1Offset));
} catch (NumberFormatException e) {
throw scanner.error(ParseErrorId.InvalidNumber);
}
var filename = "";
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
int filenameOffset = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.Filename) {
filename = new String(this.Text, filenameOffset + 1, scanner.Position - filenameOffset - 2);
nextLexicalUnit(skippedSection);
}
}
parseNewLine(false);
parts.add(new LineSectionPart(position, scanner.Position - position, line, filename));
break;
case Symbol:
var hidden = false;
switch (this.Text[arg1Offset]) {
case 'd':
if (!isIdentifier("default", arg1Offset, scanner.Position - arg1Offset)) {
throw scanner.error(ParseErrorId.DecimalDigitsExpected);
}
break;
case 'h':
if (!isIdentifier("hidden", arg1Offset, scanner.Position - arg1Offset)) {
throw scanner.error(ParseErrorId.DecimalDigitsExpected);
}
hidden = true;
break;
default:
throw scanner.error(ParseErrorId.MalformedPreprocessorDirective);
}
line = scanner.Line;
parseNewLine(true);
parts.add(new LineSectionPart(position, scanner.Position - position, line, hidden));
break;
default:
throw scanner.error(ParseErrorId.DecimalDigitsExpected);
}
position = scanner.Position;
nextLexicalUnit(skippedSection);
return position;
}
private int parsePragma(int position, bool skippedSection, ArrayList<InputSectionPart> parts) {
if (skippedSection) {
int p = ignoreDirective(position);
parts.add(new PragmaSectionPart(position, p - position, scanner.Line, false, Query.emptyInt()));
return p;
}
parseWhitespace(skippedSection);
if (nextLexicalUnit(skippedSection) != PreprocessorLexicalUnit.Warning) {
throw scanner.error(ParseErrorId.WarningExpected);
}
parseWhitespace(skippedSection);
int actionOffset = scanner.Position;
if (nextLexicalUnit(skippedSection) != PreprocessorLexicalUnit.Symbol) {
throw scanner.error(ParseErrorId.MalformedPreprocessorDirective);
}
var restore = false;
switch (this.Text[actionOffset]) {
case 'd':
if (!isIdentifier("disable", actionOffset, scanner.Position - actionOffset)) {
throw scanner.error(ParseErrorId.MalformedPreprocessorDirective);
}
break;
case 'r':
if (!isIdentifier("restore", actionOffset, scanner.Position - actionOffset)) {
throw scanner.error(ParseErrorId.MalformedPreprocessorDirective);
}
restore = true;
break;
default:
throw scanner.error(ParseErrorId.MalformedPreprocessorDirective);
}
int nwarnings = 0;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
int numberOffset = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.DecimalDigits) {
int warning;
try {
warning = Integer.parseInt(new String(this.Text, numberOffset, scanner.Position - numberOffset));
} catch (NumberFormatException e) {
throw scanner.error(ParseErrorId.InvalidNumber);
}
warnings[nwarnings++] = warning;
do {
parseOptionalWhitespace(skippedSection);
if (lexicalUnit == PreprocessorLexicalUnit.Comma) {
numberOffset = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
numberOffset = scanner.Position;
nextLexicalUnit(skippedSection);
}
if (lexicalUnit != PreprocessorLexicalUnit.DecimalDigits) {
throw scanner.error(ParseErrorId.DecimalDigitsExpected);
}
try {
warning = Integer.parseInt(new String(this.Text, numberOffset, scanner.Position - numberOffset));
} catch (NumberFormatException e) {
throw scanner.error(ParseErrorId.InvalidNumber);
}
if (nwarnings == sizeof(warnings)) {
var t = new int[nwarnings * 2];
System.arraycopy(warnings, 0, t, 0, nwarnings);
warnings = t;
}
warnings[nwarnings++] = warning;
}
} while (lexicalUnit == PreprocessorLexicalUnit.DecimalDigits);
}
}
int line = scanner.Line;
parseNewLine(false);
var t = new int[nwarnings];
System.arraycopy(warnings, 0, t, 0, nwarnings);
parts.add(new PragmaSectionPart(position, scanner.Position - position, line, restore, Query.asIterable(t)));
position = scanner.Position;
nextLexicalUnit(skippedSection);
return position;
}
private int parseConditional(int position, bool skippedSection, ArrayList<InputSectionPart> parts) {
var sectionFound = false;
do {
var lu = lexicalUnit;
int line = scanner.Line;
var skip = sectionFound;
var eval = false;
if (lu == PreprocessorLexicalUnit.Else) {
nextLexicalUnit(skippedSection);
} else {
parseWhitespace(skippedSection);
int expressionOffset = scanner.Position;
nextLexicalUnit(skippedSection);
eval = parseExpression(expressionOffset, skippedSection);
skip |= !eval;
if (!sectionFound) {
sectionFound = eval;
}
}
skip |= skippedSection;
parseNewLine(false);
var subParts = new ArrayList<InputSectionPart>();
int newOffset = parseSource(scanner.Position, skip, subParts, false);
switch (lu) {
case If:
parts.add(new IfSectionPart(position, newOffset - position, line, subParts, skip, eval));
break;
case Elif:
parts.add(new ElifSectionPart(position, newOffset - position, line, subParts, skip, eval));
break;
case Else:
parts.add(new ElseSectionPart(position, newOffset - position, line, subParts, skip));
break;
}
position = newOffset;
switch (lexicalUnit) {
case Elif:
case Else:
case Endif:
break;
default:
throw scanner.error(ParseErrorId.EndifExpected);
}
} while (lexicalUnit != PreprocessorLexicalUnit.Endif);
int line = scanner.Line;
parseNewLine(true);
parts.add(new EndifSectionPart(position, scanner.Position - position, line));
position = scanner.Position;
nextLexicalUnit(skippedSection);
return position;
}
private bool parseExpression(int position, bool skippedSection) {
return parseOrExpression(position, skippedSection);
}
private bool parseOrExpression(int position, bool skippedSection) {
var leftEval = parseAndExpression(position, skippedSection);
for (; ; ) {
if (lexicalUnit == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
bool rightEval = false;
if (lexicalUnit == PreprocessorLexicalUnit.Or) {
position = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
rightEval = parseAndExpression(position, skippedSection);
leftEval = leftEval || rightEval;
} else {
return leftEval;
}
}
}
private bool parseAndExpression(int position, bool skippedSection) {
bool leftEval = parseEqualityExpression(position, skippedSection);
for (;;) {
if (lexicalUnit == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
bool rightEval = false;
if (lexicalUnit == PreprocessorLexicalUnit.And) {
position = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
rightEval = parseEqualityExpression(position, skippedSection);
leftEval = leftEval && rightEval;
} else {
return leftEval;
}
}
}
private bool parseEqualityExpression(int position, bool skippedSection) {
var leftEval = parseUnaryExpression(position, skippedSection);
for (; ; ) {
if (lexicalUnit == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
bool rightEval = false;
switch (lexicalUnit) {
case Equal:
position = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
rightEval = parseEqualityExpression(position, skippedSection);
leftEval = leftEval == rightEval;
break;
case NotEqual:
position = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
rightEval = parseEqualityExpression(position, skippedSection);
leftEval = leftEval != rightEval;
break;
default:
return leftEval;
}
}
}
private bool parseUnaryExpression(int position, bool skippedSection) {
var negate = false;
while (lexicalUnit == PreprocessorLexicalUnit.Not) {
negate = !negate;
position = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
}
bool eval = parsePrimaryExpression(position, skippedSection);
return (negate) ? !eval : eval;
}
private bool parsePrimaryExpression(int position, bool skippedSection) {
var result = false;
switch (lexicalUnit) {
case Symbol:
String symbol = getSymbol(position, scanner.Position - position);
if (symbol.equals("true")) {
result = true;
} else if (!symbol.equals("false")) {
result = this.Symbols.contains(symbol);
}
break;
case LeftParenthesis:
position = scanner.Position;
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
position = scanner.Position;
nextLexicalUnit(skippedSection);
}
result = parseExpression(position, skippedSection);
if (lexicalUnit == PreprocessorLexicalUnit.WhiteSpace) {
nextLexicalUnit(skippedSection);
}
if (lexicalUnit != PreprocessorLexicalUnit.RightParenthesis) {
throw scanner.error(ParseErrorId.CloseParenthesisExpected);
}
break;
default:
throw scanner.error(ParseErrorId.PreprocessorSymbolExpected);
}
nextLexicalUnit(skippedSection);
return result;
}
private int ignoreDirective(int position) {
while (true) {
switch (nextLexicalUnit(true)) {
case EndOfStream:
return scanner.Position;
case NewLine:
position = scanner.Position;
nextLexicalUnit(true);
return position;
}
}
}
private void parseNewLine(bool advance) {
if (advance) {
nextLexicalUnit(false);
}
if (lexicalUnit == PreprocessorLexicalUnit.WhiteSpace) {
nextLexicalUnit(false);
}
if (lexicalUnit == PreprocessorLexicalUnit.SingleLineComment) {
nextLexicalUnit(false);
}
checkWhitespaceOrNewLine();
}
private void parseOptionalWhitespace(bool skippedSection) {
if (nextLexicalUnit(skippedSection) == PreprocessorLexicalUnit.WhiteSpace) {
nextLexicalUnit(skippedSection);
}
}
private void parseWhitespace(bool skippedSection) {
if (nextLexicalUnit(skippedSection) != PreprocessorLexicalUnit.WhiteSpace) {
throw scanner.error(ParseErrorId.WhitespaceExpected);
}
}
private void checkWhitespaceOrNewLine() {
if (lexicalUnit != PreprocessorLexicalUnit.EndOfStream && lexicalUnit != PreprocessorLexicalUnit.NewLine) {
throw scanner.error(ParseErrorId.NewLineExpected);
}
}
private void checkNewLine() {
if (lexicalUnit != PreprocessorLexicalUnit.NewLine) {
throw scanner.error(ParseErrorId.NewLineExpected);
}
}
private String getSymbol(int offset, int length) {
var hasUnicode = false;
for (int i = 0; i < length; i++) {
if (this.Text[offset + i] == '\\') {
hasUnicode = true;
break;
}
}
if (hasUnicode) {
var sb = new StringBuilder();
for (int i = 0; i < length; i++) {
char c = this.Text[offset + i];
if (c == '\\') {
i++;
sb.append((char)scanUnicodeEscapeSequence(this.Text, offset + i));
if (this.Text[offset + i] == 'u') {
i += 4;
} else {
i += 8;
}
} else {
sb.append(c);
}
}
return sb.toString();
} else {
return new String(this.Text, offset, length);
}
}
private int scanUnicodeEscapeSequence(char[] buffer, int offset) {
var result = 0;
switch (buffer[offset]) {
case 'u':
for (int i = 1; i < 5; i++) {
int value;
if ((value = ParserHelper.scanHexDigit(buffer[offset + i])) == -1) {
throw scanner.error(ParseErrorId.HexadecimalDigitExpected);
}
result = result * 16 + value;
}
return result;
case 'U':
for (int i = 1; i < 9; i++) {
int value;
if ((value = ParserHelper.scanHexDigit(buffer[offset + i])) == -1) {
throw scanner.error(ParseErrorId.HexadecimalDigitExpected);
}
result = result * 16 + value;
}
return result;
default:
throw scanner.error(ParseErrorId.InvalidEscapeSequence);
}
}
private bool isIdentifier(String identifier, int offset, int length) {
if (identifier.length() != length) {
return false;
}
for (int i = 0; i < identifier.length(); i++) {
if (this.Text[offset + i] != identifier[i]) {
return false;
}
}
return true;
}
private PreprocessorLexicalUnit nextLexicalUnit(bool skippedSection) {
return lexicalUnit = scanner.nextLexicalUnit(skippedSection);
}
}
}
| |
using Microsoft.Maker.Devices.TextDisplay.AsyncHelpers;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using System.Xml.Linq;
using Windows.Devices.Gpio;
namespace Microsoft.Maker.Devices.TextDisplay
{
class HD44780GpioDriver : TextDisplayBase
{
private const short InstructionCode_ClearLcd = 1;
private const short InstructionCode_NewLine = 192;
private enum BitMode
{
Four,
Eight
};
private enum Register
{
Data,
Instruction
}
private GpioController gpio = null;
private GpioPin registerSelectPin = null;
private GpioPin enablePin = null;
private GpioPin[] dataPins = null;
private BitMode bitMode = BitMode.Eight;
private readonly AsyncSemaphore lcdLock = new AsyncSemaphore(1);
public HD44780GpioDriver(TextDisplayConfig config) :
base(config)
{
this.gpio = GpioController.GetDefault();
}
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
protected override async Task InitializeInternal(IDictionary<string, string> driverConfigurationValues)
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
{
if (null == this.gpio)
throw new NullReferenceException();
try
{
int[] dataPinNumbers = null;
if (driverConfigurationValues.ContainsKey("RsPin") &&
driverConfigurationValues.ContainsKey("EnablePin") &&
driverConfigurationValues.ContainsKey("D4Pin") &&
driverConfigurationValues.ContainsKey("D5Pin") &&
driverConfigurationValues.ContainsKey("D6Pin") &&
driverConfigurationValues.ContainsKey("D7Pin"))
{
int rsPin = Convert.ToInt32(driverConfigurationValues["RsPin"]);
int enablePin = Convert.ToInt32(driverConfigurationValues["EnablePin"]);
int d4Pin = Convert.ToInt32(driverConfigurationValues["D4Pin"]);
int d5Pin = Convert.ToInt32(driverConfigurationValues["D5Pin"]);
int d6Pin = Convert.ToInt32(driverConfigurationValues["D6Pin"]);
int d7Pin = Convert.ToInt32(driverConfigurationValues["D7Pin"]);
if (driverConfigurationValues.ContainsKey("D0Pin") &&
driverConfigurationValues.ContainsKey("D1Pin") &&
driverConfigurationValues.ContainsKey("D2Pin") &&
driverConfigurationValues.ContainsKey("D3Pin"))
{
dataPinNumbers = new int[8];
int d0Pin = Convert.ToInt32(driverConfigurationValues["D0Pin"]);
int d1Pin = Convert.ToInt32(driverConfigurationValues["D1Pin"]);
int d2Pin = Convert.ToInt32(driverConfigurationValues["D2Pin"]);
int d3Pin = Convert.ToInt32(driverConfigurationValues["D3Pin"]);
dataPinNumbers[0] = d0Pin;
dataPinNumbers[1] = d1Pin;
dataPinNumbers[2] = d2Pin;
dataPinNumbers[3] = d3Pin;
dataPinNumbers[4] = d4Pin;
dataPinNumbers[5] = d5Pin;
dataPinNumbers[6] = d6Pin;
dataPinNumbers[7] = d7Pin;
this.bitMode = BitMode.Eight;
}
else
{
dataPinNumbers = new int[4];
dataPinNumbers[0] = d4Pin;
dataPinNumbers[1] = d5Pin;
dataPinNumbers[2] = d6Pin;
dataPinNumbers[3] = d7Pin;
this.bitMode = BitMode.Four;
}
this.initializePins(enablePin, rsPin, dataPinNumbers);
await this.initializeChip();
}
}
catch (FormatException)
{
Debug.WriteLine("HD44780GpioDriver: Pin config is invalid");
}
catch (OverflowException)
{
Debug.WriteLine("HD44780GpioDriver: Pin config is invalid");
}
catch (FileLoadException)
{
Debug.WriteLine("HD44780GpioDriver: Pin is already open");
}
}
protected override async Task DisposeInternal()
{
await this.writeValue(Register.Instruction, InstructionCode_ClearLcd);
await this.wait(TimeSpan.FromMilliseconds(1.64));
this.registerSelectPin.Dispose();
this.enablePin.Dispose();
foreach (GpioPin p in this.dataPins)
{
p.Dispose();
}
}
protected async override Task WriteMessageInternal(string message)
{
await lcdLock.WaitAsync();
try
{
Debug.WriteLine("HD44780GpioDriver: Writing Message - " + message);
await this.writeValue(Register.Instruction, InstructionCode_ClearLcd);
await this.wait(TimeSpan.FromMilliseconds(1.64));
int totalChars = 0;
int lineChars = 0;
int lines = 1;
foreach (char c in message)
{
if (this.Width == lineChars)
{
Debug.WriteLine("HD44780GpioDriver: Message overran on line" + lines);
}
if (this.Height < lines)
{
Debug.WriteLine("HD44780GpioDriver: Message contains too many lines");
}
if (c == '\n')
{
await this.writeValue(Register.Instruction, InstructionCode_NewLine);
lines++;
lineChars = 0;
continue;
}
await this.writeValue(Register.Data, (short)c);
totalChars++;
lineChars++;
await this.wait(TimeSpan.FromMilliseconds(1));
}
Debug.WriteLine("HD44780GpioDriver: Message write complete");
}
finally
{
lcdLock.Release();
}
}
private void initializePins(int enablePin, int registerSelectPin, int[] dataPins)
{
this.registerSelectPin = this.gpio.OpenPin(registerSelectPin);
this.registerSelectPin.SetDriveMode(GpioPinDriveMode.Output);
this.enablePin = this.gpio.OpenPin(enablePin);
this.enablePin.SetDriveMode(GpioPinDriveMode.Output);
this.dataPins = new GpioPin[dataPins.Count()];
for (int i = 0; i < dataPins.Count(); i++)
{
this.dataPins[i] = this.gpio.OpenPin(dataPins[i]);
this.dataPins[i].SetDriveMode(GpioPinDriveMode.Output);
}
}
public async Task initializeChip()
{
switch(this.bitMode)
{
case BitMode.Four:
//The initialization uses the instructions and wait times according to the document:http://www.taoli.ece.ufl.edu/teaching/4744/labs/lab7/LCD_V1.pdf
//PowerOn
this.writeBits(Register.Instruction, 0x03);
await this.wait(TimeSpan.FromMilliseconds(15));
this.writeBits(Register.Instruction, 0x03);
await this.wait(TimeSpan.FromMilliseconds(4.1));
this.writeBits(Register.Instruction, 0x03);
await this.wait(TimeSpan.FromMilliseconds(4.1));
this.writeBits(Register.Instruction, 0x02);
await this.wait(TimeSpan.FromMilliseconds(4.1));
//Set number of lines and font
this.writeBits(Register.Instruction, 0x02);
await this.wait(TimeSpan.FromMilliseconds(0.4));
this.writeBits(Register.Instruction, 0x08);
await this.wait(TimeSpan.FromMilliseconds(0.4));
//Display on, cursor off
this.writeBits(Register.Instruction, 0x00);
await this.wait(TimeSpan.FromMilliseconds(0.4));
this.writeBits(Register.Instruction, 0x0C);
await this.wait(TimeSpan.FromMilliseconds(0.4));
//Inc cursor to the right when writing
this.writeBits(Register.Instruction, 0x00);
await this.wait(TimeSpan.FromMilliseconds(0.4));
this.writeBits(Register.Instruction, 0x06);
await this.wait(TimeSpan.FromMilliseconds(0.4));
break;
case BitMode.Eight:
throw new NotImplementedException();
}
}
private async Task writeValue(Register register, short value)
{
switch(this.bitMode)
{
case BitMode.Four:
byte value4bits = (byte)value;
value4bits >>= 4;
this.writeBits(register, value4bits);
value4bits = (byte)value;
value4bits &= 0x0F;
this.writeBits(register, value4bits);
break;
case BitMode.Eight:
this.writeBits(register, value);
break;
}
await this.wait(TimeSpan.FromMilliseconds(0.04));
}
private void writeBits(Register register, short bits)
{
switch (register)
{
case Register.Data:
this.enablePin.Write(GpioPinValue.High);
this.registerSelectPin.Write(GpioPinValue.High);
break;
case Register.Instruction:
this.enablePin.Write(GpioPinValue.High);
this.registerSelectPin.Write(GpioPinValue.Low);
break;
}
int numberOfBits = 0;
switch(this.bitMode)
{
case BitMode.Four:
numberOfBits = 4;
break;
case BitMode.Eight:
numberOfBits = 8;
break;
}
char[] charArray = Convert.ToString(bits, 2).PadLeft(numberOfBits, '0').ToCharArray();
Array.Reverse(charArray);
for (int i = 0; i < numberOfBits; i++)
{
char v = charArray[i];
if (v == '1')
{
// write GpioPinValue.High to pin
this.dataPins[i].Write(GpioPinValue.High);
}
else
{
// write GpioPinValue.Low to pin
this.dataPins[i].Write(GpioPinValue.Low);
}
}
switch (register)
{
case Register.Data:
this.enablePin.Write(GpioPinValue.Low);
this.registerSelectPin.Write(GpioPinValue.High);
break;
case Register.Instruction:
this.enablePin.Write(GpioPinValue.Low);
this.registerSelectPin.Write(GpioPinValue.Low);
break;
}
}
private async Task wait(TimeSpan duration)
{
await Task.Delay(duration);
}
}
}
| |
using System;
using System.Collections;
using System.Text;
using System.IO;
namespace NfxLab.MicroFramework.External
{
/// <summary>
/// This class encodes and decodes JSON strings.
/// Spec. details, see http://www.json.org/
///
/// JSON uses Arrays and Objects. These correspond here to the datatypes ArrayList and Hashtable.
/// All numbers are parsed to doubles.
/// Pulled from http://techblog.procurios.nl/k/618/news/view/14605/14863/How-do-I-write-my-own-parser-for-JSON.html
/// </summary>
///
public class JSON
{
public const int TOKEN_NONE = 0;
public const int TOKEN_CURLY_OPEN = 1;
public const int TOKEN_CURLY_CLOSE = 2;
public const int TOKEN_SQUARED_OPEN = 3;
public const int TOKEN_SQUARED_CLOSE = 4;
public const int TOKEN_COLON = 5;
public const int TOKEN_COMMA = 6;
public const int TOKEN_STRING = 7;
public const int TOKEN_NUMBER = 8;
public const int TOKEN_TRUE = 9;
public const int TOKEN_FALSE = 10;
public const int TOKEN_NULL = 11;
private const int BUILDER_CAPACITY = 500;
public static object JsonDecodeFromFile(string file)
{
using (FileStream fs = new FileStream(file, FileMode.Open))
{
using (StreamReader sr = new StreamReader(fs))
{
// config.js is used by both the backend and frontend for configuration.
// The frontend requires a js var declaration in it, so strip the var components
// from the string
string configString = sr.ReadToEnd();
configString = configString.Substring(11, configString.Length - 12);
bool success = true;
return JsonDecode(configString, ref success);
}
}
}
/// <summary>
/// Parses the string json into a value
/// </summary>
/// <param name="json">A JSON string.</param>
/// <returns>An ArrayList, a Hashtable, a double, a string, null, true, or false</returns>
public static object JsonDecode(string json)
{
bool success = true;
return JsonDecode(json, ref success);
}
/// <summary>
/// Parses the string json into a value; and fills 'success' with the successfullness of the parse.
/// </summary>
/// <param name="json">A JSON string.</param>
/// <param name="success">Successful parse?</param>
/// <returns>An ArrayList, a Hashtable, a double, a string, null, true, or false</returns>
public static object JsonDecode(string json, ref bool success)
{
success = true;
if (json != null)
{
char[] charArray = json.ToCharArray();
int index = 0;
object value = ParseValue(charArray, ref index, ref success);
return value;
}
else
{
return null;
}
}
/// <summary>
/// Converts a Hashtable / ArrayList object into a JSON string
/// </summary>
/// <param name="json">A Hashtable / ArrayList</param>
/// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns>
public static string JsonEncode(object json)
{
StringBuilder builder = new StringBuilder(BUILDER_CAPACITY);
bool success = SerializeValue(json, builder);
return (success ? builder.ToString() : null);
}
protected static Hashtable ParseObject(char[] json, ref int index, ref bool success)
{
Hashtable table = new Hashtable();
int token;
// {
NextToken(json, ref index);
bool done = false;
while (!done)
{
token = LookAhead(json, index);
if (token == JSON.TOKEN_NONE)
{
success = false;
return null;
}
else if (token == JSON.TOKEN_COMMA)
{
NextToken(json, ref index);
}
else if (token == JSON.TOKEN_CURLY_CLOSE)
{
NextToken(json, ref index);
return table;
}
else
{
// name
string name = ParseString(json, ref index, ref success);
if (!success)
{
success = false;
return null;
}
// :
token = NextToken(json, ref index);
if (token != JSON.TOKEN_COLON)
{
success = false;
return null;
}
// value
object value = ParseValue(json, ref index, ref success);
if (!success)
{
success = false;
return null;
}
table[name] = value;
}
}
return table;
}
protected static ArrayList ParseArray(char[] json, ref int index, ref bool success)
{
ArrayList array = new ArrayList();
// [
NextToken(json, ref index);
bool done = false;
while (!done)
{
int token = LookAhead(json, index);
if (token == JSON.TOKEN_NONE)
{
success = false;
return null;
}
else if (token == JSON.TOKEN_COMMA)
{
NextToken(json, ref index);
}
else if (token == JSON.TOKEN_SQUARED_CLOSE)
{
NextToken(json, ref index);
break;
}
else
{
object value = ParseValue(json, ref index, ref success);
if (!success)
{
return null;
}
array.Add(value);
}
}
return array;
}
protected static object ParseValue(char[] json, ref int index, ref bool success)
{
switch (LookAhead(json, index))
{
case JSON.TOKEN_STRING:
return ParseString(json, ref index, ref success);
case JSON.TOKEN_NUMBER:
return ParseNumber(json, ref index, ref success);
case JSON.TOKEN_CURLY_OPEN:
return ParseObject(json, ref index, ref success);
case JSON.TOKEN_SQUARED_OPEN:
return ParseArray(json, ref index, ref success);
case JSON.TOKEN_TRUE:
NextToken(json, ref index);
return true;
case JSON.TOKEN_FALSE:
NextToken(json, ref index);
return false;
case JSON.TOKEN_NULL:
NextToken(json, ref index);
return null;
case JSON.TOKEN_NONE:
break;
}
success = false;
return null;
}
protected static string ParseString(char[] json, ref int index, ref bool success)
{
StringBuilder s = new StringBuilder(BUILDER_CAPACITY);
char c;
EatWhitespace(json, ref index);
// "
c = json[index++];
bool complete = false;
while (!complete)
{
if (index == json.Length)
{
break;
}
c = json[index++];
if (c == '"')
{
complete = true;
break;
}
else if (c == '\\')
{
if (index == json.Length)
{
break;
}
c = json[index++];
if (c == '"')
{
s.Append('"');
}
else if (c == '\\')
{
s.Append('\\');
}
else if (c == '/')
{
s.Append('/');
}
else if (c == 'b')
{
s.Append('\b');
}
else if (c == 'f')
{
s.Append('\f');
}
else if (c == 'n')
{
s.Append('\n');
}
else if (c == 'r')
{
s.Append('\r');
}
else if (c == 't')
{
s.Append('\t');
}
else if (c == 'u')
{
int remainingLength = json.Length - index;
if (remainingLength >= 4)
{
// parse the 32 bit hex into an integer codepoint
uint codePoint = UInt32.Parse(new string(json, index, 4));
// convert the integer codepoint to a unicode char and add to string
s.Append(codePoint);
// skip 4 chars
index += 4;
}
else
{
break;
}
}
}
else
{
s.Append(c);
}
}
if (!complete)
{
success = false;
return null;
}
return s.ToString();
}
protected static double ParseNumber(char[] json, ref int index, ref bool success)
{
EatWhitespace(json, ref index);
int lastIndex = GetLastIndexOfNumber(json, index);
int charLength = (lastIndex - index) + 1;
double number;
success = Double.TryParse(new string(json, index, charLength), out number);
index = lastIndex + 1;
return number;
}
protected static int GetLastIndexOfNumber(char[] json, int index)
{
int lastIndex;
for (lastIndex = index; lastIndex < json.Length; lastIndex++)
{
if ("0123456789+-.eE".IndexOf(json[lastIndex]) == -1)
{
break;
}
}
return lastIndex - 1;
}
protected static void EatWhitespace(char[] json, ref int index)
{
for (; index < json.Length; index++)
{
if (" \t\n\r".IndexOf(json[index]) == -1)
{
break;
}
}
}
protected static int LookAhead(char[] json, int index)
{
int saveIndex = index;
return NextToken(json, ref saveIndex);
}
protected static int NextToken(char[] json, ref int index)
{
EatWhitespace(json, ref index);
if (index == json.Length)
{
return JSON.TOKEN_NONE;
}
char c = json[index];
index++;
switch (c)
{
case '{':
return JSON.TOKEN_CURLY_OPEN;
case '}':
return JSON.TOKEN_CURLY_CLOSE;
case '[':
return JSON.TOKEN_SQUARED_OPEN;
case ']':
return JSON.TOKEN_SQUARED_CLOSE;
case ',':
return JSON.TOKEN_COMMA;
case '"':
return JSON.TOKEN_STRING;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
return JSON.TOKEN_NUMBER;
case ':':
return JSON.TOKEN_COLON;
}
index--;
int remainingLength = json.Length - index;
// false
if (remainingLength >= 5)
{
if (json[index] == 'f' &&
json[index + 1] == 'a' &&
json[index + 2] == 'l' &&
json[index + 3] == 's' &&
json[index + 4] == 'e')
{
index += 5;
return JSON.TOKEN_FALSE;
}
}
// true
if (remainingLength >= 4)
{
if (json[index] == 't' &&
json[index + 1] == 'r' &&
json[index + 2] == 'u' &&
json[index + 3] == 'e')
{
index += 4;
return JSON.TOKEN_TRUE;
}
}
// null
if (remainingLength >= 4)
{
if (json[index] == 'n' &&
json[index + 1] == 'u' &&
json[index + 2] == 'l' &&
json[index + 3] == 'l')
{
index += 4;
return JSON.TOKEN_NULL;
}
}
return JSON.TOKEN_NONE;
}
protected static bool SerializeValue(object value, StringBuilder builder)
{
bool success = true;
if (value is ArrayList || value is Array)
{
success = SerializeArray((IEnumerable)value, builder);
}
else if (value is Hashtable)
{
success = SerializeObject((Hashtable)value, builder);
}
else if (value is string)
{
success = SerializeString((string)value, builder);
}
else if ((value is Boolean) && ((Boolean)value == true))
{
builder.Append("true");
}
else if ((value is Boolean) && ((Boolean)value == false))
{
builder.Append("false");
}
else if (value is ValueType)
{
// thanks to ritchie for pointing out ValueType to me
success = SerializeNumber(Convert.ToDouble(value.ToString()), builder);
}
else if (value == null)
{
builder.Append("null");
}
else
{
success = false;
}
return success;
}
protected static bool SerializeObject(Hashtable anObject, StringBuilder builder)
{
builder.Append("{");
//IEnumerator e = anObject.GetEnumerator();
bool first = true;
foreach (DictionaryEntry e in anObject)
{
string key = e.Key.ToString();
object value = e.Value;
if (!first)
builder.Append(", ");
SerializeString(key, builder);
builder.Append(":");
if (!SerializeValue(value, builder))
return false;
first = false;
}
/*
while (e.MoveNext()) {
string key = ((DictionaryEntry)e).Key.ToString();
object value = e.Value;
if (!first) {
builder.Append(", ");
}
SerializeString(key, builder);
builder.Append(":");
if (!SerializeValue(value, builder)) {
return false;
}
first = false;
}
*/
builder.Append("}");
return true;
}
protected static bool SerializeArray(IEnumerable anArray, StringBuilder builder)
{
builder.Append("[");
bool first = true;
foreach (object value in anArray)
{
if (!first)
{
builder.Append(", ");
}
if (!SerializeValue(value, builder))
{
return false;
}
first = false;
}
builder.Append("]");
return true;
}
protected static bool SerializeString(string aString, StringBuilder builder)
{
builder.Append("\"");
char[] charArray = aString.ToCharArray();
for (int i = 0; i < charArray.Length; i++)
{
char c = charArray[i];
if (c == '"')
{
builder.Append("\\\"");
}
else if (c == '\\')
{
builder.Append("\\\\");
}
else if (c == '\b')
{
builder.Append("\\b");
}
else if (c == '\f')
{
builder.Append("\\f");
}
else if (c == '\n')
{
builder.Append("\\n");
}
else if (c == '\r')
{
builder.Append("\\r");
}
else if (c == '\t')
{
builder.Append("\\t");
}
else
{
int codepoint = (int)c;
if ((codepoint >= 32) && (codepoint <= 126))
{
builder.Append(c);
}
else
{
//TODO: verifier le format X8
builder.Append("\\u" + codepoint.ToString("{0:X8}"));
}
}
}
builder.Append("\"");
return true;
}
protected static bool SerializeNumber(double number, StringBuilder builder)
{
builder.Append(number.ToString());
return true;
}
}
}
| |
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
/*
* AvaTax API Client Library
*
* (c) 2004-2019 Avalara, Inc.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @author Genevieve Conty
* @author Greg Hester
* Swagger name: AvaTaxClient
*/
namespace Avalara.AvaTax.RestClient
{
/// <summary>
/// Represents a commitment to file a tax return on a recurring basis.
/// Only used if you subscribe to Avalara Returns.
/// </summary>
public class FilingCalendarModel
{
/// <summary>
/// The unique ID number of this filing calendar.
/// </summary>
public Int64? id { get; set; }
/// <summary>
/// The unique ID number of the company to which this filing calendar belongs.
/// </summary>
public Int32 companyId { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/13/2018, Version: 18.10, Message: Please use `taxFormCode` instead.
/// The legacy return name of the tax form to file.
/// </summary>
public String returnName { get; set; }
/// <summary>
/// Name or ISO 3166 code identifying the country that issued the tax form for this filing calendar.
///
/// This field supports many different country identifiers:
/// * Two character ISO 3166 codes
/// * Three character ISO 3166 codes
/// * Fully spelled out names of the country in ISO supported languages
/// * Common alternative spellings for many countries
///
/// For a full list of all supported codes and names, please see the Definitions API `ListCountries`.
/// </summary>
public String formCountry { get; set; }
/// <summary>
/// Name or ISO 3166 code identifying the region that issued the tax form for this filing calendar.
///
/// This field supports many different region identifiers:
/// * Two and three character ISO 3166 region codes
/// * Fully spelled out names of the region in ISO supported languages
/// * Common alternative spellings for many regions
///
/// For a full list of all supported codes and names, please see the Definitions API `ListRegions`.
/// </summary>
public String formRegion { get; set; }
/// <summary>
/// The Avalara standard tax form code of the tax form for this filing calendar. The first two characters of the tax form code
/// are the ISO 3166 country code of the country that issued this form.
/// </summary>
public String taxFormCode { get; set; }
/// <summary>
/// The start period of a fiscal year for this form/company
/// </summary>
public Int32? fiscalYearStartMonth { get; set; }
/// <summary>
/// If this calendar is for a location-specific tax return, specify the location code here. To file for all locations, leave this value NULL.
/// </summary>
public String locationCode { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/17/2021, Version: 21.9.0, Message: Field will be no longer be available after the 21.9.0 release.
/// If this calendar is for a location-specific tax return, specify the location-specific behavior here.
/// </summary>
public OutletTypeId? outletTypeId { get; set; }
/// <summary>
/// Specify the ISO 4217 currency code for the currency to remit for this tax return. For all tax returns in the United States, specify "USD".
/// </summary>
public String paymentCurrency { get; set; }
/// <summary>
/// The frequency on which this tax form is filed.
/// </summary>
public FilingFrequencyId filingFrequencyId { get; set; }
/// <summary>
/// A 16-bit bitmap containing a 1 for each month when the return should be filed.
/// </summary>
public Int16? months { get; set; }
/// <summary>
/// Tax Registration ID for this Region - in the U.S., this is for your state.
/// </summary>
public String stateRegistrationId { get; set; }
/// <summary>
/// Tax Registration ID for the local jurisdiction, if any.
/// </summary>
public String localRegistrationId { get; set; }
/// <summary>
/// The Employer Identification Number or Taxpayer Identification Number that is to be used when filing this return.
/// </summary>
public String employerIdentificationNumber { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/1/2017, Version: 17.9, Message: Field will be no longer be available after the 17.9 release.
/// The first line of the mailing address that will be used when filling out this tax return.
/// </summary>
public String line1 { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/1/2017, Version: 17.9, Message: Field will be no longer be available after the 17.9 release.
/// The second line of the mailing address that will be used when filling out this tax return.
/// Please note that some tax forms do not support multiple address lines.
/// </summary>
public String line2 { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/1/2017, Version: 17.9, Message: Field will be no longer be available after the 17.9 release.
/// The city name of the mailing address that will be used when filling out this tax return.
/// </summary>
public String city { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/1/2017, Version: 17.9, Message: Field will be no longer be available after the 17.9 release.
/// The state, region, or province of the mailing address that will be used when filling out this tax return.
/// </summary>
public String region { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/1/2017, Version: 17.9, Message: Field will be no longer be available after the 17.9 release.
/// The postal code or zip code of the mailing address that will be used when filling out this tax return.
/// </summary>
public String postalCode { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/1/2017, Version: 17.9, Message: Field will be no longer be available after the 17.9 release.
/// The two character ISO-3166 country code of the mailing address that will be used when filling out this tax return.
/// </summary>
public String country { get; set; }
/// <summary>
/// The first line of the mailing address that will be used when filling out this tax return.
/// </summary>
public String mailingAddressLine1 { get; set; }
/// <summary>
/// The second line of the mailing address that will be used when filling out this tax return.
/// Please note that some tax forms do not support multiple address lines.
/// </summary>
public String mailingAddressLine2 { get; set; }
/// <summary>
/// The city name of the mailing address that will be used when filling out this tax return.
/// </summary>
public String mailingAddressCity { get; set; }
/// <summary>
/// Name or ISO 3166 code identifying the region of the mailing address that will be used when filling out this tax return.
///
/// This field supports many different region identifiers:
/// * Two and three character ISO 3166 region codes
/// * Fully spelled out names of the region in ISO supported languages
/// * Common alternative spellings for many regions
///
/// For a full list of all supported codes and names, please see the Definitions API `ListRegions`.
/// </summary>
public String mailingAddressRegion { get; set; }
/// <summary>
/// The postal code or zip code of the mailing address that will be used when filling out this tax return.
/// </summary>
public String mailingAddressPostalCode { get; set; }
/// <summary>
/// Name or ISO 3166 code identifying the country of the mailing address that will be used when filling out this tax return.
///
/// This field supports many different country identifiers:
/// * Two character ISO 3166 codes
/// * Three character ISO 3166 codes
/// * Fully spelled out names of the country in ISO supported languages
/// * Common alternative spellings for many countries
///
/// For a full list of all supported codes and names, please see the Definitions API `ListCountries`.
/// </summary>
public String mailingAddressCountry { get; set; }
/// <summary>
/// The phone number to be used when filing this return.
/// </summary>
public String phone { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/17/2021, Version: 21.9.0, Message: Field will be no longer be available after the 21.9.0 release.
/// Special filing instructions to be used when filing this return.
/// Please note that requesting special filing instructions may incur additional costs.
/// </summary>
public String customerFilingInstructions { get; set; }
/// <summary>
/// The legal entity name to be used when filing this return.
/// </summary>
public String legalEntityName { get; set; }
/// <summary>
/// The earliest date for the tax period when this return should be filed.
/// This date specifies the earliest date for tax transactions that should be reported on this filing calendar.
/// Please note that tax is usually filed one month in arrears: for example, tax for January transactions is typically filed during the month of February.
/// </summary>
public DateTime effectiveDate { get; set; }
/// <summary>
/// The last date for the tax period when this return should be filed.
/// This date specifies the last date for tax transactions that should be reported on this filing calendar.
/// Please note that tax is usually filed one month in arrears: for example, tax for January transactions is typically filed during the month of February.
/// </summary>
public DateTime? endDate { get; set; }
/// <summary>
/// The method to be used when filing this return.
/// </summary>
public FilingTypeId? filingTypeId { get; set; }
/// <summary>
/// If you file electronically, this is the username you use to log in to the tax authority's website.
/// </summary>
public String eFileUsername { get; set; }
/// <summary>
/// If you file electronically, this is the password or pass code you use to log in to the tax authority's website.
/// </summary>
public String eFilePassword { get; set; }
/// <summary>
/// If you are required to prepay a percentage of taxes for future periods, please specify the percentage in whole numbers;
/// for example, the value 90 would indicate 90%.
/// </summary>
public Int32? prepayPercentage { get; set; }
/// <summary>
/// Determines if a prepayment is required for this filing calendar
/// </summary>
public Boolean? prePaymentRequired { get; set; }
/// <summary>
/// If your company is required to make a prepayment that is designated by a fixed amount each period, please specify the amount here.
/// </summary>
public Decimal? fixedPrepaymentAmount { get; set; }
/// <summary>
/// DEPRECATED - Date: , Version: , Message: The 'taxTypes' list field should be used going forward.
/// The type of tax to report on this return.
/// </summary>
public MatchingTaxType taxTypeId { get; set; }
/// <summary>
/// The list of tax types to report on this return.
/// </summary>
public List<String> taxTypes { get; set; }
/// <summary>
/// DEPRECATED - Date: 9/17/2021, Version: 21.9.0, Message: Field will be no longer be available after the 21.9.0 release.
/// Internal filing notes.
/// </summary>
public String internalNotes { get; set; }
/// <summary>
/// Custom filing information field for Alabama.
/// </summary>
public String alSignOn { get; set; }
/// <summary>
/// Custom filing information field for Alabama.
/// </summary>
public String alAccessCode { get; set; }
/// <summary>
/// Custom filing information field for Maine.
/// </summary>
public String meBusinessCode { get; set; }
/// <summary>
/// Custom filing information field for Iowa.
/// </summary>
public String iaBen { get; set; }
/// <summary>
/// Custom filing information field for Connecticut.
/// </summary>
public String ctReg { get; set; }
/// <summary>
/// Custom filing information field. Leave blank.
/// </summary>
public String other1Name { get; set; }
/// <summary>
/// Custom filing information field. Leave blank.
/// </summary>
public String other1Value { get; set; }
/// <summary>
/// Custom filing information field. Leave blank.
/// </summary>
public String other2Name { get; set; }
/// <summary>
/// Custom filing information field. Leave blank.
/// </summary>
public String other2Value { get; set; }
/// <summary>
/// Custom filing information field. Leave blank.
/// </summary>
public String other3Name { get; set; }
/// <summary>
/// Custom filing information field. Leave blank.
/// </summary>
public String other3Value { get; set; }
/// <summary>
/// The unique ID of the tax authority of this return.
/// </summary>
public Int32? taxAuthorityId { get; set; }
/// <summary>
/// The name of the tax authority of this return.
/// </summary>
public String taxAuthorityName { get; set; }
/// <summary>
/// The type description of the tax authority of this return.
/// </summary>
public String taxAuthorityType { get; set; }
/// <summary>
/// The date when this record was created.
/// </summary>
public DateTime? createdDate { get; set; }
/// <summary>
/// The User ID of the user who created this record.
/// </summary>
public Int32? createdUserId { get; set; }
/// <summary>
/// The date/time when this record was last modified.
/// </summary>
public DateTime? modifiedDate { get; set; }
/// <summary>
/// The user ID of the user who last modified this record.
/// </summary>
public Int32? modifiedUserId { get; set; }
/// <summary>
/// User name of bulk account.
/// </summary>
public String bulkAccountId { get; set; }
/// <summary>
/// The bulk account site code.
/// </summary>
public String siteCode { get; set; }
/// <summary>
/// The status of the bulk account's validation.
/// </summary>
public BulkAccountValidationStatus? bulkAccountValidationStatus { get; set; }
/// <summary>
/// CompanyReturn settings for complext filing calendar
/// </summary>
public List<CompanyReturnSettingModel> settings { get; set; }
/// <summary>
/// Convert this object to a JSON string of itself
/// </summary>
/// <returns>A JSON string of this object</returns>
public override string ToString()
{
return JsonConvert.SerializeObject(this, new JsonSerializerSettings() { Formatting = Formatting.Indented });
}
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// Template Source: Templates\CSharp\Requests\EntityCollectionRequest.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type ContactExtensionsCollectionRequest.
/// </summary>
public partial class ContactExtensionsCollectionRequest : BaseRequest, IContactExtensionsCollectionRequest
{
/// <summary>
/// Constructs a new ContactExtensionsCollectionRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public ContactExtensionsCollectionRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Adds the specified Extension to the collection via POST.
/// </summary>
/// <param name="extension">The Extension to add.</param>
/// <returns>The created Extension.</returns>
public System.Threading.Tasks.Task<Extension> AddAsync(Extension extension)
{
return this.AddAsync(extension, CancellationToken.None);
}
/// <summary>
/// Adds the specified Extension to the collection via POST.
/// </summary>
/// <param name="extension">The Extension to add.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created Extension.</returns>
public System.Threading.Tasks.Task<Extension> AddAsync(Extension extension, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
extension.ODataType = string.Concat("#", StringHelper.ConvertTypeToLowerCamelCase(extension.GetType().FullName));
return this.SendAsync<Extension>(extension, cancellationToken);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <returns>The collection page.</returns>
public System.Threading.Tasks.Task<IContactExtensionsCollectionPage> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The collection page.</returns>
public async System.Threading.Tasks.Task<IContactExtensionsCollectionPage> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var response = await this.SendAsync<ContactExtensionsCollectionResponse>(null, cancellationToken).ConfigureAwait(false);
if (response != null && response.Value != null && response.Value.CurrentPage != null)
{
if (response.AdditionalData != null)
{
object nextPageLink;
response.AdditionalData.TryGetValue("@odata.nextLink", out nextPageLink);
var nextPageLinkString = nextPageLink as string;
if (!string.IsNullOrEmpty(nextPageLinkString))
{
response.Value.InitializeNextPageRequest(
this.Client,
nextPageLinkString);
}
// Copy the additional data collection to the page itself so that information is not lost
response.Value.AdditionalData = response.AdditionalData;
}
return response.Value;
}
return null;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest Expand(Expression<Func<Extension, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest Select(Expression<Func<Extension, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Adds the specified top value to the request.
/// </summary>
/// <param name="value">The top value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest Top(int value)
{
this.QueryOptions.Add(new QueryOption("$top", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified filter value to the request.
/// </summary>
/// <param name="value">The filter value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest Filter(string value)
{
this.QueryOptions.Add(new QueryOption("$filter", value));
return this;
}
/// <summary>
/// Adds the specified skip value to the request.
/// </summary>
/// <param name="value">The skip value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest Skip(int value)
{
this.QueryOptions.Add(new QueryOption("$skip", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified orderby value to the request.
/// </summary>
/// <param name="value">The orderby value.</param>
/// <returns>The request object to send.</returns>
public IContactExtensionsCollectionRequest OrderBy(string value)
{
this.QueryOptions.Add(new QueryOption("$orderby", value));
return this;
}
}
}
| |
using System;
using UnityEngine;
namespace DarkMultiPlayer
{
public class OptionsWindow
{
private static OptionsWindow singleton = new OptionsWindow();
public bool loadEventHandled = true;
public bool display;
private bool isWindowLocked = false;
private bool safeDisplay;
private bool initialized;
//GUI Layout
private Rect windowRect;
private Rect moveRect;
private GUILayoutOption[] layoutOptions;
private GUILayoutOption[] smallOption;
//Styles
private GUIStyle windowStyle;
private GUIStyle buttonStyle;
//const
private const float WINDOW_HEIGHT = 400;
private const float WINDOW_WIDTH = 300;
//TempColour
private Color tempColor = new Color(1f, 1f, 1f, 1f);
private GUIStyle tempColorLabelStyle;
//Cache size
private string newCacheSize = "";
//Keybindings
private bool settingChat;
private bool settingScreenshot;
private string toolbarMode;
public OptionsWindow()
{
Client.updateEvent.Add(this.Update);
Client.drawEvent.Add(this.Draw);
}
public static OptionsWindow fetch
{
get
{
return singleton;
}
}
private void InitGUI()
{
//Setup GUI stuff
windowRect = new Rect(Screen.width / 2f - WINDOW_WIDTH / 2f, Screen.height / 2f - WINDOW_HEIGHT / 2f, WINDOW_WIDTH, WINDOW_HEIGHT);
moveRect = new Rect(0, 0, 10000, 20);
windowStyle = new GUIStyle(GUI.skin.window);
buttonStyle = new GUIStyle(GUI.skin.button);
layoutOptions = new GUILayoutOption[4];
layoutOptions[0] = GUILayout.Width(WINDOW_WIDTH);
layoutOptions[1] = GUILayout.Height(WINDOW_HEIGHT);
layoutOptions[2] = GUILayout.ExpandWidth(true);
layoutOptions[3] = GUILayout.ExpandHeight(true);
smallOption = new GUILayoutOption[2];
smallOption[0] = GUILayout.Width(100);
smallOption[1] = GUILayout.ExpandWidth(false);
tempColor = new Color();
tempColorLabelStyle = new GUIStyle(GUI.skin.label);
UpdateToolbarString();
}
private void UpdateToolbarString()
{
switch (Settings.fetch.toolbarType)
{
case DMPToolbarType.DISABLED:
toolbarMode = "Disabled";
break;
case DMPToolbarType.FORCE_STOCK:
toolbarMode = "Stock";
break;
case DMPToolbarType.BLIZZY_IF_INSTALLED:
toolbarMode = "Blizzy if installed";
break;
case DMPToolbarType.BOTH_IF_INSTALLED:
toolbarMode = "Both if installed";
break;
}
}
private void Update()
{
safeDisplay = display;
}
private void Draw()
{
if (!initialized)
{
initialized = true;
InitGUI();
}
if (safeDisplay)
{
windowRect = DMPGuiUtil.PreventOffscreenWindow(GUILayout.Window(6711 + Client.WINDOW_OFFSET, windowRect, DrawContent, "DarkMultiPlayer - Options", windowStyle, layoutOptions));
}
CheckWindowLock();
}
private void DrawContent(int windowID)
{
if (!loadEventHandled)
{
loadEventHandled = true;
tempColor = Settings.fetch.playerColor;
newCacheSize = Settings.fetch.cacheSize.ToString();
}
//Player color
GUILayout.BeginVertical();
GUI.DragWindow(moveRect);
GUILayout.BeginHorizontal();
GUILayout.Label("Player name color: ");
GUILayout.Label(Settings.fetch.playerName, tempColorLabelStyle);
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("R: ");
tempColor.r = GUILayout.HorizontalScrollbar(tempColor.r, 0, 0, 1);
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("G: ");
tempColor.g = GUILayout.HorizontalScrollbar(tempColor.g, 0, 0, 1);
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("B: ");
tempColor.b = GUILayout.HorizontalScrollbar(tempColor.b, 0, 0, 1);
GUILayout.EndHorizontal();
tempColorLabelStyle.active.textColor = tempColor;
tempColorLabelStyle.normal.textColor = tempColor;
GUILayout.BeginHorizontal();
if (GUILayout.Button("Random", buttonStyle))
{
tempColor = PlayerColorWorker.GenerateRandomColor();
}
if (GUILayout.Button("Set", buttonStyle))
{
PlayerStatusWindow.fetch.colorEventHandled = false;
Settings.fetch.playerColor = tempColor;
Settings.fetch.SaveSettings();
if (NetworkWorker.fetch.state == DarkMultiPlayerCommon.ClientState.RUNNING)
{
PlayerColorWorker.fetch.SendPlayerColorToServer();
}
}
GUILayout.EndHorizontal();
GUILayout.Space(10);
//Cache
GUILayout.Label("Cache size");
GUILayout.Label("Current size: " + Math.Round((UniverseSyncCache.fetch.currentCacheSize / (float)(1024 * 1024)), 3) + "MB.");
GUILayout.Label("Max size: " + Settings.fetch.cacheSize + "MB.");
newCacheSize = GUILayout.TextArea(newCacheSize);
GUILayout.BeginHorizontal();
if (GUILayout.Button("Set", buttonStyle))
{
int tempCacheSize;
if (Int32.TryParse(newCacheSize, out tempCacheSize))
{
if (tempCacheSize < 1)
{
tempCacheSize = 1;
newCacheSize = tempCacheSize.ToString();
}
if (tempCacheSize > 1000)
{
tempCacheSize = 1000;
newCacheSize = tempCacheSize.ToString();
}
Settings.fetch.cacheSize = tempCacheSize;
Settings.fetch.SaveSettings();
}
else
{
newCacheSize = Settings.fetch.cacheSize.ToString();
}
}
if (GUILayout.Button("Expire cache"))
{
UniverseSyncCache.fetch.ExpireCache();
}
if (GUILayout.Button("Delete cache"))
{
UniverseSyncCache.fetch.DeleteCache();
}
GUILayout.EndHorizontal();
//Key bindings
GUILayout.Space(10);
string chatDescription = "Set chat key (current: " + Settings.fetch.chatKey + ")";
if (settingChat)
{
chatDescription = "Setting chat key (click to cancel)...";
if (Event.current.isKey)
{
if (Event.current.keyCode != KeyCode.Escape)
{
Settings.fetch.chatKey = Event.current.keyCode;
Settings.fetch.SaveSettings();
settingChat = false;
}
else
{
settingChat = false;
}
}
}
if (GUILayout.Button(chatDescription))
{
settingChat = !settingChat;
}
string screenshotDescription = "Set screenshot key (current: " + Settings.fetch.screenshotKey.ToString() + ")";
if (settingScreenshot)
{
screenshotDescription = "Setting screenshot key (click to cancel)...";
if (Event.current.isKey)
{
if (Event.current.keyCode != KeyCode.Escape)
{
Settings.fetch.screenshotKey = Event.current.keyCode;
Settings.fetch.SaveSettings();
settingScreenshot = false;
}
else
{
settingScreenshot = false;
}
}
}
if (GUILayout.Button(screenshotDescription))
{
settingScreenshot = !settingScreenshot;
}
GUILayout.Space(10);
GUILayout.Label("Generate a server DMPModControl:");
if (GUILayout.Button("Generate blacklist DMPModControl.txt"))
{
ModWorker.fetch.GenerateModControlFile(false);
}
if (GUILayout.Button("Generate whitelist DMPModControl.txt"))
{
ModWorker.fetch.GenerateModControlFile(true);
}
UniverseConverterWindow.fetch.display = GUILayout.Toggle(UniverseConverterWindow.fetch.display, "Generate Universe from saved game", buttonStyle);
if (GUILayout.Button("Reset disclaimer"))
{
Settings.fetch.disclaimerAccepted = 0;
Settings.fetch.SaveSettings();
}
bool settingCompression = GUILayout.Toggle(Settings.fetch.compressionEnabled, "Enable compression", buttonStyle);
if (settingCompression != Settings.fetch.compressionEnabled)
{
Settings.fetch.compressionEnabled = settingCompression;
Settings.fetch.SaveSettings();
}
bool settingRevert = GUILayout.Toggle(Settings.fetch.revertEnabled, "Enable revert", buttonStyle);
if (settingRevert != Settings.fetch.revertEnabled)
{
Settings.fetch.revertEnabled = settingRevert;
Settings.fetch.SaveSettings();
}
GUILayout.BeginHorizontal();
GUILayout.Label("Toolbar:", smallOption);
if (GUILayout.Button(toolbarMode, buttonStyle))
{
int newSetting = (int)Settings.fetch.toolbarType + 1;
//Overflow to 0
if (!Enum.IsDefined(typeof(DMPToolbarType), newSetting))
{
newSetting = 0;
}
Settings.fetch.toolbarType = (DMPToolbarType)newSetting;
Settings.fetch.SaveSettings();
UpdateToolbarString();
ToolbarSupport.fetch.DetectSettingsChange();
}
GUILayout.EndHorizontal();
GUILayout.FlexibleSpace();
if (GUILayout.Button("Close", buttonStyle))
{
display = false;
}
GUILayout.EndVertical();
}
private void CheckWindowLock()
{
if (!Client.fetch.gameRunning)
{
RemoveWindowLock();
return;
}
if (HighLogic.LoadedSceneIsFlight)
{
RemoveWindowLock();
return;
}
if (safeDisplay)
{
Vector2 mousePos = Input.mousePosition;
mousePos.y = Screen.height - mousePos.y;
bool shouldLock = windowRect.Contains(mousePos);
if (shouldLock && !isWindowLocked)
{
InputLockManager.SetControlLock(ControlTypes.ALLBUTCAMERAS, "DMP_OptionsLock");
isWindowLocked = true;
}
if (!shouldLock && isWindowLocked)
{
RemoveWindowLock();
}
}
if (!safeDisplay && isWindowLocked)
{
RemoveWindowLock();
}
}
private void RemoveWindowLock()
{
if (isWindowLocked)
{
isWindowLocked = false;
InputLockManager.RemoveControlLock("DMP_OptionsLock");
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel.Composition.Primitives;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
using System.Threading;
using Microsoft.Internal;
namespace System.ComponentModel.Composition.Hosting
{
public partial class CompositionContainer : ExportProvider, ICompositionService, IDisposable
{
private CompositionOptions _compositionOptions;
private ImportEngine _importEngine;
private ComposablePartExportProvider _partExportProvider;
private ExportProvider _rootProvider;
private IDisposable _disposableRootProvider;
private CatalogExportProvider _catalogExportProvider;
private ExportProvider _localExportProvider;
private IDisposable _disposableLocalExportProvider;
private ExportProvider _ancestorExportProvider;
private IDisposable _disposableAncestorExportProvider;
private readonly ReadOnlyCollection<ExportProvider> _providers;
private volatile bool _isDisposed = false;
private object _lock = new object();
private static ReadOnlyCollection<ExportProvider> EmptyProviders = new ReadOnlyCollection<ExportProvider>(new ExportProvider[]{});
/// <summary>
/// Initializes a new instance of the <see cref="CompositionContainer"/> class.
/// </summary>
public CompositionContainer()
: this((ComposablePartCatalog)null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CompositionContainer"/> class
/// with the specified export providers.
/// </summary>
/// <param name="providers">
/// A <see cref="Array"/> of <see cref="ExportProvider"/> objects which provide
/// the <see cref="CompositionContainer"/> access to <see cref="Export"/> objects,
/// or <see langword="null"/> to set <see cref="Providers"/> to an empty
/// <see cref="ReadOnlyCollection{T}"/>.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="providers"/> contains an element that is <see langword="null"/>.
/// </exception>
public CompositionContainer(params ExportProvider[] providers) :
this((ComposablePartCatalog)null, providers)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CompositionContainer"/> class
/// with the specified export providers.
/// </summary>
/// <param name="compositionOPtionss">
/// <see cref="CompositionOptions"/> enumeration with flags controlling the composition.
/// </param>
/// <param name="providers">
/// A <see cref="Array"/> of <see cref="ExportProvider"/> objects which provide
/// the <see cref="CompositionContainer"/> access to <see cref="Export"/> objects,
/// or <see langword="null"/> to set <see cref="Providers"/> to an empty
/// <see cref="ReadOnlyCollection{T}"/>.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="providers"/> contains an element that is <see langword="null"/>.
/// </exception>
public CompositionContainer(CompositionOptions compositionOptions, params ExportProvider[] providers) :
this((ComposablePartCatalog)null, compositionOptions, providers)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CompositionContainer"/> class
/// with the specified catalog and export providers.
/// </summary>
/// <param name="providers">
/// A <see cref="Array"/> of <see cref="ExportProvider"/> objects which provide
/// the <see cref="CompositionContainer"/> access to <see cref="Export"/> objects,
/// or <see langword="null"/> to set <see cref="Providers"/> to an empty
/// <see cref="ReadOnlyCollection{T}"/>.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="providers"/> contains an element that is <see langword="null"/>.
/// </exception>
public CompositionContainer(ComposablePartCatalog catalog, params ExportProvider[] providers):
this(catalog, false, providers)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CompositionContainer"/> class
/// with the specified catalog and export providers.
/// </summary>
/// <param name="isThreadSafe">
/// <see cref="bool"/> indicates whether container instances are threadsafe.
/// </param>
/// <param name="providers">
/// A <see cref="Array"/> of <see cref="ExportProvider"/> objects which provide
/// the <see cref="CompositionContainer"/> access to <see cref="Export"/> objects,
/// or <see langword="null"/> to set <see cref="Providers"/> to an empty
/// <see cref="ReadOnlyCollection{T}"/>.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="providers"/> contains an element that is <see langword="null"/>.
/// </exception>
public CompositionContainer(ComposablePartCatalog catalog, bool isThreadSafe, params ExportProvider[] providers)
: this(catalog, isThreadSafe ? CompositionOptions.IsThreadSafe : CompositionOptions.Default, providers)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="CompositionContainer"/> class
/// with the specified catalog and export providers.
/// </summary>
/// <param name="compositionSettings">
/// <see cref="CompositionOptions"/> enumeration with flags controlling the composition.
/// </param>
/// <param name="providers">
/// A <see cref="Array"/> of <see cref="ExportProvider"/> objects which provide
/// the <see cref="CompositionContainer"/> access to <see cref="Export"/> objects,
/// or <see langword="null"/> to set <see cref="Providers"/> to an empty
/// <see cref="ReadOnlyCollection{T}"/>.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="providers"/> contains an element that is <see langword="null"/>.
/// </exception>
public CompositionContainer(ComposablePartCatalog catalog, CompositionOptions compositionOptions, params ExportProvider[] providers)
{
if (compositionOptions > (CompositionOptions.DisableSilentRejection | CompositionOptions.IsThreadSafe | CompositionOptions.ExportCompositionService))
{
throw new ArgumentOutOfRangeException("compositionOptions");
}
_compositionOptions = compositionOptions;
// We always create the mutable provider
_partExportProvider = new ComposablePartExportProvider(compositionOptions);
_partExportProvider.SourceProvider = this;
// Create the catalog export provider, only if necessary
if (catalog != null)
{
_catalogExportProvider = new CatalogExportProvider(catalog, compositionOptions);
_catalogExportProvider.SourceProvider = this;
}
// Set the local export provider
if (_catalogExportProvider != null)
{
_localExportProvider = new AggregateExportProvider(_partExportProvider, _catalogExportProvider);
_disposableLocalExportProvider = _localExportProvider as IDisposable;
}
else
{
_localExportProvider = _partExportProvider;
}
// Set the ancestor export provider, if ancestors are supplied
if ((providers != null) && (providers.Length > 0))
{
// Aggregate ancestors if and only if more than one passed
if (providers.Length > 1)
{
_ancestorExportProvider = new AggregateExportProvider(providers);
_disposableAncestorExportProvider = _ancestorExportProvider as IDisposable;
}
else
{
if (providers[0] == null)
{
throw ExceptionBuilder.CreateContainsNullElement("providers");
}
_ancestorExportProvider = providers[0];
}
}
// finally set the root provider
if (_ancestorExportProvider == null)
{
// if no ancestors are passed, the local and the root are the same
_rootProvider = _localExportProvider;
}
else
{
int exportProviderCount = 1 + ((catalog != null) ? 1 : 0) + ((providers != null) ? providers.Length : 0);
ExportProvider[] rootProviders = new ExportProvider[exportProviderCount];
rootProviders[0] = _partExportProvider;
int customProviderStartIndex = 1;
if (catalog != null)
{
rootProviders[1] = _catalogExportProvider;
customProviderStartIndex = 2;
}
if (providers != null)
{
for (int i = 0; i < providers.Length; i++)
{
rootProviders[customProviderStartIndex + i] = providers[i];
}
}
_rootProvider = new AggregateExportProvider(rootProviders);
_disposableRootProvider = _rootProvider as IDisposable;
}
//Insert Composition Service
if(compositionOptions.HasFlag(CompositionOptions.ExportCompositionService))
{
this.ComposeExportedValue<ICompositionService>(new CompositionServiceShim(this));
}
_rootProvider.ExportsChanged += OnExportsChangedInternal;
_rootProvider.ExportsChanging += OnExportsChangingInternal;
_providers = (providers != null) ? new ReadOnlyCollection<ExportProvider>((ExportProvider[])providers.Clone()) : EmptyProviders;
}
internal CompositionOptions CompositionOptions
{
get
{
ThrowIfDisposed();
return _compositionOptions;
}
}
/// <summary>
/// Gets the catalog which provides the container access to exports produced
/// from composable parts.
/// </summary>
/// <value>
/// The <see cref="ComposablePartCatalog"/> which provides the
/// <see cref="CompositionContainer"/> access to exports produced from
/// <see cref="ComposablePart"/> objects. The default is <see langword="null"/>.
/// </value>
/// <exception cref="ObjectDisposedException">
/// The <see cref="CompositionContainer"/> has been disposed of.
/// </exception>
public ComposablePartCatalog Catalog
{
get
{
ThrowIfDisposed();
return (_catalogExportProvider != null) ? _catalogExportProvider.Catalog : null;
}
}
internal CatalogExportProvider CatalogExportProvider
{
get
{
ThrowIfDisposed();
return _catalogExportProvider;
}
}
/// <summary>
/// Gets the export providers which provide the container access to additional exports.
/// </summary>
/// <value>
/// A <see cref="ReadOnlyCollection{T}"/> of <see cref="ExportProvider"/> objects
/// which provide the <see cref="CompositionContainer"/> access to additional
/// <see cref="Export"/> objects. The default is an empty
/// <see cref="ReadOnlyCollection{T}"/>.
/// </value>
/// <exception cref="ObjectDisposedException">
/// The <see cref="CompositionContainer"/> has been disposed of.
/// </exception>
public ReadOnlyCollection<ExportProvider> Providers
{
get
{
ThrowIfDisposed();
Contract.Ensures(Contract.Result<ReadOnlyCollection<ExportProvider>>() != null);
return _providers;
}
}
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Releases unmanaged and - optionally - managed resources
/// </summary>
/// <param name="disposing"><c>true</c> to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param>
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
if (!_isDisposed)
{
ExportProvider rootProvider = null;
IDisposable disposableAncestorExportProvider = null;
IDisposable disposableLocalExportProvider = null;
IDisposable disposableRootProvider = null;
ComposablePartExportProvider partExportProvider = null;
CatalogExportProvider catalogExportProvider = null;
ImportEngine importEngine = null;
lock(_lock)
{
if (!_isDisposed)
{
rootProvider = _rootProvider;
_rootProvider = null;
disposableRootProvider = _disposableRootProvider;
_disposableRootProvider = null;
disposableLocalExportProvider = _disposableLocalExportProvider ;
_disposableLocalExportProvider = null;
_localExportProvider = null;
disposableAncestorExportProvider = _disposableAncestorExportProvider;
_disposableAncestorExportProvider = null;
_ancestorExportProvider = null;
partExportProvider = _partExportProvider;
_partExportProvider = null;
catalogExportProvider = _catalogExportProvider;
_catalogExportProvider = null;
importEngine = _importEngine;
_importEngine = null;
_isDisposed = true;
}
}
if (rootProvider != null)
{
rootProvider.ExportsChanged -= OnExportsChangedInternal;
rootProvider.ExportsChanging -= OnExportsChangingInternal;
}
if (disposableRootProvider != null)
{
disposableRootProvider.Dispose();
}
if (disposableAncestorExportProvider != null)
{
disposableAncestorExportProvider.Dispose();
}
if (disposableLocalExportProvider != null)
{
disposableLocalExportProvider.Dispose();
}
if (catalogExportProvider != null)
{
catalogExportProvider.Dispose();
}
if (partExportProvider != null)
{
partExportProvider.Dispose();
}
if (importEngine != null)
{
importEngine.Dispose();
}
}
}
}
public void Compose(CompositionBatch batch)
{
Requires.NotNull(batch, nameof(batch));
ThrowIfDisposed();
_partExportProvider.Compose(batch);
}
/// <summary>
/// Releases the <see cref="Export"/> from the <see cref="CompositionContainer"/>. The behavior
/// may vary depending on the implementation of the <see cref="ExportProvider"/> that produced
/// the <see cref="Export"/> instance. As a general rule non shared exports should be early
/// released causing them to be detached from the container.
///
/// For example the <see cref="CatalogExportProvider"/> will only release
/// an <see cref="Export"/> if it comes from a <see cref="ComposablePart"/> that was constructed
/// under a <see cref="CreationPolicy.NonShared" /> context. Release in this context means walking
/// the dependency chain of the <see cref="Export"/>s, detaching references from the container and
/// calling Dispose on the <see cref="ComposablePart"/>s as needed. If the <see cref="Export"/>
/// was constructed under a <see cref="CreationPolicy.Shared" /> context the
/// <see cref="CatalogExportProvider"/> will do nothing as it may be in use by other requestors.
/// Those will only be detached when the container is itself disposed.
/// </summary>
/// <param name="export"><see cref="Export"/> that needs to be released.</param>
/// <exception cref="ArgumentNullException">
/// <paramref name="export"/> is <see langword="null"/>.
/// </exception>
[SuppressMessage("Microsoft.Performance", "CA1822")]
public void ReleaseExport(Export export)
{
Requires.NotNull(export, nameof(export));
IDisposable dependency = export as IDisposable;
if (dependency != null)
{
dependency.Dispose();
}
}
/// <summary>
/// Releases the <see cref="Lazy{T}"/> from the <see cref="CompositionContainer"/>. The behavior
/// may vary depending on the implementation of the <see cref="ExportProvider"/> that produced
/// the <see cref="Export"/> instance. As a general rule non shared exports should be early
/// released causing them to be detached from the container.
///
/// For example the <see cref="CatalogExportProvider"/> will only release
/// an <see cref="Lazy{T}"/> if it comes from a <see cref="ComposablePart"/> that was constructed
/// under a <see cref="CreationPolicy.NonShared" /> context. Release in this context means walking
/// the dependency chain of the <see cref="Export"/>s, detaching references from the container and
/// calling Dispose on the <see cref="ComposablePart"/>s as needed. If the <see cref="Export"/>
/// was constructed under a <see cref="CreationPolicy.Shared" /> context the
/// <see cref="CatalogExportProvider"/> will do nothing as it may be in use by other requestors.
/// Those will only be detached when the container is itself disposed.
/// </summary>
/// <param name="export"><see cref="Export"/> that needs to be released.</param>
/// <exception cref="ArgumentNullException">
/// <paramref name="export"/> is <see langword="null"/>.
/// </exception>
[SuppressMessage("Microsoft.Performance", "CA1822")]
public void ReleaseExport<T>(Lazy<T> export)
{
Requires.NotNull(export, nameof(export));
IDisposable dependency = export as IDisposable;
if (dependency != null)
{
dependency.Dispose();
}
}
/// <summary>
/// Releases a set of <see cref="Export"/>s from the <see cref="CompositionContainer"/>.
/// See also <see cref="ReleaseExport"/>.
/// </summary>
/// <param name="exports"><see cref="Export"/>s that need to be released.</param>
/// <exception cref="ArgumentNullException">
/// <paramref name="exports"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="exports"/> contains an element that is <see langword="null"/>.
/// </exception>
public void ReleaseExports(IEnumerable<Export> exports)
{
Requires.NotNullOrNullElements(exports, "exports");
foreach (Export export in exports)
{
ReleaseExport(export);
}
}
/// <summary>
/// Releases a set of <see cref="Export"/>s from the <see cref="CompositionContainer"/>.
/// See also <see cref="ReleaseExport"/>.
/// </summary>
/// <param name="exports"><see cref="Export"/>s that need to be released.</param>
/// <exception cref="ArgumentNullException">
/// <paramref name="exports"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="exports"/> contains an element that is <see langword="null"/>.
/// </exception>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures")]
public void ReleaseExports<T>(IEnumerable<Lazy<T>> exports)
{
Requires.NotNullOrNullElements(exports, "exports");
foreach (Lazy<T> export in exports)
{
ReleaseExport(export);
}
}
/// <summary>
/// Releases a set of <see cref="Export"/>s from the <see cref="CompositionContainer"/>.
/// See also <see cref="ReleaseExport"/>.
/// </summary>
/// <param name="exports"><see cref="Export"/>s that need to be released.</param>
/// <exception cref="ArgumentNullException">
/// <paramref name="exports"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="exports"/> contains an element that is <see langword="null"/>.
/// </exception>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures")]
public void ReleaseExports<T, TMetadataView>(IEnumerable<Lazy<T, TMetadataView>> exports)
{
Requires.NotNullOrNullElements(exports, "exports");
foreach (Lazy<T, TMetadataView> export in exports)
{
ReleaseExport(export);
}
}
/// <summary>
/// Sets the imports of the specified composable part exactly once and they will not
/// ever be recomposed.
/// </summary>
/// <param name="part">
/// The <see cref="ComposablePart"/> to set the imports.
/// </param>
/// <exception cref="ArgumentNullException">
/// <paramref name="part"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="CompositionException">
/// An error occurred during composition. <see cref="CompositionException.Errors"/> will
/// contain a collection of errors that occurred.
/// </exception>
/// <exception cref="ObjectDisposedException">
/// The <see cref="ICompositionService"/> has been disposed of.
/// </exception>
public void SatisfyImportsOnce(ComposablePart part)
{
ThrowIfDisposed();
if (_importEngine == null)
{
ImportEngine importEngine = new ImportEngine(this, _compositionOptions);
lock(_lock)
{
if (_importEngine == null)
{
Thread.MemoryBarrier();
_importEngine = importEngine;
importEngine = null;
}
}
if(importEngine != null)
{
importEngine.Dispose();
}
}
_importEngine.SatisfyImportsOnce(part);
}
internal void OnExportsChangedInternal(object sender, ExportsChangeEventArgs e)
{
OnExportsChanged(e);
}
internal void OnExportsChangingInternal(object sender, ExportsChangeEventArgs e)
{
OnExportsChanging(e);
}
/// <summary>
/// Returns all exports that match the conditions of the specified import.
/// </summary>
/// <param name="definition">The <see cref="ImportDefinition"/> that defines the conditions of the
/// <see cref="Export"/> to get.</param>
/// <returns></returns>
/// <result>
/// An <see cref="IEnumerable{T}"/> of <see cref="Export"/> objects that match
/// the conditions defined by <see cref="ImportDefinition"/>, if found; otherwise, an
/// empty <see cref="IEnumerable{T}"/>.
/// </result>
/// <remarks>
/// <note type="inheritinfo">
/// The implementers should not treat the cardinality-related mismatches as errors, and are not
/// expected to throw exceptions in those cases.
/// For instance, if the import requests exactly one export and the provider has no matching exports or more than one,
/// it should return an empty <see cref="IEnumerable{T}"/> of <see cref="Export"/>.
/// </note>
/// </remarks>
protected override IEnumerable<Export> GetExportsCore(ImportDefinition definition, AtomicComposition atomicComposition)
{
ThrowIfDisposed();
IEnumerable<Export> exports = null;
object source;
if(!definition.Metadata.TryGetValue(CompositionConstants.ImportSourceMetadataName, out source))
{
source = ImportSource.Any;
}
switch((ImportSource)source)
{
case ImportSource.Any:
Assumes.NotNull(_rootProvider);
_rootProvider.TryGetExports(definition, atomicComposition, out exports);
break;
case ImportSource.Local:
Assumes.NotNull(_localExportProvider);
_localExportProvider.TryGetExports(definition.RemoveImportSource(), atomicComposition, out exports);
break;
case ImportSource.NonLocal:
if(_ancestorExportProvider != null)
{
_ancestorExportProvider.TryGetExports(definition.RemoveImportSource(), atomicComposition, out exports);
}
break;
}
return exports;
}
[DebuggerStepThrough]
private void ThrowIfDisposed()
{
if (_isDisposed)
{
throw ExceptionBuilder.CreateObjectDisposed(this);
}
}
}
}
| |
using System;
using System.IO;
using System.Text;
using System.Collections.Generic;
using System.Globalization;
using System.Security.Cryptography;
namespace Moscrif.IDE.Iface
{
public class Paths
{
private string appPath = System.AppDomain.CurrentDomain.BaseDirectory;
public string AppPath
{
get { return appPath; }
}
public string ConfingDir
{
get { return System.IO.Path.Combine(appPath, "cfg"); }
}
public string StylesDir
{
get { return System.IO.Path.Combine(ConfingDir, "styles"); }
}
public string ThemesDir
{
get { return System.IO.Path.Combine(ConfingDir, "themes"); }
}
public string LanguageDir
{
get { return System.IO.Path.Combine(ConfingDir, "languages"); }
}
public string ResDir
{
get { return System.IO.Path.Combine(appPath, "resources"); }
}
public string SampleDir
{
get { return System.IO.Path.Combine(appPath, "samples"); }
}
public string Temp
{
get { return System.IO.Path.Combine(appPath, "temp"); }
}
public string DefaultTheme
{
get {
string path = System.IO.Path.Combine(ThemesDir, "Moscrif");
path = System.IO.Path.Combine(path, "gtk-2.0");
path = System.IO.Path.Combine(path, "gtkrc");
return path;
}
}
private string workDir;
public string WorkDir
{
get {
if (String.IsNullOrEmpty(workDir)){
string userDir =Environment.GetFolderPath(Environment.SpecialFolder.Personal);
if(userDir.Contains(" ")){
if(MainClass.Platform.IsWindows){
string root =System.IO.Path.GetPathRoot(userDir);
userDir = root+"MoscrifWorkspace";
//userDir ="c:/MoscrifWorkspace/";
Tool.Logger.LogDebugInfo("Default Workspace dir contains space. Create alternative in " +userDir);
workDir = userDir;
return workDir;
}
}
workDir = System.IO.Path.Combine(userDir,"MoscrifWorkspace");
}
return workDir;
}
}
/* private string homeDir;
public string HomeDir
{
get {
if (String.IsNullOrEmpty(homeDir)){
//string userDir = Environment.GetEnvironmentVariable("HOME");
string userDir =Environment.GetFolderPath(Environment.SpecialFolder.Personal);
homeDir = System.IO.Path.Combine(userDir,"Moscrif");
if (!Directory.Exists(homeDir))
Directory.CreateDirectory(homeDir);
}
//Console.WriteLine("workDir 1-> {0}", workDir);
return homeDir;
}
}*/
private string settingDir;
public string SettingDir
{
get {
if (String.IsNullOrEmpty(settingDir)){
string userDir =Environment.GetFolderPath(Environment.SpecialFolder.Personal);
settingDir = System.IO.Path.Combine(userDir,".Moscrif");
if (!Directory.Exists(settingDir))
Directory.CreateDirectory(settingDir);
}
return settingDir;
}
}
private string bannerCache;
public string BannerCache
{
get {
if (String.IsNullOrEmpty(bannerCache)){
bannerCache = System.IO.Path.Combine(SettingDir,".Banners");
if (!Directory.Exists(bannerCache))
Directory.CreateDirectory(bannerCache);
}
return bannerCache;
}
}
private string tempDir;
public string TempDir
{
get {
if (String.IsNullOrEmpty(tempDir)){
string session = "Moscrif-"+DateTime.Now.ToString("yyyyMMddHHmmss");
Tool.Logger.Log("session ->"+session);
string tempDirectory ="";
string userDir =System.IO.Path.GetTempPath(); //Environment.GetFolderPath(Environment.SpecialFolder.);
if(userDir.Contains(" ")){
if(MainClass.Platform.IsWindows){
string root =System.IO.Path.GetPathRoot(userDir);
userDir =root+"Temp";// "c:/Temp/Moscrif/";
userDir = System.IO.Path.Combine(userDir,"Moscrif");
Tool.Logger.LogDebugInfo("Temp dir contains space. Create alternative temp in " +userDir);
}
}
tempDirectory = System.IO.Path.Combine(userDir, session);
try {
if (!Directory.Exists(tempDirectory)){
Directory.CreateDirectory(tempDirectory);
Tool.Logger.LogDebugInfo("Create Temp Dir ->"+tempDirectory);
}
tempDir =tempDirectory;
} catch{
tempDir = null;
}
}
return tempDir;
}
}
public string TempOutputDir
{
get { return System.IO.Path.Combine(Temp, "output"); }
}
public string TempPrecompileDir
{
get { return System.IO.Path.Combine(TempDir, "precompile"); }
}
public string TempPublishDir
{
get { return System.IO.Path.Combine(TempDir, "publish"); }
}
public string TemplateDir {
get {
return System.IO.Path.Combine (ConfingDir, "templates");
}
}
public string FileTemplateDir {
get {
return System.IO.Path.Combine (ConfingDir, "filetemplates");
}
}
public string DisplayDir
{
get { return System.IO.Path.Combine(ConfingDir, "displays"); }
}
}
}
| |
#region Using directives
using System;
using System.Collections;
using System.Text.RegularExpressions;
using System.Globalization;
using System.Threading;
using System.Reflection;
using System.ComponentModel;
using System.ComponentModel.Design;
using System.ComponentModel.Design.Serialization;
#endregion
namespace WinHtmlEditor
{
#region HtmlFontSize enumeration
/// <summary>
/// Enum used to modify the font size
/// </summary>
public enum HtmlFontSize
{
Default = 0,
xxSmall = 1, // 8 points
xSmall = 2, // 10 points
Small = 3, // 12 points
Medium = 4, // 14 points
Large = 5, // 18 points
xLarge = 6, // 24 points
xxLarge = 7 // 36 points
} //HtmlFontSize
#endregion
#region HtmlFontProperty struct
/// <summary>
/// Struct used to define a Html Font
/// Supports Name Size Bold Italic Subscript Superscript Strikeout
/// Specialized TypeConvertor used for Designer Support
/// If Name is Empty or Null Struct is consider Null
/// </summary>
[Serializable]
[TypeConverter(typeof(HtmlFontPropertyConverter))]
public struct HtmlFontProperty
{
// properties defined for the Font
private string _name;
private HtmlFontSize _size;
private bool _bold;
private bool _italic;
private bool _underline;
private bool _strikeout;
private bool _subscript;
private bool _superscript;
/// <summary>
/// Property for the Name of the Font
/// </summary>
[Description("The Name of the Font")]
public string Name
{
get
{
return _name;
}
set
{
_name = value;
}
} //Name
/// <summary>
/// Property for the Size of the Font
/// </summary>
[Description("The Size of the Font")]
public HtmlFontSize Size
{
get
{
return _size;
}
set
{
_size = value;
}
} //Size
/// <summary>
/// Property for the Bold Indication of the Font
/// </summary>
[Description("Indicates if the font is Bold")]
public bool Bold
{
get
{
return _bold;
}
set
{
_bold = value;
}
} //Bold
/// <summary>
/// Property for the Italics Indication of the Font
/// </summary>
[Description("Indicates if the font is Italic")]
public bool Italic
{
get
{
return _italic;
}
set
{
_italic = value;
}
} //Italic
/// <summary>
/// Property for the Underline Indication of the Font
/// </summary>
[Description("Indicates if the font is Underline")]
public bool Underline
{
get
{
return _underline;
}
set
{
_underline = value;
}
} //Underline
/// <summary>
/// Property for the Strikeout Indication of the Font
/// </summary>
[Description("Indicates if the font is Strikeout")]
public bool Strikeout
{
get
{
return _strikeout;
}
set
{
_strikeout = value;
}
} //Strikeout
/// <summary>
/// Property for the Subscript Indication of the Font
/// </summary>
[Description("Indicates if the font is Subscript")]
public bool Subscript
{
get
{
return _subscript;
}
set
{
_subscript = value;
}
} //Subscript
/// <summary>
/// Property for the Superscript Indication of the Font
/// </summary>
[Description("Indicates if the font is Superscript")]
public bool Superscript
{
get
{
return _superscript;
}
set
{
_superscript = value;
}
} //Superscript
/// <summary>
/// Public constrctor for name only
/// </summary>
public HtmlFontProperty(string name)
{
_name = name;
_size = HtmlFontSize.Default;
_bold = false;
_italic = false;
_underline = false;
_strikeout = false;
_subscript = false;
_superscript = false;
} //HtmlFontProperty
/// <summary>
/// Public constrctor for name and size only
/// </summary>
public HtmlFontProperty(string name, HtmlFontSize size)
{
_name = name;
_size = size;
_bold = false;
_italic = false;
_underline = false;
_strikeout = false;
_subscript = false;
_superscript = false;
} //HtmlFontProperty
/// <summary>
/// Public constrctor for all standard attributes
/// </summary>
public HtmlFontProperty(string name, HtmlFontSize size, bool bold, bool italic, bool underline)
{
_name = name;
_size = size;
_bold = bold;
_italic = italic;
_underline = underline;
_strikeout = false;
_subscript = false;
_superscript = false;
} //HtmlFontProperty
/// <summary>
/// Public constrctor for all attributes
/// </summary>
public HtmlFontProperty(string name, HtmlFontSize size, bool bold, bool italic, bool underline, bool strikeout, bool subscript, bool superscript)
{
_name = name;
_size = size;
_bold = bold;
_italic = italic;
_underline = underline;
_strikeout = strikeout;
_subscript = subscript;
_superscript = superscript;
} //HtmlFontProperty
/// <summary>
/// Public constructor given a system Font
/// </summary>
public HtmlFontProperty(System.Drawing.Font font)
{
_name = font.Name;
_size = HtmlFontConversion.FontSizeToHtml(font.SizeInPoints);
_bold = font.Bold;
_italic = font.Italic;
_underline = font.Underline;
_strikeout = font.Strikeout;
_subscript = false;
_superscript = false;
} //HtmlFontProperty
/// <summary>
/// Public method to convert the html into a readable format
/// Used by the designer to display the font name
/// </summary>
public override string ToString()
{
return string.Format("{0}, {1}", Name, Size);
} //ToString
/// <summary>
/// Compares two Html Fonts for equality
/// Equality opertors not defined (Design Time issue with override of Equals)
/// </summary>
public static bool IsEqual(HtmlFontProperty font1, HtmlFontProperty font2)
{
// assume not equal
bool equals = false;
// perform the comparsion
if (HtmlFontProperty.IsNotNull(font1) && HtmlFontProperty.IsNotNull(font2))
{
if (font1.Name == font2.Name &&
font1.Size == font2.Size &&
font1.Bold == font2.Bold &&
font1.Italic == font2.Italic &&
font1.Underline == font2.Underline &&
font1.Strikeout == font2.Strikeout &&
font1.Subscript == font2.Subscript &&
font1.Superscript == font2.Superscript)
{
equals = true;
}
}
// return the calculated value
return equals;
} //IsEquals
/// <summary>
/// Compares two Html Fonts for equality
/// Equality opertors not defined (Design Time issue with override of Equals)
/// </summary>
public static bool IsNotEqual(HtmlFontProperty font1, HtmlFontProperty font2)
{
return (!HtmlFontProperty.IsEqual(font1, font2));
} //IsNotEqual
/// <summary>
/// Based on a font name being null the font can be assumed to be null
/// Default constructor will give a null object
/// </summary>
public static bool IsNull(HtmlFontProperty font)
{
return (font.Name == null || font.Name.Trim() == string.Empty);
} //IsNull
/// <summary>
/// Based on a font name being null the font can be assumed to be null
/// Default constructor will give a null object
/// </summary>
public static bool IsNotNull(HtmlFontProperty font)
{
return (!HtmlFontProperty.IsNull(font));
} //IsNull
} // HtmlFontProperty
#endregion
#region HtmlFontConversion utilities
/// <summary>
/// Utility Class to perform Font Attribute conversions
/// Takes data to and from the expected Html Format
/// </summary>
public class HtmlFontConversion
{
/// <summary>
/// Returns the correct string size description from a HtmlFontSize
/// </summary>
public static string HtmlFontSizeString(HtmlFontSize fontSize)
{
// set the size to blank as the default
// this will ensure font size blanked out if not set
string size = string.Empty;
switch (fontSize)
{
case HtmlFontSize.xxSmall:
size = "xx-small";
break;
case HtmlFontSize.xSmall:
size = "x-small";
break;
case HtmlFontSize.Small:
size = "small";
break;
case HtmlFontSize.Medium:
size = "medium";
break;
case HtmlFontSize.Large:
size = "large";
break;
case HtmlFontSize.xLarge:
size = "x-large";
break;
case HtmlFontSize.xxLarge:
size = "xx-large";
break;
case HtmlFontSize.Default:
size = string.Empty; //small
break;
}
// return the calculated size
return size;
} //HtmlFontSizeString
/// <summary>
/// Returns the correct bold description for the bold attribute
/// </summary>
public static string HtmlFontBoldString(bool fontBold)
{
return (fontBold?"Bold":"Normal");
} //HtmlFontBoldString
/// <summary>
/// Returns the correct bold description for the bold attribute
/// </summary>
public static string HtmlFontItalicString(bool fontItalic)
{
return (fontItalic?"Italic":"Normal");
} //HtmlFontItalicString
/// <summary>
/// Determines the font size given a selected font in points
/// </summary>
public static HtmlFontSize FontSizeToHtml(float fontSize)
{
// make the following mapping
// 1:8pt
// 2:10pt
// 3:12pt
// 4:14pt
// 5:18pt
// 6:24pt
// 7:36pt
int calcFont = 0;
if (fontSize < 10) calcFont = 1; // 8pt
else if (fontSize < 12) calcFont = 2; // 10pt
else if (fontSize < 14) calcFont = 3; // 12pt
else if (fontSize < 18) calcFont = 4; // 14pt
else if (fontSize < 24) calcFont = 5; // 24pt
else if (fontSize < 36) calcFont = 6; // 36pt
else calcFont = 7;
return (HtmlFontSize)calcFont;
} //FontSizeToHtml
/// <summary>
/// Determines the font size given the html font size
/// </summary>
public static float FontSizeFromHtml(HtmlFontSize fontSize)
{
return HtmlFontConversion.FontSizeFromHtml((int)fontSize);
} //FontSizeFromHtml
/// <summary>
/// Determines the font size given the html int size
/// </summary>
public static float FontSizeFromHtml(int fontSize)
{
// make the following mapping
// 1:8pt
// 2:10pt
// 3:12pt
// 4:14pt
// 5:18pt
// 6:24pt
// 7:36pt
float calcFont = 0;
switch (fontSize)
{
case 1:
calcFont = 8F;
break;
case 2:
calcFont = 10F;
break;
case 3:
calcFont = 12F;
break;
case 4:
calcFont = 14F;
break;
case 5:
calcFont = 18F;
break;
case 6:
calcFont = 24F;
break;
case 7:
calcFont = 36F;
break;
default:
calcFont = 12F;
break;
}
return calcFont;
} //FontSizeFromHtml
/// <summary>
/// Determines the HtmlFontSize from a style attribute
/// </summary>
public static HtmlFontSize StyleSizeToHtml(string sizeDesc)
{
// currently assume the value is a fixed point
// should take into account relative and absolute values
float size;
try
{
size = Single.Parse(Regex.Replace(sizeDesc, @"[^\d|\.]", ""));
}
catch (Exception)
{
// set size to zero to return HtmlFontSize.Default
size = 0;
}
// return value as a HtmlFontSize
return HtmlFontConversion.FontSizeToHtml(size);
} //StyleSizeToHtml
/// <summary>
/// Determines if the style attribute is for Bold
/// </summary>
public static bool IsStyleBold(string style)
{
return Regex.IsMatch(style, "bold|bolder|700|800|900", RegexOptions.IgnoreCase);
} //IsStyleBold
/// <summary>
/// Determines if the style attribute is for Italic
/// </summary>
public static bool IsStyleItalic(string style)
{
return Regex.IsMatch(style, "style|oblique", RegexOptions.IgnoreCase);
} //IsStyleItalic
} //HtmlFontConversion
#endregion
#region HtmlFontPropertyConverter class
/// <summary>
/// Expandable object converter for the HtmlFontProperty
/// Allows it to be viewable from the property browser
/// String format based on "Name, FontSize"
/// </summary>
public class HtmlFontPropertyConverter : ExpandableObjectConverter
{
// constants used for the property names
private const string PROP_NAME = "Name";
private const string PROP_SIZE = "Size";
private const string PROP_BOLD = "Bold";
private const string PROP_ITALIC = "Italic";
private const string PROP_UNDERLINE = "Underline";
private const string PROP_STRIKEOUT = "Strikeout";
private const string PROP_SUBSCRIPT = "Subscript";
private const string PROP_SUPERSCRIPT = "Superscript";
// regular expression parse
private const string FONT_PARSE_EXPRESSION = @"^(?<name>(\w| )+)((\s*,\s*)?)(?<size>\w*)";
private const string FONT_PARSE_NAME = @"${name}";
private const string FONT_PARSE_SIZE = @"${size}";
/// <summary>
/// Allows expansion sub property change to have string updated
/// </summary>
public override bool GetCreateInstanceSupported(ITypeDescriptorContext context)
{
// always return a new instance
return true;
} //GetCreateInstanceSupported
/// <summary>
/// Creates a new HtmlFontProperty from a series of values
/// </summary>
public override object CreateInstance(ITypeDescriptorContext context, IDictionary values)
{
// obtain the HtmlFontProperty properties
string name = (string)values[PROP_NAME];
HtmlFontSize size = (HtmlFontSize)values[PROP_SIZE];
bool bold = (bool)values[PROP_BOLD];
bool italic = (bool)values[PROP_ITALIC];
bool underline = (bool)values[PROP_UNDERLINE];
bool strikeout = (bool)values[PROP_STRIKEOUT];
bool subscript = (bool)values[PROP_SUBSCRIPT];
bool superscript = (bool)values[PROP_SUPERSCRIPT];
// return the new HtmlFontProperty
return new HtmlFontProperty(name, size, bold, italic, underline, strikeout, subscript, superscript);
} //CreateInstance
/// <summary>
/// Indicates if a conversion can take place from a HtmlFontProperty
/// </summary>
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
if (destinationType == typeof(string) || destinationType == typeof(InstanceDescriptor))
{
return true;
}
else
{
return base.CanConvertTo(context, destinationType);
}
} //CanConvertTo
/// <summary>
/// Performs the conversion from HtmlFontProperty to a string (only)
/// </summary>
public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType)
{
// ensure working with the intented type HtmlFontProperty
if (value is HtmlFontProperty)
{
HtmlFontProperty font = (HtmlFontProperty)value;
if (destinationType == typeof(string))
{
return font.ToString();
}
if (destinationType == typeof(InstanceDescriptor))
{
// define array to hold the properties and values
Object[] properties = new Object[8];
Type[] types = new Type[8];
// Name property
properties[0] = font.Name;
types[0] = typeof(string);
// Size property
properties[1] = font.Size;
types[1] = typeof(HtmlFontSize);
// Bold property
properties[2] = font.Bold;
types[2] = typeof(bool);
// Italic property
properties[3] = font.Italic;
types[3] = typeof(bool);
// Underline property
properties[4] = font.Underline;
types[4] = typeof(bool);
// Strikeout property
properties[5] = font.Strikeout;
types[5] = typeof(bool);
// Subscript property
properties[6] = font.Subscript;
types[6] = typeof(bool);
// Superscript property
properties[7] = font.Superscript;
types[7] = typeof(bool);
// create the instance constructor to return
ConstructorInfo ci = typeof(HtmlFontProperty).GetConstructor(types);
return new InstanceDescriptor(ci, properties);
}
}
// have something other than InstanceDescriptor or sting
return base.ConvertTo(context, culture, value, destinationType);
} //ConvertTo
/// <summary>
/// Indicates if a conversion can take place from s string
/// </summary>
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(string))
{
return true;
}
else
{
return base.CanConvertFrom(context, sourceType);
}
} //CanConvertFrom
/// <summary>
/// Performs the conversion from string to a HtmlFontProperty (only)
/// </summary>
public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value)
{
if (value is string)
{
// define a new font property
string fontString = (string)value;
HtmlFontProperty font = new HtmlFontProperty(string.Empty);;
try
{
// parse the contents of the given string using a regex
string fontName = string.Empty;
string fontSize = string.Empty;
Regex expression = new Regex(FONT_PARSE_EXPRESSION, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.ExplicitCapture);
Match match = expression.Match(fontString);
// see if a match was found
if (match.Success)
{
// extract the content type elements
fontName = match.Result(FONT_PARSE_NAME);
fontSize = match.Result(FONT_PARSE_SIZE);
// set the fontname
TextInfo text = Thread.CurrentThread.CurrentCulture.TextInfo;
font.Name = text.ToTitleCase(fontName);
// determine size from given string using Small if blank
if (fontSize == string.Empty) fontSize = "Small";
font.Size = (HtmlFontSize)Enum.Parse(typeof(HtmlFontSize), fontSize, true);
}
}
catch (Exception)
{
// do nothing but ensure font is a null font
font.Name = string.Empty;
}
if (HtmlFontProperty.IsNull(font))
{
// error performing the string conversion so throw exception given possible format
string error = string.Format(@"Cannot convert '{0}' to Type HtmlFontProperty. Format: 'FontName, HtmlSize', Font Size values: {1}", fontString, string.Join(", ", Enum.GetNames(typeof(HtmlFontSize))));
throw new ArgumentException(error);
}
else
{
// return the font
return font;
}
}
else
{
return base.ConvertFrom(context, culture, value);
}
} //ConvertFrom
} //HtmlFontPropertyConverter
#endregion
}
| |
using System;
using System.Threading.Tasks;
using UIKit;
using Foundation;
using static zsquared.C_MessageBox;
using EventKit;
using zsquared;
namespace vitavol
{
public partial class VC_Login : UIViewController
{
C_Global Global;
public VC_Login (IntPtr handle) : base (handle)
{
}
public override void ViewDidLoad()
{
base.ViewDidLoad();
AppDelegate myAppDelegate = (AppDelegate)UIApplication.SharedApplication.Delegate;
Global = myAppDelegate.Global;
// keep track of the length of text in the email box, allow login when email and password are long enough
TB_Email.AddTarget((sender, e) =>
{
B_Login.Enabled = (TB_Email.Text.Length > 6) && (TB_Password.Text.Length > 6);
}, UIControlEvent.EditingChanged);
// keep track of the length of text in the password box, allow login when email and password are long enough
TB_Password.AddTarget((sender, e) =>
{
B_Login.Enabled = (TB_Email.Text.Length > 6) && (TB_Password.Text.Length > 6);
}, UIControlEvent.EditingChanged);
// if the user asks to register, take then to the registration page
B_Register.TouchUpInside += (sender, e) =>
{
PerformSegue("Segue_LoginToRegister", this);
};
B_About.TouchUpInside += (sender, e) =>
{
Global.ViewCameFrom = E_ViewCameFrom.Login;
PerformSegue("Segue_LoginToAbout", this);
};
B_Back.TouchUpInside += (sender, e) =>
{
PerformSegue("Segue_LoginToMain", this);
};
// Login is requested; we can only get this when the email and password are long enough
B_Login.TouchUpInside += (sender, e) =>
{
string email = TB_Email.Text;
string pw = TB_Password.Text;
// Disable the UI since this is a long running process
EnableUI(false);
AI_Spinner.StartAnimating();
Task.Run(async () =>
{
// do the actual login API call
C_IOResult ior = await Global.PerformLogin(email, pw);
//C_VitaUser user = await Global.PerformLogin(email, pw);
UIApplication.SharedApplication.InvokeOnMainThread(
new Action(async () =>
{
if (!ior.Success)
{
E_MessageBoxResults mbres = await MessageBox(this,
"Error",
ior.ErrorMessage,
E_MessageBoxButtons.Ok);
AI_Spinner.StopAnimating();
EnableUI(true);
return;
}
C_VitaUser user = ior.User;
// if bad name or pass, we get null; otherwise we get a C_VitaUser
if (user == null)
{
E_MessageBoxResults mbres = await MessageBox(this,
"Error",
"Login failed. Bad email or password",
E_MessageBoxButtons.Ok);
AI_Spinner.StopAnimating();
EnableUI(true);
return;
}
// do the device registration if present
string deviceTokenUpdated = NSUserDefaults.StandardUserDefaults.StringForKey(AppDelegate.N_PushDeviceTokenUpdated);
if (deviceTokenUpdated == "true")
{
string deviceToken = NSUserDefaults.StandardUserDefaults.StringForKey(AppDelegate.N_PushDeviceToken);
C_IOResult iorx = await Global.RegisterNotificationToken(E_Platform.iOS, deviceToken, user.Token);
if (iorx.Success)
NSUserDefaults.StandardUserDefaults.SetString("false", AppDelegate.N_PushDeviceTokenUpdated);
}
AI_Spinner.StopAnimating();
EnableUI(true);
Global.LoggedInUserId = user.id;
NSUserDefaults.StandardUserDefaults.SetString(TB_Email.Text, "email");
NSUserDefaults.StandardUserDefaults.SetString(TB_Password.Text, "password");
if (user.HasSiteCoordinator)
{
if (user.SitesCoordinated.Count == 0)
{
// a site coordinator with no sites
E_MessageBoxResults mbres = await MessageBox(this,
"No Sites",
"Site Coordinator but no Sites assigned.",
E_MessageBoxButtons.Ok);
Global.SelectedSiteSlug = null;
Global.ViewCameFrom = E_ViewCameFrom.Login;
PerformSegue("Segue_LoginToSCSites", this);
}
else if (user.SitesCoordinated.Count == 1)
{
C_SiteCoordinated sc = user.SitesCoordinated[0];
Global.SelectedSiteSlug = sc.Slug;
Global.ViewCameFrom = E_ViewCameFrom.Login;
Console.WriteLine("[login]: " + Global.SelectedSiteSlug);
PerformSegue("Segue_LoginToSCSite", this);
}
else // manages more than one site
{
Global.SelectedSiteSlug = null;
Global.ViewCameFrom = E_ViewCameFrom.Login;
PerformSegue("Segue_LoginToSCSites", this);
}
}
else if (user.HasVolunteer)
{
PerformSegue("Segue_LoginToVolunteerOptions", this);
}
else if (user.HasNewUser)
{
E_MessageBoxResults mbres = await MessageBox(this,
"Not Authorized",
"Staff has not yet acted on your registration.",
E_MessageBoxButtons.Ok);
}
else
{
E_MessageBoxResults mbres = await MessageBox(this,
"Error",
"Authorization failure. Expecting Volunteer, Site Coordinator, or New User",
E_MessageBoxButtons.Ok);
}
}));
});
}; // end of B_Login lambda
// set the defaults from the settings
TB_Email.Text = NSUserDefaults.StandardUserDefaults.StringForKey("email");
TB_Password.Text = NSUserDefaults.StandardUserDefaults.StringForKey("password");
B_Login.Enabled = (TB_Email.Text.Length > 6) && (TB_Password.Text.Length > 6);
// calendarAccess == haveNotAsked, Approved, Refused
string haveAskedForCalendar = NSUserDefaults.StandardUserDefaults.StringForKey("calendarAccess");
if ((haveAskedForCalendar == null) || (haveAskedForCalendar == "haveNotAsked"))
{
EKEventStore evstore = myAppDelegate.EventStore;
evstore.RequestAccess(EKEntityType.Event, EventCompletionHandler);
}
}
private void EventCompletionHandler(bool granted, NSError e)
{
string calendar = granted ? "Approved" : "Refused";
NSUserDefaults.StandardUserDefaults.SetString(calendar, "calendarAccess");
}
private void EnableUI(bool enable)
{
TB_Email.Enabled = enable;
TB_Password.Enabled = enable;
B_Login.Enabled = enable && (TB_Email.Text.Length > 6) && (TB_Password.Text.Length > 6);
B_Register.Enabled = enable;
B_About.Enabled = enable;
}
public override void ViewDidAppear(bool animated)
{
// set the standard background color
View.BackgroundColor = C_Common.StandardBackground;
}
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.Linq;
using System.Text;
namespace EduHub.Data.Entities
{
/// <summary>
/// Departments Data Set
/// </summary>
[GeneratedCode("EduHub Data", "0.9")]
public sealed partial class PDDataSet : EduHubDataSet<PD>
{
/// <inheritdoc />
public override string Name { get { return "PD"; } }
/// <inheritdoc />
public override bool SupportsEntityLastModified { get { return true; } }
internal PDDataSet(EduHubContext Context)
: base(Context)
{
Index_GLBANK = new Lazy<NullDictionary<string, IReadOnlyList<PD>>>(() => this.ToGroupedNullDictionary(i => i.GLBANK));
Index_GLCODE = new Lazy<NullDictionary<string, IReadOnlyList<PD>>>(() => this.ToGroupedNullDictionary(i => i.GLCODE));
Index_GLTAX = new Lazy<NullDictionary<string, IReadOnlyList<PD>>>(() => this.ToGroupedNullDictionary(i => i.GLTAX));
Index_INITIATIVE = new Lazy<NullDictionary<string, IReadOnlyList<PD>>>(() => this.ToGroupedNullDictionary(i => i.INITIATIVE));
Index_PDKEY = new Lazy<Dictionary<string, PD>>(() => this.ToDictionary(i => i.PDKEY));
Index_SUBPROGRAM = new Lazy<NullDictionary<string, IReadOnlyList<PD>>>(() => this.ToGroupedNullDictionary(i => i.SUBPROGRAM));
}
/// <summary>
/// Matches CSV file headers to actions, used to deserialize <see cref="PD" />
/// </summary>
/// <param name="Headers">The CSV column headers</param>
/// <returns>An array of actions which deserialize <see cref="PD" /> fields for each CSV column header</returns>
internal override Action<PD, string>[] BuildMapper(IReadOnlyList<string> Headers)
{
var mapper = new Action<PD, string>[Headers.Count];
for (var i = 0; i < Headers.Count; i++) {
switch (Headers[i]) {
case "PDKEY":
mapper[i] = (e, v) => e.PDKEY = v;
break;
case "DESCRIPTION":
mapper[i] = (e, v) => e.DESCRIPTION = v;
break;
case "GLCODE":
mapper[i] = (e, v) => e.GLCODE = v;
break;
case "GLBANK":
mapper[i] = (e, v) => e.GLBANK = v;
break;
case "GLTAX":
mapper[i] = (e, v) => e.GLTAX = v;
break;
case "SUBPROGRAM":
mapper[i] = (e, v) => e.SUBPROGRAM = v;
break;
case "GLPROGRAM":
mapper[i] = (e, v) => e.GLPROGRAM = v;
break;
case "INITIATIVE":
mapper[i] = (e, v) => e.INITIATIVE = v;
break;
case "LW_DATE":
mapper[i] = (e, v) => e.LW_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "LW_TIME":
mapper[i] = (e, v) => e.LW_TIME = v == null ? (short?)null : short.Parse(v);
break;
case "LW_USER":
mapper[i] = (e, v) => e.LW_USER = v;
break;
default:
mapper[i] = MapperNoOp;
break;
}
}
return mapper;
}
/// <summary>
/// Merges <see cref="PD" /> delta entities
/// </summary>
/// <param name="Entities">Iterator for base <see cref="PD" /> entities</param>
/// <param name="DeltaEntities">List of delta <see cref="PD" /> entities</param>
/// <returns>A merged <see cref="IEnumerable{PD}"/> of entities</returns>
internal override IEnumerable<PD> ApplyDeltaEntities(IEnumerable<PD> Entities, List<PD> DeltaEntities)
{
HashSet<string> Index_PDKEY = new HashSet<string>(DeltaEntities.Select(i => i.PDKEY));
using (var deltaIterator = DeltaEntities.GetEnumerator())
{
using (var entityIterator = Entities.GetEnumerator())
{
while (deltaIterator.MoveNext())
{
var deltaClusteredKey = deltaIterator.Current.PDKEY;
bool yieldEntity = false;
while (entityIterator.MoveNext())
{
var entity = entityIterator.Current;
bool overwritten = Index_PDKEY.Remove(entity.PDKEY);
if (entity.PDKEY.CompareTo(deltaClusteredKey) <= 0)
{
if (!overwritten)
{
yield return entity;
}
}
else
{
yieldEntity = !overwritten;
break;
}
}
yield return deltaIterator.Current;
if (yieldEntity)
{
yield return entityIterator.Current;
}
}
while (entityIterator.MoveNext())
{
yield return entityIterator.Current;
}
}
}
}
#region Index Fields
private Lazy<NullDictionary<string, IReadOnlyList<PD>>> Index_GLBANK;
private Lazy<NullDictionary<string, IReadOnlyList<PD>>> Index_GLCODE;
private Lazy<NullDictionary<string, IReadOnlyList<PD>>> Index_GLTAX;
private Lazy<NullDictionary<string, IReadOnlyList<PD>>> Index_INITIATIVE;
private Lazy<Dictionary<string, PD>> Index_PDKEY;
private Lazy<NullDictionary<string, IReadOnlyList<PD>>> Index_SUBPROGRAM;
#endregion
#region Index Methods
/// <summary>
/// Find PD by GLBANK field
/// </summary>
/// <param name="GLBANK">GLBANK value used to find PD</param>
/// <returns>List of related PD entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> FindByGLBANK(string GLBANK)
{
return Index_GLBANK.Value[GLBANK];
}
/// <summary>
/// Attempt to find PD by GLBANK field
/// </summary>
/// <param name="GLBANK">GLBANK value used to find PD</param>
/// <param name="Value">List of related PD entities</param>
/// <returns>True if the list of related PD entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByGLBANK(string GLBANK, out IReadOnlyList<PD> Value)
{
return Index_GLBANK.Value.TryGetValue(GLBANK, out Value);
}
/// <summary>
/// Attempt to find PD by GLBANK field
/// </summary>
/// <param name="GLBANK">GLBANK value used to find PD</param>
/// <returns>List of related PD entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> TryFindByGLBANK(string GLBANK)
{
IReadOnlyList<PD> value;
if (Index_GLBANK.Value.TryGetValue(GLBANK, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PD by GLCODE field
/// </summary>
/// <param name="GLCODE">GLCODE value used to find PD</param>
/// <returns>List of related PD entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> FindByGLCODE(string GLCODE)
{
return Index_GLCODE.Value[GLCODE];
}
/// <summary>
/// Attempt to find PD by GLCODE field
/// </summary>
/// <param name="GLCODE">GLCODE value used to find PD</param>
/// <param name="Value">List of related PD entities</param>
/// <returns>True if the list of related PD entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByGLCODE(string GLCODE, out IReadOnlyList<PD> Value)
{
return Index_GLCODE.Value.TryGetValue(GLCODE, out Value);
}
/// <summary>
/// Attempt to find PD by GLCODE field
/// </summary>
/// <param name="GLCODE">GLCODE value used to find PD</param>
/// <returns>List of related PD entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> TryFindByGLCODE(string GLCODE)
{
IReadOnlyList<PD> value;
if (Index_GLCODE.Value.TryGetValue(GLCODE, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PD by GLTAX field
/// </summary>
/// <param name="GLTAX">GLTAX value used to find PD</param>
/// <returns>List of related PD entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> FindByGLTAX(string GLTAX)
{
return Index_GLTAX.Value[GLTAX];
}
/// <summary>
/// Attempt to find PD by GLTAX field
/// </summary>
/// <param name="GLTAX">GLTAX value used to find PD</param>
/// <param name="Value">List of related PD entities</param>
/// <returns>True if the list of related PD entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByGLTAX(string GLTAX, out IReadOnlyList<PD> Value)
{
return Index_GLTAX.Value.TryGetValue(GLTAX, out Value);
}
/// <summary>
/// Attempt to find PD by GLTAX field
/// </summary>
/// <param name="GLTAX">GLTAX value used to find PD</param>
/// <returns>List of related PD entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> TryFindByGLTAX(string GLTAX)
{
IReadOnlyList<PD> value;
if (Index_GLTAX.Value.TryGetValue(GLTAX, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PD by INITIATIVE field
/// </summary>
/// <param name="INITIATIVE">INITIATIVE value used to find PD</param>
/// <returns>List of related PD entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> FindByINITIATIVE(string INITIATIVE)
{
return Index_INITIATIVE.Value[INITIATIVE];
}
/// <summary>
/// Attempt to find PD by INITIATIVE field
/// </summary>
/// <param name="INITIATIVE">INITIATIVE value used to find PD</param>
/// <param name="Value">List of related PD entities</param>
/// <returns>True if the list of related PD entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByINITIATIVE(string INITIATIVE, out IReadOnlyList<PD> Value)
{
return Index_INITIATIVE.Value.TryGetValue(INITIATIVE, out Value);
}
/// <summary>
/// Attempt to find PD by INITIATIVE field
/// </summary>
/// <param name="INITIATIVE">INITIATIVE value used to find PD</param>
/// <returns>List of related PD entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> TryFindByINITIATIVE(string INITIATIVE)
{
IReadOnlyList<PD> value;
if (Index_INITIATIVE.Value.TryGetValue(INITIATIVE, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PD by PDKEY field
/// </summary>
/// <param name="PDKEY">PDKEY value used to find PD</param>
/// <returns>Related PD entity</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public PD FindByPDKEY(string PDKEY)
{
return Index_PDKEY.Value[PDKEY];
}
/// <summary>
/// Attempt to find PD by PDKEY field
/// </summary>
/// <param name="PDKEY">PDKEY value used to find PD</param>
/// <param name="Value">Related PD entity</param>
/// <returns>True if the related PD entity is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByPDKEY(string PDKEY, out PD Value)
{
return Index_PDKEY.Value.TryGetValue(PDKEY, out Value);
}
/// <summary>
/// Attempt to find PD by PDKEY field
/// </summary>
/// <param name="PDKEY">PDKEY value used to find PD</param>
/// <returns>Related PD entity, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public PD TryFindByPDKEY(string PDKEY)
{
PD value;
if (Index_PDKEY.Value.TryGetValue(PDKEY, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PD by SUBPROGRAM field
/// </summary>
/// <param name="SUBPROGRAM">SUBPROGRAM value used to find PD</param>
/// <returns>List of related PD entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> FindBySUBPROGRAM(string SUBPROGRAM)
{
return Index_SUBPROGRAM.Value[SUBPROGRAM];
}
/// <summary>
/// Attempt to find PD by SUBPROGRAM field
/// </summary>
/// <param name="SUBPROGRAM">SUBPROGRAM value used to find PD</param>
/// <param name="Value">List of related PD entities</param>
/// <returns>True if the list of related PD entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindBySUBPROGRAM(string SUBPROGRAM, out IReadOnlyList<PD> Value)
{
return Index_SUBPROGRAM.Value.TryGetValue(SUBPROGRAM, out Value);
}
/// <summary>
/// Attempt to find PD by SUBPROGRAM field
/// </summary>
/// <param name="SUBPROGRAM">SUBPROGRAM value used to find PD</param>
/// <returns>List of related PD entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PD> TryFindBySUBPROGRAM(string SUBPROGRAM)
{
IReadOnlyList<PD> value;
if (Index_SUBPROGRAM.Value.TryGetValue(SUBPROGRAM, out value))
{
return value;
}
else
{
return null;
}
}
#endregion
#region SQL Integration
/// <summary>
/// Returns a <see cref="SqlCommand"/> which checks for the existence of a PD table, and if not found, creates the table and associated indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
public override SqlCommand GetSqlCreateTableCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF NOT EXISTS (SELECT * FROM dbo.sysobjects WHERE id = OBJECT_ID(N'[dbo].[PD]') AND OBJECTPROPERTY(id, N'IsUserTable') = 1)
BEGIN
CREATE TABLE [dbo].[PD](
[PDKEY] varchar(10) NOT NULL,
[DESCRIPTION] varchar(30) NULL,
[GLCODE] varchar(10) NULL,
[GLBANK] varchar(10) NULL,
[GLTAX] varchar(10) NULL,
[SUBPROGRAM] varchar(4) NULL,
[GLPROGRAM] varchar(3) NULL,
[INITIATIVE] varchar(3) NULL,
[LW_DATE] datetime NULL,
[LW_TIME] smallint NULL,
[LW_USER] varchar(128) NULL,
CONSTRAINT [PD_Index_PDKEY] PRIMARY KEY CLUSTERED (
[PDKEY] ASC
)
);
CREATE NONCLUSTERED INDEX [PD_Index_GLBANK] ON [dbo].[PD]
(
[GLBANK] ASC
);
CREATE NONCLUSTERED INDEX [PD_Index_GLCODE] ON [dbo].[PD]
(
[GLCODE] ASC
);
CREATE NONCLUSTERED INDEX [PD_Index_GLTAX] ON [dbo].[PD]
(
[GLTAX] ASC
);
CREATE NONCLUSTERED INDEX [PD_Index_INITIATIVE] ON [dbo].[PD]
(
[INITIATIVE] ASC
);
CREATE NONCLUSTERED INDEX [PD_Index_SUBPROGRAM] ON [dbo].[PD]
(
[SUBPROGRAM] ASC
);
END");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which disables all non-clustered table indexes.
/// Typically called before <see cref="SqlBulkCopy"/> to improve performance.
/// <see cref="GetSqlRebuildIndexesCommand(SqlConnection)"/> should be called to rebuild and enable indexes after performance sensitive work is completed.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>A <see cref="SqlCommand"/> which (when executed) will disable all non-clustered table indexes</returns>
public override SqlCommand GetSqlDisableIndexesCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_GLBANK')
ALTER INDEX [PD_Index_GLBANK] ON [dbo].[PD] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_GLCODE')
ALTER INDEX [PD_Index_GLCODE] ON [dbo].[PD] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_GLTAX')
ALTER INDEX [PD_Index_GLTAX] ON [dbo].[PD] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_INITIATIVE')
ALTER INDEX [PD_Index_INITIATIVE] ON [dbo].[PD] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_SUBPROGRAM')
ALTER INDEX [PD_Index_SUBPROGRAM] ON [dbo].[PD] DISABLE;
");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which rebuilds and enables all non-clustered table indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>A <see cref="SqlCommand"/> which (when executed) will rebuild and enable all non-clustered table indexes</returns>
public override SqlCommand GetSqlRebuildIndexesCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_GLBANK')
ALTER INDEX [PD_Index_GLBANK] ON [dbo].[PD] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_GLCODE')
ALTER INDEX [PD_Index_GLCODE] ON [dbo].[PD] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_GLTAX')
ALTER INDEX [PD_Index_GLTAX] ON [dbo].[PD] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_INITIATIVE')
ALTER INDEX [PD_Index_INITIATIVE] ON [dbo].[PD] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PD]') AND name = N'PD_Index_SUBPROGRAM')
ALTER INDEX [PD_Index_SUBPROGRAM] ON [dbo].[PD] REBUILD PARTITION = ALL;
");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which deletes the <see cref="PD"/> entities passed
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <param name="Entities">The <see cref="PD"/> entities to be deleted</param>
public override SqlCommand GetSqlDeleteCommand(SqlConnection SqlConnection, IEnumerable<PD> Entities)
{
SqlCommand command = new SqlCommand();
int parameterIndex = 0;
StringBuilder builder = new StringBuilder();
List<string> Index_PDKEY = new List<string>();
foreach (var entity in Entities)
{
Index_PDKEY.Add(entity.PDKEY);
}
builder.AppendLine("DELETE [dbo].[PD] WHERE");
// Index_PDKEY
builder.Append("[PDKEY] IN (");
for (int index = 0; index < Index_PDKEY.Count; index++)
{
if (index != 0)
builder.Append(", ");
// PDKEY
var parameterPDKEY = $"@p{parameterIndex++}";
builder.Append(parameterPDKEY);
command.Parameters.Add(parameterPDKEY, SqlDbType.VarChar, 10).Value = Index_PDKEY[index];
}
builder.Append(");");
command.Connection = SqlConnection;
command.CommandText = builder.ToString();
return command;
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the PD data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the PD data set</returns>
public override EduHubDataSetDataReader<PD> GetDataSetDataReader()
{
return new PDDataReader(Load());
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the PD data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the PD data set</returns>
public override EduHubDataSetDataReader<PD> GetDataSetDataReader(List<PD> Entities)
{
return new PDDataReader(new EduHubDataSetLoadedReader<PD>(this, Entities));
}
// Modest implementation to primarily support SqlBulkCopy
private class PDDataReader : EduHubDataSetDataReader<PD>
{
public PDDataReader(IEduHubDataSetReader<PD> Reader)
: base (Reader)
{
}
public override int FieldCount { get { return 11; } }
public override object GetValue(int i)
{
switch (i)
{
case 0: // PDKEY
return Current.PDKEY;
case 1: // DESCRIPTION
return Current.DESCRIPTION;
case 2: // GLCODE
return Current.GLCODE;
case 3: // GLBANK
return Current.GLBANK;
case 4: // GLTAX
return Current.GLTAX;
case 5: // SUBPROGRAM
return Current.SUBPROGRAM;
case 6: // GLPROGRAM
return Current.GLPROGRAM;
case 7: // INITIATIVE
return Current.INITIATIVE;
case 8: // LW_DATE
return Current.LW_DATE;
case 9: // LW_TIME
return Current.LW_TIME;
case 10: // LW_USER
return Current.LW_USER;
default:
throw new ArgumentOutOfRangeException(nameof(i));
}
}
public override bool IsDBNull(int i)
{
switch (i)
{
case 1: // DESCRIPTION
return Current.DESCRIPTION == null;
case 2: // GLCODE
return Current.GLCODE == null;
case 3: // GLBANK
return Current.GLBANK == null;
case 4: // GLTAX
return Current.GLTAX == null;
case 5: // SUBPROGRAM
return Current.SUBPROGRAM == null;
case 6: // GLPROGRAM
return Current.GLPROGRAM == null;
case 7: // INITIATIVE
return Current.INITIATIVE == null;
case 8: // LW_DATE
return Current.LW_DATE == null;
case 9: // LW_TIME
return Current.LW_TIME == null;
case 10: // LW_USER
return Current.LW_USER == null;
default:
return false;
}
}
public override string GetName(int ordinal)
{
switch (ordinal)
{
case 0: // PDKEY
return "PDKEY";
case 1: // DESCRIPTION
return "DESCRIPTION";
case 2: // GLCODE
return "GLCODE";
case 3: // GLBANK
return "GLBANK";
case 4: // GLTAX
return "GLTAX";
case 5: // SUBPROGRAM
return "SUBPROGRAM";
case 6: // GLPROGRAM
return "GLPROGRAM";
case 7: // INITIATIVE
return "INITIATIVE";
case 8: // LW_DATE
return "LW_DATE";
case 9: // LW_TIME
return "LW_TIME";
case 10: // LW_USER
return "LW_USER";
default:
throw new ArgumentOutOfRangeException(nameof(ordinal));
}
}
public override int GetOrdinal(string name)
{
switch (name)
{
case "PDKEY":
return 0;
case "DESCRIPTION":
return 1;
case "GLCODE":
return 2;
case "GLBANK":
return 3;
case "GLTAX":
return 4;
case "SUBPROGRAM":
return 5;
case "GLPROGRAM":
return 6;
case "INITIATIVE":
return 7;
case "LW_DATE":
return 8;
case "LW_TIME":
return 9;
case "LW_USER":
return 10;
default:
throw new ArgumentOutOfRangeException(nameof(name));
}
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Globalization;
using System.Collections;
/// <summary>
/// CopyTo
/// </summary>
public class ArrayCopyTo
{
const int c_MaxValue = 10;
const int c_MinValue = 0;
public static int Main()
{
ArrayCopyTo ArrayCopyTo = new ArrayCopyTo();
TestLibrary.TestFramework.BeginTestCase("ArrayCopyTo");
if (ArrayCopyTo.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
TestLibrary.TestFramework.LogInformation("[Negative]");
retVal = NegTest1() && retVal;
retVal = NegTest2() && retVal;
retVal = NegTest3() && retVal;
retVal = NegTest4() && retVal;
retVal = NegTest5() && retVal;
retVal = NegTest6() && retVal;
retVal = NegTest7() && retVal;
retVal = NegTest8() && retVal;
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest1:Copies all the elements of the current one-dimensional Array to the specified one-dimensional Array starting at the specified destination Array index,the two array have the same value type.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
int index = c_MaxValue;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("001", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest2:Copies all the elements of the current one-dimensional Array to the specified one-dimensional Array starting at the specified destination Array index,the two array have the same reference type.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(string), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(string), c_MaxValue * 2);
string generator1 = string.Empty;
string generator2 = string.Empty;
for (int i = 0; i < c_MaxValue; i++)
{
generator1 = TestLibrary.Generator.GetString(-55, true, c_MinValue, c_MaxValue);
generator2 = TestLibrary.Generator.GetString(-55, true, c_MinValue, c_MaxValue);
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
int index = c_MaxValue;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("003", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest3:Copies all the elements of the current one-dimensional Array to the specified one-dimensional Array starting at the specified destination Array index,the two array can upcast.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(TestDeriveClass), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(TestBaseClass), c_MaxValue * 2);
TestDeriveClass generator1;
TestBaseClass generator2;
for (int i = 0; i < c_MaxValue; i++)
{
generator1 = new TestDeriveClass(i);
generator2 = new TestDeriveClass(i + c_MaxValue);
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
int index = c_MaxValue;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("005", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool PosTest4()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest4:Copies all the elements of the current one-dimensional Array to the specified one-dimensional Array starting at the specified destination Array index,the two array can boxing.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Object), c_MaxValue * 2);
int generator1;
object generator2;
for (int i = 0; i < c_MaxValue; i++)
{
generator1 = i;
generator2 = i + c_MaxValue;
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
int index = c_MaxValue;
for (IEnumerator itr = myOriginalArray.GetEnumerator(); itr.MoveNext(); )
{
object current = itr.Current;
if (!current.Equals(myTargetArray.GetValue(index)))
{
TestLibrary.TestFramework.LogError("007", "Copy error");
retVal = false;
break;
}
index++;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("008", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest1:myTargetArray is a null reference.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
myTargetArray = null;
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("009", "Copy error");
retVal = false;
}
catch (ArgumentNullException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("010", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest6:index is less than the lower bound of array.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
myTargetArray.SetValue(i + c_MaxValue, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MinValue-1);
TestLibrary.TestFramework.LogError("011", "Copy error");
retVal = false;
}
catch (ArgumentOutOfRangeException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("012", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest3:The source Array is multidimensional.");
try
{
int[] parameter ={ c_MaxValue, c_MaxValue };
Array myOriginalArray = Array.CreateInstance(typeof(Int32), parameter);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue*3);
myOriginalArray.CopyTo(myTargetArray, c_MinValue - 1);
TestLibrary.TestFramework.LogError("013", "Copy error");
retVal = false;
}
catch (RankException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("014", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest4()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest4:The type of the source Array cannot be cast automatically to the type of the destination array.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(string), c_MaxValue * 2);
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("015", "Copy error");
retVal = false;
}
catch (ArrayTypeMismatchException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("016", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest5()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest5:array is multidimensional.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
int[] parameter ={ c_MaxValue, c_MaxValue };
Array myTargetArray = Array.CreateInstance(typeof(Int32), parameter);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("017", "Copy error");
retVal = false;
}
catch (ArgumentException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("018", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest6()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest6:index is equal to or greater than the length of array.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MaxValue);
TestLibrary.TestFramework.LogError("019", "Copy error");
retVal = false;
}
catch (ArgumentException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("020", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest7()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest7:The number of elements in the source Array is greater than the available space from index to the end of the destination array.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(Int32), c_MaxValue * 2);
Array myTargetArray = Array.CreateInstance(typeof(Int32), c_MaxValue);
for (int i = 0; i < c_MaxValue; i++)
{
myOriginalArray.SetValue(i, i);
}
myOriginalArray.CopyTo(myTargetArray, c_MinValue);
TestLibrary.TestFramework.LogError("021", "Copy error");
retVal = false;
}
catch (ArgumentException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("022", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
// Returns true if the expected result is right
// Returns false if the expected result is wrong
public bool NegTest8()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest8:At least one element in sourceArray cannot be cast to the type of destinationArray.");
try
{
Array myOriginalArray = Array.CreateInstance(typeof(ITestInterface), c_MaxValue);
Array myTargetArray = Array.CreateInstance(typeof(TestBaseClass), c_MaxValue * 2);
TestDeriveClass generator1;
TestBaseClass generator2;
TestDeriveClass1 generator3 = new TestDeriveClass1(100);
for (int i = 0; i < c_MaxValue; i++)
{
generator1 = new TestDeriveClass(i);
generator2 = new TestDeriveClass(i + c_MaxValue);
myOriginalArray.SetValue(generator1, i);
myTargetArray.SetValue(generator2, i);
}
myOriginalArray.SetValue(generator3, c_MaxValue - 1);
myOriginalArray.CopyTo(myTargetArray, c_MinValue);
TestLibrary.TestFramework.LogError("023", "Copy error");
retVal = false;
}
catch (InvalidCastException)
{
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("024", "Unexpected exception: " + e);
retVal = false;
}
return retVal;
}
}
//create ITestInterface for Negtest8.
interface ITestInterface
{
string GetName();
}
//create TestBaseClass for provding test method and test target.
public abstract class TestBaseClass
{
// The value holder
protected int id;
public TestBaseClass(int Id)
{
id = Id;
}
protected int m_value;
protected abstract int GetValue();
}
//create TestDeriveClass for provding test method and test source.
public class TestDeriveClass : TestBaseClass, ITestInterface
{
int deriveId;
public TestDeriveClass(int Id)
: base(Id)
{
deriveId = Id;
}
protected override int GetValue()
{
return deriveId;
}
#region ITestInterface Members
public string GetName()
{
return "TestDeriveClass";
}
#endregion
}
//create TestDeriveClass for provding test method and test source.
public class TestDeriveClass1 : ITestInterface
{
int deriveId;
public TestDeriveClass1(int Id)
{
deriveId = Id;
}
protected int GetValue()
{
return deriveId;
}
#region ITestInterface Members
public string GetName()
{
return "TestDeriveClass1";
}
#endregion
}
| |
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
namespace System.ServiceModel.Channels
{
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IdentityModel.Claims;
using System.IdentityModel.Policy;
using System.IdentityModel.Selectors;
using System.IdentityModel.Tokens;
using System.Runtime;
using System.Runtime.CompilerServices;
using System.Runtime.Serialization;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.ServiceModel.Diagnostics;
using System.ServiceModel.Security;
using System.ServiceModel.Security.Tokens;
using System.Text;
using System.Xml;
class PeerSecurityHelpers
{
public static byte[] ComputeHash(X509Certificate2 cert, string pwd)
{
RSACryptoServiceProvider keyProv = cert.PublicKey.Key as RSACryptoServiceProvider;
Fx.Assert(keyProv != null, "Remote Peer's credentials are invalid!");
byte[] key = keyProv.ExportCspBlob(false);
return ComputeHash(key, pwd);
}
public static byte[] ComputeHash(Claim claim, string pwd)
{
RSACryptoServiceProvider provider = claim.Resource as RSACryptoServiceProvider;
if (provider == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("claim");
using (provider)
{
byte[] keyBlob = provider.ExportCspBlob(false);
if (keyBlob == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("key");
return ComputeHash(keyBlob, pwd);
}
}
public static byte[] ComputeHash(byte[] message, string pwd)
{
byte[] returnValue = null;
RuntimeHelpers.PrepareConstrainedRegions();
byte[] pwdBytes = null;
byte[] pwdHash = null;
byte[] tempBuffer = null;
try
{
pwdBytes = UnicodeEncoding.Unicode.GetBytes(pwd.Trim());
using (HMACSHA256 algo = new HMACSHA256(pwdBytes))
{
using (SHA256Managed sha = new SHA256Managed())
{
pwdHash = sha.ComputeHash(pwdBytes);
tempBuffer = DiagnosticUtility.Utility.AllocateByteArray(checked(message.Length + pwdHash.Length));
Array.Copy(pwdHash, tempBuffer, pwdHash.Length);
Array.Copy(message, 0, tempBuffer, pwdHash.Length, message.Length);
returnValue = algo.ComputeHash(tempBuffer);
}
}
}
finally
{
ArrayClear(pwdBytes);
ArrayClear(pwdHash);
ArrayClear(tempBuffer);
}
return returnValue;
}
static void ArrayClear(byte[] buffer)
{
if (buffer != null)
Array.Clear(buffer, 0, buffer.Length);
}
public static bool Authenticate(Claim claim, string password, byte[] authenticator)
{
bool returnValue = false;
if (authenticator == null)
return false;
byte[] hash = null;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
hash = ComputeHash(claim, password);
if (hash.Length == authenticator.Length)
{
for (int i = 0; i < hash.Length; i++)
{
if (hash[i] != authenticator[i])
{
returnValue = false;
break;
}
}
returnValue = true;
}
}
finally
{
ArrayClear(hash);
}
return returnValue;
}
public static bool AuthenticateRequest(Claim claim, string password, Message message)
{
PeerHashToken request = PeerRequestSecurityToken.CreateHashTokenFrom(message);
return request.Validate(claim, password);
}
public static bool AuthenticateResponse(Claim claim, string password, Message message)
{
PeerHashToken request = PeerRequestSecurityTokenResponse.CreateHashTokenFrom(message);
return request.Validate(claim, password);
}
}
internal class PeerIdentityClaim
{
const string resourceValue = "peer";
const string resourceRight = "peer";
public const string PeerClaimType = PeerStrings.Namespace + "/peer";
static internal Claim Claim()
{
return new Claim(PeerClaimType, resourceValue, resourceRight);
}
static internal bool IsMatch(EndpointIdentity identity)
{
return identity.IdentityClaim.ClaimType == PeerClaimType;
}
}
class PeerDoNothingSecurityProtocol : SecurityProtocol
{
public PeerDoNothingSecurityProtocol(SecurityProtocolFactory factory) : base(factory, null, null) { }
public override void SecureOutgoingMessage(ref Message message, TimeSpan timeout)
{
}
public override void VerifyIncomingMessage(ref Message request, TimeSpan timeout)
{
try
{
int i = request.Headers.FindHeader(SecurityJan2004Strings.Security, SecurityJan2004Strings.Namespace);
if (i >= 0)
{
request.Headers.AddUnderstood(i);
}
}
catch (MessageHeaderException e)
{
DiagnosticUtility.TraceHandledException(e, TraceEventType.Information);
}
catch (XmlException e)
{
DiagnosticUtility.TraceHandledException(e, TraceEventType.Information);
}
catch (SerializationException e)
{
DiagnosticUtility.TraceHandledException(e, TraceEventType.Information);
}
}
public override void OnAbort()
{
}
public override void OnClose(TimeSpan timeout)
{
}
public override void OnOpen(TimeSpan timeout)
{
}
}
class PeerDoNothingSecurityProtocolFactory : SecurityProtocolFactory
{
protected override SecurityProtocol OnCreateSecurityProtocol(EndpointAddress target, Uri via, object listenerSecurityState, TimeSpan timeout)
{
return new PeerDoNothingSecurityProtocol(this);
}
public override void OnAbort()
{
}
public override void OnOpen(TimeSpan timeout)
{
}
public override void OnClose(TimeSpan timeout)
{
}
}
class PeerIdentityVerifier : IdentityVerifier
{
public PeerIdentityVerifier() : base() { }
public override bool CheckAccess(EndpointIdentity identity, AuthorizationContext authContext)
{
return true;
}
public override bool TryGetIdentity(EndpointAddress reference, out EndpointIdentity identity)
{
if (reference == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reference");
identity = reference.Identity;
if (identity == null)
{
identity = new PeerEndpointIdentity();
}
return true;
}
}
class PeerEndpointIdentity : EndpointIdentity
{
public PeerEndpointIdentity()
: base()
{
base.Initialize(PeerIdentityClaim.Claim());
}
}
class PeerX509TokenProvider : X509SecurityTokenProvider
{
X509CertificateValidator validator;
public PeerX509TokenProvider(X509CertificateValidator validator, X509Certificate2 credential)
: base(credential)
{
this.validator = validator;
}
protected override SecurityToken GetTokenCore(TimeSpan timeout)
{
X509SecurityToken token = (X509SecurityToken)base.GetTokenCore(timeout);
if (validator != null)
{
validator.Validate(token.Certificate);
}
return token;
}
}
class PeerCertificateClientCredentials : SecurityCredentialsManager
{
X509Certificate2 selfCertificate;
X509CertificateValidator certificateValidator;
public PeerCertificateClientCredentials(X509Certificate2 selfCertificate, X509CertificateValidator validator)
{
this.selfCertificate = selfCertificate;
this.certificateValidator = validator;
}
public override SecurityTokenManager CreateSecurityTokenManager()
{
return new PeerCertificateClientCredentialsSecurityTokenManager(this);
}
class PeerCertificateClientCredentialsSecurityTokenManager : SecurityTokenManager
{
PeerCertificateClientCredentials creds;
public PeerCertificateClientCredentialsSecurityTokenManager(PeerCertificateClientCredentials creds)
{
this.creds = creds;
}
public override SecurityTokenSerializer CreateSecurityTokenSerializer(SecurityTokenVersion version)
{
MessageSecurityTokenVersion messageVersion = (MessageSecurityTokenVersion)version;
return new WSSecurityTokenSerializer(messageVersion.SecurityVersion, messageVersion.TrustVersion, messageVersion.SecureConversationVersion, messageVersion.EmitBspRequiredAttributes, null, null, null);
}
public override SecurityTokenAuthenticator CreateSecurityTokenAuthenticator(SecurityTokenRequirement tokenRequirement, out SecurityTokenResolver outOfBandTokenResolver)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException());
}
public override SecurityTokenProvider CreateSecurityTokenProvider(SecurityTokenRequirement requirement)
{
if (requirement == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("requirement");
}
if (requirement.TokenType == SecurityTokenTypes.X509Certificate && requirement.KeyUsage == SecurityKeyUsage.Signature)
{
return new PeerX509TokenProvider(this.creds.certificateValidator, this.creds.selfCertificate);
}
else
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException());
}
}
}
}
internal class PeerHashToken : SecurityToken
{
string id = SecurityUniqueId.Create().Value;
Uri status;
bool isValid;
ReadOnlyCollection<SecurityKey> keys;
internal const string TokenTypeString = PeerStrings.Namespace + "/peerhashtoken";
internal const string RequestTypeString = "http://schemas.xmlsoap.org/ws/2005/02/trust/Validate";
internal const string Action = "http://schemas.xmlsoap.org/ws/2005/02/trust/RST/Validate";
public const string PeerNamespace = PeerStrings.Namespace;
public const string PeerTokenElementName = "PeerHashToken";
public const string PeerAuthenticatorElementName = "Authenticator";
public const string PeerPrefix = "peer";
static PeerHashToken invalid = new PeerHashToken();
byte[] authenticator;
DateTime effectiveTime = DateTime.UtcNow;
DateTime expirationTime = DateTime.UtcNow.AddHours(10);
PeerHashToken()
{
CheckValidity();
}
public PeerHashToken(byte[] authenticator)
{
this.authenticator = authenticator;
CheckValidity();
}
public PeerHashToken(X509Certificate2 certificate, string password)
{
this.authenticator = PeerSecurityHelpers.ComputeHash(certificate, password);
CheckValidity();
}
public PeerHashToken(Claim claim, string password)
{
this.authenticator = PeerSecurityHelpers.ComputeHash(claim, password);
CheckValidity();
}
public override string Id
{
get { return this.id; }
}
public override DateTime ValidFrom
{
get { return this.effectiveTime; }
}
public override DateTime ValidTo
{
get { return this.expirationTime; }
}
public static PeerHashToken Invalid
{
get
{
return invalid;
}
}
public override ReadOnlyCollection<SecurityKey> SecurityKeys
{
get
{
if (null == this.keys)
{
this.keys = new ReadOnlyCollection<SecurityKey>(new List<SecurityKey>());
}
return this.keys;
}
}
public Uri Status
{
get
{
return this.status;
}
}
public bool IsValid
{
get
{
return this.isValid;
}
}
public bool Validate(Claim claim, string password)
{
if (!(this.authenticator != null))
{
throw Fx.AssertAndThrow("Incorrect initialization");
}
bool result = PeerSecurityHelpers.Authenticate(claim, password, this.authenticator);
return result;
}
void CheckValidity()
{
isValid = this.authenticator != null;
status = new Uri(isValid ? PeerRequestSecurityTokenResponse.ValidString : PeerRequestSecurityTokenResponse.InvalidString);
}
public void Write(XmlWriter writer)
{
writer.WriteStartElement(PeerPrefix, PeerTokenElementName, PeerNamespace);
writer.WriteStartElement(PeerPrefix, PeerAuthenticatorElementName, PeerNamespace);
writer.WriteString(Convert.ToBase64String(this.authenticator));
writer.WriteEndElement();
writer.WriteEndElement();
}
internal static PeerHashToken CreateFrom(XmlElement child)
{
byte[] auth = null;
foreach (XmlNode node in child.ChildNodes)
{
XmlElement element = (XmlElement)node;
if (element == null || !PeerRequestSecurityToken.CompareWithNS(element.LocalName, element.NamespaceURI, PeerTokenElementName, PeerNamespace))
continue;
if (element.ChildNodes.Count != 1)
break;
XmlElement authElement = element.ChildNodes[0] as XmlElement;
if (authElement == null || !PeerRequestSecurityToken.CompareWithNS(authElement.LocalName, authElement.NamespaceURI, PeerAuthenticatorElementName, PeerNamespace))
break;
try
{
auth = Convert.FromBase64String(XmlHelper.ReadTextElementAsTrimmedString(authElement));
break;
}
catch (ArgumentNullException e)
{
DiagnosticUtility.TraceHandledException(e, TraceEventType.Information);
}
catch (FormatException e)
{
DiagnosticUtility.TraceHandledException(e, TraceEventType.Information);
}
}
return new PeerHashToken(auth);
}
public override bool Equals(object token)
{
PeerHashToken that = token as PeerHashToken;
if (that == null)
return false;
if (Object.ReferenceEquals(that, this))
return true;
if (this.authenticator != null && that.authenticator != null && this.authenticator.Length == that.authenticator.Length)
{
for (int i = 0; i < this.authenticator.Length; i++)
{
if (this.authenticator[i] != that.authenticator[i])
return false;
}
return true;
}
return false;
}
public override int GetHashCode()
{
return isValid ? this.authenticator.GetHashCode() : 0;
}
}
class PeerSecurityTokenSerializer : WSSecurityTokenSerializer
{
public override SecurityKeyIdentifierClause CreateKeyIdentifierClauseFromTokenXml(XmlElement element, SecurityTokenReferenceStyle tokenReferenceStyle)
{
return null;
}
}
internal class PeerRequestSecurityToken : RequestSecurityToken
{
PeerHashToken token;
public const string TrustNamespace = TrustFeb2005Strings.Namespace;
public const string PeerNamespace = PeerStrings.Namespace;
public const string RequestElementName = "RequestSecurityToken";
public const string RequestedSecurityTokenElementName = "RequestedSecurityToken";
public const string PeerHashTokenElementName = "PeerHashToken";
public PeerRequestSecurityToken(PeerHashToken token)
: base()
{
this.token = token;
this.TokenType = PeerHashToken.TokenTypeString;
this.RequestType = PeerHashToken.RequestTypeString;
}
public PeerHashToken Token
{
get
{
return this.token;
}
}
public static PeerHashToken CreateHashTokenFrom(Message message)
{
PeerHashToken token = PeerHashToken.Invalid;
XmlReader reader = message.GetReaderAtBodyContents();
RequestSecurityToken rst = RequestSecurityToken.CreateFrom(reader);
XmlElement rstXml = rst.RequestSecurityTokenXml;
if (rstXml != null)
{
//find the wrapper element
foreach (XmlNode node in rst.RequestSecurityTokenXml.ChildNodes)
{
XmlElement element = (XmlElement)node;
if (element == null || !PeerRequestSecurityToken.CompareWithNS(element.LocalName, element.NamespaceURI, PeerRequestSecurityToken.RequestedSecurityTokenElementName, TrustFeb2005Strings.Namespace))
continue;
token = PeerHashToken.CreateFrom(element);
}
}
return token;
}
public PeerRequestSecurityToken CreateFrom(X509Certificate2 credential, string password)
{
PeerHashToken token = new PeerHashToken(credential, password);
return new PeerRequestSecurityToken(token);
}
internal protected override void OnWriteCustomElements(XmlWriter writer)
{
if (!(token != null && token.IsValid))
{
throw Fx.AssertAndThrow("Could not construct a valid RST without token!");
}
string wstprefix = writer.LookupPrefix(TrustNamespace);
writer.WriteStartElement(wstprefix, TrustFeb2005Strings.RequestedSecurityToken, TrustFeb2005Strings.Namespace);
token.Write(writer);
writer.WriteEndElement();
}
internal protected override void OnMakeReadOnly() { }
internal static bool CompareWithNS(string first, string firstNS, string second, string secondNS)
{
return ((String.Compare(first, second, StringComparison.Ordinal) == 0)
&& (String.Compare(firstNS, secondNS, StringComparison.OrdinalIgnoreCase) == 0));
}
}
class PeerRequestSecurityTokenResponse : RequestSecurityTokenResponse
{
public const string Action = "http://schemas.xmlsoap.org/ws/2005/02/trust/RSTR/Validate";
public const string ValidString = "http://schemas.xmlsoap.org/ws/2005/02/trust/status/valid";
public const string InvalidString = "http://schemas.xmlsoap.org/ws/2005/02/trust/status/invalid";
public const string StatusString = "Status";
public const string CodeString = "Code";
PeerHashToken token;
bool isValid = false;
public PeerRequestSecurityTokenResponse()
: this(null)
{
}
public PeerRequestSecurityTokenResponse(PeerHashToken token)
{
this.token = token;
this.isValid = (token != null && token.IsValid);
}
public PeerHashToken Token
{
get
{
if (!(this.isValid))
{
throw Fx.AssertAndThrow("should not be called when the token is invalid!");
}
return this.token;
}
}
public bool IsValid
{
get
{
return this.isValid;
}
}
public static PeerHashToken CreateHashTokenFrom(Message message)
{
PeerHashToken token = PeerHashToken.Invalid;
RequestSecurityTokenResponse response = RequestSecurityTokenResponse.CreateFrom(message.GetReaderAtBodyContents(), MessageSecurityVersion.Default, new PeerSecurityTokenSerializer());
if (String.Compare(response.TokenType, PeerHashToken.TokenTypeString, StringComparison.OrdinalIgnoreCase) != 0)
{
return token;
}
XmlElement responseXml = response.RequestSecurityTokenResponseXml;
if (responseXml != null)
{
foreach (XmlElement child in responseXml.ChildNodes)
{
if (PeerRequestSecurityToken.CompareWithNS(child.LocalName, child.NamespaceURI, StatusString, TrustFeb2005Strings.Namespace))
{
if (child.ChildNodes.Count == 1)
{
XmlElement desc = (child.ChildNodes[0] as XmlElement);
if (PeerRequestSecurityToken.CompareWithNS(desc.LocalName, desc.NamespaceURI, CodeString, TrustFeb2005Strings.Namespace))
{
string code = XmlHelper.ReadTextElementAsTrimmedString(desc);
if (String.Compare(code, ValidString, StringComparison.OrdinalIgnoreCase) != 0)
break;
}
}
}
else if (PeerRequestSecurityToken.CompareWithNS(child.LocalName, child.NamespaceURI, TrustFeb2005Strings.RequestedSecurityToken, TrustFeb2005Strings.Namespace))
{
token = PeerHashToken.CreateFrom(child);
break;
}
}
}
return token;
}
public static RequestSecurityTokenResponse CreateFrom(X509Certificate2 credential, string password)
{
PeerHashToken token = new PeerHashToken(credential, password);
return new PeerRequestSecurityTokenResponse(token);
}
internal protected override void OnWriteCustomElements(XmlWriter writer)
{
string wstprefix = writer.LookupPrefix(TrustFeb2005Strings.Namespace);
writer.WriteStartElement(wstprefix, TrustFeb2005Strings.TokenType, TrustFeb2005Strings.Namespace);
writer.WriteString(PeerHashToken.TokenTypeString);
writer.WriteEndElement();
writer.WriteStartElement(wstprefix, StatusString, TrustFeb2005Strings.Namespace);
writer.WriteStartElement(wstprefix, CodeString, TrustFeb2005Strings.Namespace);
if (!this.IsValid)
writer.WriteString(InvalidString);
else
writer.WriteString(ValidString);
writer.WriteEndElement();
writer.WriteEndElement();
if (this.IsValid)
{
writer.WriteStartElement(wstprefix, PeerRequestSecurityToken.RequestedSecurityTokenElementName, TrustFeb2005Strings.Namespace);
this.token.Write(writer);
writer.WriteEndElement();
}
}
}
class PeerChannelAuthenticatorExtension : IExtension<IPeerNeighbor>
{
IPeerNeighbor host;
PeerSecurityManager securityManager;
PeerAuthState state;
EventArgs originalArgs;
EventHandler onSucceeded;
IOThreadTimer timer = null;
object thisLock = new object();
static TimeSpan Timeout = new TimeSpan(0, 2, 0);
string meshId;
enum PeerAuthState
{
Created,
Authenticated,
Failed
}
public PeerChannelAuthenticatorExtension(PeerSecurityManager securityManager, EventHandler onSucceeded, EventArgs args, string meshId)
{
this.securityManager = securityManager;
this.state = PeerAuthState.Created;
this.originalArgs = args;
this.onSucceeded = onSucceeded;
this.meshId = meshId;
}
object ThisLock
{
get
{
return this.thisLock;
}
}
public void Attach(IPeerNeighbor host)
{
Fx.AssertAndThrow(this.securityManager.AuthenticationMode == PeerAuthenticationMode.Password, "Invalid AuthenticationMode!");
Fx.AssertAndThrow(host != null, "unrecognized host!");
this.host = host;
this.timer = new IOThreadTimer(new Action<object>(OnTimeout), null, true);
this.timer.Set(Timeout);
}
static public void OnNeighborClosed(IPeerNeighbor neighbor)
{
Fx.Assert(neighbor != null, "Neighbor must have a value");
PeerChannelAuthenticatorExtension ext = neighbor.Extensions.Find<PeerChannelAuthenticatorExtension>();
if (ext != null) neighbor.Extensions.Remove(ext);
}
public void Detach(IPeerNeighbor host)
{
Fx.Assert(host != null, "unrecognized host!");
if (host.State < PeerNeighborState.Authenticated)
{
OnFailed(host);
}
this.host = null;
this.timer.Cancel();
}
void OnTimeout(object state)
{
IPeerNeighbor neighbor = host;
if (neighbor == null)
return;
if (neighbor.State < PeerNeighborState.Authenticated)
{
OnFailed(neighbor);
}
}
public void InitiateHandShake()
{
IPeerNeighbor neighbor = host;
Message reply = null;
Fx.Assert(host != null, "Cannot initiate security handshake without a host!");
//send the RST message.
using (OperationContextScope scope = new OperationContextScope(new OperationContext((ServiceHostBase)null)))
{
PeerHashToken token = this.securityManager.GetSelfToken();
Message request = Message.CreateMessage(MessageVersion.Soap12WSAddressing10, TrustFeb2005Strings.RequestSecurityToken, new PeerRequestSecurityToken(token));
bool fatal = false;
try
{
reply = neighbor.RequestSecurityToken(request);
if (!(reply != null))
{
throw Fx.AssertAndThrow("SecurityHandshake return empty message!");
}
ProcessRstr(neighbor, reply, PeerSecurityManager.FindClaim(ServiceSecurityContext.Current));
}
catch (Exception e)
{
if (Fx.IsFatal(e))
{
fatal = true;
throw;
}
DiagnosticUtility.TraceHandledException(e, TraceEventType.Information);
this.state = PeerAuthState.Failed;
if (DiagnosticUtility.ShouldTraceError)
{
ServiceSecurityContext context = ServiceSecurityContext.Current;
ClaimSet claimSet = null;
if (context != null && context.AuthorizationContext != null && context.AuthorizationContext.ClaimSets != null && context.AuthorizationContext.ClaimSets.Count > 0)
claimSet = context.AuthorizationContext.ClaimSets[0];
PeerAuthenticationFailureTraceRecord record = new PeerAuthenticationFailureTraceRecord(
meshId,
neighbor.ListenAddress.EndpointAddress.ToString(),
claimSet,
e);
TraceUtility.TraceEvent(TraceEventType.Error,
TraceCode.PeerNodeAuthenticationFailure, SR.GetString(SR.TraceCodePeerNodeAuthenticationFailure),
record, this, null);
}
neighbor.Abort(PeerCloseReason.AuthenticationFailure, PeerCloseInitiator.LocalNode);
}
finally
{
if (!fatal)
request.Close();
}
}
}
public Message ProcessRst(Message message, Claim claim)
{
IPeerNeighbor neighbor = host;
PeerRequestSecurityTokenResponse response = null;
Message reply = null;
lock (ThisLock)
{
if (this.state != PeerAuthState.Created || neighbor == null || neighbor.IsInitiator || neighbor.State != PeerNeighborState.Opened)
{
OnFailed(neighbor);
return null;
}
}
try
{
PeerHashToken receivedToken = PeerRequestSecurityToken.CreateHashTokenFrom(message);
PeerHashToken expectedToken = securityManager.GetExpectedTokenForClaim(claim);
if (!expectedToken.Equals(receivedToken))
{
OnFailed(neighbor);
}
else
{
this.state = PeerAuthState.Authenticated;
PeerHashToken selfToken = securityManager.GetSelfToken();
response = new PeerRequestSecurityTokenResponse(selfToken);
reply = Message.CreateMessage(MessageVersion.Soap12WSAddressing10, TrustFeb2005Strings.RequestSecurityTokenResponse, response);
OnAuthenticated();
}
}
catch (Exception e)
{
if (Fx.IsFatal(e)) throw;
DiagnosticUtility.TraceHandledException(e, TraceEventType.Information);
OnFailed(neighbor);
}
return reply;
}
public void ProcessRstr(IPeerNeighbor neighbor, Message message, Claim claim)
{
PeerHashToken receivedToken = PeerRequestSecurityTokenResponse.CreateHashTokenFrom(message);
if (!receivedToken.IsValid)
{
OnFailed(neighbor);
}
else
{
PeerHashToken expectedToken = securityManager.GetExpectedTokenForClaim(claim);
if (!expectedToken.Equals(receivedToken))
OnFailed(neighbor);
else
OnAuthenticated();
}
}
public void OnAuthenticated()
{
IPeerNeighbor neighbor = null;
lock (ThisLock)
{
this.timer.Cancel();
neighbor = this.host;
this.state = PeerAuthState.Authenticated;
}
if (neighbor == null)
return;
neighbor.TrySetState(PeerNeighborState.Authenticated);
onSucceeded(neighbor, originalArgs);
}
void OnFailed(IPeerNeighbor neighbor)
{
lock (ThisLock)
{
this.state = PeerAuthState.Failed;
this.timer.Cancel();
this.host = null;
}
if (DiagnosticUtility.ShouldTraceError)
{
PeerAuthenticationFailureTraceRecord record = null;
String remoteUri = "";
PeerNodeAddress remoteAddress = neighbor.ListenAddress;
if (remoteAddress != null)
{
remoteUri = remoteAddress.EndpointAddress.ToString();
}
OperationContext opContext = OperationContext.Current;
if (opContext != null)
{
remoteUri = opContext.IncomingMessageProperties.Via.ToString();
ServiceSecurityContext secContext = opContext.ServiceSecurityContext;
if (secContext != null)
{
record = new PeerAuthenticationFailureTraceRecord(
meshId,
remoteUri,
secContext.AuthorizationContext.ClaimSets[0], null);
if (DiagnosticUtility.ShouldTraceError)
{
TraceUtility.TraceEvent(
TraceEventType.Error,
TraceCode.PeerNodeAuthenticationFailure,
SR.GetString(SR.TraceCodePeerNodeAuthenticationFailure),
record,
this,
null);
}
}
}
else
{
record = new PeerAuthenticationFailureTraceRecord(meshId, remoteUri);
if (DiagnosticUtility.ShouldTraceError)
{
TraceUtility.TraceEvent(TraceEventType.Error,
TraceCode.PeerNodeAuthenticationTimeout,
SR.GetString(SR.TraceCodePeerNodeAuthenticationTimeout),
record,
this,
null);
}
}
}
neighbor.Abort(PeerCloseReason.AuthenticationFailure, PeerCloseInitiator.LocalNode);
}
}
}
namespace System.ServiceModel.Channels
{
internal enum PeerAuthenticationMode
{
None = 0,
Password = 1,
MutualCertificate = 2
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using System;
using System.Xml;
using System.Data;
using System.Collections.Specialized;
using System.Collections.Generic;
using System.Text;
namespace WebsitePanel.EnterpriseServer
{
public class SystemController
{
private SystemController()
{
}
public static SystemSettings GetSystemSettings(string settingsName)
{
// check account
int accountCheck = SecurityContext.CheckAccount(DemandAccount.IsAdmin | DemandAccount.IsActive);
if (accountCheck < 0)
return null;
bool isDemoAccount = (SecurityContext.CheckAccount(DemandAccount.NotDemo) < 0);
return GetSystemSettingsInternal(settingsName, !isDemoAccount);
}
public static SystemSettings GetSystemSettingsActive(string settingsName, bool decrypt)
{
// check account
int accountCheck = SecurityContext.CheckAccount(DemandAccount.IsActive);
if (accountCheck < 0)
return null;
bool isDemoAccount = (SecurityContext.CheckAccount(DemandAccount.NotDemo) < 0);
return GetSystemSettingsInternal(settingsName, decrypt && isDemoAccount);
}
internal static SystemSettings GetSystemSettingsInternal(string settingsName, bool decryptPassword)
{
// create settings object
SystemSettings settings = new SystemSettings();
// get service settings
IDataReader reader = null;
try
{
// get service settings
reader = DataProvider.GetSystemSettings(settingsName);
while (reader.Read())
{
string name = (string)reader["PropertyName"];
string val = (string)reader["PropertyValue"];
if (name.ToLower().IndexOf("password") != -1 && decryptPassword)
val = CryptoUtils.Decrypt(val);
settings[name] = val;
}
}
finally
{
if (reader != null && !reader.IsClosed)
reader.Close();
}
return settings;
}
public static int SetSystemSettings(string settingsName, SystemSettings settings)
{
// check account
int accountCheck = SecurityContext.CheckAccount(DemandAccount.NotDemo | DemandAccount.IsAdmin
| DemandAccount.IsActive);
if (accountCheck < 0) return accountCheck;
XmlDocument xmldoc = new XmlDocument();
XmlElement root = xmldoc.CreateElement("properties");
foreach (string[] pair in settings.SettingsArray)
{
string name = pair[0];
string val = pair[1];
if (name.ToLower().IndexOf("password") != -1)
val = CryptoUtils.Encrypt(val);
XmlElement property = xmldoc.CreateElement("property");
property.SetAttribute("name", name);
property.SetAttribute("value", val);
root.AppendChild(property);
}
DataProvider.SetSystemSettings(settingsName, root.OuterXml);
return 0;
}
public static bool GetSystemSetupMode()
{
var scpaSystemSettings = GetSystemSettings(SystemSettings.SETUP_SETTINGS);
// Flag either not found or empty
if (String.IsNullOrEmpty(scpaSystemSettings["EnabledSCPA"]))
{
return false;
}
//
return true;
}
public static int SetupControlPanelAccounts(string passwordA, string passwordB, string ip)
{
try
{
TaskManager.StartTask("SYSTEM", "COMPLETE_SCPA");
//
TaskManager.WriteParameter("Password A", passwordA);
TaskManager.WriteParameter("Password B", passwordB);
TaskManager.WriteParameter("IP Address", ip);
//
var enabledScpaMode = GetSystemSetupMode();
//
if (enabledScpaMode == false)
{
//
TaskManager.WriteWarning("Attempt to execute SCPA procedure for an uknown reason");
//
return BusinessErrorCodes.FAILED_EXECUTE_SERVICE_OPERATION;
}
// Entering the security context into Supervisor mode
SecurityContext.SetThreadSupervisorPrincipal();
//
var accountA = UserController.GetUserInternally("serveradmin");
var accountB = UserController.GetUserInternally("admin");
//
var resultCodeA = UserController.ChangeUserPassword(accountA.UserId, passwordA);
//
if (resultCodeA < 0)
{
TaskManager.WriteParameter("Result Code A", resultCodeA);
//
return resultCodeA;
}
//
var resultCodeB = UserController.ChangeUserPassword(accountB.UserId, passwordB);
//
if (resultCodeB < 0)
{
TaskManager.WriteParameter("Result Code B", resultCodeB);
//
return resultCodeB;
}
// Disable SCPA mode
SetSystemSettings(SystemSettings.SETUP_SETTINGS, SystemSettings.Empty);
// Operation has succeeded
return 0;
}
catch (Exception ex)
{
TaskManager.WriteError(ex);
//
return BusinessErrorCodes.FAILED_EXECUTE_SERVICE_OPERATION;
}
finally
{
TaskManager.CompleteTask();
}
}
public static bool CheckIsTwilioEnabled()
{
var settings = SystemController.GetSystemSettingsActive(SystemSettings.TWILIO_SETTINGS, false);
return settings != null
&& !string.IsNullOrEmpty(settings.GetValueOrDefault(SystemSettings.TWILIO_ACCOUNTSID_KEY, string.Empty))
&& !string.IsNullOrEmpty(settings.GetValueOrDefault(SystemSettings.TWILIO_AUTHTOKEN_KEY, string.Empty))
&& !string.IsNullOrEmpty(settings.GetValueOrDefault(SystemSettings.TWILIO_PHONEFROM_KEY, string.Empty));
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
namespace System.Numerics.Tests
{
public class implicitExplicitCastOperatorsTest
{
private static void VerifyInt16ImplicitCastToComplex(Int16 value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("Int16ImplicitCast ({0})", value));
if (value != Int16.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + Int16ImplicitCast ({0})", value));
}
if (value != Int16.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - Int16ImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_Int16ImplicitCastToComplex()
{
VerifyInt16ImplicitCastToComplex(Int16.MinValue);
for (int i = 0; i < 3; ++i)
{
Int16 randomValue = Support.GetRandomInt16Value(true);
VerifyInt16ImplicitCastToComplex(randomValue);
}
VerifyInt16ImplicitCastToComplex(-1);
VerifyInt16ImplicitCastToComplex(0);
VerifyInt16ImplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
Int16 randomValue = Support.GetRandomInt16Value(false);
VerifyInt16ImplicitCastToComplex(randomValue);
}
VerifyInt16ImplicitCastToComplex(Int16.MaxValue);
}
private static void VerifyInt32ImplicitCastToComplex(Int32 value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("Int32ImplicitCast ({0})", value));
if (value != Int32.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + Int32ImplicitCast ({0})", value));
}
if (value != Int32.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - Int32ImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_Int32ImplicitCastToComplex()
{
VerifyInt32ImplicitCastToComplex(Int32.MinValue);
for (int i = 0; i < 3; ++i)
{
Int32 randomValue = Support.GetRandomInt32Value(true);
VerifyInt32ImplicitCastToComplex(randomValue);
}
VerifyInt32ImplicitCastToComplex(-1);
VerifyInt32ImplicitCastToComplex(0);
VerifyInt32ImplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
Int32 randomValue = Support.GetRandomInt32Value(false);
VerifyInt32ImplicitCastToComplex(randomValue);
}
VerifyInt32ImplicitCastToComplex(Int32.MaxValue);
}
private static void VerifyInt64ImplicitCastToComplex(Int64 value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("Int64ImplicitCast ({0})", value));
if (value != Int64.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + Int64ImplicitCast ({0})", value));
}
if (value != Int64.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - Int64ImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_Int64ImplicitCastToComplex()
{
VerifyInt64ImplicitCastToComplex(Int64.MinValue);
for (int i = 0; i < 3; ++i)
{
Int64 randomValue = Support.GetRandomInt64Value(true);
VerifyInt64ImplicitCastToComplex(randomValue);
}
VerifyInt64ImplicitCastToComplex(-1);
VerifyInt64ImplicitCastToComplex(0);
VerifyInt64ImplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
Int64 randomValue = Support.GetRandomInt64Value(false);
VerifyInt64ImplicitCastToComplex(randomValue);
}
VerifyInt64ImplicitCastToComplex(Int64.MaxValue);
}
private static void VerifyUInt16ImplicitCastToComplex(UInt16 value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("UInt16ImplicitCast ({0})", value));
if (value != UInt16.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + UInt16ImplicitCast ({0})", value));
}
if (value != UInt16.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - UInt16ImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_UInt16ImplicitCastToComplex()
{
VerifyUInt16ImplicitCastToComplex(UInt16.MinValue);
VerifyUInt16ImplicitCastToComplex(0);
VerifyUInt16ImplicitCastToComplex(1);
#if CLS_Compliant
for (int i = 0; i < 3; ++i)
{
UInt16 randomValue = Support.GetRandomUInt16Value();
VerifyUInt16ImplicitCastToComplex(randomValue);
}
#endif
VerifyUInt16ImplicitCastToComplex(UInt16.MaxValue);
}
private static void VerifyUInt32ImplicitCastToComplex(UInt32 value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("UInt32ImplicitCast ({0})", value));
if (value != UInt32.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + UInt32ImplicitCast ({0})", value));
}
if (value != UInt32.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - UInt32ImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_UInt32ImplicitCastToComplex()
{
VerifyUInt32ImplicitCastToComplex(UInt32.MinValue);
VerifyUInt32ImplicitCastToComplex(0);
VerifyUInt32ImplicitCastToComplex(1);
#if CLS_Compliant
for (int i = 0; i < 3; ++i)
{
UInt32 randomValue = Support.GetRandomUInt32Value();
VerifyUInt32ImplicitCastToComplex(randomValue);
}
#endif
VerifyUInt32ImplicitCastToComplex(UInt32.MaxValue);
}
private static void VerifyUInt64ImplicitCastToComplex(UInt64 value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("UInt64ImplicitCast ({0})", value));
if (value != UInt64.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + UInt64ImplicitCast ({0})", value));
}
if (value != UInt64.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - UInt64ImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_UInt64ImplicitCastToComplex()
{
VerifyUInt64ImplicitCastToComplex(UInt64.MinValue);
VerifyUInt64ImplicitCastToComplex(0);
VerifyUInt64ImplicitCastToComplex(1);
#if CLS_Compliant
for (int i = 0; i < 3; ++i)
{
UInt64 randomValue = Support.GetRandomUInt64Value();
VerifyUInt64ImplicitCastToComplex(randomValue);
}
#endif
VerifyUInt64ImplicitCastToComplex(UInt64.MaxValue);
}
private static void VerifySByteImplicitCastToComplex(SByte value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("SByteImplicitCast ({0})", value));
if (value != SByte.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + SByteImplicitCast ({0})", value));
}
if (value != SByte.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - SByteImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_SByteImplicitCastToComplex()
{
VerifySByteImplicitCastToComplex(SByte.MinValue);
#if CLS_Compliant
for (int i = 0; i < 3; ++i)
{
SByte randomValue = Support.GetRandomSByteValue(false);
VerifySByteImplicitCastToComplex(randomValue);
}
#endif
VerifySByteImplicitCastToComplex(0);
VerifySByteImplicitCastToComplex(1);
#if CLS_Compliant
for (int i = 0; i < 3; ++i)
{
SByte randomValue = Support.GetRandomSByteValue(true);
VerifySByteImplicitCastToComplex(randomValue);
}
#endif
VerifySByteImplicitCastToComplex(SByte.MaxValue);
}
private static void VerifyByteImplicitCastToComplex(Byte value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("ByteImplicitCast ({0})", value));
if (value != Byte.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + ByteImplicitCast ({0})", value));
}
if (value != Byte.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - ByteImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_ByteImplicitCastToComplex()
{
VerifyByteImplicitCastToComplex(Byte.MinValue);
VerifyByteImplicitCastToComplex(0);
VerifyByteImplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
Byte randomValue = Support.GetRandomByteValue();
VerifyByteImplicitCastToComplex(randomValue);
}
VerifyByteImplicitCastToComplex(Byte.MaxValue);
}
private static void VerifySingleImplicitCastToComplex(Single value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("SingleImplicitCast ({0})", value));
if (value != Single.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + SingleImplicitCast ({0})", value));
}
if (value != Single.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - SingleImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_SingleImplicitCastToComplex()
{
VerifySingleImplicitCastToComplex(Single.MinValue);
for (int i = 0; i < 3; ++i)
{
Single randomValue = Support.GetRandomSingleValue(false);
VerifySingleImplicitCastToComplex(randomValue);
}
VerifySingleImplicitCastToComplex(0);
VerifySingleImplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
Single randomValue = Support.GetRandomSingleValue(true);
VerifySingleImplicitCastToComplex(randomValue);
}
VerifySingleImplicitCastToComplex(Single.MaxValue);
}
private static void VerifyDoubleImplicitCastToComplex(double value)
{
Complex c_cast = value;
Support.VerifyRealImaginaryProperties(c_cast, value, 0.0,
string.Format("DoubleImplicitCast ({0})", value));
if (value != double.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, value + 1, 0.0,
string.Format("PLuS + DoubleImplicitCast ({0})", value));
}
if (value != double.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, value - 1, 0.0,
string.Format("Minus - DoubleImplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_DoubleImplicitCastToComplex()
{
VerifyDoubleImplicitCastToComplex(double.MinValue);
for (int i = 0; i < 3; ++i)
{
double randomValue = Support.GetRandomDoubleValue(false);
VerifyDoubleImplicitCastToComplex(randomValue);
}
VerifyDoubleImplicitCastToComplex(0);
VerifyDoubleImplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
double randomValue = Support.GetRandomDoubleValue(true);
VerifyDoubleImplicitCastToComplex(randomValue);
}
VerifyDoubleImplicitCastToComplex(double.MaxValue);
}
private static void VerifyBigIntegerExplicitCastToComplex(BigInteger value)
{
Complex c_cast = (Complex)value;
Support.VerifyRealImaginaryProperties(c_cast, (Double)value, 0.0,
string.Format("BigIntegerExplicitCast ({0})", value));
if (value != (BigInteger)double.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, (Double)(value + 1), 0.0,
string.Format("PLuS + BigIntegerExplicitCast ({0})", value));
}
if (value != (BigInteger)double.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, (Double)(value - 1), 0.0,
string.Format("Minus - BigIntegerExplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_BigIntegerExplicitCastToComplex()
{
VerifyBigIntegerExplicitCastToComplex((BigInteger)double.MinValue);
for (int i = 0; i < 3; ++i)
{
BigInteger randomValue = Support.GetRandomBigIntegerValue(false);
VerifyBigIntegerExplicitCastToComplex(randomValue);
}
VerifyBigIntegerExplicitCastToComplex(0);
VerifyBigIntegerExplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
BigInteger randomValue = Support.GetRandomBigIntegerValue(true);
VerifyBigIntegerExplicitCastToComplex(randomValue);
}
VerifyBigIntegerExplicitCastToComplex((BigInteger)double.MaxValue);
}
private static void VerifyDecimalExplicitCastToComplex(Decimal value)
{
Complex c_cast = (Complex)value;
Support.VerifyRealImaginaryProperties(c_cast, (Double)value, 0.0,
string.Format("DecimalExplicitCast ({0})", value));
if (value != Decimal.MaxValue)
{
Complex c_cast_plus = c_cast + 1;
Support.VerifyRealImaginaryProperties(c_cast_plus, (Double)(value + 1), 0.0,
string.Format("PLuS + DecimalExplicitCast ({0})", value));
}
if (value != Decimal.MinValue)
{
Complex c_cast_minus = c_cast - 1;
Support.VerifyRealImaginaryProperties(c_cast_minus, (Double)(value - 1), 0.0,
string.Format("Minus - DecimalExplicitCast + 1 ({0})", value));
}
}
[Fact]
public static void RunTests_DecimalExplicitCastToComplex()
{
VerifyDecimalExplicitCastToComplex(Decimal.MinValue);
for (int i = 0; i < 3; ++i)
{
Decimal randomValue = Support.GetRandomDecimalValue(false);
VerifyDecimalExplicitCastToComplex(randomValue);
}
VerifyDecimalExplicitCastToComplex(0);
VerifyDecimalExplicitCastToComplex(1);
for (int i = 0; i < 3; ++i)
{
Decimal randomValue = Support.GetRandomDecimalValue(true);
VerifyDecimalExplicitCastToComplex(randomValue);
}
VerifyDecimalExplicitCastToComplex(Decimal.MaxValue);
}
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Collections;
namespace log4net.Appender
{
/// <summary>
/// A strongly-typed collection of <see cref="IAppender"/> objects.
/// </summary>
/// <author>Nicko Cadell</author>
public class AppenderCollection : ICollection, IList, IEnumerable
#if !NETSTANDARD1_3
, ICloneable
#endif
{
#region Interfaces
/// <summary>
/// Supports type-safe iteration over a <see cref="AppenderCollection"/>.
/// </summary>
/// <exclude/>
public interface IAppenderCollectionEnumerator
{
/// <summary>
/// Gets the current element in the collection.
/// </summary>
IAppender Current { get; }
/// <summary>
/// Advances the enumerator to the next element in the collection.
/// </summary>
/// <returns>
/// <c>true</c> if the enumerator was successfully advanced to the next element;
/// <c>false</c> if the enumerator has passed the end of the collection.
/// </returns>
/// <exception cref="InvalidOperationException">
/// The collection was modified after the enumerator was created.
/// </exception>
bool MoveNext();
/// <summary>
/// Sets the enumerator to its initial position, before the first element in the collection.
/// </summary>
void Reset();
}
#endregion
private const int DEFAULT_CAPACITY = 16;
#region Implementation (data)
private IAppender[] m_array;
private int m_count = 0;
private int m_version = 0;
#endregion
#region Static Wrappers
/// <summary>
/// Creates a read-only wrapper for a <c>AppenderCollection</c> instance.
/// </summary>
/// <param name="list">list to create a readonly wrapper arround</param>
/// <returns>
/// An <c>AppenderCollection</c> wrapper that is read-only.
/// </returns>
public static AppenderCollection ReadOnly(AppenderCollection list)
{
if(list==null) throw new ArgumentNullException("list");
return new ReadOnlyAppenderCollection(list);
}
#endregion
#region Static Fields
/// <summary>
/// An empty readonly static AppenderCollection
/// </summary>
public static readonly AppenderCollection EmptyCollection = ReadOnly(new AppenderCollection(0));
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <c>AppenderCollection</c> class
/// that is empty and has the default initial capacity.
/// </summary>
public AppenderCollection()
{
m_array = new IAppender[DEFAULT_CAPACITY];
}
/// <summary>
/// Initializes a new instance of the <c>AppenderCollection</c> class
/// that has the specified initial capacity.
/// </summary>
/// <param name="capacity">
/// The number of elements that the new <c>AppenderCollection</c> is initially capable of storing.
/// </param>
public AppenderCollection(int capacity)
{
m_array = new IAppender[capacity];
}
/// <summary>
/// Initializes a new instance of the <c>AppenderCollection</c> class
/// that contains elements copied from the specified <c>AppenderCollection</c>.
/// </summary>
/// <param name="c">The <c>AppenderCollection</c> whose elements are copied to the new collection.</param>
public AppenderCollection(AppenderCollection c)
{
m_array = new IAppender[c.Count];
AddRange(c);
}
/// <summary>
/// Initializes a new instance of the <c>AppenderCollection</c> class
/// that contains elements copied from the specified <see cref="IAppender"/> array.
/// </summary>
/// <param name="a">The <see cref="IAppender"/> array whose elements are copied to the new list.</param>
public AppenderCollection(IAppender[] a)
{
m_array = new IAppender[a.Length];
AddRange(a);
}
/// <summary>
/// Initializes a new instance of the <c>AppenderCollection</c> class
/// that contains elements copied from the specified <see cref="IAppender"/> collection.
/// </summary>
/// <param name="col">The <see cref="IAppender"/> collection whose elements are copied to the new list.</param>
public AppenderCollection(ICollection col)
{
m_array = new IAppender[col.Count];
AddRange(col);
}
/// <summary>
/// Type visible only to our subclasses
/// Used to access protected constructor
/// </summary>
/// <exclude/>
internal protected enum Tag
{
/// <summary>
/// A value
/// </summary>
Default
}
/// <summary>
/// Allow subclasses to avoid our default constructors
/// </summary>
/// <param name="tag"></param>
/// <exclude/>
internal protected AppenderCollection(Tag tag)
{
m_array = null;
}
#endregion
#region Operations (type-safe ICollection)
/// <summary>
/// Gets the number of elements actually contained in the <c>AppenderCollection</c>.
/// </summary>
public virtual int Count
{
get { return m_count; }
}
/// <summary>
/// Copies the entire <c>AppenderCollection</c> to a one-dimensional
/// <see cref="IAppender"/> array.
/// </summary>
/// <param name="array">The one-dimensional <see cref="IAppender"/> array to copy to.</param>
public virtual void CopyTo(IAppender[] array)
{
this.CopyTo(array, 0);
}
/// <summary>
/// Copies the entire <c>AppenderCollection</c> to a one-dimensional
/// <see cref="IAppender"/> array, starting at the specified index of the target array.
/// </summary>
/// <param name="array">The one-dimensional <see cref="IAppender"/> array to copy to.</param>
/// <param name="start">The zero-based index in <paramref name="array"/> at which copying begins.</param>
public virtual void CopyTo(IAppender[] array, int start)
{
if (m_count > array.GetUpperBound(0) + 1 - start)
{
throw new System.ArgumentException("Destination array was not long enough.");
}
Array.Copy(m_array, 0, array, start, m_count);
}
/// <summary>
/// Gets a value indicating whether access to the collection is synchronized (thread-safe).
/// </summary>
/// <returns>false, because the backing type is an array, which is never thread-safe.</returns>
public virtual bool IsSynchronized
{
get { return false; }
}
/// <summary>
/// Gets an object that can be used to synchronize access to the collection.
/// </summary>
public virtual object SyncRoot
{
get { return m_array; }
}
#endregion
#region Operations (type-safe IList)
/// <summary>
/// Gets or sets the <see cref="IAppender"/> at the specified index.
/// </summary>
/// <param name="index">The zero-based index of the element to get or set.</param>
/// <exception cref="ArgumentOutOfRangeException">
/// <para><paramref name="index"/> is less than zero</para>
/// <para>-or-</para>
/// <para><paramref name="index"/> is equal to or greater than <see cref="AppenderCollection.Count"/>.</para>
/// </exception>
public virtual IAppender this[int index]
{
get
{
ValidateIndex(index); // throws
return m_array[index];
}
set
{
ValidateIndex(index); // throws
++m_version;
m_array[index] = value;
}
}
/// <summary>
/// Adds a <see cref="IAppender"/> to the end of the <c>AppenderCollection</c>.
/// </summary>
/// <param name="item">The <see cref="IAppender"/> to be added to the end of the <c>AppenderCollection</c>.</param>
/// <returns>The index at which the value has been added.</returns>
public virtual int Add(IAppender item)
{
if (m_count == m_array.Length)
{
EnsureCapacity(m_count + 1);
}
m_array[m_count] = item;
m_version++;
return m_count++;
}
/// <summary>
/// Removes all elements from the <c>AppenderCollection</c>.
/// </summary>
public virtual void Clear()
{
++m_version;
m_array = new IAppender[DEFAULT_CAPACITY];
m_count = 0;
}
/// <summary>
/// Creates a shallow copy of the <see cref="AppenderCollection"/>.
/// </summary>
/// <returns>A new <see cref="AppenderCollection"/> with a shallow copy of the collection data.</returns>
public virtual object Clone()
{
AppenderCollection newCol = new AppenderCollection(m_count);
Array.Copy(m_array, 0, newCol.m_array, 0, m_count);
newCol.m_count = m_count;
newCol.m_version = m_version;
return newCol;
}
/// <summary>
/// Determines whether a given <see cref="IAppender"/> is in the <c>AppenderCollection</c>.
/// </summary>
/// <param name="item">The <see cref="IAppender"/> to check for.</param>
/// <returns><c>true</c> if <paramref name="item"/> is found in the <c>AppenderCollection</c>; otherwise, <c>false</c>.</returns>
public virtual bool Contains(IAppender item)
{
for (int i=0; i != m_count; ++i)
{
if (m_array[i].Equals(item))
{
return true;
}
}
return false;
}
/// <summary>
/// Returns the zero-based index of the first occurrence of a <see cref="IAppender"/>
/// in the <c>AppenderCollection</c>.
/// </summary>
/// <param name="item">The <see cref="IAppender"/> to locate in the <c>AppenderCollection</c>.</param>
/// <returns>
/// The zero-based index of the first occurrence of <paramref name="item"/>
/// in the entire <c>AppenderCollection</c>, if found; otherwise, -1.
/// </returns>
public virtual int IndexOf(IAppender item)
{
for (int i=0; i != m_count; ++i)
{
if (m_array[i].Equals(item))
{
return i;
}
}
return -1;
}
/// <summary>
/// Inserts an element into the <c>AppenderCollection</c> at the specified index.
/// </summary>
/// <param name="index">The zero-based index at which <paramref name="item"/> should be inserted.</param>
/// <param name="item">The <see cref="IAppender"/> to insert.</param>
/// <exception cref="ArgumentOutOfRangeException">
/// <para><paramref name="index"/> is less than zero</para>
/// <para>-or-</para>
/// <para><paramref name="index"/> is equal to or greater than <see cref="AppenderCollection.Count"/>.</para>
/// </exception>
public virtual void Insert(int index, IAppender item)
{
ValidateIndex(index, true); // throws
if (m_count == m_array.Length)
{
EnsureCapacity(m_count + 1);
}
if (index < m_count)
{
Array.Copy(m_array, index, m_array, index + 1, m_count - index);
}
m_array[index] = item;
m_count++;
m_version++;
}
/// <summary>
/// Removes the first occurrence of a specific <see cref="IAppender"/> from the <c>AppenderCollection</c>.
/// </summary>
/// <param name="item">The <see cref="IAppender"/> to remove from the <c>AppenderCollection</c>.</param>
/// <exception cref="ArgumentException">
/// The specified <see cref="IAppender"/> was not found in the <c>AppenderCollection</c>.
/// </exception>
public virtual void Remove(IAppender item)
{
int i = IndexOf(item);
if (i < 0)
{
throw new System.ArgumentException("Cannot remove the specified item because it was not found in the specified Collection.");
}
++m_version;
RemoveAt(i);
}
/// <summary>
/// Removes the element at the specified index of the <c>AppenderCollection</c>.
/// </summary>
/// <param name="index">The zero-based index of the element to remove.</param>
/// <exception cref="ArgumentOutOfRangeException">
/// <para><paramref name="index"/> is less than zero</para>
/// <para>-or-</para>
/// <para><paramref name="index"/> is equal to or greater than <see cref="AppenderCollection.Count"/>.</para>
/// </exception>
public virtual void RemoveAt(int index)
{
ValidateIndex(index); // throws
m_count--;
if (index < m_count)
{
Array.Copy(m_array, index + 1, m_array, index, m_count - index);
}
// We can't set the deleted entry equal to null, because it might be a value type.
// Instead, we'll create an empty single-element array of the right type and copy it
// over the entry we want to erase.
IAppender[] temp = new IAppender[1];
Array.Copy(temp, 0, m_array, m_count, 1);
m_version++;
}
/// <summary>
/// Gets a value indicating whether the collection has a fixed size.
/// </summary>
/// <value>true if the collection has a fixed size; otherwise, false. The default is false</value>
public virtual bool IsFixedSize
{
get { return false; }
}
/// <summary>
/// Gets a value indicating whether the IList is read-only.
/// </summary>
/// <value>true if the collection is read-only; otherwise, false. The default is false</value>
public virtual bool IsReadOnly
{
get { return false; }
}
#endregion
#region Operations (type-safe IEnumerable)
/// <summary>
/// Returns an enumerator that can iterate through the <c>AppenderCollection</c>.
/// </summary>
/// <returns>An <see cref="Enumerator"/> for the entire <c>AppenderCollection</c>.</returns>
public virtual IAppenderCollectionEnumerator GetEnumerator()
{
return new Enumerator(this);
}
#endregion
#region Public helpers (just to mimic some nice features of ArrayList)
/// <summary>
/// Gets or sets the number of elements the <c>AppenderCollection</c> can contain.
/// </summary>
public virtual int Capacity
{
get
{
return m_array.Length;
}
set
{
if (value < m_count)
{
value = m_count;
}
if (value != m_array.Length)
{
if (value > 0)
{
IAppender[] temp = new IAppender[value];
Array.Copy(m_array, 0, temp, 0, m_count);
m_array = temp;
}
else
{
m_array = new IAppender[DEFAULT_CAPACITY];
}
}
}
}
/// <summary>
/// Adds the elements of another <c>AppenderCollection</c> to the current <c>AppenderCollection</c>.
/// </summary>
/// <param name="x">The <c>AppenderCollection</c> whose elements should be added to the end of the current <c>AppenderCollection</c>.</param>
/// <returns>The new <see cref="AppenderCollection.Count"/> of the <c>AppenderCollection</c>.</returns>
public virtual int AddRange(AppenderCollection x)
{
if (m_count + x.Count >= m_array.Length)
{
EnsureCapacity(m_count + x.Count);
}
Array.Copy(x.m_array, 0, m_array, m_count, x.Count);
m_count += x.Count;
m_version++;
return m_count;
}
/// <summary>
/// Adds the elements of a <see cref="IAppender"/> array to the current <c>AppenderCollection</c>.
/// </summary>
/// <param name="x">The <see cref="IAppender"/> array whose elements should be added to the end of the <c>AppenderCollection</c>.</param>
/// <returns>The new <see cref="AppenderCollection.Count"/> of the <c>AppenderCollection</c>.</returns>
public virtual int AddRange(IAppender[] x)
{
if (m_count + x.Length >= m_array.Length)
{
EnsureCapacity(m_count + x.Length);
}
Array.Copy(x, 0, m_array, m_count, x.Length);
m_count += x.Length;
m_version++;
return m_count;
}
/// <summary>
/// Adds the elements of a <see cref="IAppender"/> collection to the current <c>AppenderCollection</c>.
/// </summary>
/// <param name="col">The <see cref="IAppender"/> collection whose elements should be added to the end of the <c>AppenderCollection</c>.</param>
/// <returns>The new <see cref="AppenderCollection.Count"/> of the <c>AppenderCollection</c>.</returns>
public virtual int AddRange(ICollection col)
{
if (m_count + col.Count >= m_array.Length)
{
EnsureCapacity(m_count + col.Count);
}
foreach(object item in col)
{
Add((IAppender)item);
}
return m_count;
}
/// <summary>
/// Sets the capacity to the actual number of elements.
/// </summary>
public virtual void TrimToSize()
{
this.Capacity = m_count;
}
/// <summary>
/// Return the collection elements as an array
/// </summary>
/// <returns>the array</returns>
public virtual IAppender[] ToArray()
{
IAppender[] resultArray = new IAppender[m_count];
if (m_count > 0)
{
Array.Copy(m_array, 0, resultArray, 0, m_count);
}
return resultArray;
}
#endregion
#region Implementation (helpers)
/// <exception cref="ArgumentOutOfRangeException">
/// <para><paramref name="i"/> is less than zero</para>
/// <para>-or-</para>
/// <para><paramref name="i"/> is equal to or greater than <see cref="AppenderCollection.Count"/>.</para>
/// </exception>
private void ValidateIndex(int i)
{
ValidateIndex(i, false);
}
/// <exception cref="ArgumentOutOfRangeException">
/// <para><paramref name="i"/> is less than zero</para>
/// <para>-or-</para>
/// <para><paramref name="i"/> is equal to or greater than <see cref="AppenderCollection.Count"/>.</para>
/// </exception>
private void ValidateIndex(int i, bool allowEqualEnd)
{
int max = (allowEqualEnd) ? (m_count) : (m_count-1);
if (i < 0 || i > max)
{
throw log4net.Util.SystemInfo.CreateArgumentOutOfRangeException("i", (object)i, "Index was out of range. Must be non-negative and less than the size of the collection. [" + (object)i + "] Specified argument was out of the range of valid values.");
}
}
private void EnsureCapacity(int min)
{
int newCapacity = ((m_array.Length == 0) ? DEFAULT_CAPACITY : m_array.Length * 2);
if (newCapacity < min)
{
newCapacity = min;
}
this.Capacity = newCapacity;
}
#endregion
#region Implementation (ICollection)
void ICollection.CopyTo(Array array, int start)
{
if (m_count > 0)
{
Array.Copy(m_array, 0, array, start, m_count);
}
}
#endregion
#region Implementation (IList)
object IList.this[int i]
{
get { return (object)this[i]; }
set { this[i] = (IAppender)value; }
}
int IList.Add(object x)
{
return this.Add((IAppender)x);
}
bool IList.Contains(object x)
{
return this.Contains((IAppender)x);
}
int IList.IndexOf(object x)
{
return this.IndexOf((IAppender)x);
}
void IList.Insert(int pos, object x)
{
this.Insert(pos, (IAppender)x);
}
void IList.Remove(object x)
{
this.Remove((IAppender)x);
}
void IList.RemoveAt(int pos)
{
this.RemoveAt(pos);
}
#endregion
#region Implementation (IEnumerable)
IEnumerator IEnumerable.GetEnumerator()
{
return (IEnumerator)(this.GetEnumerator());
}
#endregion
#region Nested enumerator class
/// <summary>
/// Supports simple iteration over a <see cref="AppenderCollection"/>.
/// </summary>
/// <exclude/>
private sealed class Enumerator : IEnumerator, IAppenderCollectionEnumerator
{
#region Implementation (data)
private readonly AppenderCollection m_collection;
private int m_index;
private int m_version;
#endregion
#region Construction
/// <summary>
/// Initializes a new instance of the <c>Enumerator</c> class.
/// </summary>
/// <param name="tc"></param>
internal Enumerator(AppenderCollection tc)
{
m_collection = tc;
m_index = -1;
m_version = tc.m_version;
}
#endregion
#region Operations (type-safe IEnumerator)
/// <summary>
/// Gets the current element in the collection.
/// </summary>
public IAppender Current
{
get { return m_collection[m_index]; }
}
/// <summary>
/// Advances the enumerator to the next element in the collection.
/// </summary>
/// <returns>
/// <c>true</c> if the enumerator was successfully advanced to the next element;
/// <c>false</c> if the enumerator has passed the end of the collection.
/// </returns>
/// <exception cref="InvalidOperationException">
/// The collection was modified after the enumerator was created.
/// </exception>
public bool MoveNext()
{
if (m_version != m_collection.m_version)
{
throw new System.InvalidOperationException("Collection was modified; enumeration operation may not execute.");
}
++m_index;
return (m_index < m_collection.Count);
}
/// <summary>
/// Sets the enumerator to its initial position, before the first element in the collection.
/// </summary>
public void Reset()
{
m_index = -1;
}
#endregion
#region Implementation (IEnumerator)
object IEnumerator.Current
{
get { return this.Current; }
}
#endregion
}
#endregion
#region Nested Read Only Wrapper class
/// <exclude/>
private sealed class ReadOnlyAppenderCollection : AppenderCollection, ICollection
{
#region Implementation (data)
private readonly AppenderCollection m_collection;
#endregion
#region Construction
internal ReadOnlyAppenderCollection(AppenderCollection list) : base(Tag.Default)
{
m_collection = list;
}
#endregion
#region Type-safe ICollection
public override void CopyTo(IAppender[] array)
{
m_collection.CopyTo(array);
}
public override void CopyTo(IAppender[] array, int start)
{
m_collection.CopyTo(array,start);
}
void ICollection.CopyTo(Array array, int start)
{
((ICollection)m_collection).CopyTo(array, start);
}
public override int Count
{
get { return m_collection.Count; }
}
public override bool IsSynchronized
{
get { return m_collection.IsSynchronized; }
}
public override object SyncRoot
{
get { return this.m_collection.SyncRoot; }
}
#endregion
#region Type-safe IList
public override IAppender this[int i]
{
get { return m_collection[i]; }
set { throw new NotSupportedException("This is a Read Only Collection and can not be modified"); }
}
public override int Add(IAppender x)
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
public override void Clear()
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
public override bool Contains(IAppender x)
{
return m_collection.Contains(x);
}
public override int IndexOf(IAppender x)
{
return m_collection.IndexOf(x);
}
public override void Insert(int pos, IAppender x)
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
public override void Remove(IAppender x)
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
public override void RemoveAt(int pos)
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
public override bool IsFixedSize
{
get { return true; }
}
public override bool IsReadOnly
{
get { return true; }
}
#endregion
#region Type-safe IEnumerable
public override IAppenderCollectionEnumerator GetEnumerator()
{
return m_collection.GetEnumerator();
}
#endregion
#region Public Helpers
// (just to mimic some nice features of ArrayList)
public override int Capacity
{
get { return m_collection.Capacity; }
set { throw new NotSupportedException("This is a Read Only Collection and can not be modified"); }
}
public override int AddRange(AppenderCollection x)
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
public override int AddRange(IAppender[] x)
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
public override IAppender[] ToArray()
{
return m_collection.ToArray();
}
public override void TrimToSize()
{
throw new NotSupportedException("This is a Read Only Collection and can not be modified");
}
#endregion
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.IO;
using System.Text;
using System.Collections.Generic;
using System.Globalization;
using Microsoft.Build.Framework;
using Microsoft.Build.Utilities;
using Microsoft.Build.Tasks.Hosting;
using Microsoft.CodeAnalysis.CommandLine;
using System.Diagnostics;
namespace Microsoft.CodeAnalysis.BuildTasks
{
/// <summary>
/// This class defines the "Vbc" XMake task, which enables building assemblies from VB
/// source files by invoking the VB compiler. This is the new Roslyn XMake task,
/// meaning that the code is compiled by using the Roslyn compiler server, rather
/// than vbc.exe. The two should be functionally identical, but the compiler server
/// should be significantly faster with larger projects and have a smaller memory
/// footprint.
/// </summary>
public class Vbc : ManagedCompiler
{
private bool _useHostCompilerIfAvailable;
// The following 1 fields are used, set and re-set in LogEventsFromTextOutput()
/// <summary>
/// This stores the original lines and error priority together in the order in which they were received.
/// </summary>
private readonly Queue<VBError> _vbErrorLines = new Queue<VBError>();
// Used when parsing vbc output to determine the column number of an error
private bool _isDoneOutputtingErrorMessage;
private int _numberOfLinesInErrorMessage;
#region Properties
// Please keep these alphabetized. These are the parameters specific to Vbc. The
// ones shared between Vbc and Csc are defined in ManagedCompiler.cs, which is
// the base class.
public string BaseAddress
{
set { _store[nameof(BaseAddress)] = value; }
get { return (string)_store[nameof(BaseAddress)]; }
}
public string DisabledWarnings
{
set { _store[nameof(DisabledWarnings)] = value; }
get { return (string)_store[nameof(DisabledWarnings)]; }
}
public string DocumentationFile
{
set { _store[nameof(DocumentationFile)] = value; }
get { return (string)_store[nameof(DocumentationFile)]; }
}
public string ErrorReport
{
set { _store[nameof(ErrorReport)] = value; }
get { return (string)_store[nameof(ErrorReport)]; }
}
public bool GenerateDocumentation
{
set { _store[nameof(GenerateDocumentation)] = value; }
get { return _store.GetOrDefault(nameof(GenerateDocumentation), false); }
}
public ITaskItem[] Imports
{
set { _store[nameof(Imports)] = value; }
get { return (ITaskItem[])_store[nameof(Imports)]; }
}
public string LangVersion
{
set { _store[nameof(LangVersion)] = value; }
get { return (string)_store[nameof(LangVersion)]; }
}
public string ModuleAssemblyName
{
set { _store[nameof(ModuleAssemblyName)] = value; }
get { return (string)_store[nameof(ModuleAssemblyName)]; }
}
public bool NoStandardLib
{
set { _store[nameof(NoStandardLib)] = value; }
get { return _store.GetOrDefault(nameof(NoStandardLib), false); }
}
// This is not a documented switch. It prevents the automatic reference to Microsoft.VisualBasic.dll.
// The VB team believes the only scenario for this is when you are building that assembly itself.
// We have to support the switch here so that we can build the SDE and VB trees, which need to build this assembly.
// Although undocumented, it cannot be wrapped with #if BUILDING_DF_LKG because this would prevent dogfood builds
// within VS, which must use non-LKG msbuild bits.
public bool NoVBRuntimeReference
{
set { _store[nameof(NoVBRuntimeReference)] = value; }
get { return _store.GetOrDefault(nameof(NoVBRuntimeReference), false); }
}
public bool NoWarnings
{
set { _store[nameof(NoWarnings)] = value; }
get { return _store.GetOrDefault(nameof(NoWarnings), false); }
}
public string OptionCompare
{
set { _store[nameof(OptionCompare)] = value; }
get { return (string)_store[nameof(OptionCompare)]; }
}
public bool OptionExplicit
{
set { _store[nameof(OptionExplicit)] = value; }
get { return _store.GetOrDefault(nameof(OptionExplicit), true); }
}
public bool OptionStrict
{
set { _store[nameof(OptionStrict)] = value; }
get { return _store.GetOrDefault(nameof(OptionStrict), false); }
}
public bool OptionInfer
{
set { _store[nameof(OptionInfer)] = value; }
get { return _store.GetOrDefault(nameof(OptionInfer), false); }
}
// Currently only /optionstrict:custom
public string OptionStrictType
{
set { _store[nameof(OptionStrictType)] = value; }
get { return (string)_store[nameof(OptionStrictType)]; }
}
public bool RemoveIntegerChecks
{
set { _store[nameof(RemoveIntegerChecks)] = value; }
get { return _store.GetOrDefault(nameof(RemoveIntegerChecks), false); }
}
public string RootNamespace
{
set { _store[nameof(RootNamespace)] = value; }
get { return (string)_store[nameof(RootNamespace)]; }
}
public string SdkPath
{
set { _store[nameof(SdkPath)] = value; }
get { return (string)_store[nameof(SdkPath)]; }
}
/// <summary>
/// Name of the language passed to "/preferreduilang" compiler option.
/// </summary>
/// <remarks>
/// If set to null, "/preferreduilang" option is omitted, and vbc.exe uses its default setting.
/// Otherwise, the value is passed to "/preferreduilang" as is.
/// </remarks>
public string PreferredUILang
{
set { _store[nameof(PreferredUILang)] = value; }
get { return (string)_store[nameof(PreferredUILang)]; }
}
public string VsSessionGuid
{
set { _store[nameof(VsSessionGuid)] = value; }
get { return (string)_store[nameof(VsSessionGuid)]; }
}
public bool TargetCompactFramework
{
set { _store[nameof(TargetCompactFramework)] = value; }
get { return _store.GetOrDefault(nameof(TargetCompactFramework), false); }
}
public bool UseHostCompilerIfAvailable
{
set { _useHostCompilerIfAvailable = value; }
get { return _useHostCompilerIfAvailable; }
}
public string VBRuntimePath
{
set { _store[nameof(VBRuntimePath)] = value; }
get { return (string)_store[nameof(VBRuntimePath)]; }
}
public string Verbosity
{
set { _store[nameof(Verbosity)] = value; }
get { return (string)_store[nameof(Verbosity)]; }
}
public string WarningsAsErrors
{
set { _store[nameof(WarningsAsErrors)] = value; }
get { return (string)_store[nameof(WarningsAsErrors)]; }
}
public string WarningsNotAsErrors
{
set { _store[nameof(WarningsNotAsErrors)] = value; }
get { return (string)_store[nameof(WarningsNotAsErrors)]; }
}
public string VBRuntime
{
set { _store[nameof(VBRuntime)] = value; }
get { return (string)_store[nameof(VBRuntime)]; }
}
public string PdbFile
{
set { _store[nameof(PdbFile)] = value; }
get { return (string)_store[nameof(PdbFile)]; }
}
#endregion
#region Tool Members
internal override RequestLanguage Language => RequestLanguage.VisualBasicCompile;
private static readonly string[] s_separator = { "\r\n" };
internal override void LogMessages(string output, MessageImportance messageImportance)
{
var lines = output.Split(s_separator, StringSplitOptions.None);
foreach (string line in lines)
{
//Code below will parse the set of four lines that comprise a VB
//error message into a single object. The four-line format contains
//a second line that is blank. This must be passed to the code below
//to satisfy the parser. The parser needs to work with output from
//old compilers as well.
LogEventsFromTextOutput(line, messageImportance);
}
}
/// <summary>
/// Return the name of the tool to execute.
/// </summary>
override protected string ToolName
{
get
{
return "vbc.exe";
}
}
/// <summary>
/// Override Execute so that we can moved the PDB file if we need to,
/// after the compiler is done.
/// </summary>
public override bool Execute()
{
if (!base.Execute())
{
return false;
}
if (!SkipCompilerExecution)
{
MovePdbFileIfNecessary(OutputAssembly.ItemSpec);
}
return !Log.HasLoggedErrors;
}
/// <summary>
/// Move the PDB file if the PDB file that was generated by the compiler
/// is not at the specified path, or if it is newer than the one there.
/// VBC does not have a switch to specify the PDB path, so we are essentially implementing that for it here.
/// We need make this possible to avoid colliding with the PDB generated by WinMDExp.
///
/// If at some future point VBC.exe offers a /pdbfile switch, this function can be removed.
/// </summary>
internal void MovePdbFileIfNecessary(string outputAssembly)
{
// Get the name of the output assembly because the pdb will be written beside it and will have the same name
if (String.IsNullOrEmpty(PdbFile) || String.IsNullOrEmpty(outputAssembly))
{
return;
}
try
{
string actualPdb = Path.ChangeExtension(outputAssembly, ".pdb"); // This is the pdb that the compiler generated
FileInfo actualPdbInfo = new FileInfo(actualPdb);
string desiredLocation = PdbFile;
if (!desiredLocation.EndsWith(".pdb", StringComparison.OrdinalIgnoreCase))
{
desiredLocation += ".pdb";
}
FileInfo desiredPdbInfo = new FileInfo(desiredLocation);
// If the compiler generated a pdb..
if (actualPdbInfo.Exists)
{
// .. and the desired one does not exist or it's older...
if (!desiredPdbInfo.Exists || (desiredPdbInfo.Exists && actualPdbInfo.LastWriteTime > desiredPdbInfo.LastWriteTime))
{
// Delete the existing one if it's already there, as Move would otherwise fail
if (desiredPdbInfo.Exists)
{
Utilities.DeleteNoThrow(desiredPdbInfo.FullName);
}
// Move the file to where we actually wanted VBC to put it
File.Move(actualPdbInfo.FullName, desiredLocation);
}
}
}
catch (Exception e) when (Utilities.IsIoRelatedException(e))
{
Log.LogErrorWithCodeFromResources("VBC_RenamePDB", PdbFile, e.Message);
}
}
/// <summary>
/// Generate the path to the tool
/// </summary>
protected override string GenerateFullPathToTool()
{
string pathToTool = ToolLocationHelper.GetPathToBuildToolsFile(ToolName, ToolLocationHelper.CurrentToolsVersion);
if (null == pathToTool)
{
pathToTool = ToolLocationHelper.GetPathToDotNetFrameworkFile(ToolName, TargetDotNetFrameworkVersion.VersionLatest);
if (null == pathToTool)
{
Log.LogErrorWithCodeFromResources("General_FrameworksFileNotFound", ToolName, ToolLocationHelper.GetDotNetFrameworkVersionFolderPrefix(TargetDotNetFrameworkVersion.VersionLatest));
}
}
return pathToTool;
}
/// <summary>
/// vbc.exe only takes the BaseAddress in hexadecimal format. But we allow the caller
/// of the task to pass in the BaseAddress in either decimal or hexadecimal format.
/// Examples of supported hex formats include "0x10000000" or "&H10000000".
/// </summary>
internal string GetBaseAddressInHex()
{
string originalBaseAddress = this.BaseAddress;
if (originalBaseAddress != null)
{
if (originalBaseAddress.Length > 2)
{
string twoLetterPrefix = originalBaseAddress.Substring(0, 2);
if (
(0 == String.Compare(twoLetterPrefix, "0x", StringComparison.OrdinalIgnoreCase)) ||
(0 == String.Compare(twoLetterPrefix, "&h", StringComparison.OrdinalIgnoreCase))
)
{
// The incoming string is already in hex format ... we just need to
// remove the 0x or &H from the beginning.
return originalBaseAddress.Substring(2);
}
}
// The incoming BaseAddress is not in hexadecimal format, so we need to
// convert it to hex.
try
{
uint baseAddressDecimal = UInt32.Parse(originalBaseAddress, CultureInfo.InvariantCulture);
return baseAddressDecimal.ToString("X", CultureInfo.InvariantCulture);
}
catch (FormatException e)
{
throw Utilities.GetLocalizedArgumentException(e,
ErrorString.Vbc_ParameterHasInvalidValue, "BaseAddress", originalBaseAddress);
}
}
return null;
}
/// <summary>
/// Looks at all the parameters that have been set, and builds up the string
/// containing all the command-line switches.
/// </summary>
protected internal override void AddResponseFileCommands(CommandLineBuilderExtension commandLine)
{
commandLine.AppendSwitchIfNotNull("/baseaddress:", this.GetBaseAddressInHex());
commandLine.AppendSwitchIfNotNull("/libpath:", this.AdditionalLibPaths, ",");
commandLine.AppendSwitchIfNotNull("/imports:", this.Imports, ",");
// Make sure this /doc+ switch comes *before* the /doc:<file> switch (which is handled in the
// ManagedCompiler.cs base class). /doc+ is really just an alias for /doc:<assemblyname>.xml,
// and the last /doc switch on the command-line wins. If the user provided a specific doc filename,
// we want that one to win.
commandLine.AppendPlusOrMinusSwitch("/doc", this._store, "GenerateDocumentation");
commandLine.AppendSwitchIfNotNull("/optioncompare:", this.OptionCompare);
commandLine.AppendPlusOrMinusSwitch("/optionexplicit", this._store, "OptionExplicit");
// Make sure this /optionstrict+ switch appears *before* the /optionstrict:xxxx switch below
/* twhitney: In Orcas a change was made for devdiv bug 16889 that set Option Strict-, whenever this.DisabledWarnings was
* empty. That was clearly the wrong thing to do and we found it when we had a project with all the warning configuration
* entries set to WARNING. Because this.DisabledWarnings was empty in that case we would end up sending /OptionStrict-
* effectively silencing all the warnings that had been selected.
*
* Now what we do is:
* If option strict+ is specified, that trumps everything and we just set option strict+
* Otherwise, just set option strict:custom.
* You may wonder why we don't try to set Option Strict- The reason is that Option Strict- just implies a certain
* set of warnings that should be disabled (there's ten of them today) You get the same effect by sending
* option strict:custom on along with the correct list of disabled warnings.
* Rather than make this code know the current set of disabled warnings that comprise Option strict-, we just send
* option strict:custom on with the understanding that we'll get the same behavior as option strict- since we are passing
* the /nowarn line on that contains all the warnings OptionStrict- would disable anyway. The IDE knows what they are
* and puts them in the project file so we are good. And by not making this code aware of which warnings comprise
* Option Strict-, we have one less place we have to keep up to date in terms of what comprises option strict-
*/
// Decide whether we are Option Strict+ or Option Strict:custom
object optionStrictSetting = this._store["OptionStrict"];
bool optionStrict = optionStrictSetting != null ? (bool)optionStrictSetting : false;
if (optionStrict)
{
commandLine.AppendSwitch("/optionstrict+");
}
else // OptionStrict+ wasn't specified so use :custom.
{
commandLine.AppendSwitch("/optionstrict:custom");
}
commandLine.AppendSwitchIfNotNull("/optionstrict:", this.OptionStrictType);
commandLine.AppendWhenTrue("/nowarn", this._store, "NoWarnings");
commandLine.AppendSwitchWithSplitting("/nowarn:", this.DisabledWarnings, ",", ';', ',');
commandLine.AppendPlusOrMinusSwitch("/optioninfer", this._store, "OptionInfer");
commandLine.AppendWhenTrue("/nostdlib", this._store, "NoStandardLib");
commandLine.AppendWhenTrue("/novbruntimeref", this._store, "NoVBRuntimeReference");
commandLine.AppendSwitchIfNotNull("/errorreport:", this.ErrorReport);
commandLine.AppendSwitchIfNotNull("/platform:", this.PlatformWith32BitPreference);
commandLine.AppendPlusOrMinusSwitch("/removeintchecks", this._store, "RemoveIntegerChecks");
commandLine.AppendSwitchIfNotNull("/rootnamespace:", this.RootNamespace);
commandLine.AppendSwitchIfNotNull("/sdkpath:", this.SdkPath);
commandLine.AppendSwitchIfNotNull("/langversion:", this.LangVersion);
commandLine.AppendSwitchIfNotNull("/moduleassemblyname:", this.ModuleAssemblyName);
commandLine.AppendWhenTrue("/netcf", this._store, "TargetCompactFramework");
commandLine.AppendSwitchIfNotNull("/preferreduilang:", this.PreferredUILang);
commandLine.AppendPlusOrMinusSwitch("/highentropyva", this._store, "HighEntropyVA");
if (0 == String.Compare(this.VBRuntimePath, this.VBRuntime, StringComparison.OrdinalIgnoreCase))
{
commandLine.AppendSwitchIfNotNull("/vbruntime:", this.VBRuntimePath);
}
else if (this.VBRuntime != null)
{
string vbRuntimeSwitch = this.VBRuntime;
if (0 == String.Compare(vbRuntimeSwitch, "EMBED", StringComparison.OrdinalIgnoreCase))
{
commandLine.AppendSwitch("/vbruntime*");
}
else if (0 == String.Compare(vbRuntimeSwitch, "NONE", StringComparison.OrdinalIgnoreCase))
{
commandLine.AppendSwitch("/vbruntime-");
}
else if (0 == String.Compare(vbRuntimeSwitch, "DEFAULT", StringComparison.OrdinalIgnoreCase))
{
commandLine.AppendSwitch("/vbruntime+");
}
else
{
commandLine.AppendSwitchIfNotNull("/vbruntime:", vbRuntimeSwitch);
}
}
// Verbosity
if (
(this.Verbosity != null) &&
(
(0 == String.Compare(this.Verbosity, "quiet", StringComparison.OrdinalIgnoreCase)) ||
(0 == String.Compare(this.Verbosity, "verbose", StringComparison.OrdinalIgnoreCase))
)
)
{
commandLine.AppendSwitchIfNotNull("/", this.Verbosity);
}
commandLine.AppendSwitchIfNotNull("/doc:", this.DocumentationFile);
commandLine.AppendSwitchUnquotedIfNotNull("/define:", Vbc.GetDefineConstantsSwitch(this.DefineConstants));
AddReferencesToCommandLine(commandLine);
commandLine.AppendSwitchIfNotNull("/win32resource:", this.Win32Resource);
// Special case for "Sub Main" (See VSWhidbey 381254)
if (0 != String.Compare("Sub Main", this.MainEntryPoint, StringComparison.OrdinalIgnoreCase))
{
commandLine.AppendSwitchIfNotNull("/main:", this.MainEntryPoint);
}
base.AddResponseFileCommands(commandLine);
// This should come after the "TreatWarningsAsErrors" flag is processed (in managedcompiler.cs).
// Because if TreatWarningsAsErrors=false, then we'll have a /warnaserror- on the command-line,
// and then any specific warnings that should be treated as errors should be specified with
// /warnaserror+:<list> after the /warnaserror- switch. The order of the switches on the command-line
// does matter.
//
// Note that
// /warnaserror+
// is just shorthand for:
// /warnaserror+:<all possible warnings>
//
// Similarly,
// /warnaserror-
// is just shorthand for:
// /warnaserror-:<all possible warnings>
commandLine.AppendSwitchWithSplitting("/warnaserror+:", this.WarningsAsErrors, ",", ';', ',');
commandLine.AppendSwitchWithSplitting("/warnaserror-:", this.WarningsNotAsErrors, ",", ';', ',');
// If not design time build and the globalSessionGuid property was set then add a -globalsessionguid:<guid>
bool designTime = false;
if (this.HostObject != null)
{
var vbHost = this.HostObject as IVbcHostObject;
designTime = vbHost.IsDesignTime();
}
if (!designTime)
{
if (!string.IsNullOrWhiteSpace(this.VsSessionGuid))
{
commandLine.AppendSwitchIfNotNull("/sqmsessionguid:", this.VsSessionGuid);
}
}
// It's a good idea for the response file to be the very last switch passed, just
// from a predictability perspective. It also solves the problem that a dogfooder
// ran into, which is described in an email thread attached to bug VSWhidbey 146883.
// See also bugs 177762 and 118307 for additional bugs related to response file position.
if (this.ResponseFiles != null)
{
foreach (ITaskItem response in this.ResponseFiles)
{
commandLine.AppendSwitchIfNotNull("@", response.ItemSpec);
}
}
}
private void AddReferencesToCommandLine(CommandLineBuilderExtension commandLine)
{
if ((this.References == null) || (this.References.Length == 0))
{
return;
}
var references = new List<ITaskItem>(this.References.Length);
var links = new List<ITaskItem>(this.References.Length);
foreach (ITaskItem reference in this.References)
{
bool embed = Utilities.TryConvertItemMetadataToBool(reference, "EmbedInteropTypes");
if (embed)
{
links.Add(reference);
}
else
{
references.Add(reference);
}
}
if (links.Count > 0)
{
commandLine.AppendSwitchIfNotNull("/link:", links.ToArray(), ",");
}
if (references.Count > 0)
{
commandLine.AppendSwitchIfNotNull("/reference:", references.ToArray(), ",");
}
}
/// <summary>
/// Validate parameters, log errors and warnings and return true if
/// Execute should proceed.
/// </summary>
protected override bool ValidateParameters()
{
if (!base.ValidateParameters())
{
return false;
}
// Validate that the "Verbosity" parameter is one of "quiet", "normal", or "verbose".
if (this.Verbosity != null)
{
if ((0 != String.Compare(Verbosity, "normal", StringComparison.OrdinalIgnoreCase)) &&
(0 != String.Compare(Verbosity, "quiet", StringComparison.OrdinalIgnoreCase)) &&
(0 != String.Compare(Verbosity, "verbose", StringComparison.OrdinalIgnoreCase)))
{
Log.LogErrorWithCodeFromResources("Vbc_EnumParameterHasInvalidValue", "Verbosity", this.Verbosity, "Quiet, Normal, Verbose");
return false;
}
}
return true;
}
/// <summary>
/// This method intercepts the lines to be logged coming from STDOUT from VBC.
/// Once we see a standard vb warning or error, then we capture it and grab the next 3
/// lines so we can transform the string form the form of FileName.vb(line) to FileName.vb(line,column)
/// which will allow us to report the line and column to the IDE, and thus filter the error
/// in the duplicate case for multi-targeting, or just squiggle the appropriate token
/// instead of the entire line.
/// </summary>
/// <param name="singleLine">A single line from the STDOUT of the vbc compiler</param>
/// <param name="messageImportance">High,Low,Normal</param>
protected override void LogEventsFromTextOutput(string singleLine, MessageImportance messageImportance)
{
// We can return immediately if this was not called by the out of proc compiler
if (!this.UsedCommandLineTool)
{
base.LogEventsFromTextOutput(singleLine, messageImportance);
return;
}
// We can also return immediately if the current string is not a warning or error
// and we have not seen a warning or error yet. 'Error' and 'Warning' are not localized.
if (_vbErrorLines.Count == 0 &&
singleLine.IndexOf("warning", StringComparison.OrdinalIgnoreCase) == -1 &&
singleLine.IndexOf("error", StringComparison.OrdinalIgnoreCase) == -1)
{
base.LogEventsFromTextOutput(singleLine, messageImportance);
return;
}
ParseVBErrorOrWarning(singleLine, messageImportance);
}
/// <summary>
/// Given a string, parses it to find out whether it's an error or warning and, if so,
/// make sure it's validated properly.
/// </summary>
/// <comments>
/// INTERNAL FOR UNITTESTING ONLY
/// </comments>
/// <param name="singleLine">The line to parse</param>
/// <param name="messageImportance">The MessageImportance to use when reporting the error.</param>
internal void ParseVBErrorOrWarning(string singleLine, MessageImportance messageImportance)
{
// if this string is empty then we haven't seen the first line of an error yet
if (_vbErrorLines.Count > 0)
{
// vbc separates the error message from the source text with an empty line, so
// we can check for an empty line to see if vbc finished outputting the error message
if (!_isDoneOutputtingErrorMessage && singleLine.Length == 0)
{
_isDoneOutputtingErrorMessage = true;
_numberOfLinesInErrorMessage = _vbErrorLines.Count;
}
_vbErrorLines.Enqueue(new VBError(singleLine, messageImportance));
// We are looking for the line that indicates the column (contains the '~'),
// which vbc outputs 3 lines below the error message:
//
// <error message>
// <blank line>
// <line with the source text>
// <line with the '~'>
if (_isDoneOutputtingErrorMessage &&
_vbErrorLines.Count == _numberOfLinesInErrorMessage + 3)
{
// Once we have the 4th line (error line + 3), then parse it for the first ~
// which will correspond to the column of the token with the error because
// VBC respects the users's indentation settings in the file it is compiling
// and only outputs SPACE chars to STDOUT.
// The +1 is to translate the index into user columns which are 1 based.
VBError originalVBError = _vbErrorLines.Dequeue();
string originalVBErrorString = originalVBError.Message;
int column = singleLine.IndexOf('~') + 1;
int endParenthesisLocation = originalVBErrorString.IndexOf(')');
// If for some reason the line does not contain any ~ then something went wrong
// so abort and return the original string.
if (column < 0 || endParenthesisLocation < 0)
{
// we need to output all of the original lines we ate.
Log.LogMessageFromText(originalVBErrorString, originalVBError.MessageImportance);
foreach (VBError vberror in _vbErrorLines)
{
base.LogEventsFromTextOutput(vberror.Message, vberror.MessageImportance);
}
_vbErrorLines.Clear();
return;
}
string newLine = null;
newLine = originalVBErrorString.Substring(0, endParenthesisLocation) + "," + column + originalVBErrorString.Substring(endParenthesisLocation);
// Output all of the lines of the error, but with the modified first line as well.
Log.LogMessageFromText(newLine, originalVBError.MessageImportance);
foreach (VBError vberror in _vbErrorLines)
{
base.LogEventsFromTextOutput(vberror.Message, vberror.MessageImportance);
}
_vbErrorLines.Clear();
}
}
else
{
CanonicalError.Parts parts = CanonicalError.Parse(singleLine);
if (parts == null)
{
base.LogEventsFromTextOutput(singleLine, messageImportance);
}
else if ((parts.category == CanonicalError.Parts.Category.Error ||
parts.category == CanonicalError.Parts.Category.Warning) &&
parts.column == CanonicalError.Parts.numberNotSpecified)
{
if (parts.line != CanonicalError.Parts.numberNotSpecified)
{
// If we got here, then this is a standard VBC error or warning.
_vbErrorLines.Enqueue(new VBError(singleLine, messageImportance));
_isDoneOutputtingErrorMessage = false;
_numberOfLinesInErrorMessage = 0;
}
else
{
// Project-level errors don't have line numbers -- just output now.
base.LogEventsFromTextOutput(singleLine, messageImportance);
}
}
}
}
#endregion
/// <summary>
/// Many VisualStudio VB projects have values for the DefineConstants property that
/// contain quotes and spaces. Normally we don't allow parameters passed into the
/// task to contain quotes, because if we weren't careful, we might accidentally
/// allow a parameter injection attach. But for "DefineConstants", we have to allow
/// it.
/// So this method prepares the string to be passed in on the /define: command-line
/// switch. It does that by quoting the entire string, and escaping the embedded
/// quotes.
/// </summary>
internal static string GetDefineConstantsSwitch
(
string originalDefineConstants
)
{
if ((originalDefineConstants == null) || (originalDefineConstants.Length == 0))
{
return null;
}
StringBuilder finalDefineConstants = new StringBuilder(originalDefineConstants);
// Replace slash-quote with slash-slash-quote.
finalDefineConstants.Replace("\\\"", "\\\\\"");
// Replace quote with slash-quote.
finalDefineConstants.Replace("\"", "\\\"");
// Surround the whole thing with a pair of double-quotes.
finalDefineConstants.Insert(0, '"');
finalDefineConstants.Append('"');
// Now it's ready to be passed in to the /define: switch.
return finalDefineConstants.ToString();
}
/// <summary>
/// This method will initialize the host compiler object with all the switches,
/// parameters, resources, references, sources, etc.
///
/// It returns true if everything went according to plan. It returns false if the
/// host compiler had a problem with one of the parameters that was passed in.
///
/// This method also sets the "this.HostCompilerSupportsAllParameters" property
/// accordingly.
///
/// Example:
/// If we attempted to pass in Platform="foobar", then this method would
/// set HostCompilerSupportsAllParameters=true, but it would throw an
/// exception because the host compiler fully supports
/// the Platform parameter, but "foobar" is an illegal value.
///
/// Example:
/// If we attempted to pass in NoConfig=false, then this method would set
/// HostCompilerSupportsAllParameters=false, because while this is a legal
/// thing for csc.exe, the IDE compiler cannot support it. In this situation
/// the return value will also be false.
/// </summary>
/// <owner>RGoel</owner>
private bool InitializeHostCompiler(IVbcHostObject vbcHostObject)
{
this.HostCompilerSupportsAllParameters = this.UseHostCompilerIfAvailable;
string param = "Unknown";
try
{
param = nameof(vbcHostObject.BeginInitialization);
vbcHostObject.BeginInitialization();
CheckHostObjectSupport(param = nameof(AdditionalLibPaths), vbcHostObject.SetAdditionalLibPaths(AdditionalLibPaths));
CheckHostObjectSupport(param = nameof(AddModules), vbcHostObject.SetAddModules(AddModules));
// For host objects which support them, set the analyzers, ruleset and additional files.
IAnalyzerHostObject analyzerHostObject = vbcHostObject as IAnalyzerHostObject;
if (analyzerHostObject != null)
{
CheckHostObjectSupport(param = nameof(Analyzers), analyzerHostObject.SetAnalyzers(Analyzers));
CheckHostObjectSupport(param = nameof(CodeAnalysisRuleSet), analyzerHostObject.SetRuleSet(CodeAnalysisRuleSet));
CheckHostObjectSupport(param = nameof(AdditionalFiles), analyzerHostObject.SetAdditionalFiles(AdditionalFiles));
}
CheckHostObjectSupport(param = nameof(BaseAddress), vbcHostObject.SetBaseAddress(TargetType, GetBaseAddressInHex()));
CheckHostObjectSupport(param = nameof(CodePage), vbcHostObject.SetCodePage(CodePage));
CheckHostObjectSupport(param = nameof(DebugType), vbcHostObject.SetDebugType(EmitDebugInformation, DebugType));
CheckHostObjectSupport(param = nameof(DefineConstants), vbcHostObject.SetDefineConstants(DefineConstants));
CheckHostObjectSupport(param = nameof(DelaySign), vbcHostObject.SetDelaySign(DelaySign));
CheckHostObjectSupport(param = nameof(DocumentationFile), vbcHostObject.SetDocumentationFile(DocumentationFile));
CheckHostObjectSupport(param = nameof(FileAlignment), vbcHostObject.SetFileAlignment(FileAlignment));
CheckHostObjectSupport(param = nameof(GenerateDocumentation), vbcHostObject.SetGenerateDocumentation(GenerateDocumentation));
CheckHostObjectSupport(param = nameof(Imports), vbcHostObject.SetImports(Imports));
CheckHostObjectSupport(param = nameof(KeyContainer), vbcHostObject.SetKeyContainer(KeyContainer));
CheckHostObjectSupport(param = nameof(KeyFile), vbcHostObject.SetKeyFile(KeyFile));
CheckHostObjectSupport(param = nameof(LinkResources), vbcHostObject.SetLinkResources(LinkResources));
CheckHostObjectSupport(param = nameof(MainEntryPoint), vbcHostObject.SetMainEntryPoint(MainEntryPoint));
CheckHostObjectSupport(param = nameof(NoConfig), vbcHostObject.SetNoConfig(NoConfig));
CheckHostObjectSupport(param = nameof(NoStandardLib), vbcHostObject.SetNoStandardLib(NoStandardLib));
CheckHostObjectSupport(param = nameof(NoWarnings), vbcHostObject.SetNoWarnings(NoWarnings));
CheckHostObjectSupport(param = nameof(Optimize), vbcHostObject.SetOptimize(Optimize));
CheckHostObjectSupport(param = nameof(OptionCompare), vbcHostObject.SetOptionCompare(OptionCompare));
CheckHostObjectSupport(param = nameof(OptionExplicit), vbcHostObject.SetOptionExplicit(OptionExplicit));
CheckHostObjectSupport(param = nameof(OptionStrict), vbcHostObject.SetOptionStrict(OptionStrict));
CheckHostObjectSupport(param = nameof(OptionStrictType), vbcHostObject.SetOptionStrictType(OptionStrictType));
CheckHostObjectSupport(param = nameof(OutputAssembly), vbcHostObject.SetOutputAssembly(OutputAssembly?.ItemSpec));
// For host objects which support them, set platform with 32BitPreference, HighEntropyVA, and SubsystemVersion
IVbcHostObject5 vbcHostObject5 = vbcHostObject as IVbcHostObject5;
if (vbcHostObject5 != null)
{
CheckHostObjectSupport(param = nameof(PlatformWith32BitPreference), vbcHostObject5.SetPlatformWith32BitPreference(PlatformWith32BitPreference));
CheckHostObjectSupport(param = nameof(HighEntropyVA), vbcHostObject5.SetHighEntropyVA(HighEntropyVA));
CheckHostObjectSupport(param = nameof(SubsystemVersion), vbcHostObject5.SetSubsystemVersion(SubsystemVersion));
}
else
{
CheckHostObjectSupport(param = nameof(Platform), vbcHostObject.SetPlatform(Platform));
}
IVbcHostObject6 vbcHostObject6 = vbcHostObject as IVbcHostObject6;
if (vbcHostObject6 != null)
{
CheckHostObjectSupport(param = nameof(ErrorLog), vbcHostObject6.SetErrorLog(ErrorLog));
CheckHostObjectSupport(param = nameof(ReportAnalyzer), vbcHostObject6.SetReportAnalyzer(ReportAnalyzer));
}
CheckHostObjectSupport(param = nameof(References), vbcHostObject.SetReferences(References));
CheckHostObjectSupport(param = nameof(RemoveIntegerChecks), vbcHostObject.SetRemoveIntegerChecks(RemoveIntegerChecks));
CheckHostObjectSupport(param = nameof(Resources), vbcHostObject.SetResources(Resources));
CheckHostObjectSupport(param = nameof(ResponseFiles), vbcHostObject.SetResponseFiles(ResponseFiles));
CheckHostObjectSupport(param = nameof(RootNamespace), vbcHostObject.SetRootNamespace(RootNamespace));
CheckHostObjectSupport(param = nameof(SdkPath), vbcHostObject.SetSdkPath(SdkPath));
CheckHostObjectSupport(param = nameof(Sources), vbcHostObject.SetSources(Sources));
CheckHostObjectSupport(param = nameof(TargetCompactFramework), vbcHostObject.SetTargetCompactFramework(TargetCompactFramework));
CheckHostObjectSupport(param = nameof(TargetType), vbcHostObject.SetTargetType(TargetType));
CheckHostObjectSupport(param = nameof(TreatWarningsAsErrors), vbcHostObject.SetTreatWarningsAsErrors(TreatWarningsAsErrors));
CheckHostObjectSupport(param = nameof(WarningsAsErrors), vbcHostObject.SetWarningsAsErrors(WarningsAsErrors));
CheckHostObjectSupport(param = nameof(WarningsNotAsErrors), vbcHostObject.SetWarningsNotAsErrors(WarningsNotAsErrors));
// DisabledWarnings needs to come after WarningsAsErrors and WarningsNotAsErrors, because
// of the way the host object works, and the fact that DisabledWarnings trump Warnings[Not]AsErrors.
CheckHostObjectSupport(param = nameof(DisabledWarnings), vbcHostObject.SetDisabledWarnings(DisabledWarnings));
CheckHostObjectSupport(param = nameof(Win32Icon), vbcHostObject.SetWin32Icon(Win32Icon));
CheckHostObjectSupport(param = nameof(Win32Resource), vbcHostObject.SetWin32Resource(Win32Resource));
// In order to maintain compatibility with previous host compilers, we must
// light-up for IVbcHostObject2
if (vbcHostObject is IVbcHostObject2)
{
IVbcHostObject2 vbcHostObject2 = (IVbcHostObject2)vbcHostObject;
CheckHostObjectSupport(param = nameof(ModuleAssemblyName), vbcHostObject2.SetModuleAssemblyName(ModuleAssemblyName));
CheckHostObjectSupport(param = nameof(OptionInfer), vbcHostObject2.SetOptionInfer(OptionInfer));
CheckHostObjectSupport(param = nameof(Win32Manifest), vbcHostObject2.SetWin32Manifest(GetWin32ManifestSwitch(NoWin32Manifest, Win32Manifest)));
// initialize option Infer
CheckHostObjectSupport(param = nameof(OptionInfer), vbcHostObject2.SetOptionInfer(OptionInfer));
}
else
{
// If we have been given a property that the host compiler doesn't support
// then we need to state that we are falling back to the command line compiler
if (!String.IsNullOrEmpty(ModuleAssemblyName))
{
CheckHostObjectSupport(param = nameof(ModuleAssemblyName), resultFromHostObjectSetOperation: false);
}
if (_store.ContainsKey(nameof(OptionInfer)))
{
CheckHostObjectSupport(param = nameof(OptionInfer), resultFromHostObjectSetOperation: false);
}
if (!String.IsNullOrEmpty(Win32Manifest))
{
CheckHostObjectSupport(param = nameof(Win32Manifest), resultFromHostObjectSetOperation: false);
}
}
// Check for support of the LangVersion property
if (vbcHostObject is IVbcHostObject3)
{
IVbcHostObject3 vbcHostObject3 = (IVbcHostObject3)vbcHostObject;
CheckHostObjectSupport(param = nameof(LangVersion), vbcHostObject3.SetLanguageVersion(LangVersion));
}
else if (!String.IsNullOrEmpty(LangVersion) && !UsedCommandLineTool)
{
CheckHostObjectSupport(param = nameof(LangVersion), resultFromHostObjectSetOperation: false);
}
if (vbcHostObject is IVbcHostObject4)
{
IVbcHostObject4 vbcHostObject4 = (IVbcHostObject4)vbcHostObject;
CheckHostObjectSupport(param = nameof(VBRuntime), vbcHostObject4.SetVBRuntime(VBRuntime));
}
// Support for NoVBRuntimeReference was added to this task after IVbcHostObject was frozen. That doesn't matter much because the host
// compiler doesn't support it, and almost nobody uses it anyway. But if someone has set it, we need to hard code falling back to
// the command line compiler here.
if (NoVBRuntimeReference)
{
CheckHostObjectSupport(param = nameof(NoVBRuntimeReference), resultFromHostObjectSetOperation: false);
}
InitializeHostObjectSupportForNewSwitches(vbcHostObject, ref param);
// In general, we don't support preferreduilang with the in-proc compiler. It will always use the same locale as the
// host process, so in general, we have to fall back to the command line compiler if this option is specified.
// However, we explicitly allow two values (mostly for parity with C#):
// Null is supported because it means that option should be omitted, and compiler default used - obviously always valid.
// Explicitly specified name of current locale is also supported, since it is effectively a no-op.
if (!String.IsNullOrEmpty(PreferredUILang) && !String.Equals(PreferredUILang, System.Globalization.CultureInfo.CurrentUICulture.Name, StringComparison.OrdinalIgnoreCase))
{
CheckHostObjectSupport(param = nameof(PreferredUILang), resultFromHostObjectSetOperation: false);
}
}
catch (Exception e)
{
Log.LogErrorWithCodeFromResources("General_CouldNotSetHostObjectParameter", param, e.Message);
return false;
}
finally
{
// In the case of the VB host compiler, the EndInitialization method will
// throw (due to FAILED HRESULT) if there was a bad value for one of the
// parameters.
vbcHostObject.EndInitialization();
}
return true;
}
/// <summary>
/// This method will get called during Execute() if a host object has been passed into the Vbc
/// task. Returns one of the following values to indicate what the next action should be:
/// UseHostObjectToExecute Host compiler exists and was initialized.
/// UseAlternateToolToExecute Host compiler doesn't exist or was not appropriate.
/// NoActionReturnSuccess Host compiler was already up-to-date, and we're done.
/// NoActionReturnFailure Bad parameters were passed into the task.
/// </summary>
/// <owner>RGoel</owner>
protected override HostObjectInitializationStatus InitializeHostObject()
{
if (this.HostObject != null)
{
// When the host object was passed into the task, it was passed in as a generic
// "Object" (because ITask interface obviously can't have any Vbc-specific stuff
// in it, and each task is going to want to communicate with its host in a unique
// way). Now we cast it to the specific type that the Vbc task expects. If the
// host object does not match this type, the host passed in an invalid host object
// to Vbc, and we error out.
// NOTE: For compat reasons this must remain IVbcHostObject
// we can dynamically test for smarter interfaces later..
using (RCWForCurrentContext<IVbcHostObject> hostObject = new RCWForCurrentContext<IVbcHostObject>(this.HostObject as IVbcHostObject))
{
IVbcHostObject vbcHostObject = hostObject.RCW;
if (vbcHostObject != null)
{
bool hostObjectSuccessfullyInitialized = InitializeHostCompiler(vbcHostObject);
// If we're currently only in design-time (as opposed to build-time),
// then we're done. We've initialized the host compiler as best we
// can, and we certainly don't want to actually do the final compile.
// So return true, saying we're done and successful.
if (vbcHostObject.IsDesignTime())
{
// If we are design-time then we do not want to continue the build at
// this time.
return hostObjectSuccessfullyInitialized ?
HostObjectInitializationStatus.NoActionReturnSuccess :
HostObjectInitializationStatus.NoActionReturnFailure;
}
// Roslyn doesn't support using the host object for compilation
// Since the host compiler has refused to take on the responsibility for this compilation,
// we're about to shell out to the command-line compiler to handle it. If some of the
// references don't exist on disk, we know the command-line compiler will fail, so save
// the trouble, and just throw a consistent error ourselves. This allows us to give
// more information than the compiler would, and also make things consistent across
// Vbc / Csc / etc. Actually, the real reason is bug 275726 (ddsuites\src\vs\env\vsproject\refs\ptp3).
// This suite behaves differently in localized builds than on English builds because
// VBC.EXE doesn't localize the word "error" when they emit errors and so we can't scan for it.
if (!CheckAllReferencesExistOnDisk())
{
return HostObjectInitializationStatus.NoActionReturnFailure;
}
// The host compiler doesn't support some of the switches/parameters
// being passed to it. Therefore, we resort to using the command-line compiler
// in this case.
UsedCommandLineTool = true;
return HostObjectInitializationStatus.UseAlternateToolToExecute;
}
else
{
Log.LogErrorWithCodeFromResources("General_IncorrectHostObject", "Vbc", "IVbcHostObject");
}
}
}
// No appropriate host object was found.
UsedCommandLineTool = true;
return HostObjectInitializationStatus.UseAlternateToolToExecute;
}
/// <summary>
/// This method will get called during Execute() if a host object has been passed into the Vbc
/// task. Returns true if an appropriate host object was found, it was called to do the compile,
/// and the compile succeeded. Otherwise, we return false.
/// </summary>
/// <owner>RGoel</owner>
protected override bool CallHostObjectToExecute()
{
Debug.Assert(this.HostObject != null, "We should not be here if the host object has not been set.");
IVbcHostObject vbcHostObject = this.HostObject as IVbcHostObject;
Debug.Assert(vbcHostObject != null, "Wrong kind of host object passed in!");
IVbcHostObject5 vbcHostObject5 = vbcHostObject as IVbcHostObject5;
Debug.Assert(vbcHostObject5 != null, "Wrong kind of host object passed in!");
// IVbcHostObjectFreeThreaded::Compile is the preferred way to compile the host object
// because while it is still synchronous it does its waiting on our BG thread
// (as opposed to the UI thread for IVbcHostObject::Compile)
if (vbcHostObject5 != null)
{
IVbcHostObjectFreeThreaded freeThreadedHostObject = vbcHostObject5.GetFreeThreadedHostObject();
return freeThreadedHostObject.Compile();
}
else
{
// If for some reason we can't get to IVbcHostObject5 we just fall back to the old
// Compile method. This method unfortunately allows for reentrancy on the UI thread.
return vbcHostObject.Compile();
}
}
/// <summary>
/// private class that just holds together name, value pair for the vbErrorLines Queue
/// </summary>
private class VBError
{
public string Message { get; }
public MessageImportance MessageImportance { get; }
public VBError(string message, MessageImportance importance)
{
this.Message = message;
this.MessageImportance = importance;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using OrchardCore.ContentManagement.Records;
using OrchardCore.ContentManagement.Routing;
using OrchardCore.Documents;
using OrchardCore.Environment.Shell.Scope;
using YesSql;
namespace OrchardCore.Autoroute.Services
{
public class AutorouteEntries : IAutorouteEntries
{
private readonly IVolatileDocumentManager<AutorouteStateDocument> _autorouteStateManager;
private ImmutableDictionary<string, AutorouteEntry> _paths = ImmutableDictionary<string, AutorouteEntry>.Empty;
private ImmutableDictionary<string, AutorouteEntry> _contentItemIds = ImmutableDictionary<string, AutorouteEntry>.Empty;
private readonly SemaphoreSlim _semaphore = new SemaphoreSlim(1);
private int _lastIndexId;
private string _stateIdentifier;
private bool _initialized;
public AutorouteEntries(IVolatileDocumentManager<AutorouteStateDocument> autorouteStateManager)
{
_autorouteStateManager = autorouteStateManager;
_contentItemIds = _contentItemIds.WithComparers(StringComparer.OrdinalIgnoreCase);
}
public async Task<(bool, AutorouteEntry)> TryGetEntryByPathAsync(string path)
{
await EnsureInitializedAsync();
if (_contentItemIds.TryGetValue(path.TrimEnd('/'), out var entry))
{
return (true, entry);
}
return (false, entry);
}
public async Task<(bool, AutorouteEntry)> TryGetEntryByContentItemIdAsync(string contentItemId)
{
await EnsureInitializedAsync();
if (_paths.TryGetValue(contentItemId, out var entry))
{
return (true, entry);
}
return (false, entry);
}
public async Task UpdateEntriesAsync()
{
await EnsureInitializedAsync();
// Update the cache with a new state and then refresh entries as it would be done on a next request.
await _autorouteStateManager.UpdateAsync(new AutorouteStateDocument(), afterUpdateAsync: RefreshEntriesAsync);
}
private async Task EnsureInitializedAsync()
{
if (!_initialized)
{
await InitializeEntriesAsync();
}
else
{
var state = await _autorouteStateManager.GetOrCreateImmutableAsync();
if (_stateIdentifier != state.Identifier)
{
await RefreshEntriesAsync(state);
}
}
}
protected void AddEntries(IEnumerable<AutorouteEntry> entries)
{
// Evict all entries related to a container item from autoroute entries.
// This is necessary to account for deletions, disabling of an item, or disabling routing of contained items.
ILookup<string, AutorouteEntry> entriesByContainer = null;
foreach (var entry in entries.Where(x => String.IsNullOrEmpty(x.ContainedContentItemId)))
{
entriesByContainer ??= _paths.Values
.Where(x => !String.IsNullOrEmpty(x.ContainedContentItemId))
.ToLookup(x => x.ContentItemId);
if (!entriesByContainer.Contains(entry.ContentItemId))
{
continue;
}
var entriesToRemove = entriesByContainer[entry.ContentItemId];
_paths = _paths.RemoveRange(entriesToRemove.Select(x => x.ContainedContentItemId));
_contentItemIds = _contentItemIds.RemoveRange(entriesToRemove.Select(x => x.Path));
}
foreach (var entry in entries)
{
if (_paths.TryGetValue(entry.ContentItemId, out var previousContainerEntry) &&
String.IsNullOrEmpty(entry.ContainedContentItemId))
{
_contentItemIds = _contentItemIds.Remove(previousContainerEntry.Path);
}
if (!String.IsNullOrEmpty(entry.ContainedContentItemId) &&
_paths.TryGetValue(entry.ContainedContentItemId, out var previousContainedEntry))
{
_contentItemIds = _contentItemIds.Remove(previousContainedEntry.Path);
}
_contentItemIds = _contentItemIds.SetItem(entry.Path, entry);
if (!String.IsNullOrEmpty(entry.ContainedContentItemId))
{
_paths = _paths.SetItem(entry.ContainedContentItemId, entry);
}
else
{
_paths = _paths.SetItem(entry.ContentItemId, entry);
}
}
}
protected void RemoveEntries(IEnumerable<AutorouteEntry> entries)
{
foreach (var entry in entries)
{
// Evict all entries related to a container item from autoroute entries.
var entriesToRemove = _paths.Values.Where(x => x.ContentItemId == entry.ContentItemId &&
!String.IsNullOrEmpty(x.ContainedContentItemId));
_paths = _paths.RemoveRange(entriesToRemove.Select(x => x.ContainedContentItemId));
_contentItemIds = _contentItemIds.RemoveRange(entriesToRemove.Select(x => x.Path));
_paths = _paths.Remove(entry.ContentItemId);
_contentItemIds = _contentItemIds.Remove(entry.Path);
}
}
private async Task RefreshEntriesAsync(AutorouteStateDocument state)
{
if (_stateIdentifier == state.Identifier)
{
return;
}
await _semaphore.WaitAsync();
try
{
if (_stateIdentifier != state.Identifier)
{
var indexes = await Session
.QueryIndex<AutoroutePartIndex>(i => i.Id > _lastIndexId)
.OrderBy(i => i.Id)
.ListAsync();
// A draft is indexed to check for conflicts, and to remove an entry, but only if an item is unpublished,
// so only if the entry 'DocumentId' matches, this because when a draft is saved more than once, the index
// is not updated for the published version that may be already scanned, so the entry may not be re-added.
var entriesToRemove = indexes
.Where(i => !i.Published || i.Path == null)
.SelectMany(i => _paths.Values.Where(e =>
// The item was removed.
((!i.Published && !i.Latest) ||
// The part was disabled or removed.
(i.Path == null && i.Published) ||
// The item was unpublished.
(!i.Published && e.DocumentId == i.DocumentId)) &&
(e.ContentItemId == i.ContentItemId ||
e.ContainedContentItemId == i.ContentItemId)));
var entriesToAdd = indexes
.Where(i => i.Published && i.Path != null)
.Select(i => new AutorouteEntry(i.ContentItemId, i.Path, i.ContainedContentItemId, i.JsonPath)
{
DocumentId = i.DocumentId
});
RemoveEntries(entriesToRemove);
AddEntries(entriesToAdd);
_lastIndexId = indexes.LastOrDefault()?.Id ?? 0;
_stateIdentifier = state.Identifier;
}
}
finally
{
_semaphore.Release();
}
}
protected virtual async Task InitializeEntriesAsync()
{
if (_initialized)
{
return;
}
await _semaphore.WaitAsync();
try
{
if (!_initialized)
{
var state = await _autorouteStateManager.GetOrCreateImmutableAsync();
var indexes = await Session
.QueryIndex<AutoroutePartIndex>(i => i.Published && i.Path != null)
.OrderBy(i => i.Id)
.ListAsync();
var entries = indexes.Select(i => new AutorouteEntry(i.ContentItemId, i.Path, i.ContainedContentItemId, i.JsonPath)
{
DocumentId = i.DocumentId
});
AddEntries(entries);
_lastIndexId = indexes.LastOrDefault()?.Id ?? 0;
_stateIdentifier = state.Identifier;
_initialized = true;
}
}
finally
{
_semaphore.Release();
}
}
private static ISession Session => ShellScope.Services.GetRequiredService<ISession>();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/metric.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Cloud.Monitoring.V3 {
/// <summary>Holder for reflection information generated from google/monitoring/v3/metric.proto</summary>
public static partial class MetricReflection {
#region Descriptor
/// <summary>File descriptor for google/monitoring/v3/metric.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static MetricReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CiFnb29nbGUvbW9uaXRvcmluZy92My9tZXRyaWMucHJvdG8SFGdvb2dsZS5t",
"b25pdG9yaW5nLnYzGhdnb29nbGUvYXBpL21ldHJpYy5wcm90bxojZ29vZ2xl",
"L2FwaS9tb25pdG9yZWRfcmVzb3VyY2UucHJvdG8aIWdvb2dsZS9tb25pdG9y",
"aW5nL3YzL2NvbW1vbi5wcm90byJuCgVQb2ludBI0CghpbnRlcnZhbBgBIAEo",
"CzIiLmdvb2dsZS5tb25pdG9yaW5nLnYzLlRpbWVJbnRlcnZhbBIvCgV2YWx1",
"ZRgCIAEoCzIgLmdvb2dsZS5tb25pdG9yaW5nLnYzLlR5cGVkVmFsdWUiiAIK",
"ClRpbWVTZXJpZXMSIgoGbWV0cmljGAEgASgLMhIuZ29vZ2xlLmFwaS5NZXRy",
"aWMSLwoIcmVzb3VyY2UYAiABKAsyHS5nb29nbGUuYXBpLk1vbml0b3JlZFJl",
"c291cmNlEjwKC21ldHJpY19raW5kGAMgASgOMicuZ29vZ2xlLmFwaS5NZXRy",
"aWNEZXNjcmlwdG9yLk1ldHJpY0tpbmQSOgoKdmFsdWVfdHlwZRgEIAEoDjIm",
"Lmdvb2dsZS5hcGkuTWV0cmljRGVzY3JpcHRvci5WYWx1ZVR5cGUSKwoGcG9p",
"bnRzGAUgAygLMhsuZ29vZ2xlLm1vbml0b3JpbmcudjMuUG9pbnRChgEKGGNv",
"bS5nb29nbGUubW9uaXRvcmluZy52M0ILTWV0cmljUHJvdG9QAVo+Z29vZ2xl",
"LmdvbGFuZy5vcmcvZ2VucHJvdG8vZ29vZ2xlYXBpcy9tb25pdG9yaW5nL3Yz",
"O21vbml0b3JpbmeqAhpHb29nbGUuQ2xvdWQuTW9uaXRvcmluZy5WM2IGcHJv",
"dG8z"));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Google.Api.MetricReflection.Descriptor, global::Google.Api.MonitoredResourceReflection.Descriptor, global::Google.Cloud.Monitoring.V3.CommonReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Monitoring.V3.Point), global::Google.Cloud.Monitoring.V3.Point.Parser, new[]{ "Interval", "Value" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Monitoring.V3.TimeSeries), global::Google.Cloud.Monitoring.V3.TimeSeries.Parser, new[]{ "Metric", "Resource", "MetricKind", "ValueType", "Points" }, null, null, null)
}));
}
#endregion
}
#region Messages
/// <summary>
/// A single data point in a time series.
/// </summary>
public sealed partial class Point : pb::IMessage<Point> {
private static readonly pb::MessageParser<Point> _parser = new pb::MessageParser<Point>(() => new Point());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Point> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Monitoring.V3.MetricReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Point() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Point(Point other) : this() {
Interval = other.interval_ != null ? other.Interval.Clone() : null;
Value = other.value_ != null ? other.Value.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Point Clone() {
return new Point(this);
}
/// <summary>Field number for the "interval" field.</summary>
public const int IntervalFieldNumber = 1;
private global::Google.Cloud.Monitoring.V3.TimeInterval interval_;
/// <summary>
/// The time interval to which the data point applies. For GAUGE metrics, only
/// the end time of the interval is used. For DELTA metrics, the start and end
/// time should specify a non-zero interval, with subsequent points specifying
/// contiguous and non-overlapping intervals. For CUMULATIVE metrics, the
/// start and end time should specify a non-zero interval, with subsequent
/// points specifying the same start time and increasing end times, until an
/// event resets the cumulative value to zero and sets a new start time for the
/// following points.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Monitoring.V3.TimeInterval Interval {
get { return interval_; }
set {
interval_ = value;
}
}
/// <summary>Field number for the "value" field.</summary>
public const int ValueFieldNumber = 2;
private global::Google.Cloud.Monitoring.V3.TypedValue value_;
/// <summary>
/// The value of the data point.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Monitoring.V3.TypedValue Value {
get { return value_; }
set {
value_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Point);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Point other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(Interval, other.Interval)) return false;
if (!object.Equals(Value, other.Value)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (interval_ != null) hash ^= Interval.GetHashCode();
if (value_ != null) hash ^= Value.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (interval_ != null) {
output.WriteRawTag(10);
output.WriteMessage(Interval);
}
if (value_ != null) {
output.WriteRawTag(18);
output.WriteMessage(Value);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (interval_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Interval);
}
if (value_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Value);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Point other) {
if (other == null) {
return;
}
if (other.interval_ != null) {
if (interval_ == null) {
interval_ = new global::Google.Cloud.Monitoring.V3.TimeInterval();
}
Interval.MergeFrom(other.Interval);
}
if (other.value_ != null) {
if (value_ == null) {
value_ = new global::Google.Cloud.Monitoring.V3.TypedValue();
}
Value.MergeFrom(other.Value);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (interval_ == null) {
interval_ = new global::Google.Cloud.Monitoring.V3.TimeInterval();
}
input.ReadMessage(interval_);
break;
}
case 18: {
if (value_ == null) {
value_ = new global::Google.Cloud.Monitoring.V3.TypedValue();
}
input.ReadMessage(value_);
break;
}
}
}
}
}
/// <summary>
/// A collection of data points that describes the time-varying values
/// of a metric. A time series is identified by a combination of a
/// fully-specified monitored resource and a fully-specified metric.
/// This type is used for both listing and creating time series.
/// </summary>
public sealed partial class TimeSeries : pb::IMessage<TimeSeries> {
private static readonly pb::MessageParser<TimeSeries> _parser = new pb::MessageParser<TimeSeries>(() => new TimeSeries());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<TimeSeries> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Monitoring.V3.MetricReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TimeSeries() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TimeSeries(TimeSeries other) : this() {
Metric = other.metric_ != null ? other.Metric.Clone() : null;
Resource = other.resource_ != null ? other.Resource.Clone() : null;
metricKind_ = other.metricKind_;
valueType_ = other.valueType_;
points_ = other.points_.Clone();
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public TimeSeries Clone() {
return new TimeSeries(this);
}
/// <summary>Field number for the "metric" field.</summary>
public const int MetricFieldNumber = 1;
private global::Google.Api.Metric metric_;
/// <summary>
/// The associated metric. A fully-specified metric used to identify the time
/// series.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Api.Metric Metric {
get { return metric_; }
set {
metric_ = value;
}
}
/// <summary>Field number for the "resource" field.</summary>
public const int ResourceFieldNumber = 2;
private global::Google.Api.MonitoredResource resource_;
/// <summary>
/// The associated resource. A fully-specified monitored resource used to
/// identify the time series.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Api.MonitoredResource Resource {
get { return resource_; }
set {
resource_ = value;
}
}
/// <summary>Field number for the "metric_kind" field.</summary>
public const int MetricKindFieldNumber = 3;
private global::Google.Api.MetricDescriptor.Types.MetricKind metricKind_ = 0;
/// <summary>
/// The metric kind of the time series. When listing time series, this metric
/// kind might be different from the metric kind of the associated metric if
/// this time series is an alignment or reduction of other time series.
///
/// When creating a time series, this field is optional. If present, it must be
/// the same as the metric kind of the associated metric. If the associated
/// metric's descriptor must be auto-created, then this field specifies the
/// metric kind of the new descriptor and must be either `GAUGE` (the default)
/// or `CUMULATIVE`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Api.MetricDescriptor.Types.MetricKind MetricKind {
get { return metricKind_; }
set {
metricKind_ = value;
}
}
/// <summary>Field number for the "value_type" field.</summary>
public const int ValueTypeFieldNumber = 4;
private global::Google.Api.MetricDescriptor.Types.ValueType valueType_ = 0;
/// <summary>
/// The value type of the time series. When listing time series, this value
/// type might be different from the value type of the associated metric if
/// this time series is an alignment or reduction of other time series.
///
/// When creating a time series, this field is optional. If present, it must be
/// the same as the type of the data in the `points` field.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Api.MetricDescriptor.Types.ValueType ValueType {
get { return valueType_; }
set {
valueType_ = value;
}
}
/// <summary>Field number for the "points" field.</summary>
public const int PointsFieldNumber = 5;
private static readonly pb::FieldCodec<global::Google.Cloud.Monitoring.V3.Point> _repeated_points_codec
= pb::FieldCodec.ForMessage(42, global::Google.Cloud.Monitoring.V3.Point.Parser);
private readonly pbc::RepeatedField<global::Google.Cloud.Monitoring.V3.Point> points_ = new pbc::RepeatedField<global::Google.Cloud.Monitoring.V3.Point>();
/// <summary>
/// The data points of this time series. When listing time series, the order of
/// the points is specified by the list method.
///
/// When creating a time series, this field must contain exactly one point and
/// the point's type must be the same as the value type of the associated
/// metric. If the associated metric's descriptor must be auto-created, then
/// the value type of the descriptor is determined by the point's type, which
/// must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::Google.Cloud.Monitoring.V3.Point> Points {
get { return points_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as TimeSeries);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(TimeSeries other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(Metric, other.Metric)) return false;
if (!object.Equals(Resource, other.Resource)) return false;
if (MetricKind != other.MetricKind) return false;
if (ValueType != other.ValueType) return false;
if(!points_.Equals(other.points_)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (metric_ != null) hash ^= Metric.GetHashCode();
if (resource_ != null) hash ^= Resource.GetHashCode();
if (MetricKind != 0) hash ^= MetricKind.GetHashCode();
if (ValueType != 0) hash ^= ValueType.GetHashCode();
hash ^= points_.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (metric_ != null) {
output.WriteRawTag(10);
output.WriteMessage(Metric);
}
if (resource_ != null) {
output.WriteRawTag(18);
output.WriteMessage(Resource);
}
if (MetricKind != 0) {
output.WriteRawTag(24);
output.WriteEnum((int) MetricKind);
}
if (ValueType != 0) {
output.WriteRawTag(32);
output.WriteEnum((int) ValueType);
}
points_.WriteTo(output, _repeated_points_codec);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (metric_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Metric);
}
if (resource_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Resource);
}
if (MetricKind != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) MetricKind);
}
if (ValueType != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) ValueType);
}
size += points_.CalculateSize(_repeated_points_codec);
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(TimeSeries other) {
if (other == null) {
return;
}
if (other.metric_ != null) {
if (metric_ == null) {
metric_ = new global::Google.Api.Metric();
}
Metric.MergeFrom(other.Metric);
}
if (other.resource_ != null) {
if (resource_ == null) {
resource_ = new global::Google.Api.MonitoredResource();
}
Resource.MergeFrom(other.Resource);
}
if (other.MetricKind != 0) {
MetricKind = other.MetricKind;
}
if (other.ValueType != 0) {
ValueType = other.ValueType;
}
points_.Add(other.points_);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (metric_ == null) {
metric_ = new global::Google.Api.Metric();
}
input.ReadMessage(metric_);
break;
}
case 18: {
if (resource_ == null) {
resource_ = new global::Google.Api.MonitoredResource();
}
input.ReadMessage(resource_);
break;
}
case 24: {
metricKind_ = (global::Google.Api.MetricDescriptor.Types.MetricKind) input.ReadEnum();
break;
}
case 32: {
valueType_ = (global::Google.Api.MetricDescriptor.Types.ValueType) input.ReadEnum();
break;
}
case 42: {
points_.AddEntriesFrom(input, _repeated_points_codec);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.Linq;
using System.Text;
namespace EduHub.Data.Entities
{
/// <summary>
/// Note Categories Data Set
/// </summary>
[GeneratedCode("EduHub Data", "0.9")]
public sealed partial class KPNDataSet : EduHubDataSet<KPN>
{
/// <inheritdoc />
public override string Name { get { return "KPN"; } }
/// <inheritdoc />
public override bool SupportsEntityLastModified { get { return true; } }
internal KPNDataSet(EduHubContext Context)
: base(Context)
{
Index_KPNKEY = new Lazy<Dictionary<string, KPN>>(() => this.ToDictionary(i => i.KPNKEY));
}
/// <summary>
/// Matches CSV file headers to actions, used to deserialize <see cref="KPN" />
/// </summary>
/// <param name="Headers">The CSV column headers</param>
/// <returns>An array of actions which deserialize <see cref="KPN" /> fields for each CSV column header</returns>
internal override Action<KPN, string>[] BuildMapper(IReadOnlyList<string> Headers)
{
var mapper = new Action<KPN, string>[Headers.Count];
for (var i = 0; i < Headers.Count; i++) {
switch (Headers[i]) {
case "KPNKEY":
mapper[i] = (e, v) => e.KPNKEY = v;
break;
case "DESCRIPTION":
mapper[i] = (e, v) => e.DESCRIPTION = v;
break;
case "LW_DATE":
mapper[i] = (e, v) => e.LW_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "LW_TIME":
mapper[i] = (e, v) => e.LW_TIME = v == null ? (short?)null : short.Parse(v);
break;
case "LW_USER":
mapper[i] = (e, v) => e.LW_USER = v;
break;
default:
mapper[i] = MapperNoOp;
break;
}
}
return mapper;
}
/// <summary>
/// Merges <see cref="KPN" /> delta entities
/// </summary>
/// <param name="Entities">Iterator for base <see cref="KPN" /> entities</param>
/// <param name="DeltaEntities">List of delta <see cref="KPN" /> entities</param>
/// <returns>A merged <see cref="IEnumerable{KPN}"/> of entities</returns>
internal override IEnumerable<KPN> ApplyDeltaEntities(IEnumerable<KPN> Entities, List<KPN> DeltaEntities)
{
HashSet<string> Index_KPNKEY = new HashSet<string>(DeltaEntities.Select(i => i.KPNKEY));
using (var deltaIterator = DeltaEntities.GetEnumerator())
{
using (var entityIterator = Entities.GetEnumerator())
{
while (deltaIterator.MoveNext())
{
var deltaClusteredKey = deltaIterator.Current.KPNKEY;
bool yieldEntity = false;
while (entityIterator.MoveNext())
{
var entity = entityIterator.Current;
bool overwritten = Index_KPNKEY.Remove(entity.KPNKEY);
if (entity.KPNKEY.CompareTo(deltaClusteredKey) <= 0)
{
if (!overwritten)
{
yield return entity;
}
}
else
{
yieldEntity = !overwritten;
break;
}
}
yield return deltaIterator.Current;
if (yieldEntity)
{
yield return entityIterator.Current;
}
}
while (entityIterator.MoveNext())
{
yield return entityIterator.Current;
}
}
}
}
#region Index Fields
private Lazy<Dictionary<string, KPN>> Index_KPNKEY;
#endregion
#region Index Methods
/// <summary>
/// Find KPN by KPNKEY field
/// </summary>
/// <param name="KPNKEY">KPNKEY value used to find KPN</param>
/// <returns>Related KPN entity</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public KPN FindByKPNKEY(string KPNKEY)
{
return Index_KPNKEY.Value[KPNKEY];
}
/// <summary>
/// Attempt to find KPN by KPNKEY field
/// </summary>
/// <param name="KPNKEY">KPNKEY value used to find KPN</param>
/// <param name="Value">Related KPN entity</param>
/// <returns>True if the related KPN entity is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByKPNKEY(string KPNKEY, out KPN Value)
{
return Index_KPNKEY.Value.TryGetValue(KPNKEY, out Value);
}
/// <summary>
/// Attempt to find KPN by KPNKEY field
/// </summary>
/// <param name="KPNKEY">KPNKEY value used to find KPN</param>
/// <returns>Related KPN entity, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public KPN TryFindByKPNKEY(string KPNKEY)
{
KPN value;
if (Index_KPNKEY.Value.TryGetValue(KPNKEY, out value))
{
return value;
}
else
{
return null;
}
}
#endregion
#region SQL Integration
/// <summary>
/// Returns a <see cref="SqlCommand"/> which checks for the existence of a KPN table, and if not found, creates the table and associated indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
public override SqlCommand GetSqlCreateTableCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF NOT EXISTS (SELECT * FROM dbo.sysobjects WHERE id = OBJECT_ID(N'[dbo].[KPN]') AND OBJECTPROPERTY(id, N'IsUserTable') = 1)
BEGIN
CREATE TABLE [dbo].[KPN](
[KPNKEY] varchar(6) NOT NULL,
[DESCRIPTION] varchar(40) NULL,
[LW_DATE] datetime NULL,
[LW_TIME] smallint NULL,
[LW_USER] varchar(128) NULL,
CONSTRAINT [KPN_Index_KPNKEY] PRIMARY KEY CLUSTERED (
[KPNKEY] ASC
)
);
END");
}
/// <summary>
/// Returns null as <see cref="KPNDataSet"/> has no non-clustered indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>null</returns>
public override SqlCommand GetSqlDisableIndexesCommand(SqlConnection SqlConnection)
{
return null;
}
/// <summary>
/// Returns null as <see cref="KPNDataSet"/> has no non-clustered indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>null</returns>
public override SqlCommand GetSqlRebuildIndexesCommand(SqlConnection SqlConnection)
{
return null;
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which deletes the <see cref="KPN"/> entities passed
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <param name="Entities">The <see cref="KPN"/> entities to be deleted</param>
public override SqlCommand GetSqlDeleteCommand(SqlConnection SqlConnection, IEnumerable<KPN> Entities)
{
SqlCommand command = new SqlCommand();
int parameterIndex = 0;
StringBuilder builder = new StringBuilder();
List<string> Index_KPNKEY = new List<string>();
foreach (var entity in Entities)
{
Index_KPNKEY.Add(entity.KPNKEY);
}
builder.AppendLine("DELETE [dbo].[KPN] WHERE");
// Index_KPNKEY
builder.Append("[KPNKEY] IN (");
for (int index = 0; index < Index_KPNKEY.Count; index++)
{
if (index != 0)
builder.Append(", ");
// KPNKEY
var parameterKPNKEY = $"@p{parameterIndex++}";
builder.Append(parameterKPNKEY);
command.Parameters.Add(parameterKPNKEY, SqlDbType.VarChar, 6).Value = Index_KPNKEY[index];
}
builder.Append(");");
command.Connection = SqlConnection;
command.CommandText = builder.ToString();
return command;
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the KPN data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the KPN data set</returns>
public override EduHubDataSetDataReader<KPN> GetDataSetDataReader()
{
return new KPNDataReader(Load());
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the KPN data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the KPN data set</returns>
public override EduHubDataSetDataReader<KPN> GetDataSetDataReader(List<KPN> Entities)
{
return new KPNDataReader(new EduHubDataSetLoadedReader<KPN>(this, Entities));
}
// Modest implementation to primarily support SqlBulkCopy
private class KPNDataReader : EduHubDataSetDataReader<KPN>
{
public KPNDataReader(IEduHubDataSetReader<KPN> Reader)
: base (Reader)
{
}
public override int FieldCount { get { return 5; } }
public override object GetValue(int i)
{
switch (i)
{
case 0: // KPNKEY
return Current.KPNKEY;
case 1: // DESCRIPTION
return Current.DESCRIPTION;
case 2: // LW_DATE
return Current.LW_DATE;
case 3: // LW_TIME
return Current.LW_TIME;
case 4: // LW_USER
return Current.LW_USER;
default:
throw new ArgumentOutOfRangeException(nameof(i));
}
}
public override bool IsDBNull(int i)
{
switch (i)
{
case 1: // DESCRIPTION
return Current.DESCRIPTION == null;
case 2: // LW_DATE
return Current.LW_DATE == null;
case 3: // LW_TIME
return Current.LW_TIME == null;
case 4: // LW_USER
return Current.LW_USER == null;
default:
return false;
}
}
public override string GetName(int ordinal)
{
switch (ordinal)
{
case 0: // KPNKEY
return "KPNKEY";
case 1: // DESCRIPTION
return "DESCRIPTION";
case 2: // LW_DATE
return "LW_DATE";
case 3: // LW_TIME
return "LW_TIME";
case 4: // LW_USER
return "LW_USER";
default:
throw new ArgumentOutOfRangeException(nameof(ordinal));
}
}
public override int GetOrdinal(string name)
{
switch (name)
{
case "KPNKEY":
return 0;
case "DESCRIPTION":
return 1;
case "LW_DATE":
return 2;
case "LW_TIME":
return 3;
case "LW_USER":
return 4;
default:
throw new ArgumentOutOfRangeException(nameof(name));
}
}
}
#endregion
}
}
| |
/*
* Infoplus API
*
* Infoplus API.
*
* OpenAPI spec version: v1.0
* Contact: api@infopluscommerce.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace Infoplus.Model
{
/// <summary>
/// Location
/// </summary>
[DataContract]
public partial class Location : IEquatable<Location>
{
/// <summary>
/// Initializes a new instance of the <see cref="Location" /> class.
/// </summary>
[JsonConstructorAttribute]
protected Location() { }
/// <summary>
/// Initializes a new instance of the <see cref="Location" /> class.
/// </summary>
/// <param name="WarehouseId">WarehouseId (required).</param>
/// <param name="BuildingId">BuildingId.</param>
/// <param name="ZoneId">ZoneId.</param>
/// <param name="AisleId">AisleId.</param>
/// <param name="BillingTypeId">BillingTypeId (required).</param>
/// <param name="BehaviorType">BehaviorType (required).</param>
/// <param name="FootprintId">FootprintId (required).</param>
/// <param name="AddressSchemeId">AddressSchemeId.</param>
/// <param name="Address">Address.</param>
/// <param name="Level">Level.</param>
/// <param name="Bay">Bay.</param>
/// <param name="Number">Number.</param>
/// <param name="Online">Online (required) (default to false).</param>
/// <param name="PriorityCode">PriorityCode.</param>
/// <param name="AllowItemMixing">AllowItemMixing (required) (default to false).</param>
public Location(int? WarehouseId = null, int? BuildingId = null, int? ZoneId = null, int? AisleId = null, int? BillingTypeId = null, string BehaviorType = null, int? FootprintId = null, int? AddressSchemeId = null, string Address = null, int? Level = null, int? Bay = null, int? Number = null, bool? Online = null, int? PriorityCode = null, bool? AllowItemMixing = null)
{
// to ensure "WarehouseId" is required (not null)
if (WarehouseId == null)
{
throw new InvalidDataException("WarehouseId is a required property for Location and cannot be null");
}
else
{
this.WarehouseId = WarehouseId;
}
// to ensure "BillingTypeId" is required (not null)
if (BillingTypeId == null)
{
throw new InvalidDataException("BillingTypeId is a required property for Location and cannot be null");
}
else
{
this.BillingTypeId = BillingTypeId;
}
// to ensure "BehaviorType" is required (not null)
if (BehaviorType == null)
{
throw new InvalidDataException("BehaviorType is a required property for Location and cannot be null");
}
else
{
this.BehaviorType = BehaviorType;
}
// to ensure "FootprintId" is required (not null)
if (FootprintId == null)
{
throw new InvalidDataException("FootprintId is a required property for Location and cannot be null");
}
else
{
this.FootprintId = FootprintId;
}
// to ensure "Online" is required (not null)
if (Online == null)
{
throw new InvalidDataException("Online is a required property for Location and cannot be null");
}
else
{
this.Online = Online;
}
// to ensure "AllowItemMixing" is required (not null)
if (AllowItemMixing == null)
{
throw new InvalidDataException("AllowItemMixing is a required property for Location and cannot be null");
}
else
{
this.AllowItemMixing = AllowItemMixing;
}
this.BuildingId = BuildingId;
this.ZoneId = ZoneId;
this.AisleId = AisleId;
this.AddressSchemeId = AddressSchemeId;
this.Address = Address;
this.Level = Level;
this.Bay = Bay;
this.Number = Number;
this.PriorityCode = PriorityCode;
}
/// <summary>
/// Gets or Sets Id
/// </summary>
[DataMember(Name="id", EmitDefaultValue=false)]
public int? Id { get; private set; }
/// <summary>
/// Gets or Sets WarehouseId
/// </summary>
[DataMember(Name="warehouseId", EmitDefaultValue=false)]
public int? WarehouseId { get; set; }
/// <summary>
/// Gets or Sets BuildingId
/// </summary>
[DataMember(Name="buildingId", EmitDefaultValue=false)]
public int? BuildingId { get; set; }
/// <summary>
/// Gets or Sets ZoneId
/// </summary>
[DataMember(Name="zoneId", EmitDefaultValue=false)]
public int? ZoneId { get; set; }
/// <summary>
/// Gets or Sets AisleId
/// </summary>
[DataMember(Name="aisleId", EmitDefaultValue=false)]
public int? AisleId { get; set; }
/// <summary>
/// Gets or Sets BillingTypeId
/// </summary>
[DataMember(Name="billingTypeId", EmitDefaultValue=false)]
public int? BillingTypeId { get; set; }
/// <summary>
/// Gets or Sets BehaviorType
/// </summary>
[DataMember(Name="behaviorType", EmitDefaultValue=false)]
public string BehaviorType { get; set; }
/// <summary>
/// Gets or Sets FootprintId
/// </summary>
[DataMember(Name="footprintId", EmitDefaultValue=false)]
public int? FootprintId { get; set; }
/// <summary>
/// Gets or Sets AddressSchemeId
/// </summary>
[DataMember(Name="addressSchemeId", EmitDefaultValue=false)]
public int? AddressSchemeId { get; set; }
/// <summary>
/// Gets or Sets Origin
/// </summary>
[DataMember(Name="origin", EmitDefaultValue=false)]
public int? Origin { get; private set; }
/// <summary>
/// Gets or Sets Address
/// </summary>
[DataMember(Name="address", EmitDefaultValue=false)]
public string Address { get; set; }
/// <summary>
/// Gets or Sets Level
/// </summary>
[DataMember(Name="level", EmitDefaultValue=false)]
public int? Level { get; set; }
/// <summary>
/// Gets or Sets Bay
/// </summary>
[DataMember(Name="bay", EmitDefaultValue=false)]
public int? Bay { get; set; }
/// <summary>
/// Gets or Sets Number
/// </summary>
[DataMember(Name="number", EmitDefaultValue=false)]
public int? Number { get; set; }
/// <summary>
/// Gets or Sets Online
/// </summary>
[DataMember(Name="online", EmitDefaultValue=false)]
public bool? Online { get; set; }
/// <summary>
/// Gets or Sets PriorityCode
/// </summary>
[DataMember(Name="priorityCode", EmitDefaultValue=false)]
public int? PriorityCode { get; set; }
/// <summary>
/// Gets or Sets AllowItemMixing
/// </summary>
[DataMember(Name="allowItemMixing", EmitDefaultValue=false)]
public bool? AllowItemMixing { get; set; }
/// <summary>
/// Gets or Sets CreateDate
/// </summary>
[DataMember(Name="createDate", EmitDefaultValue=false)]
public DateTime? CreateDate { get; private set; }
/// <summary>
/// Gets or Sets ModifyDate
/// </summary>
[DataMember(Name="modifyDate", EmitDefaultValue=false)]
public DateTime? ModifyDate { get; private set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class Location {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" WarehouseId: ").Append(WarehouseId).Append("\n");
sb.Append(" BuildingId: ").Append(BuildingId).Append("\n");
sb.Append(" ZoneId: ").Append(ZoneId).Append("\n");
sb.Append(" AisleId: ").Append(AisleId).Append("\n");
sb.Append(" BillingTypeId: ").Append(BillingTypeId).Append("\n");
sb.Append(" BehaviorType: ").Append(BehaviorType).Append("\n");
sb.Append(" FootprintId: ").Append(FootprintId).Append("\n");
sb.Append(" AddressSchemeId: ").Append(AddressSchemeId).Append("\n");
sb.Append(" Origin: ").Append(Origin).Append("\n");
sb.Append(" Address: ").Append(Address).Append("\n");
sb.Append(" Level: ").Append(Level).Append("\n");
sb.Append(" Bay: ").Append(Bay).Append("\n");
sb.Append(" Number: ").Append(Number).Append("\n");
sb.Append(" Online: ").Append(Online).Append("\n");
sb.Append(" PriorityCode: ").Append(PriorityCode).Append("\n");
sb.Append(" AllowItemMixing: ").Append(AllowItemMixing).Append("\n");
sb.Append(" CreateDate: ").Append(CreateDate).Append("\n");
sb.Append(" ModifyDate: ").Append(ModifyDate).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as Location);
}
/// <summary>
/// Returns true if Location instances are equal
/// </summary>
/// <param name="other">Instance of Location to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(Location other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Id == other.Id ||
this.Id != null &&
this.Id.Equals(other.Id)
) &&
(
this.WarehouseId == other.WarehouseId ||
this.WarehouseId != null &&
this.WarehouseId.Equals(other.WarehouseId)
) &&
(
this.BuildingId == other.BuildingId ||
this.BuildingId != null &&
this.BuildingId.Equals(other.BuildingId)
) &&
(
this.ZoneId == other.ZoneId ||
this.ZoneId != null &&
this.ZoneId.Equals(other.ZoneId)
) &&
(
this.AisleId == other.AisleId ||
this.AisleId != null &&
this.AisleId.Equals(other.AisleId)
) &&
(
this.BillingTypeId == other.BillingTypeId ||
this.BillingTypeId != null &&
this.BillingTypeId.Equals(other.BillingTypeId)
) &&
(
this.BehaviorType == other.BehaviorType ||
this.BehaviorType != null &&
this.BehaviorType.Equals(other.BehaviorType)
) &&
(
this.FootprintId == other.FootprintId ||
this.FootprintId != null &&
this.FootprintId.Equals(other.FootprintId)
) &&
(
this.AddressSchemeId == other.AddressSchemeId ||
this.AddressSchemeId != null &&
this.AddressSchemeId.Equals(other.AddressSchemeId)
) &&
(
this.Origin == other.Origin ||
this.Origin != null &&
this.Origin.Equals(other.Origin)
) &&
(
this.Address == other.Address ||
this.Address != null &&
this.Address.Equals(other.Address)
) &&
(
this.Level == other.Level ||
this.Level != null &&
this.Level.Equals(other.Level)
) &&
(
this.Bay == other.Bay ||
this.Bay != null &&
this.Bay.Equals(other.Bay)
) &&
(
this.Number == other.Number ||
this.Number != null &&
this.Number.Equals(other.Number)
) &&
(
this.Online == other.Online ||
this.Online != null &&
this.Online.Equals(other.Online)
) &&
(
this.PriorityCode == other.PriorityCode ||
this.PriorityCode != null &&
this.PriorityCode.Equals(other.PriorityCode)
) &&
(
this.AllowItemMixing == other.AllowItemMixing ||
this.AllowItemMixing != null &&
this.AllowItemMixing.Equals(other.AllowItemMixing)
) &&
(
this.CreateDate == other.CreateDate ||
this.CreateDate != null &&
this.CreateDate.Equals(other.CreateDate)
) &&
(
this.ModifyDate == other.ModifyDate ||
this.ModifyDate != null &&
this.ModifyDate.Equals(other.ModifyDate)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Id != null)
hash = hash * 59 + this.Id.GetHashCode();
if (this.WarehouseId != null)
hash = hash * 59 + this.WarehouseId.GetHashCode();
if (this.BuildingId != null)
hash = hash * 59 + this.BuildingId.GetHashCode();
if (this.ZoneId != null)
hash = hash * 59 + this.ZoneId.GetHashCode();
if (this.AisleId != null)
hash = hash * 59 + this.AisleId.GetHashCode();
if (this.BillingTypeId != null)
hash = hash * 59 + this.BillingTypeId.GetHashCode();
if (this.BehaviorType != null)
hash = hash * 59 + this.BehaviorType.GetHashCode();
if (this.FootprintId != null)
hash = hash * 59 + this.FootprintId.GetHashCode();
if (this.AddressSchemeId != null)
hash = hash * 59 + this.AddressSchemeId.GetHashCode();
if (this.Origin != null)
hash = hash * 59 + this.Origin.GetHashCode();
if (this.Address != null)
hash = hash * 59 + this.Address.GetHashCode();
if (this.Level != null)
hash = hash * 59 + this.Level.GetHashCode();
if (this.Bay != null)
hash = hash * 59 + this.Bay.GetHashCode();
if (this.Number != null)
hash = hash * 59 + this.Number.GetHashCode();
if (this.Online != null)
hash = hash * 59 + this.Online.GetHashCode();
if (this.PriorityCode != null)
hash = hash * 59 + this.PriorityCode.GetHashCode();
if (this.AllowItemMixing != null)
hash = hash * 59 + this.AllowItemMixing.GetHashCode();
if (this.CreateDate != null)
hash = hash * 59 + this.CreateDate.GetHashCode();
if (this.ModifyDate != null)
hash = hash * 59 + this.ModifyDate.GetHashCode();
return hash;
}
}
}
}
| |
// Copyright (c) Lex Li. All rights reserved.
//
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace JexusManager.Features.FastCgi
{
using System.ComponentModel;
using Microsoft.Web.Administration;
internal class FastCgiItem : IItem<FastCgiItem>
{
public FastCgiItem(ConfigurationElement element)
{
Element = element;
Flag = element == null || element.IsLocallyStored ? "Local" : "Inhertied";
EnvironmentVariables = new EnvironmentVariablesCollection();
AdvancedSettings = new AdvancedSettings();
if (element == null)
{
Path = Arguments = MonitorChangesTo = string.Empty;
MaxInstances = 4U;
InstanceMaxRequests = 200U;
ActivityTimeout = 30U;
IdleTimeout = 300U;
QueueLength = 1000U;
RapidFailsPerMinute = 10U;
RequestTimeout = 90U;
return;
}
Reset();
}
public void Reset()
{
Path = (string)Element["fullPath"];
Arguments = (string)Element["arguments"];
MonitorChangesTo = (string)Element["monitorChangesTo"];
ErrorMode = (ErrorMode)Element["stderrMode"];
MaxInstances = (uint)Element["maxInstances"];
IdleTimeout = (uint)Element["idleTimeout"];
ActivityTimeout = (uint)Element["activityTimeout"];
RequestTimeout = (uint)Element["requestTimeout"];
InstanceMaxRequests = (uint)Element["instanceMaxRequests"];
SignalBeforeTerminateSeconds = (uint)Element["signalBeforeTerminateSeconds"];
AdvancedSettings.Protocol = (Protocol)Element["protocol"];
QueueLength = (uint)Element["queueLength"];
AdvancedSettings.FlushNamedPipe = (bool)Element["flushNamedPipe"];
RapidFailsPerMinute = (uint)Element["rapidFailsPerMinute"];
foreach (ConfigurationElement child in Element.GetCollection("environmentVariables"))
{
EnvironmentVariables.Add(
new EnvironmentVariables { Name = (string)child["name"], Value = (string)child["value"] });
}
}
[Browsable(false)]
public string Arguments { get; set; }
[Browsable(false)]
public string Path { get; set; }
[Browsable(false)]
public ConfigurationElement Element { get; set; }
[Browsable(false)]
public string Flag { get; set; }
public bool Equals(FastCgiItem other)
{
// all properties
return Match(other);
}
public void Apply()
{
Element["fullPath"] = Path;
Element["arguments"] = Arguments;
Element["monitorChangesTo"] = MonitorChangesTo;
Element["stderrMode"] = ErrorMode;
Element["maxInstances"] = MaxInstances;
Element["idleTimeout"] = IdleTimeout;
Element["activityTimeout"] = ActivityTimeout;
Element["requestTimeout"] = RequestTimeout;
Element["instanceMaxRequests"] = InstanceMaxRequests;
Element["signalBeforeTerminateSeconds"] = SignalBeforeTerminateSeconds;
Element["protocol"] = AdvancedSettings.Protocol;
Element["queueLength"] = QueueLength;
Element["flushNamedPipe"] = AdvancedSettings.FlushNamedPipe;
Element["rapidFailsPerMinute"] = RapidFailsPerMinute;
var collection = Element.GetCollection("environmentVariables");
collection.Clear();
foreach (EnvironmentVariables item in EnvironmentVariables)
{
var newElement = collection.CreateElement();
newElement["name"] = item.Name;
newElement["value"] = item.Value;
collection.Add(newElement);
}
}
public bool Match(FastCgiItem other)
{
// match combined keys.
return other != null && other.Arguments == Arguments && other.Path == Path;
}
[Browsable(true)]
[Category("General")]
[Description("Specifies optional environment variables that will be set in the FastCGI executable.")]
[DisplayName("Environment Variables")]
[Editor(typeof(EnvironmentVariablesCollectionEditor),
typeof(System.Drawing.Design.UITypeEditor))]
public EnvironmentVariablesCollection EnvironmentVariables { get; set; }
[Browsable(true)]
[Category("General")]
[Description("Specifies the number of requests a FastCGI process for this application is allowed to handle.")]
[DisplayName("Instance MaxRequests")]
[DefaultValue(200U)]
public uint InstanceMaxRequests { get; set; }
[Browsable(true)]
[Category("General")]
[Description("Specifies the maximum number of FastCGI process that are allowed in the application's process pool.")]
[DisplayName("Max Instances")]
[DefaultValue(4U)]
public uint MaxInstances { get; set; }
[Browsable(true)]
[Category("General")]
[Description("Specifies path to a file changes to which will trigger recycle of FastCGI processes.")]
[DisplayName("Monitor changes to file")]
[DefaultValue("")]
[Editor(typeof(System.Windows.Forms.Design.FileNameEditor), typeof(System.Drawing.Design.UITypeEditor))]
public string MonitorChangesTo { get; set; }
[Browsable(true)]
[Category("General")]
[Description("Specifies how FastCGI module should behave when FastCGI process sends text on standard error stream.")]
[DisplayName("Standard error mode")]
[TypeConverter(typeof(DescriptionConverter))]
public ErrorMode ErrorMode { get; set; }
[Browsable(true)]
[Category("Process Model")]
[Description("Specifies the time, in seconds, that a FastCGI process for this application is allowed to run without communicating with IIS.")]
[DisplayName("Activity Timeout")]
[DefaultValue(30U)]
public uint ActivityTimeout { get; set; }
[Browsable(true)]
[Category("Process Model")]
[DisplayName("Advanced Settings")]
public AdvancedSettings AdvancedSettings { get; set; }
[Browsable(true)]
[Category("Process Model")]
[Description("Specifies the time, in seconds, that a FastCGI process for this application is allowed to remain idle.")]
[DisplayName("Idle Timeout")]
[DefaultValue(300U)]
public uint IdleTimeout { get; set; }
[Browsable(true)]
[Category("Process Model")]
[Description("Maximum number of requests that are permitted into FastCGI handler queue.")]
[DisplayName("Queue Length")]
[DefaultValue(1000U)]
public uint QueueLength { get; set; }
[Browsable(true)]
[Category("Process Model")]
[Description("Specifies the number of FastCGI process failures allowed in a single minute before the FastCGI handler takes it off line.")]
[DisplayName("Rapid Fails PerMinute")]
[DefaultValue(10U)]
public uint RapidFailsPerMinute { get; set; }
[Browsable(true)]
[Category("Process Model")]
[Description("Specifies the maximum allowed time, in seconds, for request processing.")]
[DisplayName("Request Timeout")]
[DefaultValue(90U)]
public uint RequestTimeout { get; set; }
[Browsable(true)]
[Category("Process Model")]
[Description("Specifies the amount of time, in seconds, that IIS will wait after IIS signals a FastCGI application that it needs to shut down.")]
[DisplayName("Signal Before Terminate")]
[DefaultValue(300U)]
public uint SignalBeforeTerminateSeconds { get; set; }
}
[TypeConverter(typeof(ExpandableObjectConverter))]
internal class AdvancedSettings
{
[Browsable(true)]
[Description("Specifies whether or not the named pipe between FastCGI processes for this application is flushed before shutting down the application.")]
[DisplayName("FlushNamedpipe")]
public bool FlushNamedPipe { get; set; }
[Browsable(true)]
[Description("Specifies protocol to be used to communicate with FastCGI process.")]
[DisplayName("Protocol")]
[TypeConverter(typeof(DescriptionConverter))]
public Protocol Protocol { get; set; }
public override string ToString()
{
return string.Empty;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Xml.Serialization
{
using System.Configuration;
using System.Reflection;
using System.Reflection.Emit;
using System.Collections;
using System.IO;
using System;
using System.Text;
using System.Xml;
using System.Threading;
using System.Security;
using System.Xml.Serialization.Configuration;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.Versioning;
using System.Diagnostics.CodeAnalysis;
using System.Collections.Generic;
using System.Xml.Extensions;
using System.Linq;
using System.Xml.Serialization;
internal class TempAssembly
{
internal const string GeneratedAssemblyNamespace = "Microsoft.Xml.Serialization.GeneratedAssembly";
private Assembly _assembly = null;
private XmlSerializerImplementation _contract = null;
private IDictionary _writerMethods;
private IDictionary _readerMethods;
private TempMethodDictionary _methods;
private Hashtable _assemblies = new Hashtable();
internal class TempMethod
{
internal MethodInfo writeMethod;
internal MethodInfo readMethod;
internal string name;
internal string ns;
internal bool isSoap;
internal string methodKey;
}
private TempAssembly()
{
}
internal TempAssembly(XmlMapping[] xmlMappings, Assembly assembly, XmlSerializerImplementation contract)
{
_assembly = assembly;
InitAssemblyMethods(xmlMappings);
_contract = contract;
}
internal TempAssembly(XmlMapping[] xmlMappings, Type[] types, string defaultNamespace, string location)
{
#if !FEATURE_SERIALIZATION_UAPAOT
bool containsSoapMapping = false;
for (int i = 0; i < xmlMappings.Length; i++)
{
xmlMappings[i].CheckShallow();
if (xmlMappings[i].IsSoap)
{
containsSoapMapping = true;
}
}
// We will make best effort to use RefEmit for assembly generation
bool fallbackToCSharpAssemblyGeneration = false;
if (!containsSoapMapping && !TempAssembly.UseLegacySerializerGeneration)
{
try
{
_assembly = GenerateRefEmitAssembly(xmlMappings, types, defaultNamespace);
}
// Only catch and handle known failures with RefEmit
catch (CodeGeneratorConversionException)
{
fallbackToCSharpAssemblyGeneration = true;
}
// Add other known exceptions here...
//
}
else
{
fallbackToCSharpAssemblyGeneration = true;
}
if (fallbackToCSharpAssemblyGeneration)
{
throw new PlatformNotSupportedException("Compiling JScript/CSharp scripts is not supported");
}
#endif
#if DEBUG
// use exception in the place of Debug.Assert to avoid throwing asserts from a server process such as aspnet_ewp.exe
if (_assembly == null)
throw new InvalidOperationException(SR.Format(SR.XmlInternalErrorDetails, "Failed to generate XmlSerializer assembly, but did not throw"));
#endif
InitAssemblyMethods(xmlMappings);
}
internal static bool UseLegacySerializerGeneration
{
get
{
return false;
}
}
internal XmlSerializerImplementation Contract
{
get
{
if (_contract == null)
{
_contract = (XmlSerializerImplementation)Activator.CreateInstance(GetTypeFromAssembly(_assembly, "XmlSerializerContract"));
}
return _contract;
}
}
internal void InitAssemblyMethods(XmlMapping[] xmlMappings)
{
_methods = new TempMethodDictionary();
for (int i = 0; i < xmlMappings.Length; i++)
{
TempMethod method = new TempMethod();
method.isSoap = xmlMappings[i].IsSoap;
method.methodKey = xmlMappings[i].Key;
XmlTypeMapping xmlTypeMapping = xmlMappings[i] as XmlTypeMapping;
if (xmlTypeMapping != null)
{
method.name = xmlTypeMapping.ElementName;
method.ns = xmlTypeMapping.Namespace;
}
_methods.Add(xmlMappings[i].Key, method);
}
}
/// <devdoc>
/// <para>
/// Attempts to load pre-generated serialization assembly.
/// First check for the [XmlSerializerAssembly] attribute
/// </para>
/// </devdoc>
// SxS: This method does not take any resource name and does not expose any resources to the caller.
// It's OK to suppress the SxS warning.
internal static Assembly LoadGeneratedAssembly(Type type, string defaultNamespace, out XmlSerializerImplementation contract)
{
Assembly serializer = null;
contract = null;
string serializerName = null;
// check to see if we loading explicit pre-generated assembly
object[] attrs = type.GetCustomAttributes(typeof(System.Xml.Serialization.XmlSerializerAssemblyAttribute), false);
if (attrs.Length == 0)
{
// Guess serializer name: if parent assembly signed use strong name
AssemblyName name = type.Assembly.GetName();
serializerName = Compiler.GetTempAssemblyName(name, defaultNamespace);
// use strong name
name.Name = serializerName;
name.CodeBase = null;
name.CultureInfo = CultureInfo.InvariantCulture;
string serializerPath = null;
try
{
if (!string.IsNullOrEmpty(type.Assembly.Location))
{
serializerPath = Path.Combine(Path.GetDirectoryName(type.Assembly.Location), serializerName + ".dll");
}
if ((string.IsNullOrEmpty(serializerPath) || !File.Exists(serializerPath)) && !string.IsNullOrEmpty(Assembly.GetEntryAssembly().Location))
{
serializerPath = Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), serializerName + ".dll");
}
if (!string.IsNullOrEmpty(serializerPath))
{
serializer = Assembly.LoadFile(serializerPath);
}
}
catch (Exception e)
{
if (e is ThreadAbortException || e is StackOverflowException || e is OutOfMemoryException)
{
throw;
}
byte[] token = name.GetPublicKeyToken();
if (token != null && token.Length > 0)
{
// the parent assembly was signed, so do not try to LoadWithPartialName
return null;
}
}
if (serializer == null)
{
if (XmlSerializer.Mode == SerializationMode.PreGenOnly)
{
throw new Exception(SR.Format(SR.FailLoadAssemblyUnderPregenMode, serializerName));
}
return null;
}
#if !FEATURE_SERIALIZATION_UAPAOT
if (!IsSerializerVersionMatch(serializer, type, defaultNamespace))
{
XmlSerializationEventSource.Log.XmlSerializerExpired(serializerName, type.FullName);
return null;
}
#endif
}
else
{
System.Xml.Serialization.XmlSerializerAssemblyAttribute assemblyAttribute = (System.Xml.Serialization.XmlSerializerAssemblyAttribute)attrs[0];
if (assemblyAttribute.AssemblyName != null && assemblyAttribute.CodeBase != null)
throw new InvalidOperationException(SR.Format(SR.XmlPregenInvalidXmlSerializerAssemblyAttribute, "AssemblyName", "CodeBase"));
// found XmlSerializerAssemblyAttribute attribute, it should have all needed information to load the pre-generated serializer
if (assemblyAttribute.AssemblyName != null)
{
serializerName = assemblyAttribute.AssemblyName;
#pragma warning disable 618
serializer = Assembly.LoadWithPartialName(serializerName);
#pragma warning restore 618
}
else if (assemblyAttribute.CodeBase != null && assemblyAttribute.CodeBase.Length > 0)
{
serializerName = assemblyAttribute.CodeBase;
serializer = Assembly.LoadFrom(serializerName);
}
else
{
serializerName = type.Assembly.FullName;
serializer = type.Assembly;
}
if (serializer == null)
{
throw new FileNotFoundException(null, serializerName);
}
}
Type contractType = GetTypeFromAssembly(serializer, "XmlSerializerContract");
contract = (XmlSerializerImplementation)Activator.CreateInstance(contractType);
if (contract.CanSerialize(type))
return serializer;
return null;
}
#if !FEATURE_SERIALIZATION_UAPAOT
private static bool IsSerializerVersionMatch(Assembly serializer, Type type, string defaultNamespace)
{
if (serializer == null)
return false;
object[] attrs = serializer.GetCustomAttributes(typeof(XmlSerializerVersionAttribute), false);
if (attrs.Length != 1)
return false;
XmlSerializerVersionAttribute assemblyInfo = (XmlSerializerVersionAttribute)attrs[0];
if (assemblyInfo.ParentAssemblyId == GenerateAssemblyId(type) && assemblyInfo.Namespace == defaultNamespace)
return true;
return false;
}
private static string GenerateAssemblyId(Type type)
{
Module[] modules = type.Assembly.GetModules();
var list = new ArrayList();
for (int i = 0; i < modules.Length; i++)
{
list.Add(modules[i].ModuleVersionId.ToString());
}
list.Sort();
var sb = new StringBuilder();
for (int i = 0; i < list.Count; i++)
{
sb.Append(list[i].ToString());
sb.Append(",");
}
return sb.ToString();
}
internal static bool GenerateSerializerToStream(XmlMapping[] xmlMappings, Type[] types, string defaultNamespace, Assembly assembly, Hashtable assemblies, Stream stream)
{
var compiler = new Compiler();
try
{
var scopeTable = new Hashtable();
foreach (XmlMapping mapping in xmlMappings)
scopeTable[mapping.Scope] = mapping;
var scopes = new TypeScope[scopeTable.Keys.Count];
scopeTable.Keys.CopyTo(scopes, 0);
assemblies.Clear();
var importedTypes = new Hashtable();
foreach (TypeScope scope in scopes)
{
foreach (Type t in scope.Types)
{
compiler.AddImport(t, importedTypes);
Assembly a = t.Assembly;
string name = a.FullName;
if (assemblies[name] != null)
{
continue;
}
if (!a.GlobalAssemblyCache)
{
assemblies[name] = a;
}
}
}
for (int i = 0; i < types.Length; i++)
{
compiler.AddImport(types[i], importedTypes);
}
compiler.AddImport(typeof(object).Assembly);
compiler.AddImport(typeof(System.Xml.Serialization.XmlSerializer).Assembly);
var writer = new IndentedWriter(compiler.Source, false);
writer.WriteLine("[assembly:System.Security.AllowPartiallyTrustedCallers()]");
writer.WriteLine("[assembly:System.Security.SecurityTransparent()]");
writer.WriteLine("[assembly:System.Security.SecurityRules(System.Security.SecurityRuleSet.Level1)]");
if (assembly != null && types.Length > 0)
{
for (int i = 0; i < types.Length; i++)
{
Type type = types[i];
if (type == null)
{
continue;
}
if (DynamicAssemblies.IsTypeDynamic(type))
{
throw new InvalidOperationException(SR.Format(SR.XmlPregenTypeDynamic, types[i].FullName));
}
}
writer.Write("[assembly:");
writer.Write(typeof(XmlSerializerVersionAttribute).FullName);
writer.Write("(");
writer.Write("ParentAssemblyId=");
ReflectionAwareCodeGen.WriteQuotedCSharpString(writer, GenerateAssemblyId(types[0]));
writer.Write(", Version=");
ReflectionAwareCodeGen.WriteQuotedCSharpString(writer, ThisAssembly.Version);
if (defaultNamespace != null)
{
writer.Write(", Namespace=");
ReflectionAwareCodeGen.WriteQuotedCSharpString(writer, defaultNamespace);
}
writer.WriteLine(")]");
}
var classes = new CodeIdentifiers();
classes.AddUnique("XmlSerializationWriter", "XmlSerializationWriter");
classes.AddUnique("XmlSerializationReader", "XmlSerializationReader");
string suffix = null;
if (types != null && types.Length == 1 && types[0] != null)
{
suffix = CodeIdentifier.MakeValid(types[0].Name);
if (types[0].IsArray)
{
suffix += "Array";
}
}
writer.WriteLine("namespace " + GeneratedAssemblyNamespace + " {");
writer.Indent++;
writer.WriteLine();
string writerClass = "XmlSerializationWriter" + suffix;
writerClass = classes.AddUnique(writerClass, writerClass);
var writerCodeGen = new XmlSerializationWriterCodeGen(writer, scopes, "public", writerClass);
writerCodeGen.GenerateBegin();
string[] writeMethodNames = new string[xmlMappings.Length];
for (int i = 0; i < xmlMappings.Length; i++)
{
writeMethodNames[i] = writerCodeGen.GenerateElement(xmlMappings[i]);
}
writerCodeGen.GenerateEnd();
writer.WriteLine();
string readerClass = "XmlSerializationReader" + suffix;
readerClass = classes.AddUnique(readerClass, readerClass);
var readerCodeGen = new XmlSerializationReaderCodeGen(writer, scopes, "public", readerClass);
readerCodeGen.GenerateBegin();
string[] readMethodNames = new string[xmlMappings.Length];
for (int i = 0; i < xmlMappings.Length; i++)
{
readMethodNames[i] = readerCodeGen.GenerateElement(xmlMappings[i]);
}
readerCodeGen.GenerateEnd(readMethodNames, xmlMappings, types);
string baseSerializer = readerCodeGen.GenerateBaseSerializer("XmlSerializer1", readerClass, writerClass, classes);
var serializers = new Hashtable();
for (int i = 0; i < xmlMappings.Length; i++)
{
if (serializers[xmlMappings[i].Key] == null)
{
serializers[xmlMappings[i].Key] = readerCodeGen.GenerateTypedSerializer(readMethodNames[i], writeMethodNames[i], xmlMappings[i], classes, baseSerializer, readerClass, writerClass);
}
}
readerCodeGen.GenerateSerializerContract("XmlSerializerContract", xmlMappings, types, readerClass, readMethodNames, writerClass, writeMethodNames, serializers);
writer.Indent--;
writer.WriteLine("}");
string codecontent = compiler.Source.ToString();
byte[] info = new UTF8Encoding(true).GetBytes(codecontent);
stream.Write(info, 0, info.Length);
stream.Flush();
return true;
}
finally
{
compiler.Close();
}
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2106:SecureAsserts", Justification = "It is safe because the serialization assembly is generated by the framework code, not by the user.")]
internal static Assembly GenerateRefEmitAssembly(XmlMapping[] xmlMappings, Type[] types, string defaultNamespace)
{
var scopeTable = new Dictionary<TypeScope, XmlMapping>();
foreach (XmlMapping mapping in xmlMappings)
scopeTable[mapping.Scope] = mapping;
TypeScope[] scopes = new TypeScope[scopeTable.Keys.Count];
scopeTable.Keys.CopyTo(scopes, 0);
string assemblyName = "Microsoft.GeneratedCode";
AssemblyBuilder assemblyBuilder = CodeGenerator.CreateAssemblyBuilder(assemblyName);
// Add AssemblyVersion attribute to match parent assembly version
if (types != null && types.Length > 0 && types[0] != null)
{
ConstructorInfo AssemblyVersionAttribute_ctor = typeof(AssemblyVersionAttribute).GetConstructor(
new Type[] { typeof(string) }
);
string assemblyVersion = types[0].Assembly.GetName().Version.ToString();
assemblyBuilder.SetCustomAttribute(new CustomAttributeBuilder(AssemblyVersionAttribute_ctor, new object[] { assemblyVersion }));
}
CodeIdentifiers classes = new CodeIdentifiers();
classes.AddUnique("XmlSerializationWriter", "XmlSerializationWriter");
classes.AddUnique("XmlSerializationReader", "XmlSerializationReader");
string suffix = null;
if (types != null && types.Length == 1 && types[0] != null)
{
suffix = CodeIdentifier.MakeValid(types[0].Name);
if (types[0].IsArray)
{
suffix += "Array";
}
}
ModuleBuilder moduleBuilder = CodeGenerator.CreateModuleBuilder(assemblyBuilder, assemblyName);
string writerClass = "XmlSerializationWriter" + suffix;
writerClass = classes.AddUnique(writerClass, writerClass);
XmlSerializationWriterILGen writerCodeGen = new XmlSerializationWriterILGen(scopes, "public", writerClass);
writerCodeGen.ModuleBuilder = moduleBuilder;
writerCodeGen.GenerateBegin();
string[] writeMethodNames = new string[xmlMappings.Length];
for (int i = 0; i < xmlMappings.Length; i++)
{
writeMethodNames[i] = writerCodeGen.GenerateElement(xmlMappings[i]);
}
Type writerType = writerCodeGen.GenerateEnd();
string readerClass = "XmlSerializationReader" + suffix;
readerClass = classes.AddUnique(readerClass, readerClass);
XmlSerializationReaderILGen readerCodeGen = new XmlSerializationReaderILGen(scopes, "public", readerClass);
readerCodeGen.ModuleBuilder = moduleBuilder;
readerCodeGen.CreatedTypes.Add(writerType.Name, writerType);
readerCodeGen.GenerateBegin();
string[] readMethodNames = new string[xmlMappings.Length];
for (int i = 0; i < xmlMappings.Length; i++)
{
readMethodNames[i] = readerCodeGen.GenerateElement(xmlMappings[i]);
}
readerCodeGen.GenerateEnd(readMethodNames, xmlMappings, types);
string baseSerializer = readerCodeGen.GenerateBaseSerializer("XmlSerializer1", readerClass, writerClass, classes);
var serializers = new Dictionary<string, string>();
for (int i = 0; i < xmlMappings.Length; i++)
{
if (!serializers.ContainsKey(xmlMappings[i].Key))
{
serializers[xmlMappings[i].Key] = readerCodeGen.GenerateTypedSerializer(readMethodNames[i], writeMethodNames[i], xmlMappings[i], classes, baseSerializer, readerClass, writerClass);
}
}
readerCodeGen.GenerateSerializerContract("XmlSerializerContract", xmlMappings, types, readerClass, readMethodNames, writerClass, writeMethodNames, serializers);
return writerType.Assembly;
}
#endif
private static MethodInfo GetMethodFromType(Type type, string methodName)
{
MethodInfo method = type.GetMethod(methodName);
if (method != null)
return method;
// Not support pregen. Workaround SecurityCritical required for assembly.CodeBase api.
MissingMethodException missingMethod = new MissingMethodException(type.FullName + "::" + methodName);
throw missingMethod;
}
internal static Type GetTypeFromAssembly(Assembly assembly, string typeName)
{
typeName = GeneratedAssemblyNamespace + "." + typeName;
Type type = assembly.GetType(typeName);
if (type == null)
throw new InvalidOperationException(SR.Format(SR.XmlMissingType, typeName, assembly.FullName));
return type;
}
internal bool CanRead(XmlMapping mapping, XmlReader xmlReader)
{
if (mapping == null)
return false;
if (mapping.Accessor.Any)
{
return true;
}
TempMethod method = _methods[mapping.Key];
return xmlReader.IsStartElement(method.name, method.ns);
}
private string ValidateEncodingStyle(string encodingStyle, string methodKey)
{
if (encodingStyle != null && encodingStyle.Length > 0)
{
if (_methods[methodKey].isSoap)
{
if (encodingStyle != Soap.Encoding && encodingStyle != Soap12.Encoding)
{
throw new InvalidOperationException(SR.Format(SR.XmlInvalidEncoding3, encodingStyle, Soap.Encoding, Soap12.Encoding));
}
}
else
{
throw new InvalidOperationException(SR.Format(SR.XmlInvalidEncodingNotEncoded1, encodingStyle));
}
}
else
{
if (_methods[methodKey].isSoap)
{
encodingStyle = Soap.Encoding;
}
}
return encodingStyle;
}
internal object InvokeReader(XmlMapping mapping, XmlReader xmlReader, XmlDeserializationEvents events, string encodingStyle)
{
XmlSerializationReader reader = null;
try
{
encodingStyle = ValidateEncodingStyle(encodingStyle, mapping.Key);
reader = Contract.Reader;
reader.Init(xmlReader, events, encodingStyle, this);
if (_methods[mapping.Key].readMethod == null)
{
if (_readerMethods == null)
{
_readerMethods = Contract.ReadMethods;
}
string methodName = (string)_readerMethods[mapping.Key];
if (methodName == null)
{
throw new InvalidOperationException(SR.Format(SR.XmlNotSerializable, mapping.Accessor.Name));
}
_methods[mapping.Key].readMethod = GetMethodFromType(reader.GetType(), methodName);
}
return _methods[mapping.Key].readMethod.Invoke(reader, Array.Empty<object>());
}
catch (SecurityException e)
{
throw new InvalidOperationException(SR.XmlNoPartialTrust, e);
}
finally
{
if (reader != null)
reader.Dispose();
}
}
internal void InvokeWriter(XmlMapping mapping, XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces, string encodingStyle, string id)
{
XmlSerializationWriter writer = null;
try
{
encodingStyle = ValidateEncodingStyle(encodingStyle, mapping.Key);
writer = Contract.Writer;
writer.Init(xmlWriter, namespaces, encodingStyle, id, this);
if (_methods[mapping.Key].writeMethod == null)
{
if (_writerMethods == null)
{
_writerMethods = Contract.WriteMethods;
}
string methodName = (string)_writerMethods[mapping.Key];
if (methodName == null)
{
throw new InvalidOperationException(SR.Format(SR.XmlNotSerializable, mapping.Accessor.Name));
}
_methods[mapping.Key].writeMethod = GetMethodFromType(writer.GetType(), methodName);
}
_methods[mapping.Key].writeMethod.Invoke(writer, new object[] { o });
}
catch (SecurityException e)
{
throw new InvalidOperationException(SR.XmlNoPartialTrust, e);
}
finally
{
if (writer != null)
writer.Dispose();
}
}
internal sealed class TempMethodDictionary : Dictionary<string, TempMethod>
{
}
}
internal class TempAssemblyCacheKey
{
private string _ns;
private object _type;
internal TempAssemblyCacheKey(string ns, object type)
{
_type = type;
_ns = ns;
}
public override bool Equals(object o)
{
TempAssemblyCacheKey key = o as TempAssemblyCacheKey;
if (key == null)
return false;
return (key._type == _type && key._ns == _ns);
}
public override int GetHashCode()
{
return ((_ns != null ? _ns.GetHashCode() : 0) ^ (_type != null ? _type.GetHashCode() : 0));
}
}
internal class TempAssemblyCache
{
private Dictionary<TempAssemblyCacheKey, TempAssembly> _cache = new Dictionary<TempAssemblyCacheKey, TempAssembly>();
internal TempAssembly this[string ns, object o]
{
get
{
TempAssembly tempAssembly;
_cache.TryGetValue(new TempAssemblyCacheKey(ns, o), out tempAssembly);
return tempAssembly;
}
}
internal void Add(string ns, object o, TempAssembly assembly)
{
TempAssemblyCacheKey key = new TempAssemblyCacheKey(ns, o);
lock (this)
{
TempAssembly tempAssembly;
if (_cache.TryGetValue(key, out tempAssembly) && tempAssembly == assembly)
return;
Dictionary<TempAssemblyCacheKey, TempAssembly> _copy = new Dictionary<TempAssemblyCacheKey, TempAssembly>(_cache); // clone
_copy[key] = assembly;
_cache = _copy;
}
}
}
internal static class ThisAssembly
{
internal const string Version = "1.0.0.0";
internal const string InformationalVersion = "1.0.0.0";
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Xml;
using System.Linq;
using System.Net.Http;
using Microsoft.Build.Framework;
using ThreadingTask = System.Threading.Tasks.Task;
namespace Microsoft.DotNet.Build.CloudTestTasks
{
public class UploadToAzure : AzureConnectionStringBuildTask, ICancelableTask
{
private static readonly CancellationTokenSource TokenSource = new CancellationTokenSource();
private static readonly CancellationToken CancellationToken = TokenSource.Token;
/// <summary>
/// The name of the container to access. The specified name must be in the correct format, see the
/// following page for more info. https://msdn.microsoft.com/en-us/library/azure/dd135715.aspx
/// </summary>
[Required]
public string ContainerName { get; set; }
/// <summary>
/// An item group of files to upload. Each item must have metadata RelativeBlobPath
/// that specifies the path relative to ContainerName where the item will be uploaded.
/// </summary>
[Required]
public ITaskItem[] Items { get; set; }
/// <summary>
/// Indicates if the destination blob should be overwritten if it already exists. The default if false.
/// </summary>
public bool Overwrite { get; set; } = false;
/// <summary>
/// Enables idempotency when Overwrite is false.
///
/// false: (default) Attempting to upload an item that already exists fails.
///
/// true: When an item already exists, download the existing blob to check if it's
/// byte-for-byte identical to the one being uploaded. If so, pass. If not, fail.
/// </summary>
public bool PassIfExistingItemIdentical { get; set; }
/// <summary>
/// Specifies the maximum number of clients to concurrently upload blobs to azure
/// </summary>
public int MaxClients { get; set; } = 8;
public int UploadTimeoutInMinutes { get; set; } = 5;
public void Cancel()
{
TokenSource.Cancel();
}
public override bool Execute()
{
return ExecuteAsync(CancellationToken).GetAwaiter().GetResult();
}
public async Task<bool> ExecuteAsync(CancellationToken ct)
{
ParseConnectionString();
// If the connection string AND AccountKey & AccountName are provided, error out.
if (Log.HasLoggedErrors)
{
return false;
}
Log.LogMessage(
MessageImportance.Normal,
"Begin uploading blobs to Azure account {0} in container {1}.",
AccountName,
ContainerName);
if (Items.Length == 0)
{
Log.LogError("No items were provided for upload.");
return false;
}
// first check what blobs are present
string checkListUrl = $"{AzureHelper.GetContainerRestUrl(AccountName, ContainerName)}?restype=container&comp=list";
HashSet<string> blobsPresent = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
try
{
using (HttpClient client = new HttpClient())
{
var createRequest = AzureHelper.RequestMessage("GET", checkListUrl, AccountName, AccountKey);
Log.LogMessage(MessageImportance.Low, "Sending request to check whether Container blobs exist");
using (HttpResponseMessage response = await AzureHelper.RequestWithRetry(Log, client, createRequest))
{
var doc = new XmlDocument();
doc.LoadXml(await response.Content.ReadAsStringAsync());
XmlNodeList nodes = doc.DocumentElement.GetElementsByTagName("Blob");
foreach (XmlNode node in nodes)
{
blobsPresent.Add(node["Name"].InnerText);
}
Log.LogMessage(MessageImportance.Low, "Received response to check whether Container blobs exist");
}
}
using (var clientThrottle = new SemaphoreSlim(this.MaxClients, this.MaxClients))
{
await ThreadingTask.WhenAll(Items.Select(item => UploadAsync(ct, item, blobsPresent, clientThrottle)));
}
Log.LogMessage(MessageImportance.Normal, "Upload to Azure is complete, a total of {0} items were uploaded.", Items.Length);
}
catch (Exception e)
{
Log.LogErrorFromException(e, true);
}
return !Log.HasLoggedErrors;
}
private async ThreadingTask UploadAsync(CancellationToken ct, ITaskItem item, HashSet<string> blobsPresent, SemaphoreSlim clientThrottle)
{
if (ct.IsCancellationRequested)
{
Log.LogError("Task UploadToAzure cancelled");
ct.ThrowIfCancellationRequested();
}
string relativeBlobPath = item.GetMetadata("RelativeBlobPath");
if (string.IsNullOrEmpty(relativeBlobPath))
throw new Exception(string.Format("Metadata 'RelativeBlobPath' is missing for item '{0}'.", item.ItemSpec));
if (!File.Exists(item.ItemSpec))
throw new Exception(string.Format("The file '{0}' does not exist.", item.ItemSpec));
UploadClient uploadClient = new UploadClient(Log);
if (!Overwrite && blobsPresent.Contains(relativeBlobPath))
{
if (PassIfExistingItemIdentical &&
await ItemEqualsExistingBlobAsync(item, relativeBlobPath, uploadClient, clientThrottle))
{
return;
}
throw new Exception(string.Format("The blob '{0}' already exists.", relativeBlobPath));
}
string contentType = item.GetMetadata("ContentType");
await clientThrottle.WaitAsync();
try
{
Log.LogMessage("Uploading {0} to {1}.", item.ItemSpec, ContainerName);
await
uploadClient.UploadBlockBlobAsync(
ct,
AccountName,
AccountKey,
ContainerName,
item.ItemSpec,
relativeBlobPath,
contentType,
UploadTimeoutInMinutes);
}
finally
{
clientThrottle.Release();
}
}
private async Task<bool> ItemEqualsExistingBlobAsync(
ITaskItem item,
string relativeBlobPath,
UploadClient client,
SemaphoreSlim clientThrottle)
{
await clientThrottle.WaitAsync();
try
{
return await client.FileEqualsExistingBlobAsync(
AccountName,
AccountKey,
ContainerName,
item.ItemSpec,
relativeBlobPath,
UploadTimeoutInMinutes);
}
finally
{
clientThrottle.Release();
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.Xml;
using System.Collections;
using System.Text.RegularExpressions;
using Microsoft.Build.BackEnd;
using Microsoft.Build.Collections;
using Microsoft.Build.Evaluation;
using Microsoft.Build.Execution;
using Microsoft.Build.Framework;
using System.Collections.Generic;
using Microsoft.Build.Shared.FileSystem;
using InvalidProjectFileException = Microsoft.Build.Exceptions.InvalidProjectFileException;
using ProjectItemInstanceFactory = Microsoft.Build.Execution.ProjectItemInstance.TaskItem.ProjectItemInstanceFactory;
using Xunit;
namespace Microsoft.Build.UnitTests.BackEnd
{
public class BatchingEngine_Tests
{
[Fact]
public void GetBuckets()
{
ProjectInstance project = ProjectHelpers.CreateEmptyProjectInstance();
List<string> parameters = new List<string>();
parameters.Add("@(File);$(unittests)");
parameters.Add("$(obj)\\%(Filename).ext");
parameters.Add("@(File->'%(extension)')"); // attributes in transforms don't affect batching
ItemDictionary<ProjectItemInstance> itemsByType = new ItemDictionary<ProjectItemInstance>();
IList<ProjectItemInstance> items = new List<ProjectItemInstance>();
items.Add(new ProjectItemInstance(project, "File", "a.foo", project.FullPath));
items.Add(new ProjectItemInstance(project, "File", "b.foo", project.FullPath));
items.Add(new ProjectItemInstance(project, "File", "c.foo", project.FullPath));
items.Add(new ProjectItemInstance(project, "File", "d.foo", project.FullPath));
items.Add(new ProjectItemInstance(project, "File", "e.foo", project.FullPath));
itemsByType.ImportItems(items);
items = new List<ProjectItemInstance>();
items.Add(new ProjectItemInstance(project, "Doc", "a.doc", project.FullPath));
items.Add(new ProjectItemInstance(project, "Doc", "b.doc", project.FullPath));
items.Add(new ProjectItemInstance(project, "Doc", "c.doc", project.FullPath));
items.Add(new ProjectItemInstance(project, "Doc", "d.doc", project.FullPath));
items.Add(new ProjectItemInstance(project, "Doc", "e.doc", project.FullPath));
itemsByType.ImportItems(items);
PropertyDictionary<ProjectPropertyInstance> properties = new PropertyDictionary<ProjectPropertyInstance>();
properties.Set(ProjectPropertyInstance.Create("UnitTests", "unittests.foo"));
properties.Set(ProjectPropertyInstance.Create("OBJ", "obj"));
List<ItemBucket> buckets = BatchingEngine.PrepareBatchingBuckets(parameters, CreateLookup(itemsByType, properties), MockElementLocation.Instance);
Assert.Equal(5, buckets.Count);
foreach (ItemBucket bucket in buckets)
{
// non-batching data -- same for all buckets
XmlAttribute tempXmlAttribute = (new XmlDocument()).CreateAttribute("attrib");
tempXmlAttribute.Value = "'$(Obj)'=='obj'";
Assert.True(ConditionEvaluator.EvaluateCondition(tempXmlAttribute.Value, ParserOptions.AllowAll, bucket.Expander, ExpanderOptions.ExpandAll, Directory.GetCurrentDirectory(), MockElementLocation.Instance, null, new BuildEventContext(1, 2, 3, 4), FileSystems.Default));
Assert.Equal("a.doc;b.doc;c.doc;d.doc;e.doc", bucket.Expander.ExpandIntoStringAndUnescape("@(doc)", ExpanderOptions.ExpandItems, MockElementLocation.Instance));
Assert.Equal("unittests.foo", bucket.Expander.ExpandIntoStringAndUnescape("$(bogus)$(UNITTESTS)", ExpanderOptions.ExpandPropertiesAndMetadata, MockElementLocation.Instance));
}
Assert.Equal("a.foo", buckets[0].Expander.ExpandIntoStringAndUnescape("@(File)", ExpanderOptions.ExpandItems, MockElementLocation.Instance));
Assert.Equal(".foo", buckets[0].Expander.ExpandIntoStringAndUnescape("@(File->'%(Extension)')", ExpanderOptions.ExpandItems, MockElementLocation.Instance));
Assert.Equal("obj\\a.ext", buckets[0].Expander.ExpandIntoStringAndUnescape("$(obj)\\%(Filename).ext", ExpanderOptions.ExpandPropertiesAndMetadata, MockElementLocation.Instance));
// we weren't batching on this attribute, so it has no value
Assert.Equal(String.Empty, buckets[0].Expander.ExpandIntoStringAndUnescape("%(Extension)", ExpanderOptions.ExpandAll, MockElementLocation.Instance));
ProjectItemInstanceFactory factory = new ProjectItemInstanceFactory(project, "i");
items = buckets[0].Expander.ExpandIntoItemsLeaveEscaped("@(file)", factory, ExpanderOptions.ExpandItems, MockElementLocation.Instance);
Assert.NotNull(items);
Assert.Single(items);
int invalidProjectFileExceptions = 0;
try
{
// This should throw because we don't allow item lists to be concatenated
// with other strings.
bool throwAway;
items = buckets[0].Expander.ExpandSingleItemVectorExpressionIntoItems("@(file)$(unitests)", factory, ExpanderOptions.ExpandItems, false /* no nulls */, out throwAway, MockElementLocation.Instance);
}
catch (InvalidProjectFileException ex)
{
// check we don't lose error codes from IPFE's during build
Assert.Equal("MSB4012", ex.ErrorCode);
invalidProjectFileExceptions++;
}
// We do allow separators in item vectors, this results in an item group with a single flattened item
items = buckets[0].Expander.ExpandIntoItemsLeaveEscaped("@(file, ',')", factory, ExpanderOptions.ExpandItems, MockElementLocation.Instance);
Assert.NotNull(items);
Assert.Single(items);
Assert.Equal("a.foo", items[0].EvaluatedInclude);
Assert.Equal(1, invalidProjectFileExceptions);
}
/// <summary>
/// Tests the real simple case of using an unqualified metadata reference %(Culture),
/// where there are only two items and both of them have a value for Culture, but they
/// have different values.
/// </summary>
[Fact]
public void ValidUnqualifiedMetadataReference()
{
ProjectInstance project = ProjectHelpers.CreateEmptyProjectInstance();
List<string> parameters = new List<string>();
parameters.Add("@(File)");
parameters.Add("%(Culture)");
ItemDictionary<ProjectItemInstance> itemsByType = new ItemDictionary<ProjectItemInstance>();
List<ProjectItemInstance> items = new List<ProjectItemInstance>();
ProjectItemInstance a = new ProjectItemInstance(project, "File", "a.foo", project.FullPath);
ProjectItemInstance b = new ProjectItemInstance(project, "File", "b.foo", project.FullPath);
a.SetMetadata("Culture", "fr-fr");
b.SetMetadata("Culture", "en-en");
items.Add(a);
items.Add(b);
itemsByType.ImportItems(items);
PropertyDictionary<ProjectPropertyInstance> properties = new PropertyDictionary<ProjectPropertyInstance>();
List<ItemBucket> buckets = BatchingEngine.PrepareBatchingBuckets(parameters, CreateLookup(itemsByType, properties), null);
Assert.Equal(2, buckets.Count);
}
/// <summary>
/// Tests the case where an unqualified metadata reference is used illegally.
/// It's illegal because not all of the items consumed contain a value for
/// that metadata.
/// </summary>
[Fact]
public void InvalidUnqualifiedMetadataReference()
{
Assert.Throws<InvalidProjectFileException>(() =>
{
ProjectInstance project = ProjectHelpers.CreateEmptyProjectInstance();
List<string> parameters = new List<string>();
parameters.Add("@(File)");
parameters.Add("%(Culture)");
ItemDictionary<ProjectItemInstance> itemsByType = new ItemDictionary<ProjectItemInstance>();
List<ProjectItemInstance> items = new List<ProjectItemInstance>();
ProjectItemInstance a = new ProjectItemInstance(project, "File", "a.foo", project.FullPath);
items.Add(a);
ProjectItemInstance b = new ProjectItemInstance(project, "File", "b.foo", project.FullPath);
items.Add(b);
a.SetMetadata("Culture", "fr-fr");
itemsByType.ImportItems(items);
PropertyDictionary<ProjectPropertyInstance> properties = new PropertyDictionary<ProjectPropertyInstance>();
// This is expected to throw because not all items contain a value for metadata "Culture".
// Only a.foo has a Culture metadata. b.foo does not.
BatchingEngine.PrepareBatchingBuckets(parameters, CreateLookup(itemsByType, properties), MockElementLocation.Instance);
}
);
}
/// <summary>
/// Tests the case where an unqualified metadata reference is used illegally.
/// It's illegal because not all of the items consumed contain a value for
/// that metadata.
/// </summary>
[Fact]
public void NoItemsConsumed()
{
Assert.Throws<InvalidProjectFileException>(() =>
{
List<string> parameters = new List<string>();
parameters.Add("$(File)");
parameters.Add("%(Culture)");
ItemDictionary<ProjectItemInstance> itemsByType = new ItemDictionary<ProjectItemInstance>();
PropertyDictionary<ProjectPropertyInstance> properties = new PropertyDictionary<ProjectPropertyInstance>();
// This is expected to throw because we have no idea what item list %(Culture) refers to.
BatchingEngine.PrepareBatchingBuckets(parameters, CreateLookup(itemsByType, properties), MockElementLocation.Instance);
}
);
}
/// <summary>
/// Missing unittest found by mutation testing.
/// REASON TEST WASN'T ORIGINALLY PRESENT: Missed test.
///
/// This test ensures that two items with duplicate attributes end up in exactly one batching
/// bucket.
/// </summary>
[Fact]
public void Regress_Mutation_DuplicateBatchingBucketsAreFoldedTogether()
{
ProjectInstance project = ProjectHelpers.CreateEmptyProjectInstance();
List<string> parameters = new List<string>();
parameters.Add("%(File.Culture)");
ItemDictionary<ProjectItemInstance> itemsByType = new ItemDictionary<ProjectItemInstance>();
List<ProjectItemInstance> items = new List<ProjectItemInstance>();
items.Add(new ProjectItemInstance(project, "File", "a.foo", project.FullPath));
items.Add(new ProjectItemInstance(project, "File", "b.foo", project.FullPath)); // Need at least two items for this test case to ensure multiple buckets might be possible
itemsByType.ImportItems(items);
PropertyDictionary<ProjectPropertyInstance> properties = new PropertyDictionary<ProjectPropertyInstance>();
List<ItemBucket> buckets = BatchingEngine.PrepareBatchingBuckets(parameters, CreateLookup(itemsByType, properties), null);
// If duplicate buckets have been folded correctly, then there will be exactly one bucket here
// containing both a.foo and b.foo.
Assert.Single(buckets);
}
[Fact]
public void Simple()
{
string content = @"
<Project ToolsVersion=""msbuilddefaulttoolsversion"" xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"">
<ItemGroup>
<AToB Include=""a;b""/>
</ItemGroup>
<Target Name=""Build"">
<CreateItem Include=""%(AToB.Identity)"">
<Output ItemName=""AToBBatched"" TaskParameter=""Include""/>
</CreateItem>
<Message Text=""[AToBBatched: @(AToBBatched)]""/>
</Target>
</Project>
";
MockLogger log = Helpers.BuildProjectWithNewOMExpectSuccess(content);
log.AssertLogContains("[AToBBatched: a;b]");
}
/// <summary>
/// When removing an item in a target which is batched and called by call target there was an exception thrown
/// due to us adding the same item instance to the remove item lists when merging the lookups between the two batches.
/// The fix was to not add the item to the remove list if it already exists.
/// </summary>
[Fact]
public void Regress72803()
{
string content = @"
<Project xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"" DefaultTargets=""ReleaseBuild"">
<ItemGroup>
<Environments Include=""dev"" />
<Environments Include=""prod"" />
<ItemsToZip Include=""1"" />
</ItemGroup>
<Target Name=""ReleaseBuild"">
<CallTarget Targets=""MakeAppPackage;MakeDbPackage""/>
</Target>
<Target Name=""MakeAppPackage"" Outputs=""%(Environments.Identity)"">
<ItemGroup>
<ItemsToZip Include=""%(Environments.Identity).msi"" />
</ItemGroup>
</Target>
<Target Name=""MakeDbPackage"" Outputs=""%(Environments.Identity)"">
<Message Text=""Item Before:%(Environments.Identity) @(ItemsToZip)"" />
<ItemGroup>
<ItemsToZip Remove=""@(ItemsToZip)"" />
</ItemGroup>
<Message Text=""Item After:%(Environments.Identity) @(ItemsToZip)"" Condition=""'@(ItemsToZip)' != ''"" />
</Target>
</Project>
";
MockLogger log = Helpers.BuildProjectWithNewOMExpectSuccess(content);
log.AssertLogContains("Item Before:dev 1");
log.AssertLogContains("Item Before:prod 1");
log.AssertLogDoesntContain("Item After:dev 1");
log.AssertLogDoesntContain("Item After:prod 1");
}
/// <summary>
/// Regress a bug where batching over an item list seemed to have
/// items for that list even in buckets where there should be none, because
/// it was batching over metadata that only other list/s had.
/// </summary>
[Fact]
public void BucketsWithEmptyListForBatchedItemList()
{
string content = @"
<Project ToolsVersion=""msbuilddefaulttoolsversion"" xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"">
<ItemGroup>
<i Include=""b""/>
<j Include=""a"">
<k>x</k>
</j>
</ItemGroup>
<Target Name=""t"">
<ItemGroup>
<Obj Condition=""'%(j.k)'==''"" Include=""@(j->'%(Filename).obj');%(i.foo)""/>
</ItemGroup>
<Message Text=""@(Obj)"" />
</Target>
</Project>
";
MockLogger log = Helpers.BuildProjectWithNewOMExpectSuccess(content);
log.AssertLogDoesntContain("a.obj");
}
/// <summary>
/// Bug for Targets instead of Tasks.
/// </summary>
[Fact]
public void BucketsWithEmptyListForTargetBatchedItemList()
{
string content = @"
<Project ToolsVersion=""msbuilddefaulttoolsversion"" xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"">
<ItemGroup>
<a Include=""a1""/>
<b Include=""b1""/>
</ItemGroup>
<Target Name=""t"" Outputs=""%(a.Identity)%(b.identity)"">
<Message Text=""[a=@(a) b=@(b)]"" />
</Target>
</Project>
";
MockLogger log = Helpers.BuildProjectWithNewOMExpectSuccess(content);
log.AssertLogContains("[a=a1 b=]");
log.AssertLogContains("[a= b=b1]");
}
/// <summary>
/// A batching target that has no outputs should still run.
/// This is how we shipped before, although Jay pointed out it's odd.
/// </summary>
[Fact]
public void BatchOnEmptyOutput()
{
string content = @"
<Project ToolsVersion=""msbuilddefaulttoolsversion"" xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"">
<ItemGroup>
<File Include=""$(foo)"" />
</ItemGroup>
<!-- Should not run as the single batch has no outputs -->
<Target Name=""b"" Outputs=""%(File.Identity)""><Message Text=""[@(File)]"" /></Target>
<Target Name=""a"" DependsOnTargets=""b"">
<Message Text=""[a]"" />
</Target>
</Project>
";
MockLogger log = Helpers.BuildProjectWithNewOMExpectSuccess(content);
log.AssertLogContains("[]");
}
/// <summary>
/// Every batch should get its own new task object.
/// We verify this by using the Warning class. If the same object is being reused,
/// the second warning would have the code from the first use of the task.
/// </summary>
[Fact]
public void EachBatchGetsASeparateTaskObject()
{
string content = @"
<Project ToolsVersion=""msbuilddefaulttoolsversion"" xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"">
<ItemGroup>
<i Include=""i1"">
<Code>high</Code>
</i>
<i Include=""i2""/>
</ItemGroup>
<Target Name=""t"">
<Warning Text=""@(i)"" Code=""%(i.Code)""/>
</Target>
</Project>";
MockLogger log = Helpers.BuildProjectWithNewOMExpectSuccess(content);
Assert.Equal("high", log.Warnings[0].Code);
Assert.Null(log.Warnings[1].Code);
}
/// <summary>
/// It is important that the batching engine invokes the different batches in the same
/// order as the items are declared in the project, especially when batching is simply
/// being used as a "for loop".
/// </summary>
[Fact]
public void BatcherPreservesItemOrderWithinASingleItemList()
{
string content = @"
<Project ToolsVersion=""msbuilddefaulttoolsversion"" xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"">
<ItemGroup>
<AToZ Include=""a;b;c;d;e;f;g;h;i;j;k;l;m;n;o;p;q;r;s;t;u;v;w;x;y;z""/>
<ZToA Include=""z;y;x;w;v;u;t;s;r;q;p;o;n;m;l;k;j;i;h;g;f;e;d;c;b;a""/>
</ItemGroup>
<Target Name=""Build"">
<CreateItem Include=""%(AToZ.Identity)"">
<Output ItemName=""AToZBatched"" TaskParameter=""Include""/>
</CreateItem>
<CreateItem Include=""%(ZToA.Identity)"">
<Output ItemName=""ZToABatched"" TaskParameter=""Include""/>
</CreateItem>
<Message Text=""AToZBatched: @(AToZBatched)""/>
<Message Text=""ZToABatched: @(ZToABatched)""/>
</Target>
</Project>
";
MockLogger log = Helpers.BuildProjectWithNewOMExpectSuccess(content);
log.AssertLogContains("AToZBatched: a;b;c;d;e;f;g;h;i;j;k;l;m;n;o;p;q;r;s;t;u;v;w;x;y;z");
log.AssertLogContains("ZToABatched: z;y;x;w;v;u;t;s;r;q;p;o;n;m;l;k;j;i;h;g;f;e;d;c;b;a");
}
/// <summary>
/// Undefined and empty metadata values should not be distinguished when bucketing.
/// This is the same as previously shipped.
/// </summary>
[Fact]
public void UndefinedAndEmptyMetadataValues()
{
string content = @"
<Project ToolsVersion='msbuilddefaulttoolsversion' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<ItemGroup>
<i Include='i1'/>
<i Include='i2'>
<m></m>
</i>
<i Include='i3'>
<m>m1</m>
</i>
</ItemGroup>
<Target Name='Build'>
<Message Text='[@(i) %(i.m)]'/>
</Target>
</Project>
";
Project project = new Project(XmlReader.Create(new StringReader(ObjectModelHelpers.CleanupFileContents(content))));
MockLogger logger = new MockLogger();
project.Build(logger);
logger.AssertLogContains("[i1;i2 ]", "[i3 m1]");
}
private static Lookup CreateLookup(ItemDictionary<ProjectItemInstance> itemsByType, PropertyDictionary<ProjectPropertyInstance> properties)
{
return new Lookup(itemsByType, properties);
}
}
}
| |
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/
// Portions Copyright 2000-2004 Jonathan de Halleux
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.IO;
using EnvDTE;
using System.Runtime.InteropServices;
using Gallio.Common.Platform;
using Gallio.Common.Reflection;
using Gallio.Navigator.Native;
using Gallio.Runtime.Logging;
using Gallio.UI.ErrorReporting;
using Gallio.VisualStudio.Interop;
namespace Gallio.Navigator
{
/// <summary>
/// Gallio navigator engine.
/// </summary>
public class GallioNavigatorEngine : IGallioNavigator
{
private readonly bool possiblyRunningInIE;
private readonly IVisualStudioManager visualStudioManager;
/// <summary>
/// Creates a navigator.
/// </summary>
/// <param name="visualStudioManager">The visual studio manager.</param>
/// <param name="possiblyRunningInIE">True if the navigator code may be running in IE.</param>
/// <exception cref="ArgumentNullException">Thrown if <paramref name="visualStudioManager"/> is null.</exception>
public GallioNavigatorEngine(bool possiblyRunningInIE, IVisualStudioManager visualStudioManager)
{
if (visualStudioManager == null)
throw new ArgumentNullException("visualStudioManager");
this.possiblyRunningInIE = possiblyRunningInIE;
this.visualStudioManager = visualStudioManager;
}
/// <summary>
/// Creates a navigator.
/// </summary>
/// <param name="possiblyRunningInIE">True if the navigator code may be running in IE.</param>
public GallioNavigatorEngine(bool possiblyRunningInIE)
: this(possiblyRunningInIE, VisualStudioManager.Instance)
{
}
/// <inheritdoc />
public bool NavigateTo(string path, int lineNumber, int columnNumber)
{
if (path == null)
throw new ArgumentNullException("path");
if (path.Length == 0)
throw new ArgumentException("Path must not be empty.", "path");
if (!Path.IsPathRooted(path))
throw new ArgumentException("Path must be rooted.", "path");
if (lineNumber < 0)
throw new ArgumentOutOfRangeException("lineNumber");
if (columnNumber < 0)
throw new ArgumentOutOfRangeException("columnNumber");
try
{
if (IsElevationNeededToAccessVisualStudio())
return NavigateToFileUsingHelperProcess(path, lineNumber, columnNumber);
return NavigateToFileInVisualStudio(path, lineNumber, columnNumber);
}
catch (Exception ex)
{
ErrorDialog.Show(null, "Gallio Navigator", String.Format(
"Gallio could not navigate to: {0} ({1},{2}) because the file was not found or Visual Studio could not be controlled.\nPlease try again after launching Visual Studio manually and opening the appropriate solution.",
path, lineNumber, columnNumber), ex.ToString());
return false;
}
}
private bool IsElevationNeededToAccessVisualStudio()
{
if (!possiblyRunningInIE)
return false;
ProcessIntegrityLevel integrityLevel = ProcessSupport.ProcessIntegrityLevel;
return integrityLevel != ProcessIntegrityLevel.Unknown
&& integrityLevel < ProcessIntegrityLevel.Medium;
}
private bool NavigateToFileUsingHelperProcess(string path, int lineNumber, int columnNumber)
{
var command = GallioNavigatorCommand.CreateNavigateToCommand(path, lineNumber, columnNumber);
string navigatorExePath = AssemblyUtils.GetFriendlyAssemblyLocation(GetType().Assembly);
string navigatorArgs = command.ToUri();
System.Diagnostics.Process.Start(navigatorExePath, navigatorArgs);
return true;
}
private bool NavigateToFileInVisualStudio(string path, int lineNumber, int columnNumber)
{
path = Path.GetFullPath(path);
var logger = NullLogger.Instance;
IVisualStudio visualStudio = visualStudioManager.GetVisualStudio(VisualStudioVersion.Any, true, logger);
if (visualStudio == null)
return false;
visualStudio.Call(dte =>
{
Window window = OpenFile(dte, path);
if (window == null)
window = FindFileInSolution(dte, path);
TextSelection selection = window.Selection as TextSelection;
if (lineNumber != 0)
{
if (selection != null)
selection.MoveToLineAndOffset(lineNumber, Math.Max(1, columnNumber), false);
}
window.Activate();
window.Visible = true;
});
visualStudio.BringToFront();
return true;
}
private static Window OpenFile(DTE dte, string path)
{
try
{
if (!File.Exists(path))
return null;
return dte.OpenFile(Constants.vsViewKindCode, path);
}
catch (COMException ex)
{
if (ex.ErrorCode != NativeConstants.STG_E_FILENOTFOUND)
throw;
return null;
}
}
private static Window FindFileInSolution(DTE dte, string path)
{
Solution solution = dte.Solution;
if (! solution.IsOpen)
throw new ApplicationException("File not found and no solution is open to be searched.");
List<string> searchPaths = new List<string>();
searchPaths.Add(Path.GetDirectoryName(solution.FileName));
foreach (Project project in FindAllProjects(solution))
{
try
{
string projectFile = project.FileName;
if (! string.IsNullOrEmpty(projectFile))
searchPaths.Add(Path.GetDirectoryName(projectFile));
}
catch (COMException)
{
}
}
return FindFileInSearchPaths(dte, searchPaths, path);
}
private static IEnumerable<Project> FindAllProjects(Solution solution)
{
var projects = new List<Project>();
foreach (Project project in solution.Projects)
{
if (project.Kind != Constants.vsProjectItemKindSolutionItems)
projects.Add(project);
FindAllProjects(projects, project.ProjectItems);
}
return projects;
}
private static void FindAllProjects(IList<Project> projects, ProjectItems parent)
{
foreach (ProjectItem projectItem in parent)
{
Project project = projectItem.SubProject;
if (project != null)
{
if (project.Kind != EnvDTE.Constants.vsProjectItemKindSolutionItems)
projects.Add(project);
FindAllProjects(projects, project.ProjectItems);
}
}
}
private static Window FindFileInSearchPaths(DTE dte, IEnumerable<string> searchPaths, string path)
{
for (; ; )
{
path = RemoveLeadingSegment(path);
if (path.Length == 0)
return null;
foreach (string searchPath in searchPaths)
{
Window window = OpenFile(dte, Path.Combine(searchPath, path));
if (window != null)
return window;
}
}
}
private static string RemoveLeadingSegment(string path)
{
int slash = path.IndexOfAny(new[] {Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar});
if (slash < 0)
return "";
return path.Substring(slash + 1);
}
}
}
| |
/*
Copyright (c) 2004-2006 Tomas Matousek, Ladislav Prosek, Vaclav Novak, and Martin Maly.
The use and distribution terms for this software are contained in the file named License.txt,
which can be found in the root of the Phalanger distribution. By using this software
in any fashion, you are agreeing to be bound by the terms of this license.
You must not remove this notice from this software.
*/
using System;
using System.IO;
using System.Diagnostics;
using System.Reflection.Emit;
using PHP.Core.AST;
using PHP.Core.Emit;
using PHP.Core.Parsers;
namespace PHP.Core.Compiler.AST
{
partial class NodeCompilers
{
#region ItemUse
[NodeCompiler(typeof(ItemUse))]
sealed class ItemUseCompiler : CompoundVarUseCompiler<ItemUse>
{
/// <summary>
/// Set when the index is emitted.
/// </summary>
private PhpTypeCode indexTypeCode = PhpTypeCode.Unknown;
public override Evaluation Analyze(ItemUse node, Analyzer analyzer, ExInfoFromParent info)
{
access = info.Access;
// checks for write context of key-less array operator ($a =& $x[] is ok):
if (node.Index == null
&& (access == AccessType.Read
|| access == AccessType.ReadAndWrite
|| access == AccessType.ReadAndWriteAndReadRef
|| access == AccessType.ReadAndWriteAndReadUnknown))
{
analyzer.ErrorSink.Add(Errors.EmptyIndexInReadContext, analyzer.SourceUnit, node.Span);
return new Evaluation(node);
}
base.Analyze(node, analyzer, info);
ExInfoFromParent sinfo = new ExInfoFromParent(node);
switch (info.Access)
{
case AccessType.Write:
case AccessType.WriteRef:
case AccessType.ReadRef: sinfo.Access = AccessType.Write; break;
case AccessType.ReadAndWriteAndReadRef:
case AccessType.WriteAndReadRef:
case AccessType.ReadAndWrite: sinfo.Access = AccessType.ReadAndWrite; break;
case AccessType.WriteAndReadUnknown:
case AccessType.ReadAndWriteAndReadUnknown: sinfo.Access = info.Access; break;
case AccessType.ReadUnknown: sinfo.Access = AccessType.ReadUnknown; break;
default: sinfo.Access = AccessType.Read; break;
}
((ItemUse)node).Array.Analyze(analyzer, sinfo);
if (node.Index != null)
node.Index = node.Index.Analyze(analyzer, ExInfoFromParent.DefaultExInfo).Literalize();
return new Evaluation(node);
}
public override PhpTypeCode Emit(ItemUse node, CodeGenerator codeGenerator)
{
Statistics.AST.AddNode("ItemUse");
PhpTypeCode result = PhpTypeCode.Invalid;
switch (codeGenerator.SelectAccess(access))
{
case AccessType.None:
result = EmitNodeRead((ItemUse)node, codeGenerator, Operators.GetItemKinds.Get);
codeGenerator.IL.Emit(OpCodes.Pop);
break;
case AccessType.Read:
result = EmitNodeRead((ItemUse)node, codeGenerator, Operators.GetItemKinds.Get);
break;
case AccessType.Write:
// prepares for write:
result = EmitNodeWrite((ItemUse)node, codeGenerator);
break;
case AccessType.ReadRef:
// if the selector is set to the ReadRef, the chain is emitted as if it was written
// (chained nodes are marked as ReadAndWrite):
if (codeGenerator.AccessSelector == AccessType.ReadRef)
codeGenerator.AccessSelector = AccessType.Write;
result = EmitNodeReadRef((ItemUse)node, codeGenerator);
break;
case AccessType.ReadUnknown:
result = EmitNodeReadUnknown((ItemUse)node, codeGenerator);
break;
case AccessType.WriteRef:
// prepares for write:
result = EmitNodeWriteRef((ItemUse)node, codeGenerator);
break;
default:
Debug.Fail(null);
break;
}
return result;
}
internal override PhpTypeCode EmitAssign(ItemUse node, CodeGenerator codeGenerator)
{
var chain = codeGenerator.ChainBuilder;
PhpTypeCode result;
switch (access)
{
case AccessType.WriteAndReadRef:
case AccessType.WriteAndReadUnknown:
case AccessType.ReadAndWrite:
case AccessType.ReadAndWriteAndReadRef:
case AccessType.ReadAndWriteAndReadUnknown:
case AccessType.Write:
case AccessType.WriteRef:
{
bool reference = access == AccessType.WriteRef;
// Note that some work was done in Emit() !
// In cases 3, 4, 5 EmitAssign is not called
if (node.IsMemberOf != null ||
(node.IsMemberOf == null && (node.Array is DirectStFldUse || node.Array is IndirectStFldUse || node.Array is ItemUse)))
{
// 2, 6, 7
chain.EmitSetArrayItem(indexTypeCode, node.Index, reference);
chain.End();
}
else
{
// Note: The value which should be stored is already loaded on the evaluation stack.
// Push the destination array and index and call the operator
// 1: a_[x]_
Debug.Assert(node.Array is SimpleVarUse);
chain.IsArrayItem = true;
chain.IsLastMember = true;
indexTypeCode = codeGenerator.EmitArrayKey(chain, node.Index);
node.Array.Emit(codeGenerator);
chain.EmitSetItem(indexTypeCode, node.Index, reference);
// Store the changed variable into table of variables (do nothing in optimalized functions)
SimpleVarUseHelper.EmitLoadAddress_StoreBack((SimpleVarUse)node.Array, codeGenerator);
}
result = PhpTypeCode.Void;
break;
}
case AccessType.None:
// do nothing
result = PhpTypeCode.Void;
break;
case AccessType.Read:
// do nothing
result = PhpTypeCode.Object;
break;
case AccessType.ReadRef:
// Do nothing
result = PhpTypeCode.PhpReference;
break;
default:
Debug.Fail(null);
result = PhpTypeCode.Invalid;
break;
}
return result;
}
internal override void EmitUnset(ItemUse node, CodeGenerator codeGenerator)
{
var chain = codeGenerator.ChainBuilder;
var itemuse = (ItemUse)node;
// Template: "unset(x[y])" Operators.UnsetItem(object obj,object index)
// Case 3: a_[x]_[x] never reached
Debug.Assert(chain.IsArrayItem == false);
// Case 4,5 never reached
// 4: a[x]->...
// 5: ...->a[]->...
Debug.Assert(chain.IsMember == false);
chain.QuietRead = true;
// 1, 2, 6, 7
if (node.IsMemberOf != null)
{
// 6 , 7: ...->a[]_[]_ , ...->a_[]_
chain.Create();
chain.Begin();
chain.Lengthen(); // for hop over ->
node.IsMemberOf.Emit(codeGenerator);
chain.IsArrayItem = true;
chain.IsLastMember = false;
chain.EmitUnsetItem(itemuse.Array, itemuse.Index);
chain.IsArrayItem = false;
chain.End();
return;
}
// 1, 2
if (itemuse.Array is ItemUse || itemuse.Array is DirectStFldUse || itemuse.Array is IndirectStFldUse /* ??? */)
{
// 2: a[]_[]_
chain.Create();
chain.Begin();
chain.IsArrayItem = true;
chain.IsLastMember = true;
chain.EmitUnsetItem(itemuse.Array, itemuse.Index);
chain.IsArrayItem = false;
chain.End();
return;
}
// 1: a_[x]_
chain.IsArrayItem = true;
chain.IsLastMember = true;
chain.EmitUnsetItem(itemuse.Array, itemuse.Index);
chain.IsArrayItem = false;
}
internal override PhpTypeCode EmitIsset(ItemUse node, CodeGenerator codeGenerator, bool empty)
{
//Template:
// "isset(x[y])" Operators.GetItem(x,y) != null
codeGenerator.ChainBuilder.QuietRead = true;
// GetItem(x,y) ...
this.EmitNodeRead(node, codeGenerator, (empty) ? Operators.GetItemKinds.Empty : Operators.GetItemKinds.Isset);
return PhpTypeCode.Object;
}
/// <summary>
/// Emits code to load variable onto the evaluation stack. Supports operators chaining.
/// </summary>
/// <param name="node">Instance.</param>
/// <param name="codeGenerator">A geenrator.</param>
/// <param name="itemGetterKind">Whether to load for "get", "isset", or "empty".</param>
private PhpTypeCode EmitNodeRead(ItemUse/*!*/node, CodeGenerator/*!*/ codeGenerator, Operators.GetItemKinds itemGetterKind)
{
var chain = codeGenerator.ChainBuilder;
var itemuse = (ItemUse)node;
PhpTypeCode result;
if (chain.IsArrayItem)
{
// we are in the itemuse.Array subchain //
// 3: a_[x]_[x]
chain.Lengthen(); // for []
result = chain.EmitGetItem(itemuse.Array, itemuse.Index, itemGetterKind);
return result;
}
// 1,2,4,5,6,7
if (chain.IsMember)
{
// we are in the field chain //
// 4, 5
if (node.IsMemberOf != null)
{
// we are in the middle of the field chain //
// 5: ...->a[]->...
// Lengthen chain for isMemberOf
chain.Lengthen(); // for hop over ->
node.IsMemberOf.Emit(codeGenerator);
// Lengthen chain for own []
chain.Lengthen();
chain.IsArrayItem = true;
chain.IsLastMember = false;
result = chain.EmitGetItem(itemuse.Array, itemuse.Index, itemGetterKind);
chain.IsArrayItem = false;
return result;
}
else
{
// we are at the beginning of the field chain //
// 4: a[x]->...
// Lengthen chain for itself
chain.Lengthen(); // for own []
chain.IsArrayItem = true;
chain.IsLastMember = true;
result = chain.EmitGetItem(itemuse.Array, itemuse.Index, itemGetterKind);
chain.IsArrayItem = false;
return result;
}
}
// 1, 2, 6, 7
if (node.IsMemberOf != null)
{
// last node of the field chain //
// 6 , 7: ...->a[]_[]_ , ...->a_[]_
bool quiet_read = chain.QuietRead;
chain.Create();
chain.Begin();
chain.QuietRead = quiet_read;
chain.Lengthen(); // for hop over ->
node.IsMemberOf.Emit(codeGenerator);
// let's emit the itemuse.Array subchain followed by the GetItem:
chain.IsArrayItem = true;
chain.IsLastMember = false;
result = chain.EmitGetItem(itemuse.Array, itemuse.Index, itemGetterKind);
chain.IsArrayItem = false;
chain.End();
return result;
}
// 1, 2
if (itemuse.Array is ItemUse || itemuse.Array is DirectStFldUse || itemuse.Array is IndirectStFldUse)
{
// we are at the beginning of the field chain //
// 2: a[]_[]_
bool quiet_read = chain.QuietRead;
chain.Create();
chain.Begin();
chain.QuietRead = quiet_read;
chain.IsArrayItem = true;
chain.IsLastMember = true;
result = chain.EmitGetItem(itemuse.Array, itemuse.Index, itemGetterKind);
chain.IsArrayItem = false;
chain.End();
return result;
}
// no chains //
// 1: a_[x]_
chain.IsArrayItem = true;
chain.IsLastMember = true;
result = chain.EmitGetItem(itemuse.Array, itemuse.Index, itemGetterKind);
chain.IsArrayItem = false;
return result;
}
/// <summary>
/// Emits code to load a reference to a variable onto an evaluation stack. Supports operators chaining.
/// </summary>
/// <param name="node">Instance.</param>
/// <param name="codeGenerator"></param>
private PhpTypeCode EmitNodeReadRef(ItemUse/*!*/node, CodeGenerator codeGenerator)
{
ChainBuilder chain = codeGenerator.ChainBuilder;
LocalBuilder local = codeGenerator.IL.DeclareLocal(typeof(object));
// Case 3: a_[x]_[x] never reached
Debug.Assert(chain.IsArrayItem == false, "ReadRef access shouldn't be set to node.Array subchain nodes");
// Case 4,5 never reached
// 4: a[x]->...
// 5: ...->a[]->...
Debug.Assert(chain.IsMember == false);
// 1, 2, 6, 7
if (node.IsMemberOf != null)
{
// last node of the field chain //
// 6 , 7: ...->a[]_[]_ , ...->a_[]_
chain.Create();
chain.Begin();
if (node.IsMemberOf is FunctionCall)
chain.LoadAddressOfFunctionReturnValue = true;
chain.SetObjectForLazyEmit(node);
// let's emit the node.Array subchain followed by the GetArrayItemRef:
chain.IsArrayItem = true;
chain.IsLastMember = false;
chain.Lengthen(); // for own []
chain.EmitGetArrayItemRef(node.Array, node.Index);
chain.IsArrayItem = false;
chain.EndRef();
return PhpTypeCode.PhpReference;
}
// 1, 2
if (node.Array is ItemUse || node.Array is DirectStFldUse || node.Array is IndirectStFldUse)
{
// we are at the beginning of the field chain //
// 2: a[]_[]_
chain.Create();
chain.Begin();
chain.IsArrayItem = true;
chain.IsLastMember = true;
chain.Lengthen();
chain.EmitGetArrayItemRef(node.Array, node.Index);
chain.IsArrayItem = false;
chain.EndRef();
return PhpTypeCode.PhpReference;
}
// no chains //
// 1: a_[x]_
return chain.EmitGetItemRef((SimpleVarUse)node.Array, node.Index);
}
/// <summary>
/// Emits code to load <see cref="PhpRuntimeChain"/> onto an evaluation stack. Supports operators chaining.
/// </summary>
/// <param name="node">Instance.</param>
/// <param name="codeGenerator"></param>
private PhpTypeCode EmitNodeReadUnknown(ItemUse/*!*/node, CodeGenerator codeGenerator)
{
ChainBuilder chain = codeGenerator.ChainBuilder;
PhpTypeCode result = PhpTypeCode.PhpRuntimeChain;
if (chain.IsArrayItem)
{
// 3: a_[x]_[x]
chain.Lengthen(); // for []
chain.EmitRTChainAddItem(node);
return result;
}
// 1,2,4,5,6,7
if (chain.IsMember)
{
// 4, 5
if (node.IsMemberOf != null)
{
// 5: ...->a[]->...
// Lengthen chain for isMemberOf
chain.Lengthen(); // for hop over ->
PhpTypeCode res = node.IsMemberOf.Emit(codeGenerator);
if (res != PhpTypeCode.PhpRuntimeChain)
{
codeGenerator.EmitBoxing(res);
chain.EmitCreateRTChain();
}
// Lengthen chain for own []
chain.Lengthen();
chain.IsArrayItem = true;
chain.IsLastMember = false;
chain.EmitRTChainAddItem(node);
chain.IsArrayItem = false;
return result;
}
// 4: a[x]->...
// Lengthen chain for itself
chain.Lengthen(); // for own []
chain.IsArrayItem = true;
chain.IsLastMember = true;
chain.EmitRTChainAddItem(node);
chain.IsArrayItem = false;
return result;
}
// 1, 2, 6, 7
if (node.IsMemberOf != null)
{
// 6 , 7: ...->a[]_[]_ , ...->a_[]_
bool quiet_read = chain.QuietRead;
chain.Create();
chain.Begin();
chain.QuietRead = quiet_read;
chain.Lengthen(); // for hop over ->
PhpTypeCode res = node.IsMemberOf.Emit(codeGenerator);
if (res != PhpTypeCode.PhpRuntimeChain)
{
codeGenerator.EmitBoxing(res);
chain.EmitCreateRTChain();
}
chain.IsArrayItem = true;
chain.IsLastMember = false;
chain.EmitRTChainAddItem(node);
chain.IsArrayItem = false;
chain.End();
return result;
}
// 1, 2
if (node.Array is ItemUse || node.Array is DirectStFldUse || node.Array is IndirectStFldUse /* ??? */)
{
// 2: a[]_[]_
bool quiet_read = chain.QuietRead;
chain.Create();
chain.Begin();
chain.QuietRead = quiet_read;
chain.IsArrayItem = true;
chain.IsLastMember = true;
chain.EmitRTChainAddItem(node);
chain.IsArrayItem = false;
chain.End();
return result;
}
// 1: a_[x]_
chain.IsArrayItem = true;
chain.IsLastMember = true;
chain.EmitRTChainAddItem(node);
chain.IsArrayItem = false;
return result;
}
/// <summary>
/// Emits code to prepare an evaluation stack for storing a value into a variable.
/// Supports operators chaining. Store is finished by calling <see cref="EmitAssign"/>.
/// </summary>
/// <param name="node">Instance.</param>
/// <param name="codeGenerator"></param>
private PhpTypeCode EmitNodeWrite(ItemUse/*!*/node, CodeGenerator codeGenerator)
{
ChainBuilder chain = codeGenerator.ChainBuilder;
if (chain.IsArrayItem)
{
// 3: a_[x]_[v]
Debug.Assert(node.IsMemberOf == null);
return chain.EmitEnsureItem(node.Array, node.Index, true);
}
// 1, 2, 4, 5, 6, 7
if (chain.IsMember)
{
// 4, 5
if (node.IsMemberOf != null)
{
// 5: ...->a[]->...
// Store isMemberOf for lazy emit
chain.SetObjectForLazyEmit(node);
chain.IsArrayItem = true;
chain.IsLastMember = false;
}
else
{
// 4: a_[x]_->c->..., a[x]_[x]_->c->...
chain.IsArrayItem = true;
chain.IsLastMember = true;
}
PhpTypeCode result = chain.EmitEnsureItem(node.Array, node.Index, false);
chain.IsArrayItem = false;
return result;
}
// 1, 2, 6, 7
if (node.IsMemberOf != null)
{
// 6, 7: ...->a[x]_[x]_
chain.Create();
chain.Begin();
// Store isMemberOf for lazy emit
chain.SetObjectForLazyEmit(node);
chain.IsArrayItem = true;
chain.IsLastMember = false;
chain.Lengthen(); // for own []
node.Array.Emit(codeGenerator);
indexTypeCode = codeGenerator.EmitArrayKey(chain, node.Index);
// Note that EmitAssign will finish the work (SetArrayItem or PhpArray.Add)
return PhpTypeCode.Unknown;
}
// 1, 2
Debug.Assert(node.IsMemberOf == null);
if (node.Array is ItemUse || node.Array is DirectStFldUse || node.Array is IndirectStFldUse /* ??? */)
{
// 2: a[]_[]_
chain.Create();
chain.Begin();
chain.IsArrayItem = true;
chain.IsLastMember = true;
node.Array.Emit(codeGenerator);
indexTypeCode = codeGenerator.EmitArrayKey(chain, node.Index);
// Note that further work will be done by EmitAssign (SetArrayItem or PhpArray.Add)
return PhpTypeCode.Unknown;
}
// 1: a_[x]_
// Do nothing now, let the work be done in EmitAssign()
return PhpTypeCode.Unknown;
}
/// <summary>
/// Emits code to prepare an evaluation stack for storing a reference into a variable.
/// Supports operators chaining. Store is finished by calling <see cref="EmitAssign"/>.
/// </summary>
/// <param name="node">Instance.</param>
/// <param name="codeGenerator"></param>
private PhpTypeCode EmitNodeWriteRef(ItemUse/*!*/node, CodeGenerator codeGenerator)
{
ChainBuilder chain = codeGenerator.ChainBuilder;
// Case 3: a_[x]_[x] never reached
Debug.Assert(chain.IsArrayItem == false);
// Case 4,5 never reached
// 4: a[x]->...
// 5: ...->a[]->...
Debug.Assert(chain.IsMember == false);
// 1, 2, 6, 7
if (node.IsMemberOf != null)
{
// 6, 7: ...->a[x]_[x]_
chain.Create();
chain.Begin();
// Store isMemberOf for lazy emit
chain.SetObjectForLazyEmit(node);
chain.IsArrayItem = true;
chain.IsLastMember = false;
chain.Lengthen(); // for own []
node.Array.Emit(codeGenerator);
indexTypeCode = codeGenerator.EmitArrayKey(chain, node.Index);
// Note that EmitAssign will finish the work (SetArrayItem or PhpArray.Add)
}
else
{
// 1, 2
Debug.Assert(node.IsMemberOf == null);
if (node.Array is ItemUse || node.Array is DirectStFldUse || node.Array is IndirectStFldUse /* ??? */)
{
// 2: a[]_[]_
chain.Create();
chain.Begin();
chain.IsArrayItem = true;
chain.IsLastMember = true;
node.Array.Emit(codeGenerator);
indexTypeCode = codeGenerator.EmitArrayKey(chain, node.Index);
// Note that further work will be done by EmitAssign (SetArrayItem or PhpArray.Add)
}
// 1: a_[x]_
// Do nothing now, let the work be done in EmitAssign()
// Note further work will be done by EmitAssign (either SetItem or SetItemRef);
}
return PhpTypeCode.Unknown;
}
}
#endregion
#region StringLiteralDereferenceEx
[NodeCompiler(typeof(StringLiteralDereferenceEx))]
sealed class StringLiteralDereferenceExCompiler : ExpressionCompiler<StringLiteralDereferenceEx>
{
public override Evaluation Analyze(StringLiteralDereferenceEx node, Analyzer analyzer, ExInfoFromParent info)
{
access = info.Access;
node.StringExpr = node.StringExpr.Analyze(analyzer, info).Literalize();
node.KeyExpr = node.KeyExpr.Analyze(analyzer, info).Literalize();
IntLiteral @int = node.KeyExpr as IntLiteral;
if (@int != null)
{
int key = (int)@int.Value;
if (key >= 0)
{
StringLiteral str;
BinaryStringLiteral bstr;
if ((str = node.StringExpr as StringLiteral) != null)
{
string strValue = (string)str.Value;
if (key < strValue.Length)
return new Evaluation(node, strValue[key].ToString());
else
{ }// report invalid index
}
else if ((bstr = node.StringExpr as BinaryStringLiteral) != null)
{
var bytesValue = (PhpBytes)bstr.GetValue();
if (key < bytesValue.Length)
return new Evaluation(node, new PhpBytes(new byte[] { bytesValue[key] }));
else
{ }// report invalid index
}
}
else
{
// report invalid index
}
}
return new Evaluation(node);
}
public override PhpTypeCode Emit(StringLiteralDereferenceEx node, CodeGenerator codeGenerator)
{
codeGenerator.ChainBuilder.Create();
var typeCode = codeGenerator.ChainBuilder.EmitGetItem(node.StringExpr, node.KeyExpr, Operators.GetItemKinds.Get);
codeGenerator.ChainBuilder.End();
return typeCode;
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
namespace System.Reflection.Emit
{
using System.Runtime.InteropServices;
using System;
using CultureInfo = System.Globalization.CultureInfo;
using System.Reflection;
using System.Security.Permissions;
using System.Diagnostics.Contracts;
[HostProtection(MayLeakOnAbort = true)]
[ClassInterface(ClassInterfaceType.None)]
[ComDefaultInterface(typeof(_FieldBuilder))]
[System.Runtime.InteropServices.ComVisible(true)]
public sealed class FieldBuilder : FieldInfo, _FieldBuilder
{
#region Private Data Members
private int m_fieldTok;
private FieldToken m_tkField;
private TypeBuilder m_typeBuilder;
private String m_fieldName;
private FieldAttributes m_Attributes;
private Type m_fieldType;
#endregion
#region Constructor
[System.Security.SecurityCritical] // auto-generated
internal FieldBuilder(TypeBuilder typeBuilder, String fieldName, Type type,
Type[] requiredCustomModifiers, Type[] optionalCustomModifiers, FieldAttributes attributes)
{
if (fieldName == null)
throw new ArgumentNullException("fieldName");
if (fieldName.Length == 0)
throw new ArgumentException(Environment.GetResourceString("Argument_EmptyName"), "fieldName");
if (fieldName[0] == '\0')
throw new ArgumentException(Environment.GetResourceString("Argument_IllegalName"), "fieldName");
if (type == null)
throw new ArgumentNullException("type");
if (type == typeof(void))
throw new ArgumentException(Environment.GetResourceString("Argument_BadFieldType"));
Contract.EndContractBlock();
m_fieldName = fieldName;
m_typeBuilder = typeBuilder;
m_fieldType = type;
m_Attributes = attributes & ~FieldAttributes.ReservedMask;
SignatureHelper sigHelp = SignatureHelper.GetFieldSigHelper(m_typeBuilder.Module);
sigHelp.AddArgument(type, requiredCustomModifiers, optionalCustomModifiers);
int sigLength;
byte[] signature = sigHelp.InternalGetSignature(out sigLength);
m_fieldTok = TypeBuilder.DefineField(m_typeBuilder.GetModuleBuilder().GetNativeHandle(),
typeBuilder.TypeToken.Token, fieldName, signature, sigLength, m_Attributes);
m_tkField = new FieldToken(m_fieldTok, type);
}
#endregion
#region Internal Members
[System.Security.SecurityCritical] // auto-generated
internal void SetData(byte[] data, int size)
{
ModuleBuilder.SetFieldRVAContent(m_typeBuilder.GetModuleBuilder().GetNativeHandle(), m_tkField.Token, data, size);
}
internal TypeBuilder GetTypeBuilder() { return m_typeBuilder; }
#endregion
#region MemberInfo Overrides
internal int MetadataTokenInternal
{
get { return m_fieldTok; }
}
public override Module Module
{
get { return m_typeBuilder.Module; }
}
public override String Name
{
get {return m_fieldName; }
}
public override Type DeclaringType
{
get
{
if (m_typeBuilder.m_isHiddenGlobalType == true)
return null;
return m_typeBuilder;
}
}
public override Type ReflectedType
{
get
{
if (m_typeBuilder.m_isHiddenGlobalType == true)
return null;
return m_typeBuilder;
}
}
#endregion
#region FieldInfo Overrides
public override Type FieldType
{
get { return m_fieldType; }
}
public override Object GetValue(Object obj)
{
// NOTE!! If this is implemented, make sure that this throws
// a NotSupportedException for Save-only dynamic assemblies.
// Otherwise, it could cause the .cctor to be executed.
throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule"));
}
public override void SetValue(Object obj,Object val,BindingFlags invokeAttr,Binder binder,CultureInfo culture)
{
// NOTE!! If this is implemented, make sure that this throws
// a NotSupportedException for Save-only dynamic assemblies.
// Otherwise, it could cause the .cctor to be executed.
throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule"));
}
public override RuntimeFieldHandle FieldHandle
{
get { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); }
}
public override FieldAttributes Attributes
{
get { return m_Attributes; }
}
#endregion
#region ICustomAttributeProvider Implementation
public override Object[] GetCustomAttributes(bool inherit)
{
throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule"));
}
public override Object[] GetCustomAttributes(Type attributeType, bool inherit)
{
throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule"));
}
public override bool IsDefined(Type attributeType, bool inherit)
{
throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule"));
}
#endregion
#region Public Members
public FieldToken GetToken()
{
return m_tkField;
}
#if FEATURE_CORECLR
[System.Security.SecurityCritical] // auto-generated
#else
[System.Security.SecuritySafeCritical]
#endif
public void SetOffset(int iOffset)
{
m_typeBuilder.ThrowIfCreated();
TypeBuilder.SetFieldLayoutOffset(m_typeBuilder.GetModuleBuilder().GetNativeHandle(), GetToken().Token, iOffset);
}
[System.Security.SecuritySafeCritical] // auto-generated
[Obsolete("An alternate API is available: Emit the MarshalAs custom attribute instead. http://go.microsoft.com/fwlink/?linkid=14202")]
public void SetMarshal(UnmanagedMarshal unmanagedMarshal)
{
if (unmanagedMarshal == null)
throw new ArgumentNullException("unmanagedMarshal");
Contract.EndContractBlock();
m_typeBuilder.ThrowIfCreated();
byte[] ubMarshal = unmanagedMarshal.InternalGetBytes();
TypeBuilder.SetFieldMarshal(m_typeBuilder.GetModuleBuilder().GetNativeHandle(), GetToken().Token, ubMarshal, ubMarshal.Length);
}
[System.Security.SecuritySafeCritical] // auto-generated
public void SetConstant(Object defaultValue)
{
m_typeBuilder.ThrowIfCreated();
TypeBuilder.SetConstantValue(m_typeBuilder.GetModuleBuilder(), GetToken().Token, m_fieldType, defaultValue);
}
#if FEATURE_CORECLR
[System.Security.SecurityCritical] // auto-generated
#else
[System.Security.SecuritySafeCritical]
#endif
[System.Runtime.InteropServices.ComVisible(true)]
public void SetCustomAttribute(ConstructorInfo con, byte[] binaryAttribute)
{
if (con == null)
throw new ArgumentNullException("con");
if (binaryAttribute == null)
throw new ArgumentNullException("binaryAttribute");
Contract.EndContractBlock();
ModuleBuilder module = m_typeBuilder.Module as ModuleBuilder;
m_typeBuilder.ThrowIfCreated();
TypeBuilder.DefineCustomAttribute(module,
m_tkField.Token, module.GetConstructorToken(con).Token, binaryAttribute, false, false);
}
[System.Security.SecuritySafeCritical] // auto-generated
public void SetCustomAttribute(CustomAttributeBuilder customBuilder)
{
if (customBuilder == null)
throw new ArgumentNullException("customBuilder");
Contract.EndContractBlock();
m_typeBuilder.ThrowIfCreated();
ModuleBuilder module = m_typeBuilder.Module as ModuleBuilder;
customBuilder.CreateCustomAttribute(module, m_tkField.Token);
}
#endregion
#if !FEATURE_CORECLR
void _FieldBuilder.GetTypeInfoCount(out uint pcTInfo)
{
throw new NotImplementedException();
}
void _FieldBuilder.GetTypeInfo(uint iTInfo, uint lcid, IntPtr ppTInfo)
{
throw new NotImplementedException();
}
void _FieldBuilder.GetIDsOfNames([In] ref Guid riid, IntPtr rgszNames, uint cNames, uint lcid, IntPtr rgDispId)
{
throw new NotImplementedException();
}
void _FieldBuilder.Invoke(uint dispIdMember, [In] ref Guid riid, uint lcid, short wFlags, IntPtr pDispParams, IntPtr pVarResult, IntPtr pExcepInfo, IntPtr puArgErr)
{
throw new NotImplementedException();
}
#endif
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using Sep.Git.Tfs.Commands;
using Sep.Git.Tfs.Core;
using Sep.Git.Tfs.Core.TfsInterop;
using Sep.Git.Tfs.Util;
using StructureMap;
namespace Sep.Git.Tfs.VsFake
{
public class MockBranchObject : IBranchObject
{
public string Path { get; set; }
public string ParentPath { get; set; }
public bool IsRoot { get; set; }
}
public class TfsHelper : ITfsHelper
{
#region misc/null
IContainer _container;
TextWriter _stdout;
Script _script;
FakeVersionControlServer _versionControlServer;
public TfsHelper(IContainer container, TextWriter stdout, Script script)
{
_container = container;
_stdout = stdout;
_script = script;
_versionControlServer = new FakeVersionControlServer(_script);
}
public string TfsClientLibraryVersion { get { return "(FAKE)"; } }
public string Url { get; set; }
public string Username { get; set; }
public string Password { get; set; }
public void EnsureAuthenticated() {}
public void SetPathResolver() {}
public bool CanShowCheckinDialog { get { return false; } }
public long ShowCheckinDialog(IWorkspace workspace, IPendingChange[] pendingChanges, IEnumerable<IWorkItemCheckedInfo> checkedInfos, string checkinComment)
{
throw new NotImplementedException();
}
public IIdentity GetIdentity(string username)
{
if (username == "vtccds_cp")
return new FakeIdentity { DisplayName = username, MailAddress = "b8d46dada4dd62d2ab98a2bda7310285c42e46f6qvabajY2" };
return new NullIdentity();
}
#endregion
#region read changesets
public ITfsChangeset GetLatestChangeset(IGitTfsRemote remote)
{
return _script.Changesets.LastOrDefault().Try(x => BuildTfsChangeset(x, remote));
}
public int GetLatestChangesetId(IGitTfsRemote remote)
{
return _script.Changesets.LastOrDefault().Id;
}
public IEnumerable<ITfsChangeset> GetChangesets(string path, long startVersion, IGitTfsRemote remote, long lastVersion = -1, bool byLots = false)
{
if (!_script.Changesets.Any(c => c.IsBranchChangeset) && _script.Changesets.Any(c => c.IsMergeChangeset))
return _script.Changesets.Where(x => x.Id >= startVersion).Select(x => BuildTfsChangeset(x, remote));
var branchPath = path + "/";
return _script.Changesets
.Where(x => x.Id >= startVersion && x.Changes.Any(c => c.RepositoryPath.IndexOf(branchPath, StringComparison.CurrentCultureIgnoreCase) == 0 || branchPath.IndexOf(c.RepositoryPath, StringComparison.CurrentCultureIgnoreCase) == 0))
.Select(x => BuildTfsChangeset(x, remote));
}
public int FindMergeChangesetParent(string path, long firstChangeset, GitTfsRemote remote)
{
var firstChangesetOfBranch = _script.Changesets.FirstOrDefault(c => c.IsMergeChangeset && c.MergeChangesetDatas.MergeIntoBranch == path && c.MergeChangesetDatas.BeforeMergeChangesetId < firstChangeset);
if (firstChangesetOfBranch != null)
return firstChangesetOfBranch.MergeChangesetDatas.BeforeMergeChangesetId;
return -1;
}
private ITfsChangeset BuildTfsChangeset(ScriptedChangeset changeset, IGitTfsRemote remote)
{
var tfsChangeset = _container.With<ITfsHelper>(this).With<IChangeset>(new Changeset(_versionControlServer, changeset)).GetInstance<TfsChangeset>();
tfsChangeset.Summary = new TfsChangesetInfo { ChangesetId = changeset.Id, Remote = remote };
return tfsChangeset;
}
class Changeset : IChangeset
{
private IVersionControlServer _versionControlServer;
private ScriptedChangeset _changeset;
public Changeset(IVersionControlServer versionControlServer, ScriptedChangeset changeset)
{
_versionControlServer = versionControlServer;
_changeset = changeset;
}
public IChange[] Changes
{
get { return _changeset.Changes.Select(x => new Change(_versionControlServer, _changeset, x)).ToArray(); }
}
public string Committer
{
get { return _changeset.Committer ?? "todo"; }
}
public DateTime CreationDate
{
get { return _changeset.CheckinDate; }
}
public string Comment
{
get { return _changeset.Comment.Replace("\n", "\r\n"); }
}
public int ChangesetId
{
get { return _changeset.Id; }
}
public IVersionControlServer VersionControlServer
{
get { throw new NotImplementedException(); }
}
public void Get(ITfsWorkspace workspace, IEnumerable<IChange> changes, Action<Exception> ignorableErrorHandler)
{
workspace.Get(this.ChangesetId, changes);
}
}
class Change : IChange, IItem
{
IVersionControlServer _versionControlServer;
ScriptedChangeset _changeset;
ScriptedChange _change;
public Change(IVersionControlServer versionControlServer, ScriptedChangeset changeset, ScriptedChange change)
{
_versionControlServer = versionControlServer;
_changeset = changeset;
_change = change;
}
TfsChangeType IChange.ChangeType
{
get { return _change.ChangeType; }
}
IItem IChange.Item
{
get { return this; }
}
IVersionControlServer IItem.VersionControlServer
{
get { return _versionControlServer; }
}
int IItem.ChangesetId
{
get { return _changeset.Id; }
}
string IItem.ServerItem
{
get { return _change.RepositoryPath; }
}
int IItem.DeletionId
{
get { return 0; }
}
TfsItemType IItem.ItemType
{
get { return _change.ItemType; }
}
int IItem.ItemId
{
get { return _change.ItemId.Value; }
}
long IItem.ContentLength
{
get
{
using (var temp = ((IItem)this).DownloadFile())
return new FileInfo(temp).Length;
}
}
TemporaryFile IItem.DownloadFile()
{
var temp = new TemporaryFile();
using(var stream = File.Create(temp))
using(var writer = new BinaryWriter(stream))
writer.Write(_change.Content);
return temp;
}
}
#endregion
#region workspaces
public void WithWorkspace(string localDirectory, IGitTfsRemote remote, IEnumerable<Tuple<string, string>> mappings, TfsChangesetInfo versionToFetch, Action<ITfsWorkspace> action)
{
Trace.WriteLine("Setting up a TFS workspace at " + localDirectory);
var fakeWorkspace = new FakeWorkspace(localDirectory, remote.TfsRepositoryPath);
var workspace = _container.With("localDirectory").EqualTo(localDirectory)
.With("remote").EqualTo(remote)
.With("contextVersion").EqualTo(versionToFetch)
.With("workspace").EqualTo(fakeWorkspace)
.With("tfsHelper").EqualTo(this)
.GetInstance<TfsWorkspace>();
action(workspace);
}
public void WithWorkspace(string directory, IGitTfsRemote remote, TfsChangesetInfo versionToFetch, Action<ITfsWorkspace> action)
{
Trace.WriteLine("Setting up a TFS workspace at " + directory);
var fakeWorkspace = new FakeWorkspace(directory, remote.TfsRepositoryPath);
var workspace = _container.With("localDirectory").EqualTo(directory)
.With("remote").EqualTo(remote)
.With("contextVersion").EqualTo(versionToFetch)
.With("workspace").EqualTo(fakeWorkspace)
.With("tfsHelper").EqualTo(this)
.GetInstance<TfsWorkspace>();
action(workspace);
}
class FakeWorkspace : IWorkspace
{
string _directory;
string _repositoryRoot;
public FakeWorkspace(string directory, string repositoryRoot)
{
_directory = directory;
_repositoryRoot = repositoryRoot;
}
public void GetSpecificVersion(IChangeset changeset)
{
GetSpecificVersion(changeset.ChangesetId, changeset.Changes);
}
public void GetSpecificVersion(int changeset, IEnumerable<IChange> changes)
{
var repositoryRoot = _repositoryRoot.ToLower();
if(!repositoryRoot.EndsWith("/")) repositoryRoot += "/";
foreach (var change in changes)
{
if (change.Item.ItemType == TfsItemType.File)
{
var outPath = Path.Combine(_directory, change.Item.ServerItem.ToLower().Replace(repositoryRoot, ""));
var outDir = Path.GetDirectoryName(outPath);
if (!Directory.Exists(outDir)) Directory.CreateDirectory(outDir);
using (var download = change.Item.DownloadFile())
File.WriteAllText(outPath, File.ReadAllText(download.Path));
}
}
}
#region unimplemented
public void Merge(string sourceTfsPath, string tfsRepositoryPath)
{
throw new NotImplementedException();
}
public IPendingChange[] GetPendingChanges()
{
throw new NotImplementedException();
}
public ICheckinEvaluationResult EvaluateCheckin(TfsCheckinEvaluationOptions options, IPendingChange[] allChanges, IPendingChange[] changes, string comment, ICheckinNote checkinNote, IEnumerable<IWorkItemCheckinInfo> workItemChanges)
{
throw new NotImplementedException();
}
public ICheckinEvaluationResult EvaluateCheckin(TfsCheckinEvaluationOptions options, IPendingChange[] allChanges, IPendingChange[] changes, string comment, string authors, ICheckinNote checkinNote, IEnumerable<IWorkItemCheckinInfo> workItemChanges)
{
throw new NotImplementedException();
}
public void Shelve(IShelveset shelveset, IPendingChange[] changes, TfsShelvingOptions options)
{
throw new NotImplementedException();
}
public int Checkin(IPendingChange[] changes, string comment, string author, ICheckinNote checkinNote, IEnumerable<IWorkItemCheckinInfo> workItemChanges, TfsPolicyOverrideInfo policyOverrideInfo, bool overrideGatedCheckIn)
{
throw new NotImplementedException();
}
public int PendAdd(string path)
{
throw new NotImplementedException();
}
public int PendEdit(string path)
{
throw new NotImplementedException();
}
public int PendDelete(string path)
{
throw new NotImplementedException();
}
public int PendRename(string pathFrom, string pathTo)
{
throw new NotImplementedException();
}
public void ForceGetFile(string path, int changeset)
{
throw new NotImplementedException();
}
public void GetSpecificVersion(int changeset)
{
throw new NotImplementedException();
}
public string GetLocalItemForServerItem(string serverItem)
{
throw new NotImplementedException();
}
public string GetServerItemForLocalItem(string localItem)
{
throw new NotImplementedException();
}
public string OwnerName
{
get { throw new NotImplementedException(); }
}
#endregion
}
public void CleanupWorkspaces(string workingDirectory)
{
}
public bool IsExistingInTfs(string path)
{
var exists = false;
foreach (var changeset in _script.Changesets)
{
foreach (var change in changeset.Changes)
{
if (change.RepositoryPath == path)
{
exists = !change.ChangeType.IncludesOneOf(TfsChangeType.Delete);
}
}
}
return exists;
}
public bool CanGetBranchInformation { get { return true; } }
public IChangeset GetChangeset(int changesetId)
{
return new Changeset(_versionControlServer, _script.Changesets.First(c => c.Id == changesetId));
}
public IList<RootBranch> GetRootChangesetForBranch(string tfsPathBranchToCreate, int lastChangesetIdToCheck = -1, string tfsPathParentBranch = null)
{
var firstChangesetOfBranch = _script.Changesets.FirstOrDefault(c => c.IsBranchChangeset && c.BranchChangesetDatas.BranchPath == tfsPathBranchToCreate);
var rootBranches = new List<RootBranch>();
var branchChangeset = _script.Changesets.Where(c => c.IsBranchChangeset).ToList();
if (firstChangesetOfBranch != null)
{
do
{
var branch = new RootBranch(firstChangesetOfBranch.BranchChangesetDatas.RootChangesetId,
firstChangesetOfBranch.BranchChangesetDatas.BranchPath);
branch.IsRenamedBranch = DeletedBranchesPathes.Contains(branch.TfsBranchPath);
rootBranches.Add(branch);
firstChangesetOfBranch = branchChangeset.FirstOrDefault(
c => c.BranchChangesetDatas.BranchPath == firstChangesetOfBranch.BranchChangesetDatas.ParentBranch);
} while (firstChangesetOfBranch != null);
rootBranches.Reverse();
return rootBranches;
}
rootBranches.Add(new RootBranch(-1, tfsPathBranchToCreate));
return rootBranches;
}
private List<string> _deletedBranchesPathes;
List<string> DeletedBranchesPathes
{
get
{
return _deletedBranchesPathes ?? (_deletedBranchesPathes = _script.Changesets.Where(c => c.IsBranchChangeset &&
c.Changes.Any(ch => ch.ChangeType == TfsChangeType.Delete && ch.RepositoryPath == c.BranchChangesetDatas.ParentBranch))
.Select(b => b.BranchChangesetDatas.ParentBranch).ToList());
}
}
public IEnumerable<IBranchObject> GetBranches(bool getDeletedBranches = false)
{
var renamings = _script.Changesets.Where(
c => c.IsBranchChangeset &&
DeletedBranchesPathes.Any(b => b == c.BranchChangesetDatas.BranchPath)).ToList();
var branches = new List<IBranchObject>();
branches.AddRange(_script.RootBranches.Select(b => new MockBranchObject { IsRoot = true, Path = b.BranchPath, ParentPath = null }));
branches.AddRange(_script.Changesets.Where(c=>c.IsBranchChangeset).Select(c => new MockBranchObject
{
IsRoot = false,
Path = c.BranchChangesetDatas.BranchPath,
ParentPath = GetRealRootBranch(renamings, c.BranchChangesetDatas.ParentBranch)
}));
if (!getDeletedBranches)
branches.RemoveAll(b => DeletedBranchesPathes.Contains(b.Path));
return branches;
}
private string GetRealRootBranch(List<ScriptedChangeset> deletedBranches, string branchPath)
{
var realRoot = branchPath;
while (true)
{
var parent = deletedBranches.FirstOrDefault(b => b.BranchChangesetDatas.BranchPath == realRoot);
if (parent == null)
return realRoot;
realRoot = parent.BranchChangesetDatas.ParentBranch;
}
}
#endregion
#region unimplemented
public IShelveset CreateShelveset(IWorkspace workspace, string shelvesetName)
{
throw new NotImplementedException();
}
public IEnumerable<IWorkItemCheckinInfo> GetWorkItemInfos(IEnumerable<string> workItems, TfsWorkItemCheckinAction checkinAction)
{
throw new NotImplementedException();
}
public IEnumerable<IWorkItemCheckedInfo> GetWorkItemCheckedInfos(IEnumerable<string> workItems, TfsWorkItemCheckinAction checkinAction)
{
throw new NotImplementedException();
}
public ICheckinNote CreateCheckinNote(Dictionary<string, string> checkinNotes)
{
throw new NotImplementedException();
}
public ITfsChangeset GetChangeset(int changesetId, IGitTfsRemote remote)
{
throw new NotImplementedException();
}
public bool HasShelveset(string shelvesetName)
{
throw new NotImplementedException();
}
public ITfsChangeset GetShelvesetData(IGitTfsRemote remote, string shelvesetOwner, string shelvesetName)
{
throw new NotImplementedException();
}
public int ListShelvesets(ShelveList shelveList, IGitTfsRemote remote)
{
throw new NotImplementedException();
}
public IEnumerable<string> GetAllTfsRootBranchesOrderedByCreation()
{
return new List<string>();
}
public IEnumerable<TfsLabel> GetLabels(string tfsPathBranch, string nameFilter = null)
{
throw new NotImplementedException();
}
public void CreateBranch(string sourcePath, string targetPath, int changesetId, string comment = null)
{
throw new NotImplementedException();
}
public void CreateTfsRootBranch(string projectName, string mainBranch, string gitRepositoryPath, bool createTeamProjectFolder)
{
throw new NotImplementedException();
}
public long QueueGatedCheckinBuild(Uri value, string buildDefinitionName, string shelvesetName, string checkInTicket)
{
throw new NotImplementedException();
}
#endregion
private class FakeVersionControlServer : IVersionControlServer
{
Script _script;
public FakeVersionControlServer(Script script)
{
_script = script;
}
public IItem GetItem(int itemId, int changesetNumber)
{
var match = _script.Changesets.AsEnumerable().Reverse()
.SkipWhile(cs => cs.Id > changesetNumber)
.Select(cs => new { Changeset = cs, Change = cs.Changes.SingleOrDefault(change => change.ItemId == itemId) })
.First(x => x.Change != null);
return new Change(this, match.Changeset, match.Change);
}
public IItem GetItem(string itemPath, int changesetNumber)
{
throw new NotImplementedException();
}
public IItem[] GetItems(string itemPath, int changesetNumber, TfsRecursionType recursionType)
{
throw new NotImplementedException();
}
public IEnumerable<IChangeset> QueryHistory(string path, int version, int deletionId, TfsRecursionType recursion, string user, int versionFrom, int versionTo, int maxCount, bool includeChanges, bool slotMode, bool includeDownloadInfo)
{
throw new NotImplementedException();
}
}
}
}
| |
using System;
using Xwt;
using Xwt.Drawing;
using System.Xml;
namespace Samples
{
public class MainWindow: Window
{
TreeView samplesTree;
TreeStore store;
Image icon;
VBox sampleBox;
Label title;
Widget currentSample;
DataField<string> nameCol = new DataField<string> ();
DataField<Sample> widgetCol = new DataField<Sample> ();
DataField<Image> iconCol = new DataField<Image> ();
StatusIcon statusIcon;
public MainWindow ()
{
Title = "Xwt Demo Application";
Width = 500;
Height = 400;
try {
statusIcon = Application.CreateStatusIcon ();
statusIcon.Menu = new Menu ();
statusIcon.Menu.Items.Add (new MenuItem ("Test"));
statusIcon.Image = Image.FromResource (GetType (), "package.png");
} catch {
Console.WriteLine ("Status icon could not be shown");
}
Menu menu = new Menu ();
var file = new MenuItem ("_File");
file.SubMenu = new Menu ();
file.SubMenu.Items.Add (new MenuItem ("_Open"));
file.SubMenu.Items.Add (new MenuItem ("_New"));
MenuItem mi = new MenuItem ("_Close");
mi.Clicked += delegate {
Application.Exit();
};
file.SubMenu.Items.Add (mi);
menu.Items.Add (file);
var edit = new MenuItem ("_Edit");
edit.SubMenu = new Menu ();
edit.SubMenu.Items.Add (new MenuItem ("_Copy"));
edit.SubMenu.Items.Add (new MenuItem ("Cu_t"));
edit.SubMenu.Items.Add (new MenuItem ("_Paste"));
menu.Items.Add (edit);
MainMenu = menu;
HPaned box = new HPaned ();
icon = Image.FromResource (typeof(App), "document-generic.png");
store = new TreeStore (nameCol, iconCol, widgetCol);
samplesTree = new TreeView ();
samplesTree.Columns.Add ("Name", iconCol, nameCol);
var w = AddSample (null, "Widgets", null);
AddSample (w, "Boxes", typeof(Boxes));
AddSample (w, "Buttons", typeof(ButtonSample));
AddSample (w, "Calendar", typeof(CalendarSample));
AddSample (w, "CheckBox", typeof(Checkboxes));
AddSample (w, "Clipboard", typeof(ClipboardSample));
AddSample (w, "ColorSelector", typeof(ColorSelectorSample));
AddSample (w, "FontSelector", typeof(FontSelectorSample));
AddSample (w, "ComboBox", typeof(ComboBoxes));
AddSample (w, "DatePicker", typeof(DatePickerSample));
// AddSample (null, "Designer", typeof(Designer));
AddSample (w, "Expander", typeof (ExpanderSample));
AddSample (w, "Progress bars", typeof(ProgressBarSample));
AddSample (w, "Frames", typeof(Frames));
AddSample (w, "Images", typeof(Images));
AddSample (w, "Labels", typeof(Labels));
AddSample (w, "ListBox", typeof(ListBoxSample));
AddSample (w, "LinkLabels", typeof(LinkLabels));
var listView = AddSample (w, "ListView", typeof(ListView1));
AddSample (listView, "Editable checkboxes", typeof(ListView2));
AddSample (listView, "Cell Bounds", typeof(ListViewCellBounds));
AddSample (w, "Markdown", typeof (MarkDownSample));
AddSample (w, "Menu", typeof(MenuSamples));
AddSample (w, "Mnemonics", typeof (Mnemonics));
AddSample (w, "Notebook", typeof(NotebookSample));
AddSample (w, "Paneds", typeof(PanedViews));
AddSample (w, "Popover", typeof(PopoverSample));
AddSample (w, "RadioButton", typeof (RadioButtonSample));
AddSample (w, "SpinButton", typeof (SpinButtonSample));
AddSample (w, "Scroll View", typeof(ScrollWindowSample));
AddSample (w, "Scrollbar", typeof(ScrollbarSample));
AddSample (w, "Slider", typeof (SliderSample));
AddSample (w, "Spinners", typeof (Spinners));
AddSample (w, "Tables", typeof (Tables));
AddSample (w, "Text Entry", typeof (TextEntries));
AddSample (w, "Password Entry", typeof (PasswordEntries));
var treeview = AddSample (w, "TreeView", typeof(TreeViews));
AddSample (treeview, "Cell Bounds", typeof(TreeViewCellBounds));
AddSample (w, "WebView", typeof(WebViewSample));
var n = AddSample (null, "Drawing", null);
AddSample (n, "Canvas with Widget (Linear)", typeof (CanvasWithWidget_Linear));
AddSample (n, "Canvas with Widget (Radial)", typeof (CanvasWithWidget_Radial));
AddSample (n, "Chart", typeof (ChartSample));
AddSample (n, "Colors", typeof(ColorsSample));
AddSample (n, "Figures", typeof(DrawingFigures));
AddSample (n, "Transformations", typeof(DrawingTransforms));
AddSample (n, "Images and Patterns", typeof(DrawingPatternsAndImages));
AddSample (n, "Text", typeof(DrawingText));
AddSample (n, "Partial Images", typeof (PartialImages));
AddSample (n, "Custom Drawn Image", typeof (ImageScaling));
AddSample (n, "9-patch Image", typeof (Image9Patch));
AddSample (n, "Widget Rendering", typeof (WidgetRendering));
AddSample (n, "Text Input", typeof (TextInput));
var wf = AddSample (null, "Widget Features", null);
AddSample (wf, "Drag & Drop", typeof(DragDrop));
AddSample (wf, "Focus", typeof(WidgetFocus));
AddSample (wf, "Widget Events", typeof(WidgetEvents));
AddSample (wf, "Opacity", typeof(OpacitySample));
AddSample (wf, "Tooltips", typeof(Tooltips));
AddSample (wf, "Cursors", typeof(MouseCursors));
var windows = AddSample (null, "Windows", typeof(Windows));
AddSample (windows, "Message Dialogs", typeof(MessageDialogs));
AddSample (null, "Screens", typeof (ScreensSample));
AddSample (null, "Multithreading", typeof (MultithreadingSample));
samplesTree.DataSource = store;
box.Panel1.Content = samplesTree;
sampleBox = new VBox ();
title = new Label ("Sample:");
sampleBox.PackStart (title);
box.Panel2.Content = sampleBox;
box.Panel2.Resize = true;
box.Position = 160;
Content = box;
samplesTree.SelectionChanged += HandleSamplesTreeSelectionChanged;
CloseRequested += HandleCloseRequested;
}
void HandleCloseRequested (object sender, CloseRequestedEventArgs args)
{
args.AllowClose = MessageDialog.Confirm ("Samples will be closed", Command.Ok);
if (args.AllowClose)
Application.Exit ();
}
protected override void Dispose (bool disposing)
{
base.Dispose (disposing);
if (statusIcon != null) {
statusIcon.Dispose ();
}
}
void HandleSamplesTreeSelectionChanged (object sender, EventArgs e)
{
if (samplesTree.SelectedRow != null) {
if (currentSample != null)
sampleBox.Remove (currentSample);
Sample s = store.GetNavigatorAt (samplesTree.SelectedRow).GetValue (widgetCol);
if (s.Type != null) {
if (s.Widget == null)
s.Widget = (Widget)Activator.CreateInstance (s.Type);
sampleBox.PackStart (s.Widget, true);
}
// Console.WriteLine (System.Xaml.XamlServices.Save (s.Widget));
currentSample = s.Widget;
Dump (currentSample, 0);
}
}
void Dump (IWidgetSurface w, int ind)
{
if (w == null)
return;
var s = w.GetPreferredSize ();
Console.WriteLine (new string (' ', ind * 2) + " " + w.GetType ().Name + " " + s.Width + " " + s.Height);
foreach (var c in w.Children)
Dump (c, ind + 1);
}
TreePosition AddSample (TreePosition pos, string name, Type sampleType)
{
//if (page != null)
// page.Margin.SetAll (5);
return store.AddNode (pos).SetValue (nameCol, name).SetValue (iconCol, icon).SetValue (widgetCol, new Sample (sampleType)).CurrentPosition;
}
}
class Sample
{
public Sample (Type type)
{
Type = type;
}
public Type Type;
public Widget Widget;
}
}
| |
//-------------------------------------------------------------------------------
// <copyright file="ExtensionBase.cs" company="Appccelerate">
// Copyright (c) 2008-2015
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
//-------------------------------------------------------------------------------
namespace Appccelerate.StateMachine.Extensions
{
using System;
using Appccelerate.StateMachine.Machine;
/// <summary>
/// Base class for state machine extensions with empty implementation.
/// </summary>
/// <typeparam name="TState">The type of the state.</typeparam>
/// <typeparam name="TEvent">The type of the event.</typeparam>
public class ExtensionBase<TState, TEvent> : IExtension<TState, TEvent>
where TState : IComparable
where TEvent : IComparable
{
/// <summary>
/// Starts the state machine.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
public virtual void StartedStateMachine(IStateMachineInformation<TState, TEvent> stateMachine)
{
}
/// <summary>
/// Stops the state machine.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
public virtual void StoppedStateMachine(IStateMachineInformation<TState, TEvent> stateMachine)
{
}
/// <summary>
/// Events the queued.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="eventId">The event id.</param>
/// <param name="eventArgument">The event argument.</param>
public virtual void EventQueued(IStateMachineInformation<TState, TEvent> stateMachine, TEvent eventId, object eventArgument)
{
}
/// <summary>
/// Events the queued with priority.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="eventId">The event id.</param>
/// <param name="eventArgument">The event argument.</param>
public virtual void EventQueuedWithPriority(IStateMachineInformation<TState, TEvent> stateMachine, TEvent eventId, object eventArgument)
{
}
/// <summary>
/// Called after the state machine switched states.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="oldState">The old state.</param>
/// <param name="newState">The new state.</param>
public virtual void SwitchedState(IStateMachineInformation<TState, TEvent> stateMachine, IState<TState, TEvent> oldState, IState<TState, TEvent> newState)
{
}
/// <summary>
/// Called when the state machine is initializing.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="initialState">The initial state. Can be replaced by the extension.</param>
public virtual void InitializingStateMachine(IStateMachineInformation<TState, TEvent> stateMachine, ref TState initialState)
{
}
/// <summary>
/// Called when the state machine was initialized.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="initialState">The initial state.</param>
public virtual void InitializedStateMachine(IStateMachineInformation<TState, TEvent> stateMachine, TState initialState)
{
}
/// <summary>
/// Called when the state machine enters the initial state.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="state">The state.</param>
public virtual void EnteringInitialState(IStateMachineInformation<TState, TEvent> stateMachine, TState state)
{
}
/// <summary>
/// Called when the state machine entered the initial state.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="state">The state.</param>
/// <param name="context">The context.</param>
public virtual void EnteredInitialState(IStateMachineInformation<TState, TEvent> stateMachine, TState state, ITransitionContext<TState, TEvent> context)
{
}
/// <summary>
/// Called when an event is firing on the state machine.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="eventId">The event id. Can be replaced by the extension.</param>
/// <param name="eventArgument">The event argument. Can be replaced by the extension.</param>
public virtual void FiringEvent(IStateMachineInformation<TState, TEvent> stateMachine, ref TEvent eventId, ref object eventArgument)
{
}
/// <summary>
/// Called when an event was fired on the state machine.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="context">The transition context.</param>
public virtual void FiredEvent(IStateMachineInformation<TState, TEvent> stateMachine, ITransitionContext<TState, TEvent> context)
{
}
/// <summary>
/// Called before an entry action exception is handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="state">The state.</param>
/// <param name="context">The context.</param>
/// <param name="exception">The exception. Can be replaced by the extension.</param>
public virtual void HandlingEntryActionException(IStateMachineInformation<TState, TEvent> stateMachine, IState<TState, TEvent> state, ITransitionContext<TState, TEvent> context, ref Exception exception)
{
}
/// <summary>
/// Called after an entry action exception was handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="state">The state.</param>
/// <param name="context">The context.</param>
/// <param name="exception">The exception.</param>
public virtual void HandledEntryActionException(IStateMachineInformation<TState, TEvent> stateMachine, IState<TState, TEvent> state, ITransitionContext<TState, TEvent> context, Exception exception)
{
}
/// <summary>
/// Called before an exit action exception is handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="state">The state.</param>
/// <param name="context">The context.</param>
/// <param name="exception">The exception. Can be replaced by the extension.</param>
public virtual void HandlingExitActionException(IStateMachineInformation<TState, TEvent> stateMachine, IState<TState, TEvent> state, ITransitionContext<TState, TEvent> context, ref Exception exception)
{
}
/// <summary>
/// Called after an exit action exception was handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="state">The state.</param>
/// <param name="context">The context.</param>
/// <param name="exception">The exception.</param>
public virtual void HandledExitActionException(IStateMachineInformation<TState, TEvent> stateMachine, IState<TState, TEvent> state, ITransitionContext<TState, TEvent> context, Exception exception)
{
}
/// <summary>
/// Called before a guard exception is handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="transition">The transition.</param>
/// <param name="transitionContext">The transition context.</param>
/// <param name="exception">The exception. Can be replaced by the extension.</param>
public virtual void HandlingGuardException(IStateMachineInformation<TState, TEvent> stateMachine, ITransition<TState, TEvent> transition, ITransitionContext<TState, TEvent> transitionContext, ref Exception exception)
{
}
/// <summary>
/// Called after a guard exception was handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="transition">The transition.</param>
/// <param name="transitionContext">The transition context.</param>
/// <param name="exception">The exception.</param>
public virtual void HandledGuardException(IStateMachineInformation<TState, TEvent> stateMachine, ITransition<TState, TEvent> transition, ITransitionContext<TState, TEvent> transitionContext, Exception exception)
{
}
/// <summary>
/// Called before a transition exception is handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="transition">The transition.</param>
/// <param name="context">The context.</param>
/// <param name="exception">The exception. Can be replaced by the extension.</param>
public virtual void HandlingTransitionException(IStateMachineInformation<TState, TEvent> stateMachine, ITransition<TState, TEvent> transition, ITransitionContext<TState, TEvent> context, ref Exception exception)
{
}
/// <summary>
/// Called after a transition exception is handled.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="transition">The transition.</param>
/// <param name="transitionContext">The transition context.</param>
/// <param name="exception">The exception.</param>
public virtual void HandledTransitionException(IStateMachineInformation<TState, TEvent> stateMachine, ITransition<TState, TEvent> transition, ITransitionContext<TState, TEvent> transitionContext, Exception exception)
{
}
/// <summary>
/// Called when a transition is skipped because its guard returned false.
/// </summary>
public virtual void SkippedTransition(
IStateMachineInformation<TState, TEvent> stateMachineInformation,
ITransition<TState, TEvent> transition,
ITransitionContext<TState, TEvent> context)
{
}
/// <summary>
/// Called when a transition is going to be executed. After the guard of the transition evaluated to true.
/// </summary>
/// <param name="stateMachine">The state machine.</param>
/// <param name="transition">The transition.</param>
/// <param name="transitionContext">The transition context.</param>
public virtual void ExecutingTransition(
IStateMachineInformation<TState, TEvent> stateMachine,
ITransition<TState, TEvent> transition,
ITransitionContext<TState, TEvent> transitionContext)
{
}
/// <summary>
/// Called when a transition was executed.
/// </summary>
public virtual void ExecutedTransition(
IStateMachineInformation<TState, TEvent> stateMachineInformation,
ITransition<TState, TEvent> transition,
ITransitionContext<TState, TEvent> context)
{
}
}
}
| |
/*
* Copyright (C) 2007-2014 ARGUS TV
* http://www.argus-tv.com
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GNU Make; see the file COPYING. If not, write to
* the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
* http://www.gnu.org/copyleft/gpl.html
*
*/
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using ArgusTV.DataContracts;
using ArgusTV.DataContracts.Tuning;
namespace ArgusTV.ServiceProxy
{
/// <summary>
/// Service to control/query all aspects of recording and tuning.
/// </summary>
public partial class ControlServiceProxy : RestProxyBase
{
/// <summary>
/// Constructs a channel to the service.
/// </summary>
internal ControlServiceProxy()
: base("Control")
{
}
#region Plugin Services
/// <summary>
/// Get all configured plugin services.
/// </summary>
/// <param name="activeOnly">Set to true to only receive active plugins.</param>
/// <returns>An array containing zero or more plugin services.</returns>
public async Task<List<PluginService>> GetAllPluginServices(bool activeOnly = true)
{
var request = NewRequest(HttpMethod.Get, "PluginServices");
if (!activeOnly)
{
request.AddParameter("activeOnly", false);
}
return await ExecuteAsync<List<PluginService>>(request).ConfigureAwait(false);
}
/// <summary>
/// Save a new or modified plugin service. A new plugin service is recognized by a Guid.Empty ID.
/// </summary>
/// <param name="pluginService">The plugin service to save.</param>
/// <returns>The saved plugin service.</returns>
public async Task<PluginService> SavePluginService(PluginService pluginService)
{
var request = NewRequest(HttpMethod.Post, "SavePluginService");
request.AddBody(pluginService);
return await ExecuteAsync<PluginService>(request).ConfigureAwait(false);
}
/// <summary>
/// Delete a plugin service.
/// </summary>
/// <param name="pluginServiceId">The ID of the plugin service to delete.</param>
public async Task DeletePluginService(Guid pluginServiceId)
{
var request = NewRequest(HttpMethod.Post, "DeletePluginService/{0}", pluginServiceId);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Ask ARGUS TV to test the connection to a recorder by pinging it.
/// </summary>
/// <param name="pluginService">The plugin service to ping.</param>
public async Task PingPluginService(PluginService pluginService)
{
var request = NewRequest(HttpMethod.Post, "PingPluginService");
request.AddBody(pluginService);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Check if the ARGUS TV service has the needed access rights on the recording shares of the given pluginService.
/// </summary>
/// <param name="pluginService">The pluginService.</param>
/// <returns>A list of RecordingShareAccessibilityInfos.</returns>
public async Task<List<RecordingShareAccessibilityInfo>> AreRecordingSharesAccessible(PluginService pluginService)
{
var request = NewRequest(HttpMethod.Post, "AreRecordingSharesAccessible");
request.AddBody(pluginService);
return await ExecuteAsync<List<RecordingShareAccessibilityInfo>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get information (free disk space) from all recording disks.
/// </summary>
/// <returns>A RecordingDisksInfo entity with all disk(s) information.</returns>
public async Task<RecordingDisksInfo> GetRecordingDisksInfo()
{
var request = NewRequest(HttpMethod.Get, "RecordingDisksInfo");
return await ExecuteAsync<RecordingDisksInfo>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all the recording shares configured for the current recorder(s).
/// </summary>
/// <returns>A list containing zero or more recording shares.</returns>
public async Task<List<string>> GetRecordingShares()
{
var request = NewRequest(HttpMethod.Get, "RecordingShares");
return await ExecuteAsync<List<string>>(request).ConfigureAwait(false);
}
#endregion
#region Recordings
/// <summary>
/// Get all recordings for the given criteria. You must specificy at least one criterium other than the channel type.
/// </summary>
/// <param name="channelType">The channel-type of the recordings.</param>
/// <param name="scheduleId">The schedule ID of the recordings, or null.</param>
/// <param name="programTitle">The program title of the recordings, or null.</param>
/// <param name="category">The category of the recordings, or null.</param>
/// <param name="channelId">The channel ID of the recordings, or null.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>Returns a list of zero or more recordings.</returns>
public async Task<List<Recording>> GetFullRecordings(ChannelType channelType, Guid? scheduleId, string programTitle, string category, Guid? channelId, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Post, "GetFullRecordings/{0}", channelType);
request.AddBody(new
{
ScheduleId = scheduleId,
ProgramTitle = programTitle,
Category = category,
ChannelId = channelId
});
return await ExecuteAsync<List<Recording>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recording groups based on the recording group-mode.
/// </summary>
/// <param name="channelType">The channel-type of the recordings.</param>
/// <param name="recordingGroupMode">The recording group-mode.</param>
/// <returns>An array of zero or more recording schedule-groups.</returns>
public async Task<List<RecordingGroup>> GetAllRecordingGroups(ChannelType channelType, RecordingGroupMode recordingGroupMode)
{
var request = NewRequest(HttpMethod.Get, "RecordingGroups/{0}/{1}", channelType, recordingGroupMode);
return await ExecuteAsync<List<RecordingGroup>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings for the given original schedule.
/// </summary>
/// <param name="scheduleId">The ID of the schedule.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>An array of zero or more recordings.</returns>
public async Task<List<RecordingSummary>> GetRecordingsForSchedule(Guid scheduleId, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Get, "RecordingsForSchedule/{0}", scheduleId);
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
return await ExecuteAsync<List<RecordingSummary>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings for the given original schedules.
/// </summary>
/// <param name="scheduleIds">A list of schedule IDs.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>A list of zero or more lists of zero or more recordings (summary), so a list per given schedule ID.</returns>
public async Task<List<List<RecordingSummary>>> GetRecordingsForSchedules(IEnumerable<Guid> scheduleIds, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Post, "RecordingsForSchedules");
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
request.AddBody(new
{
Ids = scheduleIds
});
return await ExecuteAsync<List<List<RecordingSummary>>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings for the given program title.
/// </summary>
/// <param name="channelType">The channel-type of the recordings.</param>
/// <param name="programTitle">The program title.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>An array of zero or more recordings.</returns>
public async Task<List<RecordingSummary>> GetRecordingsForProgramTitle(ChannelType channelType, string programTitle, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Post, "RecordingsForProgramTitle/{0}", channelType);
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
request.AddBody(new
{
ProgramTitle = programTitle
});
return await ExecuteAsync<List<RecordingSummary>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings for the given program titles.
/// </summary>
/// <param name="channelType">The channel-type of the recordings.</param>
/// <param name="programTitles">A list of program titles.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>A list of zero or more lists of zero or more recordings (summary), so a list per given program title.</returns>
public async Task<List<List<RecordingSummary>>> GetRecordingsForProgramTitles(ChannelType channelType, IEnumerable<string> programTitles, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Post, "RecordingsForProgramTitles/{0}", channelType);
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
request.AddBody(new
{
ProgramTitles = programTitles
});
return await ExecuteAsync<List<List<RecordingSummary>>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings for the given program category.
/// </summary>
/// <param name="channelType">The channel-type of the recordings.</param>
/// <param name="category">The program category.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>An array of zero or more recordings.</returns>
public async Task<List<RecordingSummary>> GetRecordingsForCategory(ChannelType channelType, string category, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Post, "RecordingsForCategory/{0}", channelType);
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
request.AddBody(new
{
Category = category
});
return await ExecuteAsync<List<RecordingSummary>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings for the given program category.
/// </summary>
/// <param name="channelType">The channel-type of the recordings.</param>
/// <param name="categories">A list of program categories.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>A list of zero or more lists of zero or more recordings (summary), so a list per given category.</returns>
public async Task<List<List<RecordingSummary>>> GetRecordingsForCategories(ChannelType channelType, IEnumerable<string> categories, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Post, "RecordingsForCategories/{0}", channelType);
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
request.AddBody(new
{
Categories = categories
});
return await ExecuteAsync<List<List<RecordingSummary>>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings on the given channel.
/// </summary>
/// <param name="channelId">The ID of the channel.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>An array of zero or more recordings.</returns>
public async Task<List<RecordingSummary>> GetRecordingsOnChannel(Guid channelId, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Get, "RecordingsOnChannel/{0}", channelId);
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
return await ExecuteAsync<List<RecordingSummary>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get all recordings on the given channels.
/// </summary>
/// <param name="channelIds">A list of channel IDs.</param>
/// <param name="includeNonExisting">If true also return recording entries for which the recording file is missing.</param>
/// <returns>A list of zero or more lists of zero or more recordings (summary), so a list per given channel.</returns>
public async Task<List<List<RecordingSummary>>> GetRecordingsOnChannels(IEnumerable<Guid> channelIds, bool includeNonExisting = false)
{
var request = NewRequest(HttpMethod.Post, "RecordingsOnChannels");
if (includeNonExisting)
{
request.AddParameter("includeNonExisting", true);
}
request.AddBody(new
{
Ids = channelIds
});
return await ExecuteAsync<List<List<RecordingSummary>>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get the recording associated with the given filename.
/// </summary>
/// <param name="recordingFileName">The full path of the recording file.</param>
/// <returns>The recording, or null if none is found.</returns>
public async Task<Recording> GetRecordingByFileName(string recordingFileName)
{
var request = NewRequest(HttpMethod.Post, "RecordingByFile");
request.AddBody(new
{
RecordingFileName = recordingFileName
});
return await ExecuteAsync<Recording>(request).ConfigureAwait(false);
}
/// <summary>
/// Delete a recording.
/// </summary>
/// <param name="recordingFileName">The filename of the recording.</param>
/// <param name="deleteRecordingFile">Set to true to also delete the recording file.</param>
public async Task DeleteRecording(string recordingFileName, bool deleteRecordingFile = true)
{
var request = NewRequest(HttpMethod.Delete, "RecordingByFile");
if (!deleteRecordingFile)
{
request.AddParameter("deleteRecordingFile", false);
}
request.AddBody(new
{
RecordingFileName = recordingFileName
});
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Get the recording by its unique ID.
/// </summary>
/// <param name="recordingId">The ID of the recording.</param>
/// <returns>The recording, or null if none is found.</returns>
public async Task<Recording> GetRecordingById(Guid recordingId)
{
var request = NewRequest(HttpMethod.Get, "RecordingById/{0}", recordingId);
return await ExecuteAsync<Recording>(request).ConfigureAwait(false);
}
/// <summary>
/// Delete a recording by ID.
/// </summary>
/// <param name="recordingId">The ID of the recording.</param>
/// <param name="deleteRecordingFile">Set to true to also delete the recording file.</param>
public async Task DeleteRecordingById(Guid recordingId, bool deleteRecordingFile = true)
{
var request = NewRequest(HttpMethod.Delete, "RecordingById/{0}", recordingId);
if (!deleteRecordingFile)
{
request.AddParameter("deleteRecordingFile", false);
}
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Get the position (in seconds) to where the recording was last watched. Or null if it was never watched.
/// </summary>
/// <param name="recordingFileName">The filename of the recording.</param>
/// <returns>The position in seconds or null.</returns>
public async Task<int?> GetRecordingLastWatchedPosition(string recordingFileName)
{
var request = NewRequest(HttpMethod.Post, "GetRecordingLastWatchedPosition");
request.AddBody(new
{
RecordingFileName = recordingFileName
});
var result = await ExecuteAsync<GetRecordingLastWatchedPositionResult>(request).ConfigureAwait(false);
return result.LastWatchedPositionSeconds;
}
private class GetRecordingLastWatchedPositionResult
{
public int? LastWatchedPositionSeconds { get; set; }
}
/// <summary>
/// Set the position (in seconds) to where the recording was last watched, or null to reset the state to never-watched.
/// </summary>
/// <param name="recordingFileName">The filename of the recording.</param>
/// <param name="lastWatchedPositionSeconds">The position in seconds or null.</param>
public async Task SetRecordingLastWatchedPosition(string recordingFileName, int? lastWatchedPositionSeconds)
{
var request = NewRequest(HttpMethod.Post, "SetRecordingLastWatchedPosition");
request.AddBody(new
{
RecordingFileName = recordingFileName,
LastWatchedPositionSeconds = lastWatchedPositionSeconds
});
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Set the number of times a recording was fully watched.
/// </summary>
/// <param name="recordingFileName">The filename of the recording.</param>
/// <param name="fullyWatchedCount">The number of times the recording was fully watched.</param>
public async Task SetRecordingFullyWatchedCount(string recordingFileName, int fullyWatchedCount)
{
var request = NewRequest(HttpMethod.Post, "SetRecordingFullyWatchedCount");
request.AddBody(new
{
RecordingFileName = recordingFileName,
FullyWatchedCount = fullyWatchedCount
});
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Mark a recording as last watched now (can be used by playback engines that don't support the last-watched position).
/// </summary>
/// <param name="recordingFileName">The filename of the recording.</param>
public async Task SetRecordingLastWatched(string recordingFileName)
{
var request = NewRequest(HttpMethod.Post, "SetRecordingWatched");
request.AddBody(new
{
RecordingFileName = recordingFileName
});
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Set the keep mode and value of an existing recording.
/// </summary>
/// <param name="recordingFileName">The filename of the recording.</param>
/// <param name="keepUntilMode">The keep until mode to use for this recording.</param>
/// <param name="keepUntilValue">The keep until value to use for this recording, or null if the mode doesn't require a value.</param>
public async Task SetRecordingKeepUntil(string recordingFileName, KeepUntilMode keepUntilMode, int? keepUntilValue)
{
var request = NewRequest(HttpMethod.Post, "SetRecordingKeepUntil");
request.AddBody(new
{
RecordingFileName = recordingFileName,
KeepUntilMode = keepUntilMode,
KeepUntilValue = keepUntilValue
});
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Get the history of programs that have been recorded by this schedule. This list is the one used
/// to make the NewEpisodesOnly and NewTitlesOnly rules work.
/// </summary>
/// <param name="scheduleId">The ID of the schedule.</param>
/// <returns>An array containing zero or more recorded programs.</returns>
public async Task<List<ScheduleRecordedProgram>> GetPreviouslyRecordedHistory(Guid scheduleId)
{
var request = NewRequest(HttpMethod.Get, "PreviouslyRecordedHistory/{0}", scheduleId);
return await ExecuteAsync<List<ScheduleRecordedProgram>>(request).ConfigureAwait(false);
}
/// <summary>
/// Add an upcoming program to the list of previously recorded programs of its schedule. This list is the one used
/// to make the NewEpisodesOnly and NewTitlesOnly rules work.
/// </summary>
/// <param name="upcomingProgram">The upcoming program to add to the history.</param>
public async Task AddToPreviouslyRecordedHistory(UpcomingProgram upcomingProgram)
{
var request = NewRequest(HttpMethod.Post, "AddToPreviouslyRecordedHistory");
request.AddBody(upcomingProgram);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Remove an upcoming program from the list of previously recorded programs of its schedule. This list is the one used
/// to make the NewEpisodesOnly and NewTitlesOnly rules work.
/// </summary>
/// <param name="upcomingProgram">The upcoming program to remove from the history.</param>
public async Task RemoveFromPreviouslyRecordedHistory(UpcomingProgram upcomingProgram)
{
var request = NewRequest(HttpMethod.Post, "RemoveFromPreviouslyRecordedHistory");
request.AddBody(upcomingProgram);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Import a history of programs that have been recorded by this schedule. This list is the one used
/// to make the NewEpisodesOnly and NewTitlesOnly rules work.
/// </summary>
/// <param name="scheduleId">The ID of the schedule.</param>
/// <param name="history">An array containing zero or more recorded programs.</param>
public async Task ImportPreviouslyRecordedHistory(Guid scheduleId, IEnumerable<ScheduleRecordedProgram> history)
{
var request = NewRequest(HttpMethod.Post, "ImportPreviouslyRecordedHistory/{0}", scheduleId);
request.AddBody(history);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Delete a recorded program from the previously recorded history of its schedule.
/// </summary>
/// <param name="scheduleRecordedProgramId">The ID of the recorded program.</param>
public async Task DeleteFromPreviouslyRecordedHistory(int scheduleRecordedProgramId)
{
var request = NewRequest(HttpMethod.Post, "DeleteFromPreviouslyRecordedHistory/{0}", scheduleRecordedProgramId);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Delete the previously recorded history of a schedule.
/// </summary>
/// <param name="scheduleId">The ID of the schedule.</param>
public async Task ClearPreviouslyRecordedHistory(Guid scheduleId)
{
var request = NewRequest(HttpMethod.Post, "ClearPreviouslyRecordedHistory/{0}", scheduleId);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Import a new recording into the system. A new RecordingId will be auto-generated,
/// so this can be left Guid.Empty. If ScheduleId and ChannelId are not known, you may
/// generate your own (new unique) Guid and pass that in.
/// </summary>
/// <param name="recording">The recording to import.</param>
/// <returns>True if the recording was imported succesfully, false if the recording filename was already imported.</returns>
public async Task<bool> ImportRecording(Recording recording)
{
var request = NewRequest(HttpMethod.Post, "ImportNewRecording");
request.AddBody(recording);
var result = await ExecuteAsync<BooleanResult>(request).ConfigureAwait(false);
return result.Result;
}
/// <summary>
/// Change the recording filename for an existing recording. Can be used after moving or transcoding a file.
/// </summary>
/// <param name="recordingFileName">The full path of the current recording file (UNC).</param>
/// <param name="newRecordingFileName">The full path of the new recording file to use (UNC).</param>
/// <param name="newRecordingStartTime">The new recording start-time (in case of trimming), or null to keep the existing time.</param>
/// <param name="newRecordingStopTime">The new recording stop-time (in case of trimming), or null to keep the existing time.</param>
/// <returns>True if the recording was found and modified succesfully, false otherwise.</returns>
public async Task<bool> ChangeRecordingFile(string recordingFileName, string newRecordingFileName, DateTime? newRecordingStartTime, DateTime? newRecordingStopTime)
{
var request = NewRequest(HttpMethod.Post, "ModifyRecordingFile");
request.AddBody(new
{
RecordingFileName = recordingFileName,
NewRecordingFileName = newRecordingFileName,
NewRecordingStartTime = newRecordingStartTime,
NewRecordingStopTime = newRecordingStopTime
});
var result = await ExecuteAsync<BooleanResult>(request).ConfigureAwait(false);
return result.Result;
}
private class BooleanResult
{
public bool Result { get; set; }
}
/// <summary>
/// Schedule a processing command to run on a recording.
/// </summary>
/// <param name="recordingId">The unique ID of the recording.</param>
/// <param name="processingCommandId">The unique ID of the processing command.</param>
/// <param name="runAtTime">The time and date at which to run the command.</param>
public async Task RunProcessingCommandOnRecording(Guid recordingId, Guid processingCommandId, DateTime runAtTime)
{
var request = NewRequest(HttpMethod.Post, "RunProcessingCommandOnRecording/{0}/{1}/{2}", recordingId, processingCommandId, runAtTime);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Get a resized recording thumbnail, if newer than the provided timestamp. The aspect ratio of the
/// original thumbnail is preserved so the returned image will be potentially either smaller than
/// the requested size, or centered on a background. If both width and height are set to 0, the full
/// size thumbnail will be returned.
/// </summary>
/// <param name="recordingId">The unique ID of the recording.</param>
/// <param name="width">The requested width, 0 to get the width according to the aspect.</param>
/// <param name="height">The requested height, 0 to get the height according to the aspect.</param>
/// <param name="argbBackground">The optional RGB color of the background, null to return the scaled image as is.</param>
/// <param name="modifiedAfterTime">Only return a thumbnail if it is newer than the given timestamp.</param>
/// <returns>A byte array containing the bytes of a JPG of the resized thumbnail, an empty array if no newer thumbnail was found or null if no thumbnail was found.</returns>
public async Task<byte[]> GetRecordingThumbnail(Guid recordingId, int width, int height, int? argbBackground, DateTime modifiedAfterTime)
{
var request = NewRequest(HttpMethod.Get, "RecordingThumbnail/{0}/{1}/{2}/{3}", recordingId, width, height, modifiedAfterTime);
if (argbBackground.HasValue)
{
request.AddParameter("argbBackground", argbBackground.Value);
}
using (var response = await ExecuteRequestAsync(request).ConfigureAwait(false))
{
switch (response.StatusCode)
{
case HttpStatusCode.NoContent:
return null;
case HttpStatusCode.NotModified:
return new byte[0];
case HttpStatusCode.OK:
return response.Content.ReadAsByteArrayAsync().Result;
}
throw new ArgusTVException(response.ReasonPhrase);
}
}
/// <summary>
/// Start RTSP streaming of the given recording.
/// </summary>
/// <param name="recordingFileName">The filename of the recording.</param>
/// <returns>The RTSP url of the recording stream.</returns>
public async Task<string> StartRecordingStream(string recordingFileName)
{
var request = NewRequest(HttpMethod.Post, "StartRecordingRtspStream");
request.AddBody(new
{
RecordingFileName = recordingFileName
});
var result = await ExecuteAsync<StartRecordingStreamResult>(request).ConfigureAwait(false);
return result.RtspUrl;
}
private class StartRecordingStreamResult
{
public string RtspUrl { get; set; }
}
/// <summary>
/// Stop RTSP streaming of the given recording.
/// </summary>
/// <param name="rtspUrl">The RTSP url of the recording stream.</param>
public async Task StopRecordingStream(string rtspUrl)
{
var request = NewRequest(HttpMethod.Post, "StopRecordingRtspStream");
request.AddBody(new
{
RtspUrl = rtspUrl
});
await ExecuteAsync(request).ConfigureAwait(false);
}
#endregion
#region Upcoming/Active Recordings
/// <summary>
/// Get all upcoming recordings.
/// </summary>
/// <param name="filter">Set filter to retrieve recordings and/or cancelled recordings.</param>
/// <param name="includeActive">Set to true to include upcoming recordings that are currently being recorded.</param>
/// <returns>An array with zero or more upcoming recordings.</returns>
public async Task<List<UpcomingRecording>> GetAllUpcomingRecordings(UpcomingRecordingsFilter filter, bool includeActive = false)
{
var request = NewRequest(HttpMethod.Get, "UpcomingRecordings/{0}", (int)filter);
if (includeActive)
{
request.AddParameter("includeActive", true);
}
return await ExecuteAsync<List<UpcomingRecording>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get the first upcoming recording that's not cancelled or unallocated.
/// </summary>
/// <param name="includeActive">Set to true to include upcoming recordings that are currently being recorded.</param>
/// <returns>Null or an upcoming (or active) recording.</returns>
public async Task<UpcomingRecording> GetNextUpcomingRecording(bool includeActive)
{
var request = NewRequest(HttpMethod.Get, "NextUpcomingRecording");
if (includeActive)
{
request.AddParameter("includeActive", true);
}
return await ExecuteAsync<UpcomingRecording>(request).ConfigureAwait(false);
}
/// <summary>
/// Get upcoming recordings for a specific schedule.
/// </summary>
/// <param name="scheduleId">The ID of the schedule.</param>
/// <param name="includeCancelled">Set to true to also retrieve cancelled programs.</param>
/// <returns>An array with zero or more upcoming recordings.</returns>
public async Task<List<UpcomingRecording>> GetUpcomingRecordings(Guid scheduleId, bool includeCancelled = false)
{
var request = NewRequest(HttpMethod.Get, "UpcomingRecordingsForSchedule/{0}", scheduleId);
if (includeCancelled)
{
request.AddParameter("includeCancelled", true);
}
return await ExecuteAsync<List<UpcomingRecording>>(request).ConfigureAwait(false);
}
/// <summary>
/// Get the currently active recordings.
/// </summary>
/// <returns>An array with zero or more active recordings.</returns>
public async Task<List<ActiveRecording>> GetActiveRecordings()
{
var request = NewRequest(HttpMethod.Get, "ActiveRecordings");
return await ExecuteAsync<List<ActiveRecording>>(request).ConfigureAwait(false);
}
/// <summary>
/// Check if a recording has started or is pending.
/// </summary>
/// <param name="upcomingProgramId">The ID of the recording's upcoming program.</param>
/// <returns>True if the recording has started (or is pending), false if it has not.</returns>
public async Task<bool> IsRecordingPendingOrActive(Guid upcomingProgramId)
{
var request = NewRequest(HttpMethod.Get, "IsRecordingPendingOrActive/{0}", upcomingProgramId);
var result = await ExecuteAsync<IsRecordingPendingOrActiveResult>(request).ConfigureAwait(false);
return result.IsPendingOrActive;
}
private class IsRecordingPendingOrActiveResult
{
public bool IsPendingOrActive { get; set; }
}
/// <summary>
/// Abort an active recording.
/// </summary>
/// <param name="activeRecording">The active recording to abort.</param>
public async Task AbortActiveRecording(ActiveRecording activeRecording)
{
var request = NewRequest(HttpMethod.Post, "AbortActiveRecording");
request.AddBody(activeRecording);
await ExecuteAsync(request).ConfigureAwait(false);
}
#endregion
#region Live TV/Radio
/// <summary>
/// Tune to a channel, and get a live RTSP stream to that channel.
/// </summary>
/// <param name="channel">The channel to tune to.</param>
/// <param name="liveStream">The live stream (RTSP) that is either existing or null for a new one.</param>
/// <returns>A LiveStreamResult value to indicate success or failure, and the new or updated live stream.</returns>
public async Task<TuneLiveStreamResult> TuneLiveStream(Channel channel, LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "TuneLiveStream");
request.AddBody(new
{
Channel = channel,
LiveStream = liveStream
});
return await ExecuteAsync<TuneLiveStreamResult>(request).ConfigureAwait(false);
}
/// <summary>
/// Stop the live stream.
/// </summary>
/// <param name="liveStream">The live stream (RTSP) of the stream to stop.</param>
public async Task StopLiveStream(LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "StopLiveStream");
request.AddBody(liveStream);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Get all live streams.
/// </summary>
/// <returns>An array of zero or more live streams.</returns>
public async Task<List<LiveStream>> GetLiveStreams()
{
var request = NewRequest(HttpMethod.Get, "GetLiveStreams");
return await ExecuteAsync<List<LiveStream>>(request).ConfigureAwait(false);
}
/// <summary>
/// Tell the recorder we are still showing this stream and to keep it alive. Call this every 30 seconds or so.
/// </summary>
/// <param name="liveStream">The live stream (RTSP) that is stil in use.</param>
/// <returns>True if the live stream is still running, false otherwise.</returns>
public async Task<bool> KeepLiveStreamAlive(LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "KeepStreamAlive");
request.AddBody(liveStream);
var result = await ExecuteAsync<KeepStreamAliveResult>(request).ConfigureAwait(false);
return result.IsAlive;
}
private class KeepStreamAliveResult
{
public bool IsAlive { get; set; }
}
/// <summary>
/// Ask the recorder for the give live stream's tuning details (if possible).
/// </summary>
/// <param name="liveStream">The active live stream.</param>
/// <returns>The service tuning details, or null if none are available.</returns>
public async Task<ServiceTuning> GetLiveStreamTuningDetails(LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "GetLiveStreamTuningDetails");
request.AddBody(liveStream);
return await ExecuteAsync<ServiceTuning>(request).ConfigureAwait(false);
}
/// <summary>
/// Get the live stream for a given RTSP url.
/// </summary>
/// <param name="rtspUrl">The RTSP url for which to find the live stream.</param>
/// <returns>The corresponding live stream.</returns>
public async Task<LiveStream> GetLiveStreamByRtspUrl(string rtspUrl)
{
var request = NewRequest(HttpMethod.Post, "GetLiveStream");
request.AddBody(new
{
RtspUrl = rtspUrl
});
return await ExecuteAsync<LiveStream>(request).ConfigureAwait(false);
}
/// <summary>
/// Get the live tuning state of one or more channels.
/// </summary>
/// <param name="channels">The channels to get the current state from.</param>
/// <param name="liveStream">The live stream you want to be ignored (since it's yours), or null.</param>
/// <returns>Null, or an array with the respective live state for each of the given channels.</returns>
public async Task<List<ChannelLiveState>> GetChannelsLiveState(IEnumerable<Channel> channels, LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "ChannelsLiveState");
request.AddBody(new
{
Channels = channels,
LiveStream = liveStream
});
return await ExecuteAsync<List<ChannelLiveState>>(request).ConfigureAwait(false);
}
#endregion
#region Teletext
/// <summary>
/// Ask the recorder whether the given live stream has teletext.
/// </summary>
/// <param name="liveStream">The live stream.</param>
/// <returns>True if teletext is present.</returns>
public async Task<bool> HasTeletext(LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "CanGrabTeletext");
request.AddBody(liveStream);
var result = await ExecuteAsync<CanGrabTeletextResult>(request).ConfigureAwait(false);
return result.HasTeletext;
}
private class CanGrabTeletextResult
{
public bool HasTeletext { get; set; }
}
/// <summary>
/// Tell the recorder to start grabbing teletext for the given live stream.
/// </summary>
/// <param name="liveStream">The live stream.</param>
public async Task StartGrabbingTeletext(LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "StartGrabbingTeletext");
request.AddBody(liveStream);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Tell the recorder to stop grabbing teletext for the given live stream.
/// </summary>
/// <param name="liveStream">The live stream.</param>
public async Task StopGrabbingTeletext(LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "StopGrabbingTeletext");
request.AddBody(liveStream);
await ExecuteAsync(request).ConfigureAwait(false);
}
/// <summary>
/// Ask the recorder whether it is grabbing teletext for the given live stream.
/// </summary>
/// <param name="liveStream">The live stream.</param>
/// <returns>True if the recorder is grabbing teletext.</returns>
public async Task<bool> IsGrabbingTeletext(LiveStream liveStream)
{
var request = NewRequest(HttpMethod.Post, "GrabbingTeletext");
request.AddBody(liveStream);
var result = await ExecuteAsync<GrabbingTeletextResult>(request).ConfigureAwait(false);
return result.IsGrabbingTeletext;
}
private class GrabbingTeletextResult
{
public bool IsGrabbingTeletext { get; set; }
}
/// <summary>
/// Request a teletext page/subpage from the recorder for the given live stream.
/// </summary>
/// <param name="liveStream">The live stream.</param>
/// <param name="pageNumber">The teletext page number</param>
/// <param name="subPageNumber">The teletext subpage number</param>
/// <returns>The requested teletext page, or null if the page was not ready yet.</returns>
public async Task<TeletextPage> GetTeletextPage(LiveStream liveStream, int pageNumber, int subPageNumber)
{
var request = NewRequest(HttpMethod.Post, "TeletextPage");
request.AddBody(new
{
LiveStream = liveStream,
PageNumber = pageNumber,
SubPageNumber = subPageNumber
});
return await ExecuteAsync<TeletextPage>(request).ConfigureAwait(false);
}
/// <summary>
/// Request a teletext page/subpage from the recorder for the given live stream, as an image. Note that the page
/// may contain transparent parts even if 'useTransparentBackground' is set to false (e.g. subtitle or newsflash page).
/// </summary>
/// <param name="liveStream">The live stream.</param>
/// <param name="pageNumber">The teletext page number</param>
/// <param name="subPageNumber">The teletext subpage number</param>
/// <param name="imageWidth">The width of the teletext image</param>
/// <param name="imageHeight">The height of the teletext image</param>
/// <param name="useTransparentBackground">Use a transparent background instead of black.</param>
/// <param name="showHidden">Show the hidden teletext information.</param>
/// <returns>The requested teletext page in form of an image, or null if the page was not ready yet.</returns>
public async Task<byte[]> GetTeletextPageImage(LiveStream liveStream, int pageNumber, int subPageNumber, int imageWidth, int imageHeight, bool useTransparentBackground = false, bool showHidden = false)
{
var request = NewRequest(HttpMethod.Post, "TeletextPageImage/{0}/{1}", imageWidth, imageHeight);
if (useTransparentBackground)
{
request.AddParameter("useTransparentBackground", true);
}
if (showHidden)
{
request.AddParameter("showHidden", true);
}
request.AddBody(new
{
LiveStream = liveStream,
PageNumber = pageNumber,
SubPageNumber = subPageNumber
});
using (var response = await ExecuteRequestAsync(request).ConfigureAwait(false))
{
switch (response.StatusCode)
{
case HttpStatusCode.NoContent:
return null;
case HttpStatusCode.NotModified:
return new byte[0];
case HttpStatusCode.OK:
return response.Content.ReadAsByteArrayAsync().Result;
}
throw new ArgusTVException(response.ReasonPhrase);
}
}
#endregion
}
/// <summary>
/// Result from tuning a live stream.
/// </summary>
public class TuneLiveStreamResult
{
/// <summary>
/// A LiveStreamResult value to indicate success or failure.
/// </summary>
public LiveStreamResult LiveStreamResult { get; set; }
/// <summary>
/// The new or updated live stream.
/// </summary>
public LiveStream LiveStream { get; set; }
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using wishlist.Areas.HelpPage.ModelDescriptions;
using wishlist.Areas.HelpPage.Models;
namespace wishlist.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null);
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel,
ApiParameterDescription apiParameter, ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
#region Copyright notice and license
// Copyright 2015 gRPC authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using System;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading;
using Grpc.Core.Utils;
namespace Grpc.Core.Internal
{
/// <summary>
/// Utility methods for detecting platform and architecture.
/// </summary>
internal static class PlatformApis
{
const string UnityEngineApplicationClassName = "UnityEngine.Application, UnityEngine";
const string XamarinAndroidObjectClassName = "Java.Lang.Object, Mono.Android";
const string XamarinIOSObjectClassName = "Foundation.NSObject, Xamarin.iOS";
static readonly bool isLinux;
static readonly bool isMacOSX;
static readonly bool isWindows;
static readonly bool isMono;
static readonly bool isNetCore;
static readonly bool isUnity;
static readonly bool isUnityIOS;
static readonly bool isXamarin;
static readonly bool isXamarinIOS;
static readonly bool isXamarinAndroid;
static PlatformApis()
{
#if NETSTANDARD1_5
isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
isMacOSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
isWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
isNetCore = RuntimeInformation.FrameworkDescription.StartsWith(".NET Core");
#else
var platform = Environment.OSVersion.Platform;
// PlatformID.MacOSX is never returned, commonly used trick is to identify Mac is by using uname.
isMacOSX = (platform == PlatformID.Unix && GetUname() == "Darwin");
isLinux = (platform == PlatformID.Unix && !isMacOSX);
isWindows = (platform == PlatformID.Win32NT || platform == PlatformID.Win32S || platform == PlatformID.Win32Windows);
isNetCore = false;
#endif
isMono = Type.GetType("Mono.Runtime") != null;
// Unity
var unityApplicationClass = Type.GetType(UnityEngineApplicationClassName);
if (unityApplicationClass != null)
{
isUnity = true;
// Consult value of Application.platform via reflection
// https://docs.unity3d.com/ScriptReference/Application-platform.html
var platformProperty = unityApplicationClass.GetTypeInfo().GetProperty("platform");
var unityRuntimePlatform = platformProperty?.GetValue(null)?.ToString();
isUnityIOS = (unityRuntimePlatform == "IPhonePlayer");
}
else
{
isUnity = false;
isUnityIOS = false;
}
// Xamarin
isXamarinIOS = Type.GetType(XamarinIOSObjectClassName) != null;
isXamarinAndroid = Type.GetType(XamarinAndroidObjectClassName) != null;
isXamarin = isXamarinIOS || isXamarinAndroid;
}
public static bool IsLinux
{
get { return isLinux; }
}
public static bool IsMacOSX
{
get { return isMacOSX; }
}
public static bool IsWindows
{
get { return isWindows; }
}
public static bool IsMono
{
get { return isMono; }
}
/// <summary>
/// true if running on Unity platform.
/// </summary>
public static bool IsUnity
{
get { return isUnity; }
}
/// <summary>
/// true if running on Unity iOS, false otherwise.
/// </summary>
public static bool IsUnityIOS
{
get { return isUnityIOS; }
}
/// <summary>
/// true if running on a Xamarin platform (either Xamarin.Android or Xamarin.iOS),
/// false otherwise.
/// </summary>
public static bool IsXamarin
{
get { return isXamarin; }
}
/// <summary>
/// true if running on Xamarin.iOS, false otherwise.
/// </summary>
public static bool IsXamarinIOS
{
get { return isXamarinIOS; }
}
/// <summary>
/// true if running on Xamarin.Android, false otherwise.
/// </summary>
public static bool IsXamarinAndroid
{
get { return isXamarinAndroid; }
}
/// <summary>
/// true if running on .NET Core (CoreCLR), false otherwise.
/// </summary>
public static bool IsNetCore
{
get { return isNetCore; }
}
public static bool Is64Bit
{
get { return IntPtr.Size == 8; }
}
/// <summary>
/// Returns <c>UnityEngine.Application.platform</c> as a string.
/// See https://docs.unity3d.com/ScriptReference/Application-platform.html for possible values.
/// Value is obtained via reflection to avoid compile-time dependency on Unity.
/// This method should only be called if <c>IsUnity</c> is <c>true</c>.
/// </summary>
public static string GetUnityRuntimePlatform()
{
GrpcPreconditions.CheckState(IsUnity, "Not running on Unity.");
#if NETSTANDARD1_5
return Type.GetType(UnityEngineApplicationClassName).GetTypeInfo().GetProperty("platform").GetValue(null).ToString();
#else
return Type.GetType(UnityEngineApplicationClassName).GetProperty("platform").GetValue(null).ToString();
#endif
}
[DllImport("libc")]
static extern int uname(IntPtr buf);
static string GetUname()
{
var buffer = Marshal.AllocHGlobal(8192);
try
{
if (uname(buffer) == 0)
{
return Marshal.PtrToStringAnsi(buffer);
}
return string.Empty;
}
catch
{
return string.Empty;
}
finally
{
if (buffer != IntPtr.Zero)
{
Marshal.FreeHGlobal(buffer);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Threading;
namespace Apache.Geode.Client.UnitTests
{
using NUnit.Framework;
using Apache.Geode.DUnitFramework;
using Apache.Geode.Client.Tests;
using Apache.Geode.Client;
using QueryStatics = Apache.Geode.Client.Tests.QueryStatics;
using QueryCategory = Apache.Geode.Client.Tests.QueryCategory;
using QueryStrings = Apache.Geode.Client.Tests.QueryStrings;
[TestFixture]
[Category("group1")]
[Category("unicast_only")]
[Category("generics")]
public class ThinClientRemoteParamQueryResultSetTests : ThinClientRegionSteps
{
#region Private members
private UnitProcess m_client1;
private UnitProcess m_client2;
private static string[] QueryRegionNames = { "Portfolios", "Positions", "Portfolios2",
"Portfolios3" };
#endregion
protected override ClientBase[] GetClients()
{
m_client1 = new UnitProcess();
m_client2 = new UnitProcess();
return new ClientBase[] { m_client1, m_client2 };
}
[TestFixtureSetUp]
public override void InitTests()
{
base.InitTests();
}
[TearDown]
public override void EndTest()
{
m_client1.Call(Close);
m_client2.Call(Close);
CacheHelper.StopJavaServers();
base.EndTest();
}
[SetUp]
public override void InitTest()
{
m_client1.Call(InitClient);
m_client2.Call(InitClient);
}
#region Functions invoked by the tests
public void InitClient()
{
CacheHelper.Init();
CacheHelper.DCache.TypeRegistry.RegisterType(Portfolio.CreateDeserializable, 8);
CacheHelper.DCache.TypeRegistry.RegisterType(Position.CreateDeserializable, 7);
CacheHelper.DCache.TypeRegistry.RegisterPdxType(PortfolioPdx.CreateDeserializable);
CacheHelper.DCache.TypeRegistry.RegisterPdxType(PositionPdx.CreateDeserializable);
}
public void StepOne(string locators, bool isPdx)
{
m_isPdx = isPdx;
CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[0], true, true,
null, locators, "__TESTPOOL1_", true);
CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[1], true, true,
null, locators, "__TESTPOOL1_", true);
CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[2], true, true,
null, locators, "__TESTPOOL1_", true);
CacheHelper.CreateTCRegion_Pool<object, object>(QueryRegionNames[3], true, true,
null, locators, "__TESTPOOL1_", true);
IRegion<object, object> region = CacheHelper.GetRegion<object, object>(QueryRegionNames[0]);
Apache.Geode.Client.RegionAttributes<object, object> regattrs = region.Attributes;
region.CreateSubRegion(QueryRegionNames[1], regattrs);
}
public void StepTwo(bool isPdx)
{
m_isPdx = isPdx;
IRegion<object, object> region0 = CacheHelper.GetRegion<object, object>(QueryRegionNames[0]);
IRegion<object, object> subRegion0 = (IRegion<object, object>)region0.GetSubRegion(QueryRegionNames[1]);
IRegion<object, object> region1 = CacheHelper.GetRegion<object, object>(QueryRegionNames[1]);
IRegion<object, object> region2 = CacheHelper.GetRegion<object, object>(QueryRegionNames[2]);
IRegion<object, object> region3 = CacheHelper.GetRegion<object, object>(QueryRegionNames[3]);
QueryHelper<object, object> qh = QueryHelper<object, object>.GetHelper(CacheHelper.DCache);
Util.Log("SetSize {0}, NumSets {1}.", qh.PortfolioSetSize,
qh.PortfolioNumSets);
if (!m_isPdx)
{
qh.PopulatePortfolioData(region0, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePositionData(subRegion0, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePositionData(region1, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePortfolioData(region2, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePortfolioData(region3, qh.PortfolioSetSize,
qh.PortfolioNumSets);
}
else
{
qh.PopulatePortfolioPdxData(region0, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePositionPdxData(subRegion0, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePositionPdxData(region1, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePortfolioPdxData(region2, qh.PortfolioSetSize,
qh.PortfolioNumSets);
qh.PopulatePortfolioPdxData(region3, qh.PortfolioSetSize,
qh.PortfolioNumSets);
}
}
public void StepThreePQRS()
{
bool ErrorOccurred = false;
QueryHelper<object, object> qh = QueryHelper<object, object>.GetHelper(CacheHelper.DCache);
var qs = CacheHelper.DCache.GetPoolManager().Find("__TESTPOOL1_").GetQueryService();
int qryIdx = 0;
foreach (QueryStrings paramqrystr in QueryStatics.ResultSetParamQueries)
{
if (paramqrystr.Category == QueryCategory.Unsupported)
{
Util.Log("Skipping query index {0} because it is unsupported.", qryIdx);
qryIdx++;
continue;
}
Util.Log("Evaluating query index {0}. {1}", qryIdx, paramqrystr.Query);
Query<object> query = qs.NewQuery<object>(paramqrystr.Query);
//Populate the parameter list (paramList) for the query.
object[] paramList = new object[QueryStatics.NoOfQueryParam[qryIdx]];
int numVal = 0;
for (int ind = 0; ind < QueryStatics.NoOfQueryParam[qryIdx]; ind++)
{
//Util.Log("NIL::PQRS:: QueryStatics.QueryParamSet[{0},{1}] = {2}", qryIdx, ind, QueryStatics.QueryParamSet[qryIdx][ind]);
try
{
numVal = Convert.ToInt32(QueryStatics.QueryParamSet[qryIdx][ind]);
paramList[ind] = numVal;
//Util.Log("NIL::PQRS::361 Interger Args:: paramList[0] = {1}", ind, paramList[ind]);
}
catch (FormatException)
{
//Console.WriteLine("Param string is not a sequence of digits.");
paramList[ind] = (System.String)QueryStatics.QueryParamSet[qryIdx][ind];
//Util.Log("NIL::PQRS:: Interger Args:: routingObj[0] = {1}", ind, routingObj[ind].ToString());
}
}
ISelectResults<object> results = query.Execute(paramList);
//Varify the result
int expectedRowCount = qh.IsExpectedRowsConstantPQRS(qryIdx) ?
QueryStatics.ResultSetPQRowCounts[qryIdx] : QueryStatics.ResultSetPQRowCounts[qryIdx] * qh.PortfolioNumSets;
if (!qh.VerifyRS(results, expectedRowCount))
{
ErrorOccurred = true;
Util.Log("Query verify failed for query index {0}.", qryIdx);
qryIdx++;
continue;
}
ResultSet<object> rs = results as ResultSet<object>;
foreach (object item in rs)
{
if (!m_isPdx)
{
Portfolio port = item as Portfolio;
if (port == null)
{
Position pos = item as Position;
if (pos == null)
{
string cs = item as string;
if (cs == null)
{
Util.Log("Query got other/unknown object.");
}
else
{
Util.Log("Query got string : {0}.", cs);
}
}
else
{
Util.Log("Query got Position object with secId {0}, shares {1}.", pos.SecId, pos.SharesOutstanding);
}
}
else
{
Util.Log("Query got Portfolio object with ID {0}, pkid {1}.", port.ID, port.Pkid);
}
}
else
{
PortfolioPdx port = item as PortfolioPdx;
if (port == null)
{
PositionPdx pos = item as PositionPdx;
if (pos == null)
{
string cs = item as string;
if (cs == null)
{
Util.Log("Query got other/unknown object.");
}
else
{
Util.Log("Query got string : {0}.", cs);
}
}
else
{
Util.Log("Query got PositionPdx object with secId {0}, shares {1}.", pos.secId, pos.getSharesOutstanding);
}
}
else
{
Util.Log("Query got PortfolioPdx object with ID {0}, pkid {1}.", port.ID, port.Pkid);
}
}
}
qryIdx++;
}
Assert.IsFalse(ErrorOccurred, "One or more query validation errors occurred.");
}
public void StepFourPQRS()
{
bool ErrorOccurred = false;
QueryHelper<object, object> qh = QueryHelper<object, object>.GetHelper(CacheHelper.DCache);
var qs = CacheHelper.DCache.GetPoolManager().Find("__TESTPOOL1_").GetQueryService();
int qryIdx = 0;
foreach (QueryStrings qrystr in QueryStatics.ResultSetParamQueries)
{
if (qrystr.Category != QueryCategory.Unsupported)
{
qryIdx++;
continue;
}
Util.Log("Evaluating unsupported query index {0}.", qryIdx);
Query<object> query = qs.NewQuery<object>(qrystr.Query);
object[] paramList = new object[QueryStatics.NoOfQueryParam[qryIdx]];
Int32 numVal = 0;
for (Int32 ind = 0; ind < QueryStatics.NoOfQueryParam[qryIdx]; ind++)
{
//Util.Log("NIL::PQRS:: QueryStatics.QueryParamSet[{0},{1}] = {2}", qryIdx, ind, QueryStatics.QueryParamSet[qryIdx, ind]);
try
{
numVal = Convert.ToInt32(QueryStatics.QueryParamSet[qryIdx][ind]);
paramList[ind] = numVal;
//Util.Log("NIL::PQRS:: Interger Args:: paramList[0] = {1}", ind, paramList[ind]);
}
catch (FormatException)
{
//Console.WriteLine("Param string is not a sequence of digits.");
paramList[ind] = (System.String)QueryStatics.QueryParamSet[qryIdx][ind];
//Util.Log("NIL::PQRS:: Interger Args:: paramList[0] = {1}", ind, paramList[ind].ToString());
}
}
try
{
ISelectResults<object> results = query.Execute(paramList);
Util.Log("Query exception did not occur for index {0}.", qryIdx);
ErrorOccurred = true;
qryIdx++;
}
catch (GeodeException)
{
// ok, exception expected, do nothing.
qryIdx++;
}
catch (Exception)
{
Util.Log("Query unexpected exception occurred for index {0}.", qryIdx);
ErrorOccurred = true;
qryIdx++;
}
}
Assert.IsFalse(ErrorOccurred, "Query expected exceptions did not occur.");
}
public void KillServer()
{
CacheHelper.StopJavaServer(1);
Util.Log("Cacheserver 1 stopped.");
}
public delegate void KillServerDelegate();
#endregion
void runRemoteParamQueryRS()
{
CacheHelper.SetupJavaServers(true, "remotequeryN.xml");
CacheHelper.StartJavaLocator(1, "GFELOC");
Util.Log("Locator started");
CacheHelper.StartJavaServerWithLocators(1, "GFECS1", 1);
Util.Log("Cacheserver 1 started.");
m_client1.Call(StepOne, CacheHelper.Locators, m_isPdx);
Util.Log("StepOne complete.");
m_client1.Call(StepTwo, m_isPdx);
Util.Log("StepTwo complete.");
m_client1.Call(StepThreePQRS);
Util.Log("StepThree complete.");
m_client1.Call(StepFourPQRS);
Util.Log("StepFour complete.");
m_client1.Call(Close);
CacheHelper.StopJavaServer(1);
Util.Log("Cacheserver 1 stopped.");
CacheHelper.StopJavaLocator(1);
Util.Log("Locator stopped");
}
static bool m_isPdx = false;
[Test]
public void RemoteParamQueryRSWithPdx()
{
m_isPdx = true;
runRemoteParamQueryRS();
}
[Test]
public void RemoteParamQueryRSWithoutPdx()
{
m_isPdx = false;
runRemoteParamQueryRS();
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace PlatformProject.WebApp.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
private const int DefaultCollectionSize = 3;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
#region Disclaimer/Info
///////////////////////////////////////////////////////////////////////////////////////////////////
// Subtext WebLog
//
// Subtext is an open source weblog system that is a fork of the .TEXT
// weblog system.
//
// For updated news and information please visit http://subtextproject.com/
// Subtext is hosted at Google Code at http://code.google.com/p/subtext/
// The development mailing list is at subtext@googlegroups.com
//
// This project is licensed under the BSD license. See the License.txt file for more information.
///////////////////////////////////////////////////////////////////////////////////////////////////
#endregion
using System;
using System.Collections.Generic;
using System.Data;
using System.Globalization;
using System.Linq;
using Subtext.Extensibility;
using Subtext.Extensibility.Interfaces;
using Subtext.Framework.Components;
using Subtext.Framework.Configuration;
using Subtext.Framework.Text;
namespace Subtext.Framework.Data
{
public partial class DatabaseObjectProvider
{
/// <summary>
/// Returns a pageable collection of entries ordered by the id descending.
/// This is used in the admin section.
/// </summary>
public override IPagedCollection<EntryStatsView> GetEntries(PostType postType, int? categoryId, int pageIndex, int pageSize)
{
using (IDataReader reader = _procedures.GetEntries(blogId: BlogId, categoryId: categoryId, pageIndex: pageIndex, postType: (int)postType, pageSize: pageSize))
{
return reader.ReadPagedCollection(r => reader.ReadEntryStatsView());
}
}
/// <summary>
/// Gets the entries that meet the specific <see cref="PostType"/>
/// and the <see cref="PostConfig"/> flags.
/// </summary>
/// <remarks>
/// This is called to get the main syndicated entries and supports MetaWeblog API.
/// </remarks>
/// <param name="itemCount">Item count.</param>
/// <param name="postType">The type of post to retrieve.</param>
/// <param name="postConfig">Post configuration options.</param>
/// <param name="includeCategories">Whether or not to include categories</param>
/// <returns></returns>
public override ICollection<Entry> GetEntries(int itemCount, PostType postType, PostConfig postConfig, bool includeCategories)
{
using (IDataReader reader = _procedures.GetConditionalEntries(itemCount,
(int)postType,
(int)postConfig,
BlogId,
includeCategories,
CurrentDateTimeUtc))
{
return reader.ReadEntryCollection();
}
}
public override ICollection<Entry> GetEntriesByCategory(int itemCount, int categoryId, bool activeOnly)
{
using (IDataReader reader = _procedures.GetPostsByCategoryID(itemCount, categoryId, activeOnly, BlogId, CurrentDateTimeUtc))
{
return reader.ReadEntryCollection();
}
}
public override ICollection<Entry> GetEntriesByTag(int itemCount, string tagName)
{
using (IDataReader reader = _procedures.GetPostsByTag(itemCount, tagName, BlogId, true, CurrentDateTimeUtc))
{
return reader.ReadEntryCollection();
}
}
public override ICollection<EntryStatsView> GetPopularEntries(int blogId, DateFilter filter)
{
DateTime? minDate = null;
if (filter == DateFilter.LastMonth)
{
minDate = CurrentDateTimeUtc.AddMonths(-1);
}
else if (filter == DateFilter.LastWeek)
{
minDate = CurrentDateTimeUtc.AddDays(-7);
}
else if (filter == DateFilter.LastYear)
{
minDate = CurrentDateTimeUtc.AddYears(-1);
}
using (IDataReader reader = _procedures.GetPopularPosts(BlogId, minDate))
{
return reader.ReadCollection(r =>
{
var entry = r.ReadEntryStatsView();
entry.PostType = PostType.BlogPost;
return entry;
});
}
}
public override IPagedCollection<EntryStatsView> GetEntriesForExport(int pageIndex, int pageSize)
{
using (IDataReader reader = _procedures.GetEntriesForExport(BlogId, pageIndex, pageSize))
{
var entries = reader.ReadEntryCollection<EntryStatsView, IPagedCollection<EntryStatsView>>(r => r.ReadPagedCollection(innerReader => innerReader.ReadEntryStatsView()));
if (reader.NextResult())
{
var comments = reader.ReadEnumerable(r => r.ReadFeedbackItem());
entries.Accumulate(comments, entry => entry.Id, comment => comment.EntryId,
(entry, comment) => { entry.Comments.Add(comment); comment.Entry = entry; });
if (reader.NextResult())
{
var trackBacks = reader.ReadEnumerable(r => r.ReadFeedbackItem());
entries.Accumulate(trackBacks, entry => entry.Id, trackback => trackback.EntryId,
(entry, trackback) => { entry.Comments.Add(trackback); trackback.Entry = entry; });
}
}
return entries;
}
}
public override EntryDay GetEntryDay(DateTime dateTime)
{
using (IDataReader reader = _procedures.GetEntriesByDayRange(dateTime.Date, dateTime.Date.AddDays(1), (int)PostType.BlogPost, true, BlogId, CurrentDateTimeUtc))
{
var entryDay = new EntryDay(dateTime);
while (reader.Read())
{
entryDay.Add(reader.ReadEntry());
}
return entryDay;
}
}
/// <summary>
/// Returns the previous and next entry to the specified entry.
/// </summary>
/// <param name="entryId"></param>
/// <returns></returns>
/// <param name="postType"></param>
public override ICollection<EntrySummary> GetPreviousAndNextEntries(int entryId, PostType postType)
{
using (IDataReader reader = _procedures.GetEntryPreviousNext(entryId, (int)postType, BlogId, CurrentDateTimeUtc))
{
return reader.ReadCollection<EntrySummary>();
}
}
/// <summary>
/// Returns the posts for the specified month for the Month Archive section.
/// </summary>
/// <param name="month"></param>
/// <param name="year"></param>
/// <returns></returns>
public override ICollection<Entry> GetPostsByMonth(int month, int year)
{
using (IDataReader reader = _procedures.GetPostsByMonth(month, year, BlogId, CurrentDateTimeUtc))
{
return reader.ReadEntryCollection();
}
}
public override ICollection<Entry> GetPostsByDayRange(DateTime start, DateTime stop, PostType postType, bool activeOnly)
{
DateTime min = start;
DateTime max = stop;
if (stop < start)
{
min = stop;
max = start;
}
using (IDataReader reader = _procedures.GetEntriesByDayRange(min, max, (int)postType, activeOnly, BlogId, CurrentDateTimeUtc))
{
return reader.ReadEntryCollection();
}
}
/// <summary>
/// Returns an <see cref="Entry" /> with the specified id.
/// </summary>
/// <param name="id">Id of the entry</param>
/// <param name="activeOnly">Whether or not to only return the entry if it is active.</param>
/// <param name="includeCategories">Whether the entry should have its Categories property populated</param>
/// <returns></returns>
public override Entry GetEntry(int id, bool activeOnly, bool includeCategories)
{
using (IDataReader reader = _procedures.GetEntryReader(BlogId, id, activeOnly, includeCategories))
{
if (reader.Read())
{
return DataHelper.ReadEntryWithCategories(reader);
}
return null;
}
}
/// <summary>
/// Returns an <see cref="Entry" /> with the specified entry name.
/// </summary>
/// <param name="entryName">Url friendly entry name.</param>
/// <param name="activeOnly">Whether or not to only return the entry if it is active.</param>
/// <param name="includeCategories">Whether the entry should have its Categories property populated</param>
/// <returns></returns>
public override Entry GetEntry(string entryName, bool activeOnly, bool includeCategories)
{
using (IDataReader reader = _procedures.GetEntryReader(BlogId,
entryName,
activeOnly,
includeCategories))
{
if (reader.Read())
{
return DataHelper.ReadEntryWithCategories(reader);
}
return null;
}
}
/// <summary>
/// Deletes the specified entry.
/// </summary>
/// <param name="entryId">The entry id.</param>
/// <returns></returns>
public override bool DeleteEntry(int entryId)
{
return _procedures.DeletePost(entryId, CurrentDateTimeUtc);
}
/// <summary>
/// Creates the specified entry in the back end data store attaching
/// the specified category ids.
/// </summary>
/// <param name="entry">Entry.</param>
/// <param name="categoryIds">Category I ds.</param>
/// <returns></returns>
public override int Create(Entry entry, IEnumerable<int> categoryIds)
{
ValidateEntry(entry);
entry.DateCreatedUtc = entry.DateCreatedUtc.IsNull() ? CurrentDateTimeUtc : entry.DateCreatedUtc;
entry.Id = _procedures.InsertEntry(entry.Title
, entry.Body.NullIfEmpty()
, (int)entry.PostType
, entry.Author.NullIfEmpty()
, entry.Email.NullIfEmpty()
, entry.Description.NullIfEmpty()
, BlogId
, entry.DateCreatedUtc
, (int)entry.PostConfig
, entry.EntryName.NullIfEmpty()
, entry.DatePublishedUtc.NullIfEmpty());
if (categoryIds != null)
{
SetEntryCategoryList(entry.Id, categoryIds);
}
if (entry.Id > -1)
{
Config.CurrentBlog.DateModifiedUtc = entry.DateCreatedUtc;
}
return entry.Id;
}
/// <summary>
/// Saves the categories for the specified post.
/// </summary>
public override bool SetEntryCategoryList(int entryId, IEnumerable<int> categoryIds)
{
if (categoryIds == null)
{
return _procedures.InsertLinkCategoryList(string.Empty, entryId, BlogId);
}
var idsAsStrings = categoryIds.Select(id => id.ToString(CultureInfo.InvariantCulture));
string catList = string.Join(",", idsAsStrings.ToArray());
return _procedures.InsertLinkCategoryList(catList, entryId, BlogId);
}
/// <summary>
/// Saves the tags for the specified post
/// </summary>
/// <param name="postId">The EntryId for the post to update</param>
/// <param name="tags">
/// An array of tag strings for the associated post. If there are no tags
/// associated with the post, pass tags with length zero to remove post tags
/// if present.
/// </param>
/// <returns></returns>
public override bool SetEntryTagList(int postId, IEnumerable<string> tags)
{
if (tags == null)
throw new ArgumentNullException("tags");
string tagList = "";
foreach (string tag in tags)
{
tagList += tag + ",";
}
if (tagList.Length > 0)
tagList = tagList.Substring(0, tagList.Length - 1);
return _procedures.InsertEntryTagList(postId, BlogId, tagList);
}
/// <summary>
/// Saves changes to the specified entry attaching the specified categories.
/// </summary>
/// <param name="entry">Entry.</param>
/// <param name="categoryIds">Category Ids.</param>
/// <returns></returns>
public override bool Update(Entry entry, IEnumerable<int> categoryIds)
{
ValidateEntry(entry);
if (entry.IsActive && entry.DatePublishedUtc.IsNull())
{
entry.DatePublishedUtc = CurrentDateTimeUtc;
}
entry.DateModifiedUtc = entry.DateModifiedUtc.IsNull() ? CurrentDateTimeUtc : entry.DateModifiedUtc;
bool updated = _procedures.UpdateEntry(
entry.Id
, entry.Title ?? string.Empty
, entry.Body.NullIfEmpty()
, (int)entry.PostType
, entry.Author.NullIfEmpty()
, entry.Email.NullIfEmpty()
, entry.Description.NullIfEmpty()
, entry.DateModifiedUtc
, (int)entry.PostConfig
, entry.EntryName.NullIfEmpty()
, entry.DatePublishedUtc.NullIfEmpty()
, BlogId);
if (!updated)
{
return false;
}
if (!categoryIds.IsNullOrEmpty())
{
SetEntryCategoryList(entry.Id, categoryIds);
}
if (Config.Settings.Tracking.UseTrackingServices)
{
if (entry.Id > -1)
{
Config.CurrentBlog.DateModifiedUtc = entry.DateModifiedUtc;
}
}
return true;
}
public override ICollection<ArchiveCount> GetPostCountsByMonth()
{
using (IDataReader reader = _procedures.GetPostsByMonthArchive(BlogId, CurrentDateTimeUtc))
{
ICollection<ArchiveCount> acc = DataHelper.ReadArchiveCount(reader);
return acc;
}
}
public override ICollection<ArchiveCount> GetPostCountsByYear()
{
using (IDataReader reader = _procedures.GetPostsByYearArchive(BlogId, CurrentDateTimeUtc))
{
ICollection<ArchiveCount> acc = DataHelper.ReadArchiveCount(reader);
return acc;
}
}
public override ICollection<ArchiveCount> GetPostCountsByCategory()
{
using (IDataReader reader = _procedures.GetPostsByCategoriesArchive(BlogId))
{
ICollection<ArchiveCount> acc = DataHelper.ReadArchiveCount(reader);
return acc;
}
}
}
}
| |
/*
Azure Media Services REST API v2 Function
This function submits a job wth encoding and/or analytics.
Input:
{
"assetId" : "nb:cid:UUID:2d0d78a2-685a-4b14-9cf0-9afb0bb5dbfc", // Mandatory, Id of the source asset
"mes" : // Optional but required to encode with Media Encoder Standard (MES)
{
"preset" : "Content Adaptive Multiple Bitrate MP4", // Optional but required to encode with Media Encoder Standard (MES). If MESPreset contains an extension "H264 Multiple Bitrate 720p with thumbnail.json" then it loads this file from ..\Presets
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
}
"mesThumbnails" : // Optional but required to generate thumbnails with Media Encoder Standard (MES)
{
"start" : "{Best}", // Optional. Start time/mode. Default is "{Best}"
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
}
"mepw" : // Optional but required to encode with Premium Workflow Encoder
{
"workflowAssetId" : "nb:cid:UUID:2d0d78a2-685a-4b14-9cf0-9afb0bb5dbfc", // Required. Id for the workflow asset
"workflowConfig" : "", // Optional. Premium Workflow Config for the task
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"indexV1" : // Optional but required to index audio with Media Indexer v1
{
"language" : "English", // Optional. Default is "English"
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"indexV2" : // Optional but required to index audio with Media Indexer v2
{
"language" : "EnUs", // Optional. Default is EnUs
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"ocr" : // Optional but required to do OCR
{
"language" : "AutoDetect", // Optional (Autodetect is the default)
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"faceDetection" : // Optional but required to do Face Detection
{
"mode" : "PerFaceEmotion", // Optional (PerFaceEmotion is the default)
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"faceRedaction" : // Optional but required to do Face Redaction
{
"mode" : "analyze" // Optional (analyze is the default)
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"motionDetection" : // Optional but required to do Motion Detection
{
"level" : "medium", // Optional (medium is the default)
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"summarization" : // Optional but required to do Motion Detection
{
"duration" : "0.0", // Optional (0.0 is the default)
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
"videoAnnotation" : // Optional but required to do Video Annotator
{
"outputStorage" : "amsstorage01" // Optional. Storage account name where to put the output asset (attached to AMS account)
},
// General job properties
"priority" : 10, // Optional, priority of the job
"useEncoderOutputForAnalytics" : true, // Optional, use generated asset by MES or Premium Workflow as a source for media analytics
"jobName" : "" // Optional, job name
// For compatibility only with old workflows. Do not use anymore!
"mesPreset" : "Adaptive Streaming", // Optional but required to encode with Media Encoder Standard (MES). If MESPreset contains an extension "H264 Multiple Bitrate 720p with thumbnail.json" then it loads this file from ..\Presets
"workflowAssetId" : "nb:cid:UUID:2d0d78a2-685a-4b14-9cf0-9afb0bb5dbfc", // Optional, but required to encode the asset with Premium Workflow Encoder. Id for the workflow asset
"workflowConfig" : "" // Optional. Premium Workflow Config for the task
"indexV1Language" : "English", // Optional but required to index the asset with Indexer v1
"indexV2Language" : "EnUs", // Optional but required to index the asset with Indexer v2
"ocrLanguage" : "AutoDetect" or "English", // Optional but required to do OCR
"faceDetectionMode" : "PerFaceEmotion, // Optional but required to trigger face detection
"faceRedactionMode" : "analyze", // Optional, but required for face redaction
"motionDetectionLevel" : "medium", // Optional, required for motion detection
"summarizationDuration" : "0.0" // Optional. Required to create video summarization. "0.0" for automatic
}
Output:
{
"jobId" : // job id
"otherJobsQueue" = 3 // number of jobs in the queue
"mes" : // Output asset generated by MES (if mesPreset was specified)
{
assetId : "",
taskId : ""
},
"mesThumbnails" :// Output asset generated by MES
{
assetId : "",
taskId : ""
},
"mepw" : // Output asset generated by Premium Workflow Encoder
{
assetId : "",
taskId : ""
},
"indexV1" : // Output asset generated by Indexer v1
{
assetId : "",
taskId : "",
language : ""
},
"indexV2" : // Output asset generated by Indexer v2
{
assetId : "",
taskId : "",
language : ""
},
"ocr" : // Output asset generated by OCR
{
assetId : "",
taskId : ""
},
"faceDetection" : // Output asset generated by Face detection
{
assetId : ""
taskId : ""
},
"faceRedaction" : // Output asset generated by Face redaction
{
assetId : ""
taskId : ""
},
"motionDetection" : // Output asset generated by motion detection
{
assetId : "",
taskId : ""
},
"summarization" : // Output asset generated by video summarization
{
assetId : "",
taskId : ""
},
"videoAnnotation" :// Output asset generated by Video Annotator
{
assetId : "",
taskId : ""
}
}
*/
using System;
using System.Net;
using System.Net.Http;
using Newtonsoft.Json;
using Microsoft.WindowsAzure.MediaServices.Client;
using System.Linq;
using System.Threading.Tasks;
using System.IO;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Host;
namespace media_functions_for_logic_app
{
public static class submit_job
{
// Field for service context.
private static CloudMediaContext _context = null;
[FunctionName("submit-job")]
public static async Task<object> Run([HttpTrigger(WebHookType = "genericJson")]HttpRequestMessage req, TraceWriter log, Microsoft.Azure.WebJobs.ExecutionContext execContext)
{
int taskindex = 0;
bool useEncoderOutputForAnalytics = false;
IAsset outputEncoding = null;
log.Info($"Webhook was triggered!");
string triggerStart = DateTime.UtcNow.ToString("o");
string jsonContent = await req.Content.ReadAsStringAsync();
dynamic data = JsonConvert.DeserializeObject(jsonContent);
log.Info(jsonContent);
log.Info($"asset id : {data.assetId}");
if (data.assetId == null)
{
// for test
// data.assetId = "nb:cid:UUID:2d0d78a2-685a-4b14-9cf0-9afb0bb5dbfc";
return req.CreateResponse(HttpStatusCode.BadRequest, new
{
error = "Please pass asset ID in the input object (assetId)"
});
}
IJob job = null;
ITask taskEncoding = null;
int OutputMES = -1;
int OutputMEPW = -1;
int OutputIndex1 = -1;
int OutputIndex2 = -1;
int OutputOCR = -1;
int OutputFaceDetection = -1;
int OutputMotion = -1;
int OutputSummarization = -1;
int OutputFaceRedaction = -1;
int OutputMesThumbnails = -1;
int OutputVideoAnnotation = -1;
int NumberJobsQueue = 0;
MediaServicesCredentials amsCredentials = new MediaServicesCredentials();
log.Info($"Using Azure Media Service Rest API Endpoint : {amsCredentials.AmsRestApiEndpoint}");
try
{
AzureAdTokenCredentials tokenCredentials = new AzureAdTokenCredentials(amsCredentials.AmsAadTenantDomain,
new AzureAdClientSymmetricKey(amsCredentials.AmsClientId, amsCredentials.AmsClientSecret),
AzureEnvironments.AzureCloudEnvironment);
AzureAdTokenProvider tokenProvider = new AzureAdTokenProvider(tokenCredentials);
_context = new CloudMediaContext(amsCredentials.AmsRestApiEndpoint, tokenProvider);
// find the Asset
string assetid = (string)data.assetId;
IAsset asset = _context.Assets.Where(a => a.Id == assetid).FirstOrDefault();
if (asset == null)
{
log.Info($"Asset not found {assetid}");
return req.CreateResponse(HttpStatusCode.BadRequest, new
{
error = "Asset not found"
});
}
if (data.useEncoderOutputForAnalytics != null && ((bool)data.useEncoderOutputForAnalytics) && (data.mesPreset != null || data.mes != null)) // User wants to use encoder output for media analytics
{
useEncoderOutputForAnalytics = (bool)data.useEncoderOutputForAnalytics;
}
// Declare a new encoding job with the Standard encoder
int priority = 10;
if (data.priority != null)
{
priority = (int)data.priority;
}
job = _context.Jobs.Create(((string)data.jobName) ?? "Azure Functions Job", priority);
if (data.mes != null || data.mesPreset != null) // MES Task
{
// Get a media processor reference, and pass to it the name of the
// processor to use for the specific task.
IMediaProcessor processorMES = MediaServicesHelper.GetLatestMediaProcessorByName(_context, "Media Encoder Standard");
string preset = null;
if (data.mes != null)
{
preset = (string)data.mes.preset;
}
else
{
preset = (string)data.mesPreset; // Compatibility mode
}
if (preset == null)
{
preset = "Content Adaptive Multiple Bitrate MP4"; // the default preset
}
if (preset.ToUpper().EndsWith(".JSON"))
{
// Build the folder path to the preset
string presetPath = Path.Combine(System.IO.Directory.GetParent(execContext.FunctionDirectory).FullName, "presets", preset);
log.Info("presetPath= " + presetPath);
preset = File.ReadAllText(presetPath);
}
// Create a task with the encoding details, using a string preset.
// In this case "H264 Multiple Bitrate 720p" system defined preset is used.
taskEncoding = job.Tasks.AddNew("MES encoding task",
processorMES,
preset,
TaskOptions.None);
// Specify the input asset to be encoded.
taskEncoding.InputAssets.Add(asset);
OutputMES = taskindex++;
// Add an output asset to contain the results of the job.
// This output is specified as AssetCreationOptions.None, which
// means the output asset is not encrypted.
outputEncoding = taskEncoding.OutputAssets.AddNew(asset.Name + " MES encoded", JobHelpers.OutputStorageFromParam(data.mes), AssetCreationOptions.None);
}
if (data.mepw != null || data.workflowAssetId != null) // Premium Encoder Task
{
//find the workflow asset
string workflowassetid = null;
if (data.mepw != null)
{
workflowassetid = (string)data.mepw.workflowAssetId;
}
else
{
workflowassetid = (string)data.workflowAssetId; // compatibility mode
}
IAsset workflowAsset = _context.Assets.Where(a => a.Id == workflowassetid).FirstOrDefault();
if (workflowAsset == null)
{
log.Info($"Workflow not found {workflowassetid}");
return req.CreateResponse(HttpStatusCode.BadRequest, new
{
error = "Workflow not found"
});
}
// Get a media processor reference, and pass to it the name of the
// processor to use for the specific task.
IMediaProcessor processorMEPW = MediaServicesHelper.GetLatestMediaProcessorByName(_context, "Media Encoder Premium Workflow");
string premiumConfiguration = "";
if (data.mepw != null && data.mepw.workflowConfig != null)
{
premiumConfiguration = (string)data.mepw.workflowConfig;
}
else if (data.workflowConfig != null)
{
premiumConfiguration = (string)data.workflowConfig; // compatibility mode
}
// In some cases, a configuration can be loaded and passed it to the task to tuned the workflow
// premiumConfiguration=File.ReadAllText(Path.Combine(System.IO.Directory.GetParent(execContext.FunctionDirectory).FullName, "presets", "SetRuntime.xml")).Replace("VideoFileName", VideoFile.Name).Replace("AudioFileName", AudioFile.Name);
// Create a task
taskEncoding = job.Tasks.AddNew("Premium Workflow encoding task",
processorMEPW,
premiumConfiguration,
TaskOptions.None);
log.Info("task created");
// Specify the input asset to be encoded.
taskEncoding.InputAssets.Add(workflowAsset); // first add the Workflow
taskEncoding.InputAssets.Add(asset); // Then add the video asset
OutputMEPW = taskindex++;
// Add an output asset to contain the results of the job.
// This output is specified as AssetCreationOptions.None, which
// means the output asset is not encrypted.
outputEncoding = taskEncoding.OutputAssets.AddNew(asset.Name + " Premium encoded", JobHelpers.OutputStorageFromParam(data.mepw), AssetCreationOptions.None);
}
IAsset an_asset = useEncoderOutputForAnalytics ? outputEncoding : asset;
// Media Analytics
OutputIndex1 = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.indexV1 == null) ? (string)data.indexV1Language : ((string)data.indexV1.language ?? "English"), "Azure Media Indexer", "IndexerV1.xml", "English", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.indexV1));
OutputIndex2 = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.indexV2 == null) ? (string)data.indexV2Language : ((string)data.indexV2.language ?? "EnUs"), "Azure Media Indexer 2 Preview", "IndexerV2.json", "EnUs", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.indexV2));
OutputOCR = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.ocr == null) ? (string)data.ocrLanguage : ((string)data.ocr.language ?? "AutoDetect"), "Azure Media OCR", "OCR.json", "AutoDetect", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.ocr));
OutputFaceDetection = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.faceDetection == null) ? (string)data.faceDetectionMode : ((string)data.faceDetection.mode ?? "PerFaceEmotion"), "Azure Media Face Detector", "FaceDetection.json", "PerFaceEmotion", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.faceDetection));
OutputFaceRedaction = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.faceRedaction == null) ? (string)data.faceRedactionMode : ((string)data.faceRedaction.mode ?? "comined"), "Azure Media Redactor", "FaceRedaction.json", "combined", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.faceRedaction));
OutputMotion = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.motionDetection == null) ? (string)data.motionDetectionLevel : ((string)data.motionDetection.level ?? "medium"), "Azure Media Motion Detector", "MotionDetection.json", "medium", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.motionDetection));
OutputSummarization = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.summarization == null) ? (string)data.summarizationDuration : ((string)data.summarization.duration ?? "0.0"), "Azure Media Video Thumbnails", "Summarization.json", "0.0", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.summarization));
OutputVideoAnnotation = JobHelpers.AddTask(execContext, _context, job, an_asset, (data.videoAnnotation != null) ? "1.0" : null, "Azure Media Video Annotator", "VideoAnnotation.json", "1.0", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.videoAnnotation));
// MES Thumbnails
OutputMesThumbnails = JobHelpers.AddTask(execContext, _context, job, asset, (data.mesThumbnails != null) ? ((string)data.mesThumbnails.Start ?? "{Best}") : null, "Media Encoder Standard", "MesThumbnails.json", "{Best}", ref taskindex, specifiedStorageAccountName: JobHelpers.OutputStorageFromParam(data.mesThumbnails));
job.Submit();
log.Info("Job Submitted");
NumberJobsQueue = _context.Jobs.Where(j => j.State == JobState.Queued).Count();
}
catch (Exception ex)
{
string message = ex.Message + ((ex.InnerException != null) ? Environment.NewLine + MediaServicesHelper.GetErrorMessage(ex) : "");
log.Info($"ERROR: Exception {message}");
return req.CreateResponse(HttpStatusCode.InternalServerError, new { error = message });
}
job = _context.Jobs.Where(j => j.Id == job.Id).FirstOrDefault(); // Let's refresh the job
log.Info("Job Id: " + job.Id);
log.Info("OutputAssetMESId: " + JobHelpers.ReturnId(job, OutputMES));
log.Info("OutputAssetMEPWId: " + JobHelpers.ReturnId(job, OutputMEPW));
log.Info("OutputAssetIndexV1Id: " + JobHelpers.ReturnId(job, OutputIndex1));
log.Info("OutputAssetIndexV2Id: " + JobHelpers.ReturnId(job, OutputIndex2));
log.Info("OutputAssetOCRId: " + JobHelpers.ReturnId(job, OutputOCR));
log.Info("OutputAssetFaceDetectionId: " + JobHelpers.ReturnId(job, OutputFaceDetection));
log.Info("OutputAssetFaceRedactionId: " + JobHelpers.ReturnId(job, OutputFaceRedaction));
log.Info("OutputAssetMotionDetectionId: " + JobHelpers.ReturnId(job, OutputMotion));
log.Info("OutputAssetSummarizationId: " + JobHelpers.ReturnId(job, OutputSummarization));
log.Info("OutputMesThumbnailsId: " + JobHelpers.ReturnId(job, OutputMesThumbnails));
log.Info("OutputAssetVideoAnnotationId: " + JobHelpers.ReturnId(job, OutputVideoAnnotation));
return req.CreateResponse(HttpStatusCode.OK, new
{
jobId = job.Id,
otherJobsQueue = NumberJobsQueue,
mes = new
{
assetId = JobHelpers.ReturnId(job, OutputMES),
taskId = JobHelpers.ReturnTaskId(job, OutputMES)
},
mepw = new
{
assetId = JobHelpers.ReturnId(job, OutputMEPW),
taskId = JobHelpers.ReturnTaskId(job, OutputMEPW)
},
indexV1 = new
{
assetId = JobHelpers.ReturnId(job, OutputIndex1),
taskId = JobHelpers.ReturnTaskId(job, OutputIndex1),
language = (string)data.indexV1Language
},
indexV2 = new
{
assetId = JobHelpers.ReturnId(job, OutputIndex2),
taskId = JobHelpers.ReturnTaskId(job, OutputIndex2),
language = (string)data.indexV2Language
},
ocr = new
{
assetId = JobHelpers.ReturnId(job, OutputOCR),
taskId = JobHelpers.ReturnTaskId(job, OutputOCR)
},
faceDetection = new
{
assetId = JobHelpers.ReturnId(job, OutputFaceDetection),
taskId = JobHelpers.ReturnTaskId(job, OutputFaceDetection)
},
faceRedaction = new
{
assetId = JobHelpers.ReturnId(job, OutputFaceRedaction),
taskId = JobHelpers.ReturnTaskId(job, OutputFaceRedaction)
},
motionDetection = new
{
assetId = JobHelpers.ReturnId(job, OutputMotion),
taskId = JobHelpers.ReturnTaskId(job, OutputMotion)
},
summarization = new
{
assetId = JobHelpers.ReturnId(job, OutputSummarization),
taskId = JobHelpers.ReturnTaskId(job, OutputSummarization)
},
mesThumbnails = new
{
assetId = JobHelpers.ReturnId(job, OutputMesThumbnails),
taskId = JobHelpers.ReturnTaskId(job, OutputMesThumbnails)
},
videoAnnotation = new
{
assetId = JobHelpers.ReturnId(job, OutputVideoAnnotation),
taskId = JobHelpers.ReturnTaskId(job, OutputVideoAnnotation)
}
});
}
}
}
| |
using System;
using System.Collections.Generic;
using GitVersion;
using GitVersion.Model.Configuration;
using GitVersion.VersionCalculation;
using GitVersionCore.Tests.Helpers;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using NSubstitute;
using NUnit.Framework;
using Shouldly;
namespace GitVersionCore.Tests.VersionCalculation
{
[TestFixture]
public class BaseVersionCalculatorTests : TestBase
{
[Test]
public void ChoosesHighestVersionReturnedFromStrategies()
{
var dateTimeOffset = DateTimeOffset.Now;
var versionCalculator = GetBaseVersionCalculator(contextBuilder =>
{
contextBuilder.OverrideServices(services =>
{
services.RemoveAll<IVersionStrategy>();
services.AddSingleton<IVersionStrategy>(new V1Strategy(DateTimeOffset.Now));
services.AddSingleton<IVersionStrategy>(new V2Strategy(dateTimeOffset));
});
});
var baseVersion = versionCalculator.GetBaseVersion();
baseVersion.SemanticVersion.ToString().ShouldBe("2.0.0");
baseVersion.ShouldIncrement.ShouldBe(true);
baseVersion.BaseVersionSource.When.ShouldBe(dateTimeOffset);
}
[Test]
public void UsesWhenFromNextBestMatchIfHighestDoesntHaveWhen()
{
var when = DateTimeOffset.Now;
var versionCalculator = GetBaseVersionCalculator(contextBuilder =>
{
contextBuilder.OverrideServices(services =>
{
services.RemoveAll<IVersionStrategy>();
services.AddSingleton<IVersionStrategy>(new V1Strategy(when));
services.AddSingleton<IVersionStrategy>(new V2Strategy(null));
});
});
var baseVersion = versionCalculator.GetBaseVersion();
baseVersion.SemanticVersion.ToString().ShouldBe("2.0.0");
baseVersion.ShouldIncrement.ShouldBe(true);
baseVersion.BaseVersionSource.When.ShouldBe(when);
}
[Test]
public void UsesWhenFromNextBestMatchIfHighestDoesntHaveWhenReversedOrder()
{
var when = DateTimeOffset.Now;
var versionCalculator = GetBaseVersionCalculator(contextBuilder =>
{
contextBuilder.OverrideServices(services =>
{
services.RemoveAll<IVersionStrategy>();
services.AddSingleton<IVersionStrategy>(new V1Strategy(null));
services.AddSingleton<IVersionStrategy>(new V2Strategy(when));
});
});
var baseVersion = versionCalculator.GetBaseVersion();
baseVersion.SemanticVersion.ToString().ShouldBe("2.0.0");
baseVersion.ShouldIncrement.ShouldBe(true);
baseVersion.BaseVersionSource.When.ShouldBe(when);
}
[Test]
public void ShouldNotFilterVersion()
{
var fakeIgnoreConfig = new TestIgnoreConfig(new ExcludeSourcesContainingExclude());
var version = new BaseVersion("dummy", false, new SemanticVersion(2), GitToolsTestingExtensions.CreateMockCommit(), null);
var versionCalculator = GetBaseVersionCalculator(contextBuilder =>
{
contextBuilder
.WithConfig(new Config { Ignore = fakeIgnoreConfig })
.OverrideServices(services =>
{
services.RemoveAll<IVersionStrategy>();
services.AddSingleton<IVersionStrategy>(new TestVersionStrategy(version));
});
});
var baseVersion = versionCalculator.GetBaseVersion();
baseVersion.Source.ShouldBe(version.Source);
baseVersion.ShouldIncrement.ShouldBe(version.ShouldIncrement);
baseVersion.SemanticVersion.ShouldBe(version.SemanticVersion);
}
[Test]
public void ShouldFilterVersion()
{
var fakeIgnoreConfig = new TestIgnoreConfig(new ExcludeSourcesContainingExclude());
var higherVersion = new BaseVersion("exclude", false, new SemanticVersion(2), GitToolsTestingExtensions.CreateMockCommit(), null);
var lowerVersion = new BaseVersion("dummy", false, new SemanticVersion(1), GitToolsTestingExtensions.CreateMockCommit(), null);
var versionCalculator = GetBaseVersionCalculator(contextBuilder =>
{
contextBuilder
.WithConfig(new Config { Ignore = fakeIgnoreConfig })
.OverrideServices(services =>
{
services.RemoveAll<IVersionStrategy>();
services.AddSingleton<IVersionStrategy>(new TestVersionStrategy(higherVersion, lowerVersion));
});
});
var baseVersion = versionCalculator.GetBaseVersion();
baseVersion.Source.ShouldNotBe(higherVersion.Source);
baseVersion.SemanticVersion.ShouldNotBe(higherVersion.SemanticVersion);
baseVersion.Source.ShouldBe(lowerVersion.Source);
baseVersion.SemanticVersion.ShouldBe(lowerVersion.SemanticVersion);
}
[Test]
public void ShouldIgnorePreReleaseVersionInMainlineMode()
{
var fakeIgnoreConfig = new TestIgnoreConfig(new ExcludeSourcesContainingExclude());
var lowerVersion = new BaseVersion("dummy", false, new SemanticVersion(1), GitToolsTestingExtensions.CreateMockCommit(), null);
var preReleaseVersion = new BaseVersion(
"prerelease",
false,
new SemanticVersion(1, 0, 1)
{
PreReleaseTag = new SemanticVersionPreReleaseTag
{
Name = "alpha",
Number = 1
}
},
GitToolsTestingExtensions.CreateMockCommit(),
null
);
var versionCalculator = GetBaseVersionCalculator(contextBuilder =>
{
contextBuilder
.WithConfig(new Config { VersioningMode = VersioningMode.Mainline, Ignore = fakeIgnoreConfig })
.OverrideServices(services =>
{
services.RemoveAll<IVersionStrategy>();
services.AddSingleton<IVersionStrategy>(new TestVersionStrategy(preReleaseVersion, lowerVersion));
});
});
var baseVersion = versionCalculator.GetBaseVersion();
baseVersion.Source.ShouldNotBe(preReleaseVersion.Source);
baseVersion.SemanticVersion.ShouldNotBe(preReleaseVersion.SemanticVersion);
baseVersion.Source.ShouldBe(lowerVersion.Source);
baseVersion.SemanticVersion.ShouldBe(lowerVersion.SemanticVersion);
}
private static IBaseVersionCalculator GetBaseVersionCalculator(Action<GitVersionContextBuilder> contextBuilderAction)
{
var contextBuilder = new GitVersionContextBuilder();
contextBuilderAction?.Invoke(contextBuilder);
contextBuilder.Build();
return contextBuilder.ServicesProvider.GetService<IBaseVersionCalculator>();
}
private class TestIgnoreConfig : IgnoreConfig
{
private readonly IVersionFilter filter;
public override bool IsEmpty => false;
public TestIgnoreConfig(IVersionFilter filter)
{
this.filter = filter;
}
public override IEnumerable<IVersionFilter> ToFilters()
{
yield return filter;
}
}
private class ExcludeSourcesContainingExclude : IVersionFilter
{
public bool Exclude(BaseVersion version, out string reason)
{
reason = null;
if (version.Source.Contains("exclude"))
{
reason = "was excluded";
return true;
}
return false;
}
}
private sealed class V1Strategy : IVersionStrategy
{
private readonly ICommit when;
public V1Strategy(DateTimeOffset? when)
{
if (when != null)
{
this.when = GitToolsTestingExtensions.CreateMockCommit();
this.when.When.Returns(when.Value);
}
else
{
this.when = null;
}
}
public IEnumerable<BaseVersion> GetVersions()
{
yield return new BaseVersion("Source 1", false, new SemanticVersion(1), when, null);
}
}
private sealed class V2Strategy : IVersionStrategy
{
private readonly ICommit when;
public V2Strategy(DateTimeOffset? when)
{
if (when != null)
{
this.when = GitToolsTestingExtensions.CreateMockCommit();
this.when.When.Returns(when.Value);
}
else
{
this.when = null;
}
}
public IEnumerable<BaseVersion> GetVersions()
{
yield return new BaseVersion("Source 2", true, new SemanticVersion(2), when, null);
}
}
private sealed class TestVersionStrategy : IVersionStrategy
{
private readonly IEnumerable<BaseVersion> versions;
public TestVersionStrategy(params BaseVersion[] versions)
{
this.versions = versions;
}
public IEnumerable<BaseVersion> GetVersions()
{
return versions;
}
}
}
}
| |
/******************************************************************************
* The MIT License
* Copyright (c) 2003 Novell Inc. www.novell.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the Software), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*******************************************************************************/
//
// Novell.Directory.Ldap.Events.Edir.EventData.GeneralDSEventData.cs
//
// Author:
// Anil Bhatia (banil@novell.com)
//
// (C) 2003 Novell, Inc (http://www.novell.com)
//
using System.IO;
using System.Text;
using Novell.Directory.Ldap.Asn1;
namespace Novell.Directory.Ldap.Events.Edir.EventData
{
/// <summary>
/// The class represents the data for General DS Events.
/// </summary>
public class GeneralDSEventData : BaseEdirEventData
{
protected int ds_time;
public int DSTime
{
get { return ds_time; }
}
protected int milli_seconds;
public int MilliSeconds
{
get { return milli_seconds; }
}
protected int nVerb;
public int Verb
{
get { return nVerb; }
}
protected int current_process;
public int CurrentProcess
{
get { return current_process; }
}
protected string strPerpetratorDN;
public string PerpetratorDN
{
get { return strPerpetratorDN; }
}
protected int[] integer_values;
public int[] IntegerValues
{
get { return integer_values; }
}
protected string[] string_values;
public string[] StringValues
{
get { return string_values; }
}
public GeneralDSEventData(EdirEventDataType eventDataType, Asn1Object message)
: base(eventDataType, message)
{
var length = new int[1];
ds_time = getTaggedIntValue(
(Asn1Tagged) decoder.decode(decodedData, length),
GeneralEventField.EVT_TAG_GEN_DSTIME);
milli_seconds = getTaggedIntValue(
(Asn1Tagged) decoder.decode(decodedData, length),
GeneralEventField.EVT_TAG_GEN_MILLISEC);
nVerb = getTaggedIntValue(
(Asn1Tagged) decoder.decode(decodedData, length),
GeneralEventField.EVT_TAG_GEN_VERB);
current_process = getTaggedIntValue(
(Asn1Tagged) decoder.decode(decodedData, length),
GeneralEventField.EVT_TAG_GEN_CURRPROC);
strPerpetratorDN = getTaggedStringValue(
(Asn1Tagged) decoder.decode(decodedData, length),
GeneralEventField.EVT_TAG_GEN_PERP);
var temptaggedvalue =
(Asn1Tagged) decoder.decode(decodedData, length);
if (temptaggedvalue.getIdentifier().Tag
== (int) GeneralEventField.EVT_TAG_GEN_INTEGERS)
{
//Integer List.
var inteseq = getTaggedSequence(temptaggedvalue, GeneralEventField.EVT_TAG_GEN_INTEGERS);
var intobject = inteseq.toArray();
integer_values = new int[intobject.Length];
for (var i = 0; i < intobject.Length; i++)
{
integer_values[i] = ((Asn1Integer) intobject[i]).intValue();
}
//second decoding for Strings.
temptaggedvalue = (Asn1Tagged) decoder.decode(decodedData, length);
}
else
{
integer_values = null;
}
if (temptaggedvalue.getIdentifier().Tag
== (int) GeneralEventField.EVT_TAG_GEN_STRINGS
&& temptaggedvalue.getIdentifier().Constructed)
{
//String values.
var inteseq =
getTaggedSequence(temptaggedvalue, GeneralEventField.EVT_TAG_GEN_STRINGS);
var stringobject = inteseq.toArray();
string_values = new string[stringobject.Length];
for (var i = 0; i < stringobject.Length; i++)
{
string_values[i] =
((Asn1OctetString) stringobject[i]).stringValue();
}
}
else
{
string_values = null;
}
DataInitDone();
}
protected int getTaggedIntValue(Asn1Tagged tagvalue, GeneralEventField tagid)
{
var obj = tagvalue.taggedValue();
if ((int) tagid != tagvalue.getIdentifier().Tag)
{
throw new IOException("Unknown Tagged Data");
}
var dbytes = SupportClass.ToByteArray(((Asn1OctetString) obj).byteValue());
var data = new MemoryStream(dbytes);
var dec = new LBERDecoder();
var length = dbytes.Length;
return (int) dec.decodeNumeric(data, length);
}
protected string getTaggedStringValue(Asn1Tagged tagvalue, GeneralEventField tagid)
{
var obj = tagvalue.taggedValue();
if ((int) tagid != tagvalue.getIdentifier().Tag)
{
throw new IOException("Unknown Tagged Data");
}
var dbytes = SupportClass.ToByteArray(((Asn1OctetString) obj).byteValue());
var data = new MemoryStream(dbytes);
var dec = new LBERDecoder();
var length = dbytes.Length;
return (string) dec.decodeCharacterString(data, length);
}
protected Asn1Sequence getTaggedSequence(Asn1Tagged tagvalue, GeneralEventField tagid)
{
var obj = tagvalue.taggedValue();
if ((int) tagid != tagvalue.getIdentifier().Tag)
{
throw new IOException("Unknown Tagged Data");
}
var dbytes = SupportClass.ToByteArray(((Asn1OctetString) obj).byteValue());
var data = new MemoryStream(dbytes);
var dec = new LBERDecoder();
var length = dbytes.Length;
return new Asn1Sequence(dec, data, length);
}
/// <summary>
/// Returns a string representation of the object.
/// </summary>
public override string ToString()
{
var buf = new StringBuilder();
buf.Append("[GeneralDSEventData");
buf.AppendFormat("(DSTime={0})", ds_time);
buf.AppendFormat("(MilliSeconds={0})", milli_seconds);
buf.AppendFormat("(verb={0})", nVerb);
buf.AppendFormat("(currentProcess={0})", current_process);
buf.AppendFormat("(PerpetartorDN={0})", strPerpetratorDN);
buf.AppendFormat("(Integer Values={0})", integer_values);
buf.AppendFormat("(String Values={0})", string_values);
buf.Append("]");
return buf.ToString();
}
}
}
| |
#region Header
/*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* Copyright (c) 2007-2008 James Nies and NArrange contributors.
* All rights reserved.
*
* This program and the accompanying materials are made available under
* the terms of the Common Public License v1.0 which accompanies this
* distribution.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*<author>James Nies</author>
*<contributor>Everton Elvio Koser</contributor>
*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/
#endregion Header
namespace NArrange.Core
{
using System;
#region Enumerations
/// <summary>
/// Binary expression operator.
/// </summary>
public enum BinaryExpressionOperator
{
/// <summary>
/// Equality operator.
/// </summary>
Equal,
/// <summary>
/// Not equal operator.
/// </summary>
NotEqual,
/// <summary>
/// Contains a substring.
/// </summary>
Contains,
/// <summary>
/// Matches a substring.
/// </summary>
Matches,
/// <summary>
/// Logical And.
/// </summary>
And,
/// <summary>
/// Logical Or.
/// </summary>
Or
}
/// <summary>
/// Code access level.
/// </summary>
[Flags]
public enum CodeAccess
{
/// <summary>
/// None/Not specified.
/// </summary>
None = 0,
/// <summary>
/// Private accessibility.
/// </summary>
Private = 1,
/// <summary>
/// Protected/family accessibility.
/// </summary>
Protected = 2,
/// <summary>
/// Internal/assembly accessibility.
/// </summary>
Internal = 4,
/// <summary>
/// Public accessibility.
/// </summary>
Public = 8
}
/// <summary>
/// Represents a level in a code file.
/// </summary>
public enum CodeLevel
{
/// <summary>
/// None, not specified.
/// </summary>
None,
/// <summary>
/// File level.
/// </summary>
File,
/// <summary>
/// Namespace level.
/// </summary>
Namespace
}
/// <summary>
/// Comment type.
/// </summary>
public enum CommentType
{
/// <summary>
/// Single line comment.
/// </summary>
Line,
/// <summary>
/// Single line XML comment.
/// </summary>
XmlLine,
/// <summary>
/// Block comment.
/// </summary>
Block
}
/// <summary>
/// Element attribute scope.
/// </summary>
public enum ElementAttributeScope
{
/// <summary>
/// Element scope.
/// </summary>
Element,
/// <summary>
/// Parent scope.
/// </summary>
Parent
}
/// <summary>
/// Element attribute.
/// </summary>
public enum ElementAttributeType
{
/// <summary>
/// None/Not specified.
/// </summary>
None,
/// <summary>
/// Name attribute.
/// </summary>
Name,
/// <summary>
/// Access attribute.
/// </summary>
Access,
/// <summary>
/// Modifier attribute.
/// </summary>
Modifier,
/// <summary>
/// Element Type attribute.
/// </summary>
ElementType,
/// <summary>
/// Type attribute.
/// </summary>
Type,
/// <summary>
/// Attributes attribute.
/// </summary>
Attributes
}
/// <summary>
/// Element type.
/// </summary>
public enum ElementType
{
/// <summary>
/// Not specified.
/// </summary>
NotSpecified,
/// <summary>
/// Comment element type.
/// </summary>
Comment,
/// <summary>
/// Attribute element type.
/// </summary>
Attribute,
/// <summary>
/// Using statement element type.
/// </summary>
Using,
/// <summary>
/// Namespace element type.
/// </summary>
Namespace,
/// <summary>
/// Region element type.
/// </summary>
Region,
/// <summary>
/// Condition directive element type.
/// </summary>
ConditionDirective,
/// <summary>
/// Field element type.
/// </summary>
Field,
/// <summary>
/// Constructor element type.
/// </summary>
Constructor,
/// <summary>
/// Property element type.
/// </summary>
Property,
/// <summary>
/// Method element type.
/// </summary>
Method,
/// <summary>
/// Event element type.
/// </summary>
Event,
/// <summary>
/// Delegate element type.
/// </summary>
Delegate,
/// <summary>
/// Type element type.
/// </summary>
Type,
}
/// <summary>
/// File attribute.
/// </summary>
public enum FileAttributeType
{
/// <summary>
/// None/Not specified.
/// </summary>
None,
/// <summary>
/// File name.
/// </summary>
Name,
/// <summary>
/// File path.
/// </summary>
Path,
/// <summary>
/// Attributes for the file.
/// </summary>
Attributes
}
/// <summary>
/// Grouping separator type.
/// </summary>
public enum GroupSeparatorType
{
/// <summary>
/// New line separator.
/// </summary>
NewLine,
/// <summary>
/// Custom separator string.
/// </summary>
Custom
}
/// <summary>
/// Handler type.
/// </summary>
public enum HandlerType
{
/// <summary>
/// Source handler.
/// </summary>
Source,
/// <summary>
/// Project handler.
/// </summary>
Project
}
/// <summary>
/// Enumeration for interface impelementation types.
/// </summary>
public enum InterfaceReferenceType
{
/// <summary>
/// None/Unknown reference type.
/// </summary>
None,
/// <summary>
/// Base class implementation.
/// </summary>
Class,
/// <summary>
/// Interface implementation.
/// </summary>
Interface
}
/// <summary>
/// Log level.
/// </summary>
public enum LogLevel
{
/// <summary>
/// Error message log level.
/// </summary>
Error,
/// <summary>
/// Warning message log level.
/// </summary>
Warning,
/// <summary>
/// Informational message log level.
/// </summary>
Info,
/// <summary>
/// Verbose log level.
/// </summary>
Verbose,
/// <summary>
/// Trace log level.
/// </summary>
Trace
}
/// <summary>
/// Member attributes.
/// </summary>
[Flags]
public enum MemberModifiers
{
/// <summary>
/// None/Not specified.
/// </summary>
None = 0,
/// <summary>
/// Abstract member.
/// </summary>
Abstract = 1,
/// <summary>
/// Sealed member.
/// </summary>
Sealed = 2,
/// <summary>
/// Static member.
/// </summary>
Static = 4,
/// <summary>
/// Unsafe member.
/// </summary>
Unsafe = 8,
/// <summary>
/// Virtual member.
/// </summary>
Virtual = 16,
/// <summary>
/// Override member.
/// </summary>
Override = 32,
/// <summary>
/// New member.
/// </summary>
New = 64,
/// <summary>
/// ReadOnly member.
/// </summary>
ReadOnly = 128,
/// <summary>
/// Constant member.
/// </summary>
Constant = 256,
/// <summary>
/// External member.
/// </summary>
External = 512,
/// <summary>
/// Partial member.
/// </summary>
Partial = 1024
}
/// <summary>
/// Operator type.
/// </summary>
public enum OperatorType
{
/// <summary>
/// None/Not specified.
/// </summary>
None = 0,
/// <summary>
/// Explicit operator.
/// </summary>
Explicit = 1,
/// <summary>
/// Implicit operator.
/// </summary>
Implicit = 2
}
/// <summary>
/// Region style.
/// </summary>
public enum RegionStyle
{
/// <summary>
/// Default region style.
/// </summary>
Default,
/// <summary>
/// Use region directives around region members.
/// </summary>
Directive,
/// <summary>
/// Commented directives around region members.
/// </summary>
CommentDirective,
/// <summary>
/// No directives around region members, just group.
/// </summary>
NoDirective
}
/// <summary>
/// Sort direction.
/// </summary>
public enum SortDirection
{
/// <summary>
/// None, do not sort.
/// </summary>
None,
/// <summary>
/// Sort in ascending order.
/// </summary>
Ascending,
/// <summary>
/// Sort in descending order.
/// </summary>
Descending
}
/// <summary>
/// Tabbing style.
/// </summary>
public enum TabStyle
{
/// <summary>
/// Uses spaces when writing elements.
/// </summary>
Spaces,
/// <summary>
/// Use tabs when writing elements.
/// </summary>
Tabs
}
/// <summary>
/// Type element type.
/// </summary>
public enum TypeElementType
{
/// <summary>
/// Module element type.
/// </summary>
Module,
/// <summary>
/// Class element type.
/// </summary>
Class,
/// <summary>
/// Structure element type.
/// </summary>
Structure,
/// <summary>
/// Interface element type.
/// </summary>
Interface,
/// <summary>
/// Enumeration element type.
/// </summary>
Enum
}
/// <summary>
/// Type attributes.
/// </summary>
/// <remarks>This is a subset of member attributes that apply to types.</remarks>
[Flags]
public enum TypeModifiers
{
/// <summary>
/// None, no modifiers specified.
/// </summary>
None = MemberModifiers.None,
/// <summary>
/// Abstract type declaration.
/// </summary>
Abstract = MemberModifiers.Abstract,
/// <summary>
/// Sealed type declaration.
/// </summary>
Sealed = MemberModifiers.Sealed,
/// <summary>
/// Static type declaration.
/// </summary>
Static = MemberModifiers.Static,
/// <summary>
/// Unsafe type declaration.
/// </summary>
Unsafe = MemberModifiers.Unsafe,
/// <summary>
/// Partial type declaration.
/// </summary>
Partial = MemberModifiers.Partial,
/// <summary>
/// New type declaration.
/// </summary>
New = MemberModifiers.New
}
/// <summary>
/// Unary expression operator.
/// </summary>
public enum UnaryExpressionOperator
{
/// <summary>
/// Logical negate.
/// </summary>
Negate
}
/// <summary>
/// Using type.
/// </summary>
public enum UsingType
{
/// <summary>
/// Namespace.
/// </summary>
Namespace,
/// <summary>
/// Redefinition of namespace or type.
/// </summary>
Alias
}
/// <summary>
/// Whitespace character types.
/// </summary>
[Flags]
public enum WhiteSpaceTypes
{
/// <summary>
/// None, do not include any whitespace.
/// </summary>
None = 0,
/// <summary>
/// Include spaces.
/// </summary>
Space = 1,
/// <summary>
/// Include tabs.
/// </summary>
Tab = 2,
/// <summary>
/// Include carriage returns.
/// </summary>
CarriageReturn = 4,
/// <summary>
/// Include line feeds.
/// </summary>
Linefeed = 8,
/// <summary>
/// Include spaces and tabs.
/// </summary>
SpaceAndTab = Space | Tab,
/// <summary>
/// Include carriage returns and line feeds.
/// </summary>
CarriageReturnAndLinefeed = CarriageReturn | Linefeed,
/// <summary>
/// Include all whitespace characters.
/// </summary>
All = SpaceAndTab | CarriageReturnAndLinefeed
}
#endregion Enumerations
}
| |
// **********************************************************************************
// CassiniDev - http://cassinidev.codeplex.com
//
// Copyright (c) 2010 Sky Sanders. All rights reserved.
//
// This source code is subject to terms and conditions of the Microsoft Public
// License (Ms-PL). A copy of the license can be found in the license.txt file
// included in this distribution.
//
// You must not remove this notice, or any other, from this software.
//
// **********************************************************************************
#region
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
using System.Threading;
#endregion
namespace CassiniDev
{
/// <summary>
/// Command line arguments
///
/// fixed 5/24/10 - quoted embedded spaces in ToString
/// </summary>
public class CommandLineArguments
{
#region Properties
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "br", LongName = "browse",
HelpText = "Relative virtual path to open default browser.")]
public string Browse;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "ah", DefaultValue = false,
HelpText = "If true add entry to Windows hosts file. Requires write permissions to hosts file.")] public
bool AddHost;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "a", LongName = "path",
HelpText = "Physical location of content.")] public string
ApplicationPath;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, LongName = "log", DefaultValue = false, HelpText = "Enable logging.")] public
bool EnableLogging;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "h", LongName = "host",
HelpText = "Host name used for app root url. Optional unless AddHost is true.")] public string HostName;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "i", LongName = "ip",
HelpText = "IP address to listen to. Ignored if IPMode != Specific")]
// ReSharper disable InconsistentNaming
public string IPAddress;
// ReSharper restore InconsistentNaming
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "im", DefaultValue = IPMode.Loopback, HelpText = "",
LongName = "ipMode")] public
// ReSharper disable InconsistentNaming
IPMode IPMode;
// ReSharper restore InconsistentNaming
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "v6", DefaultValue = false,
HelpText = "If IPMode 'Any' or 'LoopBack' are specified use the V6 address", LongName = "ipV6")] public bool
// ReSharper disable InconsistentNaming
IPv6;
// ReSharper restore InconsistentNaming
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, LongName = "nodirlist", DefaultValue = false,
HelpText = "Disable diretory listing")] public bool Nodirlist;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, LongName = "ntlm", DefaultValue = false, HelpText = "Run as current identity"
)] public bool Ntlm;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "p", LongName = "port",
HelpText = "Port to listen to. Ignored if PortMode=FirstAvailable.", DefaultValue = 0)] public int Port;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "pm", HelpText = "", LongName = "portMode",
DefaultValue = PortMode.FirstAvailable)] public PortMode PortMode;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "pre", DefaultValue = 65535, LongName = "highPort",
HelpText = "End of port range. Ignored if PortMode != FirstAvailable")] public int PortRangeEnd = 9000;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "prs", DefaultValue = 32768, LongName = "lowPort",
HelpText = "Start of port range. Ignored if PortMode != FirstAvailable")] public int PortRangeStart =
8080;
///<summary>
///</summary>
[DefaultArgument(ArgumentType.AtMostOnce, DefaultValue = RunMode.Server, HelpText = "[Server|Hostsfile]")] public RunMode RunMode;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, LongName = "silent", DefaultValue = false, HelpText = "Fail silently")] public bool Silent;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "t", DefaultValue = 0, LongName = "timeout",
HelpText = "Length of time, in ms, to wait for a request before stopping the server. 0 = no timeout.")] public int TimeOut;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "v", LongName = "vpath", DefaultValue = "/",
HelpText = "Optional. default value '/'"
)] public string VirtualPath = "/";
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "vs", DefaultValue = false,
HelpText = "If true run in Visual Studio Development Server mode - readonly UI with single option to quit.."
)] public
bool VisualStudio;
///<summary>
///</summary>
[Argument(ArgumentType.AtMostOnce, ShortName = "w", DefaultValue = 0, LongName = "wait",
HelpText =
"Length of time, in ms, to wait for a specific port before throwing an exception or exiting. 0 = don't wait."
)] public int WaitForPort;
#endregion
///<summary>
///</summary>
///<returns></returns>
public string[] ToArgs()
{
List<string> result = new List<string>();
if (RunMode != RunMode.Server)
{
result.Add(string.Format("{0}", RunMode));
}
if (!string.IsNullOrEmpty(ApplicationPath))
{
result.Add(string.Format("/a:{0}", ApplicationPath.Contains("") ? String.Format("\"{0}\"", ApplicationPath) : ApplicationPath));
}
result.Add(string.Format("/v:{0}", VirtualPath.Contains("") ? String.Format("\"{0}\"", VirtualPath) : VirtualPath));
if (!string.IsNullOrEmpty(HostName))
{
result.Add(string.Format("/h:{0}", HostName.Contains("") ? String.Format("\"{0}\"", HostName) : HostName));
}
if (AddHost)
{
result.Add("/ah");
}
if (IPMode != IPMode.Loopback)
{
result.Add(string.Format("/im:{0}", IPMode));
}
if (!string.IsNullOrEmpty(IPAddress))
{
result.Add(string.Format("/i:{0}", IPAddress));
}
if (IPv6)
{
result.Add("/v6");
}
if (VisualStudio)
{
result.Add("/vs");
}
if (PortMode != PortMode.FirstAvailable)
{
result.Add(string.Format("/pm:{0}", PortMode));
}
if (Port != 0)
{
result.Add(string.Format("/p:{0}", Port));
}
if (PortRangeStart != 32768)
{
result.Add(string.Format("/prs:{0}", PortRangeStart));
}
if (PortRangeEnd != 65535)
{
result.Add(string.Format("/pre:{0}", PortRangeEnd));
}
if (TimeOut > 0)
{
result.Add(string.Format("/t:{0}", TimeOut));
}
if (WaitForPort > 0)
{
result.Add(string.Format("/w:{0}", WaitForPort));
}
if (Ntlm)
{
result.Add("/ntlm");
}
if (Silent)
{
result.Add("/silent");
}
if (Nodirlist)
{
result.Add("/nodirlist");
}
if (EnableLogging)
{
result.Add("/log");
}
if (!string.IsNullOrEmpty(Browse))
{
result.Add(string.Format("/br:{0}", Browse));
}
return result.ToArray();
}
/// <summary>
/// Returns a <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>.
/// </summary>
/// <returns>
/// A <see cref="T:System.String"/> that represents the current <see cref="T:System.Object"/>.
/// </returns>
/// <filterpriority>2</filterpriority>
public override string ToString()
{
return string.Join(" ", ToArgs());
//StringBuilder sb = new StringBuilder();
//if (RunMode != RunMode.Server)
//{
// sb.AppendFormat("{0}", RunMode);
//}
//if (!string.IsNullOrEmpty(ApplicationPath))
//{
// sb.AppendFormat(" /a:{0}", ApplicationPath.Contains(" ") ? String.Format("\"{0}\"", ApplicationPath) : ApplicationPath);
//}
//sb.AppendFormat(" /v:{0}", VirtualPath.Contains(" ") ? String.Format("\"{0}\"", VirtualPath) : VirtualPath);
//if (!string.IsNullOrEmpty(HostName))
//{
// sb.AppendFormat(" /h:{0}", HostName.Contains(" ") ? String.Format("\"{0}\"", HostName) : HostName);
//}
//if (AddHost)
//{
// sb.Append(" /ah");
//}
//if (IPMode != IPMode.Loopback)
//{
// sb.AppendFormat(" /im:{0}", IPMode);
//}
//if (!string.IsNullOrEmpty(IPAddress))
//{
// sb.AppendFormat(" /i:{0}", IPAddress);
//}
//if (IPv6)
//{
// sb.Append(" /v6");
//}
//if (VisualStudio)
//{
// sb.Append(" /vs");
//}
//if (PortMode != PortMode.FirstAvailable)
//{
// sb.AppendFormat(" /pm:{0}", PortMode);
//}
//if (Port != 0)
//{
// sb.AppendFormat(" /p:{0}", Port);
//}
//if (PortRangeStart != 32768)
//{
// sb.AppendFormat(" /prs:{0}", PortRangeStart);
//}
//if (PortRangeEnd != 65535)
//{
// sb.AppendFormat(" /pre:{0}", PortRangeEnd);
//}
//if (TimeOut > 0)
//{
// sb.AppendFormat(" /t:{0}", TimeOut);
//}
//if (WaitForPort > 0)
//{
// sb.AppendFormat(" /w:{0}", WaitForPort);
//}
//if (Ntlm)
//{
// sb.Append(" /ntlm");
//}
//if (Silent)
//{
// sb.Append(" /silent");
//}
//if (Nodirlist)
//{
// sb.Append(" /nodirlist");
//}
//if (EnableLogging)
//{
// sb.Append(" /log");
//}
//return sb.ToString().Trim();
}
/// <summary>
/// </summary>
internal void Validate()
{
if (string.IsNullOrEmpty(ApplicationPath))
{
throw new CassiniException(SR.ErrApplicationPathIsNull, ErrorField.ApplicationPath);
}
try
{
ApplicationPath = Path.GetFullPath(ApplicationPath);
}
catch
{
}
if (!Directory.Exists(ApplicationPath))
{
throw new CassiniException(SR.WebdevDirNotExist, ErrorField.ApplicationPath);
}
ApplicationPath = ApplicationPath.Trim('\"').TrimEnd('\\');
if (!string.IsNullOrEmpty(VirtualPath))
{
VirtualPath = VirtualPath.Trim('\"');
VirtualPath = VirtualPath.Trim('/');
VirtualPath = "/" + VirtualPath;
}
else
{
VirtualPath = "/";
}
if (!VirtualPath.StartsWith("/"))
{
VirtualPath = "/" + VirtualPath;
}
if (AddHost && string.IsNullOrEmpty(HostName))
{
throw new CassiniException(SR.ErrInvalidHostname, ErrorField.HostName);
}
IPAddress = ParseIP(IPMode, IPv6, IPAddress).ToString();
if (VisualStudio) // then STOP HERE.
{
// It is fortunate that in order to provide api parity with WebDev
// we do not need to port scan. Visual Studio balks and refuses to
// attach if we monkey around and open ports.
Port = Port == 0 ? 80 : Port;
PortMode = PortMode.Specific;
return;
}
switch (PortMode)
{
case PortMode.FirstAvailable:
if (PortRangeStart < 1)
{
throw new CassiniException(SR.ErrInvalidPortRangeValue, ErrorField.PortRangeStart);
}
if (PortRangeEnd < 1)
{
throw new CassiniException(SR.ErrInvalidPortRangeValue, ErrorField.PortRangeEnd);
}
if (PortRangeStart > PortRangeEnd)
{
throw new CassiniException(SR.ErrPortRangeEndMustBeEqualOrGreaterThanPortRangeSta,
ErrorField.PortRange);
}
Port = CassiniNetworkUtils.GetAvailablePort(PortRangeStart, PortRangeEnd,
System.Net.IPAddress.Parse(IPAddress), true);
if (Port == 0)
{
throw new CassiniException(SR.ErrNoAvailablePortFound, ErrorField.PortRange);
}
break;
case PortMode.Specific:
if ((Port < 1) || (Port > 0xffff))
{
throw new CassiniException(SR.ErrPortOutOfRange, ErrorField.Port);
}
// start waiting....
//TODO: design this hack away.... why am I waiting in a validation method?
int now = Environment.TickCount;
// wait until either 1) the specified port is available or 2) the specified amount of time has passed
while (Environment.TickCount < now + WaitForPort &&
CassiniNetworkUtils.GetAvailablePort(Port, Port, System.Net.IPAddress.Parse(IPAddress), true) !=
Port)
{
Thread.Sleep(100);
}
// is the port available?
if (CassiniNetworkUtils.GetAvailablePort(Port, Port, System.Net.IPAddress.Parse(IPAddress), true) !=
Port)
{
throw new CassiniException(SR.ErrPortIsInUse, ErrorField.Port);
}
break;
default:
throw new CassiniException(SR.ErrInvalidPortMode, ErrorField.None);
}
}
/// <summary>
/// Converts CommandLineArgument values to an IP address if possible.
/// Throws Exception if not.
/// </summary>
/// <param name="ipmode"></param>
/// <param name="v6"></param>
/// <param name="ipString"></param>
/// <returns></returns>
/// <exception cref="CassiniException">If IPMode is invalid</exception>
/// <exception cref="CassiniException">If IPMode is 'Specific' and ipString is invalid</exception>
public static IPAddress ParseIP(IPMode ipmode, bool v6, string ipString)
{
IPAddress ip;
switch (ipmode)
{
case IPMode.Loopback:
ip = v6 ? System.Net.IPAddress.IPv6Loopback : System.Net.IPAddress.Loopback;
break;
case IPMode.Any:
ip = v6 ? System.Net.IPAddress.IPv6Any : System.Net.IPAddress.Any;
break;
case IPMode.Specific:
if (!System.Net.IPAddress.TryParse(ipString, out ip))
{
throw new CassiniException(SR.ErrInvalidIPAddress, ErrorField.IPAddress);
}
break;
default:
throw new CassiniException(SR.ErrInvalidIPMode, ErrorField.None);
}
return ip;
}
}
}
| |
/*
* Qa full api
*
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: all
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace HostMe.Sdk.Model
{
/// <summary>
/// MemberUser
/// </summary>
[DataContract]
public partial class MemberUser : IEquatable<MemberUser>, IValidatableObject
{
/// <summary>
/// Initializes a new instance of the <see cref="MemberUser" /> class.
/// </summary>
/// <param name="Email">Email.</param>
/// <param name="CustomerName">CustomerName.</param>
/// <param name="Language">Language.</param>
/// <param name="Phone">Phone.</param>
/// <param name="Gender">Gender.</param>
/// <param name="Dob">Dob.</param>
public MemberUser(string Email = null, string CustomerName = null, string Language = null, string Phone = null, string Gender = null, DateTimeOffset? Dob = null)
{
this.Email = Email;
this.CustomerName = CustomerName;
this.Language = Language;
this.Phone = Phone;
this.Gender = Gender;
this.Dob = Dob;
}
/// <summary>
/// Gets or Sets Email
/// </summary>
[DataMember(Name="email", EmitDefaultValue=true)]
public string Email { get; set; }
/// <summary>
/// Gets or Sets CustomerName
/// </summary>
[DataMember(Name="customerName", EmitDefaultValue=true)]
public string CustomerName { get; set; }
/// <summary>
/// Gets or Sets Language
/// </summary>
[DataMember(Name="language", EmitDefaultValue=true)]
public string Language { get; set; }
/// <summary>
/// Gets or Sets Phone
/// </summary>
[DataMember(Name="phone", EmitDefaultValue=true)]
public string Phone { get; set; }
/// <summary>
/// Gets or Sets Gender
/// </summary>
[DataMember(Name="gender", EmitDefaultValue=true)]
public string Gender { get; set; }
/// <summary>
/// Gets or Sets Dob
/// </summary>
[DataMember(Name="dob", EmitDefaultValue=true)]
public DateTimeOffset? Dob { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class MemberUser {\n");
sb.Append(" Email: ").Append(Email).Append("\n");
sb.Append(" CustomerName: ").Append(CustomerName).Append("\n");
sb.Append(" Language: ").Append(Language).Append("\n");
sb.Append(" Phone: ").Append(Phone).Append("\n");
sb.Append(" Gender: ").Append(Gender).Append("\n");
sb.Append(" Dob: ").Append(Dob).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as MemberUser);
}
/// <summary>
/// Returns true if MemberUser instances are equal
/// </summary>
/// <param name="other">Instance of MemberUser to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(MemberUser other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Email == other.Email ||
this.Email != null &&
this.Email.Equals(other.Email)
) &&
(
this.CustomerName == other.CustomerName ||
this.CustomerName != null &&
this.CustomerName.Equals(other.CustomerName)
) &&
(
this.Language == other.Language ||
this.Language != null &&
this.Language.Equals(other.Language)
) &&
(
this.Phone == other.Phone ||
this.Phone != null &&
this.Phone.Equals(other.Phone)
) &&
(
this.Gender == other.Gender ||
this.Gender != null &&
this.Gender.Equals(other.Gender)
) &&
(
this.Dob == other.Dob ||
this.Dob != null &&
this.Dob.Equals(other.Dob)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Email != null)
hash = hash * 59 + this.Email.GetHashCode();
if (this.CustomerName != null)
hash = hash * 59 + this.CustomerName.GetHashCode();
if (this.Language != null)
hash = hash * 59 + this.Language.GetHashCode();
if (this.Phone != null)
hash = hash * 59 + this.Phone.GetHashCode();
if (this.Gender != null)
hash = hash * 59 + this.Gender.GetHashCode();
if (this.Dob != null)
hash = hash * 59 + this.Dob.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
using System;
using System.Data;
using System.Data.Common;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using Pivotal.Data.GemFireXD;
namespace AdoNetTest.BIN.Parameter
{
class SelectDateTimeColumnsAsParameters : GFXDTest
{
public SelectDateTimeColumnsAsParameters(ManualResetEvent resetEvent)
: base(resetEvent)
{
}
public override void Run(object context)
{
try
{
Log("Connection String = " + Connection.ConnectionString);
CreateTable();
Command.CommandText = "SELECT * FROM datetime_test ORDER BY id ASC";
ReadData();
Command.CommandText = "SELECT * FROM datetime_test WHERE type_datetime=?";
GFXDParameter param = Command.CreateParameter();
param.DbType = DbType.DateTime;
param.Value = DateTime.Parse("2009-09-09 09:09:09");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.DateTime;
param.Value = DateTime.Parse("2010-10-10 10:10:10");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.DateTime;
param.Value = DateTime.Parse("2011-11-11 11:11:11");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.DateTime;
param.Value = DateTime.Parse("2011-04-24 12:12:12");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.DateTime;
param.Value = DateTime.Parse("2011-04-26 12:12:12");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
////////////////////////////////////////////////////////////////////////////////
Command.CommandText = "SELECT * FROM datetime_test WHERE type_date=?";
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Date;
param.Value = DateTime.Parse("2009-09-09 09:09:09");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Date;
param.Value = DateTime.Parse("2010-10-10 10:10:10");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Date;
param.Value = DateTime.Parse("2011-11-11 11:11:11");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Date;
param.Value = DateTime.Parse("2011-04-24 12:12:12");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Date;
param.Value = DateTime.Parse("2011-04-26 12:12:12");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
/////////////////////////////////////////////////////////////////////////////////
Command.CommandText = "SELECT * FROM datetime_test WHERE type_time=?";
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Time;
param.Value = DateTime.Parse("2009-09-09 09:09:09");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Time;
param.Value = DateTime.Parse("2010-10-10 10:10:10");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Time;
param.Value = DateTime.Parse("2011-11-11 11:11:11");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Time;
param.Value = DateTime.Parse("2011-04-24 12:12:12");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
Command.Parameters.Clear();
param = Command.CreateParameter();
param.DbType = DbType.Time;
param.Value = DateTime.Parse("2011-04-26 12:12:12");
Command.Parameters.Add(param);
if (ReadData() < 1)
Fail("DataReader returns no rows");
Log("===============================================================================");
}
catch (Exception e)
{
Fail(e);
}
finally
{
try
{
DropTable();
}
catch (Exception e)
{
Fail(e);
}
base.Run(context);
}
}
private void CreateTable()
{
DropTable();
Command.CommandText = "CREATE TABLE datetime_test "
+ "(id INT primary key, "
+ "type_date DATE, "
+ "type_time TIME,"
+ "type_datetime TIMESTAMP)";
Log(Command.CommandText);
Command.ExecuteNonQuery();
Command.CommandText = "INSERT INTO datetime_test VALUES" +
" (1001, '2009-09-09', '09:09:09', '2009-09-09 09:09:09')," +
" (1002, '2010-10-10', '10:10:10', '2010-10-10 10:10:10')," +
" (1003, '2011-11-11', '11:11:11', '2011-11-11 11:11:11')," +
" (1004, '2012-12-12', '12:12:12', '2012-12-12 12:12:12')," +
" (1005, '2011-04-24', '12:12:12', '2011-04-24 12:12:12')," +
" (1006, '2011-04-26', '12:12:12', '2011-04-26 12:12:12')";
Log(Command.CommandText);
Command.ExecuteNonQuery();
}
private int ReadData()
{
int rows = 0;
//Log(String.Format("{0}; param = {1}", Command.CommandText, Command.Parameters[0].ToString()));
Log(String.Format("{0}", Command.CommandText));
DataReader = Command.ExecuteReader();
while (DataReader.Read())
{
rows += 1;
Log(String.Format("{0}, {1}, {2}, {3}", DataReader.GetString(0),
DataReader.GetString(1), DataReader.GetString(2), DataReader.GetString(3)));
}
DataReader.Close();
return rows;
}
private void DropTable()
{
if(DbHelper.TableExists("DATETIME_TEST"))
{
Command.CommandText = "DROP TABLE DATETIME_TEST";
Command.Parameters.Clear();
Command.ExecuteNonQuery();
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Internal.Resources
{
using Azure;
using Management;
using Internal;
using Rest;
using Rest.Azure;
using Rest.Azure.OData;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for ResourceGroupsOperations.
/// </summary>
public static partial class ResourceGroupsOperationsExtensions
{
/// <summary>
/// Get all the resources for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group with the resources to get.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
public static IPage<GenericResource> ListResources(this IResourceGroupsOperations operations, string resourceGroupName, ODataQuery<GenericResourceFilter> odataQuery = default(ODataQuery<GenericResourceFilter>))
{
return operations.ListResourcesAsync(resourceGroupName, odataQuery).GetAwaiter().GetResult();
}
/// <summary>
/// Get all the resources for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group with the resources to get.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<GenericResource>> ListResourcesAsync(this IResourceGroupsOperations operations, string resourceGroupName, ODataQuery<GenericResourceFilter> odataQuery = default(ODataQuery<GenericResourceFilter>), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListResourcesWithHttpMessagesAsync(resourceGroupName, odataQuery, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Checks whether a resource group exists.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to check. The name is case insensitive.
/// </param>
public static bool CheckExistence(this IResourceGroupsOperations operations, string resourceGroupName)
{
return operations.CheckExistenceAsync(resourceGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Checks whether a resource group exists.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to check. The name is case insensitive.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<bool> CheckExistenceAsync(this IResourceGroupsOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CheckExistenceWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to create or update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the create or update a resource group.
/// </param>
public static ResourceGroup CreateOrUpdate(this IResourceGroupsOperations operations, string resourceGroupName, ResourceGroup parameters)
{
return operations.CreateOrUpdateAsync(resourceGroupName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Creates a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to create or update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the create or update a resource group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ResourceGroup> CreateOrUpdateAsync(this IResourceGroupsOperations operations, string resourceGroupName, ResourceGroup parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes a resource group.
/// </summary>
/// <remarks>
/// When you delete a resource group, all of its resources are also deleted.
/// Deleting a resource group deletes all of its template deployments and
/// currently stored operations.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to delete. The name is case insensitive.
/// </param>
public static void Delete(this IResourceGroupsOperations operations, string resourceGroupName)
{
operations.DeleteAsync(resourceGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a resource group.
/// </summary>
/// <remarks>
/// When you delete a resource group, all of its resources are also deleted.
/// Deleting a resource group deletes all of its template deployments and
/// currently stored operations.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to delete. The name is case insensitive.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IResourceGroupsOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to get. The name is case insensitive.
/// </param>
public static ResourceGroup Get(this IResourceGroupsOperations operations, string resourceGroupName)
{
return operations.GetAsync(resourceGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to get. The name is case insensitive.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ResourceGroup> GetAsync(this IResourceGroupsOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates a resource group.
/// </summary>
/// <remarks>
/// Resource groups can be updated through a simple PATCH operation to a group
/// address. The format of the request is the same as that for creating a
/// resource group. If a field is unspecified, the current value is retained.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to update. The name is case insensitive.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update a resource group.
/// </param>
public static ResourceGroup Patch(this IResourceGroupsOperations operations, string resourceGroupName, ResourceGroup parameters)
{
return operations.PatchAsync(resourceGroupName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Updates a resource group.
/// </summary>
/// <remarks>
/// Resource groups can be updated through a simple PATCH operation to a group
/// address. The format of the request is the same as that for creating a
/// resource group. If a field is unspecified, the current value is retained.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to update. The name is case insensitive.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update a resource group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ResourceGroup> PatchAsync(this IResourceGroupsOperations operations, string resourceGroupName, ResourceGroup parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.PatchWithHttpMessagesAsync(resourceGroupName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Captures the specified resource group as a template.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to export as a template.
/// </param>
/// <param name='parameters'>
/// Parameters for exporting the template.
/// </param>
public static ResourceGroupExportResult ExportTemplate(this IResourceGroupsOperations operations, string resourceGroupName, ExportTemplateRequest parameters)
{
return operations.ExportTemplateAsync(resourceGroupName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Captures the specified resource group as a template.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to export as a template.
/// </param>
/// <param name='parameters'>
/// Parameters for exporting the template.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ResourceGroupExportResult> ExportTemplateAsync(this IResourceGroupsOperations operations, string resourceGroupName, ExportTemplateRequest parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ExportTemplateWithHttpMessagesAsync(resourceGroupName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the resource groups for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
public static IPage<ResourceGroup> List(this IResourceGroupsOperations operations, ODataQuery<ResourceGroupFilter> odataQuery = default(ODataQuery<ResourceGroupFilter>))
{
return operations.ListAsync(odataQuery).GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the resource groups for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ResourceGroup>> ListAsync(this IResourceGroupsOperations operations, ODataQuery<ResourceGroupFilter> odataQuery = default(ODataQuery<ResourceGroupFilter>), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(odataQuery, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes a resource group.
/// </summary>
/// <remarks>
/// When you delete a resource group, all of its resources are also deleted.
/// Deleting a resource group deletes all of its template deployments and
/// currently stored operations.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to delete. The name is case insensitive.
/// </param>
public static void BeginDelete(this IResourceGroupsOperations operations, string resourceGroupName)
{
operations.BeginDeleteAsync(resourceGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a resource group.
/// </summary>
/// <remarks>
/// When you delete a resource group, all of its resources are also deleted.
/// Deleting a resource group deletes all of its template deployments and
/// currently stored operations.
/// </remarks>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to delete. The name is case insensitive.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginDeleteAsync(this IResourceGroupsOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get all the resources for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<GenericResource> ListResourcesNext(this IResourceGroupsOperations operations, string nextPageLink)
{
return operations.ListResourcesNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Get all the resources for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<GenericResource>> ListResourcesNextAsync(this IResourceGroupsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListResourcesNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the resource groups for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<ResourceGroup> ListNext(this IResourceGroupsOperations operations, string nextPageLink)
{
return operations.ListNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the resource groups for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ResourceGroup>> ListNextAsync(this IResourceGroupsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace angah.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
using System.Collections.Immutable;
using System.Linq;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Diagnostics;
namespace RefactoringEssentials.CSharp.Diagnostics
{
[DiagnosticAnalyzer(LanguageNames.CSharp)]
/// <summary>
/// Finds redundant internal modifiers.
/// </summary>
public class RedundantPrivateAnalyzer : DiagnosticAnalyzer
{
static readonly DiagnosticDescriptor descriptor = new DiagnosticDescriptor(
CSharpDiagnosticIDs.RedundantPrivateAnalyzerID,
GettextCatalog.GetString("Removes 'private' modifiers that are not required"),
GettextCatalog.GetString("'private' modifier is redundant"),
DiagnosticAnalyzerCategories.RedundanciesInCode,
DiagnosticSeverity.Hidden,
isEnabledByDefault: true,
helpLinkUri: HelpLink.CreateFor(CSharpDiagnosticIDs.RedundantPrivateAnalyzerID),
customTags: DiagnosticCustomTags.Unnecessary
);
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(descriptor);
public override void Initialize(AnalysisContext context)
{
context.RegisterSyntaxNodeAction(
(nodeContext) =>
{
Diagnostic diagnostic;
if (TryGetDiagnostic(nodeContext, out diagnostic))
{
nodeContext.ReportDiagnostic(diagnostic);
}
},
new SyntaxKind[] {
SyntaxKind.MethodDeclaration,
SyntaxKind.FieldDeclaration,
SyntaxKind.PropertyDeclaration,
SyntaxKind.IndexerDeclaration,
SyntaxKind.EventDeclaration,
SyntaxKind.ConstructorDeclaration,
SyntaxKind.OperatorDeclaration,
SyntaxKind.ClassDeclaration,
SyntaxKind.InterfaceDeclaration,
SyntaxKind.StructDeclaration,
SyntaxKind.EnumDeclaration,
SyntaxKind.DelegateDeclaration
}
);
}
static bool TryGetDiagnostic(SyntaxNodeAnalysisContext nodeContext, out Diagnostic diagnostic)
{
diagnostic = default(Diagnostic);
if (nodeContext.IsFromGeneratedCode())
return false;
var methodDeclaration = nodeContext.Node as MethodDeclarationSyntax;
if (methodDeclaration != null && methodDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, methodDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var fieldDeclaration = nodeContext.Node as FieldDeclarationSyntax;
if (fieldDeclaration != null && fieldDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, fieldDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var propertyDeclaration = nodeContext.Node as PropertyDeclarationSyntax;
if (propertyDeclaration != null && propertyDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, propertyDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var indexerDeclaration = nodeContext.Node as IndexerDeclarationSyntax;
if (indexerDeclaration != null && indexerDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, indexerDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var eventDeclaration = nodeContext.Node as EventDeclarationSyntax;
if (eventDeclaration != null && eventDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, eventDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var constructorDeclaration = nodeContext.Node as ConstructorDeclarationSyntax;
if (constructorDeclaration != null && constructorDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, constructorDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var operatorDeclaration = nodeContext.Node as OperatorDeclarationSyntax;
if (operatorDeclaration != null && operatorDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, operatorDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var delegateDeclaration = nodeContext.Node as DelegateDeclarationSyntax;
if (delegateDeclaration != null && delegateDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, delegateDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var enumDeclaration = nodeContext.Node as EnumDeclarationSyntax;
if (enumDeclaration != null && enumDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, enumDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var structDeclaration = nodeContext.Node as StructDeclarationSyntax;
if (structDeclaration != null && structDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, structDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var interfaceDeclaration = nodeContext.Node as InterfaceDeclarationSyntax;
if (interfaceDeclaration != null && interfaceDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, interfaceDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
var classDeclaration = nodeContext.Node as ClassDeclarationSyntax;
if (classDeclaration != null && classDeclaration.Modifiers.Any(m => m.IsKind(SyntaxKind.PrivateKeyword)))
{
diagnostic = Diagnostic.Create(descriptor, classDeclaration.Modifiers.FirstOrDefault(m => m.IsKind(SyntaxKind.PrivateKeyword)).GetLocation());
return true;
}
return false;
}
public static SyntaxNode RemoveModifierFromNode(SyntaxNode node, SyntaxKind modifier)
{
//there seem to be no base classes to support WithModifiers.
//dynamic modifiersNode = node;
//return modifiersNode.WithModifiers(SyntaxFactory.TokenList(modifiersNode.Modifiers.Where(m => !m.IsKind(SyntaxKind.PrivateKeyword))));
MethodDeclarationSyntax methodNode = node as MethodDeclarationSyntax;
if (methodNode != null)
return methodNode.WithModifiers(SyntaxFactory.TokenList(methodNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(methodNode.GetLeadingTrivia());
FieldDeclarationSyntax fieldNode = node as FieldDeclarationSyntax;
if (fieldNode != null)
return fieldNode.WithModifiers(SyntaxFactory.TokenList(fieldNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(fieldNode.GetLeadingTrivia());
PropertyDeclarationSyntax propertyNode = node as PropertyDeclarationSyntax;
if (propertyNode != null)
return propertyNode.WithModifiers(SyntaxFactory.TokenList(propertyNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(propertyNode.GetLeadingTrivia());
IndexerDeclarationSyntax indexerNode = node as IndexerDeclarationSyntax;
if (indexerNode != null)
return indexerNode.WithModifiers(SyntaxFactory.TokenList(indexerNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(indexerNode.GetLeadingTrivia());
EventDeclarationSyntax eventNode = node as EventDeclarationSyntax;
if (eventNode != null)
return eventNode.WithModifiers(SyntaxFactory.TokenList(eventNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(eventNode.GetLeadingTrivia());
ConstructorDeclarationSyntax ctrNode = node as ConstructorDeclarationSyntax;
if (ctrNode != null)
return ctrNode.WithModifiers(SyntaxFactory.TokenList(ctrNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(ctrNode.GetLeadingTrivia());
OperatorDeclarationSyntax opNode = node as OperatorDeclarationSyntax;
if (opNode != null)
return opNode.WithModifiers(SyntaxFactory.TokenList(opNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(opNode.GetLeadingTrivia());
ClassDeclarationSyntax classNode = node as ClassDeclarationSyntax;
if (classNode != null)
return classNode.WithModifiers(SyntaxFactory.TokenList(classNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(classNode.GetLeadingTrivia());
InterfaceDeclarationSyntax interfaceNode = node as InterfaceDeclarationSyntax;
if (interfaceNode != null)
return interfaceNode.WithModifiers(SyntaxFactory.TokenList(interfaceNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(interfaceNode.GetLeadingTrivia());
StructDeclarationSyntax structNode = node as StructDeclarationSyntax;
if (structNode != null)
return structNode.WithModifiers(SyntaxFactory.TokenList(structNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(structNode.GetLeadingTrivia());
var enumNode = node as EnumDeclarationSyntax;
if (enumNode != null)
return enumNode.WithModifiers(SyntaxFactory.TokenList(enumNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(enumNode.GetLeadingTrivia());
var delegateNode = node as DelegateDeclarationSyntax;
if (delegateNode != null)
return delegateNode.WithModifiers(SyntaxFactory.TokenList(delegateNode.Modifiers.Where(m => !m.IsKind(modifier))))
.WithLeadingTrivia(delegateNode.GetLeadingTrivia());
return node;
}
}
}
| |
//
// ManagerTest.cs
//
// Author:
// Zachary Gramana <zack@xamarin.com>
//
// Copyright (c) 2014 Xamarin Inc
// Copyright (c) 2014 .NET Foundation
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//
// Copyright (c) 2014 Couchbase, Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
//
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Couchbase.Lite;
using NUnit.Framework;
using Sharpen;
using Couchbase.Lite.Db;
using System;
namespace Couchbase.Lite
{
public class ManagerTest : LiteTestCase
{
[Test]
public void TestServer()
{
//to ensure this test is easily repeatable we will explicitly remove
//any stale foo.cblite
var mustExist = true;
Database old = manager.GetDatabaseWithoutOpening("foo", mustExist);
if (old != null)
{
old.Delete();
}
mustExist = false;
var db = manager.GetDatabaseWithoutOpening("foo", mustExist);
Assert.IsNotNull(db);
Assert.AreEqual("foo", db.Name);
Assert.IsTrue(db.Path.StartsWith(GetServerPath()));
Assert.IsFalse(db.Exists());
// because foo doesn't exist yet
List<string> databaseNames = manager.AllDatabaseNames.ToList();
Assert.IsTrue(!databaseNames.Contains("foo"));
Assert.IsTrue(db.Open());
Assert.IsTrue(db.Exists());
databaseNames = manager.AllDatabaseNames.ToList();
Assert.IsTrue(databaseNames.Contains("foo"));
db.Close();
db.Delete();
}
/// <exception cref="System.Exception"></exception>
[Test]
public void TestUpgradeOldDatabaseFiles()
{
var testDirName = "test-directory-" + Runtime.CurrentTimeMillis();
var rootDirPath = RootDirectory.FullName;
var testDirPath = Path.Combine(rootDirPath, testDirName);
var testDirInfo = Directory.CreateDirectory(testDirPath);
var dbStream = GetAsset("withattachments.cblite");
var destStream = File.OpenWrite(Path.Combine(testDirPath, "withattachments" + Manager.DatabaseSuffix));
dbStream.CopyTo(destStream);
dbStream.Dispose();
destStream.Dispose();
var attStream = GetAsset("attachment.blob");
Directory.CreateDirectory(Path.Combine(testDirPath, "withattachments/attachments"));
destStream = File.OpenWrite(Path.Combine(testDirPath, "withattachments/attachments/356a192b7913b04c54574d18c28d46e6395428ab.blob"));
attStream.CopyTo(destStream);
destStream.Dispose();
attStream.Dispose();
StopCBLite();
manager = new Manager(testDirInfo, Manager.DefaultOptions);
var db = manager.GetDatabaseWithoutOpening("withattachments", true);
int version = DatabaseUpgraderFactory.SchemaVersion(db.Path);
Assert.IsTrue(version >= 101, "Upgrade failed");
Assert.IsFalse(Directory.Exists(Path.Combine(testDirPath, "withattachments/attachments")), "Failed to remove old attachments dir");
Assert.IsTrue(Directory.Exists(Path.Combine(testDirPath, "withattachments attachments")), "Failed to create new attachments dir");
}
[Test]
public void TestReplaceDatabaseNamedNoAttachments() {
//Copy database from assets to local storage
var dbStream = GetAsset("noattachments.cblite");
manager.ReplaceDatabase("replaced", dbStream, null);
dbStream.Dispose();
//Now validate the number of files in the DB
var db = manager.GetDatabase("replaced");
Assert.AreEqual(10, db.DocumentCount);
db.Dispose();
}
[Test]
public void TestReplaceDatabaseNamedWithAttachments() {
var dbStream = GetAsset("withattachments.cblite");
var attachments = new Dictionary<string, Stream>();
attachments["356a192b7913b04c54574d18c28d46e6395428ab.blob"] = GetAsset("attachment.blob");
manager.ReplaceDatabase("replaced", dbStream, attachments);
dbStream.Dispose();
//Validate the number of files in the DB
Assert.AreEqual(1, manager.GetDatabase("replaced").DocumentCount);
var doc = manager.GetDatabase("replaced").GetExistingDocument("168e0c56-4588-4df4-8700-4d5115fa9c74");
Assert.IsNotNull(doc);
Assert.IsNotNull(doc.CurrentRevision.Attachments.ElementAt(0));
Assert.IsNotNull(doc.CurrentRevision.Attachments.ElementAt(0).Content);
}
[Test]
public void TestReplaceWithIosDatabase() {
using (var assetStream = GetAsset("ios104.zip")) {
manager.ReplaceDatabase("iosdb", assetStream, true);
}
var db = manager.GetExistingDatabase("iosdb");
Assert.IsNotNull(db, "Failed to import database");
var doc = db.GetExistingDocument("BC38EA44-E153-429A-A698-0CBE6B0090C4");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 2, "Failed to get attachments from imported database");
using(var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
var view = db.GetView("view");
view.SetMap((d, emit) => {
if(d["_id"].Equals("BC38EA44-E153-429A-A698-0CBE6B0090C4")) {
emit(d["_id"], null);
}
}, "1");
var result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
db.Dispose();
using (var assetStream = GetAsset("ios110.zip")) {
manager.ReplaceDatabase("iosdb", assetStream, true);
}
db = manager.GetExistingDatabase("iosdb");
Assert.IsNotNull(db, "Failed to import database");
doc = db.GetExistingDocument("-iTji_n2zmHpmgYecaRHqZE");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 2, "Failed to get attachments from imported database");
using(var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
view = db.GetView("view");
view.SetMap((d, emit) => {
if(d["_id"].Equals("-iTji_n2zmHpmgYecaRHqZE")) {
emit(d["_id"], null);
}
}, "1");
result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
}
[Test]
public void TestReplaceWithAndroidDatabase() {
using (var assetStream = GetAsset("android104.zip")) {
manager.ReplaceDatabase("androiddb", assetStream, true);
}
var db = manager.GetExistingDatabase("androiddb");
Assert.IsNotNull(db, "Failed to import database");
var doc = db.GetExistingDocument("66ac306d-de93-46c8-b60f-946c16ac4a1d");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 1, "Failed to get attachments from imported database");
using(var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
var view = db.GetView("view");
view.SetMap((d, emit) => {
if(d["_id"].Equals("66ac306d-de93-46c8-b60f-946c16ac4a1d")) {
emit(d["_id"], null);
}
}, "1");
var result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
db.Dispose();
using (var assetStream = GetAsset("android110.zip")) {
manager.ReplaceDatabase("androiddb", assetStream, true);
}
db = manager.GetExistingDatabase("androiddb");
Assert.IsNotNull(db, "Failed to import database");
doc = db.GetExistingDocument("d3e80747-2568-47c8-81e8-a04ba1b5c5d4");
Assert.IsNotNull(doc, "Failed to get doc from imported database");
Assert.AreEqual(doc.CurrentRevision.AttachmentNames.Count(), 1, "Failed to get attachments from imported database");
using(var attachment = doc.CurrentRevision.Attachments.ElementAt(0)) {
Assert.IsNotNull(attachment.Content, "Failed to get attachment data");
}
view = db.GetView("view");
view.SetMap((d, emit) => {
if(d["_id"].Equals("d3e80747-2568-47c8-81e8-a04ba1b5c5d4")) {
emit(d["_id"], null);
}
}, "1");
result = view.CreateQuery().Run();
Assert.AreEqual(1, result.Count);
}
[Test]
public void TestReplaceFailure()
{
var doc = database.CreateDocument();
doc.PutProperties(new Dictionary<string, object> {
{ "foo", "bar" }
});
Assert.Throws(typeof(ArgumentException), () =>
{
using (var assetStream = GetAsset("android104.zip")) {
manager.ReplaceDatabase(database.Name, assetStream, false);
}
});
// Verify that the original DB is intact
doc = database.GetExistingDocument(doc.Id);
Assert.IsNotNull(doc, "Failed to get original document");
Assert.AreEqual("bar", doc.UserProperties["foo"]);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using Internal.Cryptography;
using Microsoft.Win32.SafeHandles;
using ErrorCode=Interop.NCrypt.ErrorCode;
namespace System.Security.Cryptography
{
internal static class CngKeyLite
{
private static class KeyPropertyName
{
internal const string ECCCurveName = "ECCCurveName"; // NCRYPT_ECC_CURVE_NAME
internal const string ECCParameters = "ECCParameters"; // BCRYPT_ECC_PARAMETERS
internal const string ExportPolicy = "Export Policy"; // NCRYPT_EXPORT_POLICY_PROPERTY
internal const string Length = "Length"; // NCRYPT_LENGTH_PROPERTY
internal const string PublicKeyLength = "PublicKeyLength"; // NCRYPT_PUBLIC_KEY_LENGTH (Win10+)
}
private static readonly SafeNCryptProviderHandle s_microsoftSoftwareProviderHandle =
OpenNCryptProvider("Microsoft Software Key Storage Provider"); // MS_KEY_STORAGE_PROVIDER
internal static SafeNCryptKeyHandle ImportKeyBlob(string blobType, byte[] keyBlob)
{
SafeNCryptKeyHandle keyHandle;
ErrorCode errorCode = Interop.NCrypt.NCryptImportKey(
s_microsoftSoftwareProviderHandle,
IntPtr.Zero,
blobType,
IntPtr.Zero,
out keyHandle,
keyBlob,
keyBlob.Length,
0);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
Debug.Assert(keyHandle != null);
SetExportable(keyHandle);
return keyHandle;
}
internal static SafeNCryptKeyHandle ImportKeyBlob(string blobType, byte[] keyBlob, string curveName)
{
SafeNCryptKeyHandle keyHandle;
keyHandle = ECCng.ImportKeyBlob(blobType, keyBlob, curveName, s_microsoftSoftwareProviderHandle);
Debug.Assert(keyHandle != null);
SetExportable(keyHandle);
return keyHandle;
}
internal static byte[] ExportKeyBlob(SafeNCryptKeyHandle keyHandle, string blobType)
{
Debug.Assert(!keyHandle.IsInvalid);
int numBytesNeeded;
ErrorCode errorCode = Interop.NCrypt.NCryptExportKey(
keyHandle,
IntPtr.Zero,
blobType,
IntPtr.Zero,
null,
0,
out numBytesNeeded,
0);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
byte[] buffer = new byte[numBytesNeeded];
errorCode = Interop.NCrypt.NCryptExportKey(
keyHandle,
IntPtr.Zero,
blobType,
IntPtr.Zero,
buffer,
buffer.Length,
out numBytesNeeded,
0);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
Array.Resize(ref buffer, numBytesNeeded);
return buffer;
}
internal static SafeNCryptKeyHandle GenerateNewExportableKey(string algorithm, int keySize)
{
// Despite the function being create "persisted" key, since we pass a null name it's
// actually ephemeral.
SafeNCryptKeyHandle keyHandle;
ErrorCode errorCode = Interop.NCrypt.NCryptCreatePersistedKey(
s_microsoftSoftwareProviderHandle,
out keyHandle,
algorithm,
null,
0,
CngKeyCreationOptions.None);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
Debug.Assert(!keyHandle.IsInvalid);
SetExportable(keyHandle);
SetKeyLength(keyHandle, keySize);
errorCode = Interop.NCrypt.NCryptFinalizeKey(keyHandle, 0);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
return keyHandle;
}
internal static SafeNCryptKeyHandle GenerateNewExportableKey(string algorithm, string curveName)
{
// Despite the function being create "persisted" key, since we pass a null name it's
// actually ephemeral.
SafeNCryptKeyHandle keyHandle;
ErrorCode errorCode = Interop.NCrypt.NCryptCreatePersistedKey(
s_microsoftSoftwareProviderHandle,
out keyHandle,
algorithm,
null,
0,
CngKeyCreationOptions.None);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
Debug.Assert(!keyHandle.IsInvalid);
SetExportable(keyHandle);
SetCurveName(keyHandle, curveName);
errorCode = Interop.NCrypt.NCryptFinalizeKey(keyHandle, 0);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
return keyHandle;
}
internal static SafeNCryptKeyHandle GenerateNewExportableKey(string algorithm, ref ECCurve explicitCurve)
{
// Despite the function being create "persisted" key, since we pass a null name it's
// actually ephemeral.
SafeNCryptKeyHandle keyHandle;
ErrorCode errorCode = Interop.NCrypt.NCryptCreatePersistedKey(
s_microsoftSoftwareProviderHandle,
out keyHandle,
algorithm,
null,
0,
CngKeyCreationOptions.None);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
Debug.Assert(!keyHandle.IsInvalid);
SetExportable(keyHandle);
byte[] parametersBlob = ECCng.GetPrimeCurveParameterBlob(ref explicitCurve);
SetProperty(keyHandle, KeyPropertyName.ECCParameters, parametersBlob);
errorCode = Interop.NCrypt.NCryptFinalizeKey(keyHandle, 0);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
return keyHandle;
}
private static void SetExportable(SafeNCryptKeyHandle keyHandle)
{
Debug.Assert(!keyHandle.IsInvalid);
CngExportPolicies exportPolicy = CngExportPolicies.AllowPlaintextExport;
unsafe
{
ErrorCode errorCode = Interop.NCrypt.NCryptSetProperty(
keyHandle,
KeyPropertyName.ExportPolicy,
&exportPolicy,
sizeof(CngExportPolicies),
CngPropertyOptions.Persist);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
}
}
private static void SetKeyLength(SafeNCryptKeyHandle keyHandle, int keySize)
{
Debug.Assert(!keyHandle.IsInvalid);
unsafe
{
ErrorCode errorCode = Interop.NCrypt.NCryptSetProperty(
keyHandle,
KeyPropertyName.Length,
&keySize,
sizeof(int),
CngPropertyOptions.Persist);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
}
}
internal static unsafe int GetKeyLength(SafeNCryptKeyHandle keyHandle)
{
Debug.Assert(!keyHandle.IsInvalid);
int keySize = 0;
// Attempt to use PublicKeyLength first as it returns the correct value for ECC keys
ErrorCode errorCode = Interop.NCrypt.NCryptGetIntProperty(
keyHandle,
KeyPropertyName.PublicKeyLength,
ref keySize);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
// Fall back to Length (< Windows 10)
errorCode = Interop.NCrypt.NCryptGetIntProperty(
keyHandle,
KeyPropertyName.Length,
ref keySize);
}
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
return keySize;
}
private static SafeNCryptProviderHandle OpenNCryptProvider(string providerName)
{
SafeNCryptProviderHandle providerHandle;
ErrorCode errorCode = Interop.NCrypt.NCryptOpenStorageProvider(out providerHandle, providerName, 0);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
Debug.Assert(!providerHandle.IsInvalid);
return providerHandle;
}
/// <summary>
/// Returns a CNG key property.
/// </summary>
/// <returns>
/// null - if property not defined on key.
/// throws - for any other type of error.
/// </returns>
private static byte[] GetProperty(SafeNCryptHandle ncryptHandle, string propertyName, CngPropertyOptions options)
{
Debug.Assert(!ncryptHandle.IsInvalid);
unsafe
{
int numBytesNeeded;
ErrorCode errorCode = Interop.NCrypt.NCryptGetProperty(ncryptHandle, propertyName, null, 0, out numBytesNeeded, options);
if (errorCode == ErrorCode.NTE_NOT_FOUND)
return null;
if (errorCode != ErrorCode.ERROR_SUCCESS)
throw errorCode.ToCryptographicException();
byte[] propertyValue = new byte[numBytesNeeded];
fixed (byte* pPropertyValue = propertyValue)
{
errorCode = Interop.NCrypt.NCryptGetProperty(ncryptHandle, propertyName, pPropertyValue, propertyValue.Length, out numBytesNeeded, options);
}
if (errorCode == ErrorCode.NTE_NOT_FOUND)
return null;
if (errorCode != ErrorCode.ERROR_SUCCESS)
throw errorCode.ToCryptographicException();
Array.Resize(ref propertyValue, numBytesNeeded);
return propertyValue;
}
}
/// <summary>
/// Retrieve a well-known CNG string property. (Note: desktop compat: this helper likes to return special values rather than throw exceptions for missing
/// or ill-formatted property values. Only use it for well-known properties that are unlikely to be ill-formatted.)
/// </summary>
private static string GetPropertyAsString(SafeNCryptHandle ncryptHandle, string propertyName, CngPropertyOptions options)
{
Debug.Assert(!ncryptHandle.IsInvalid);
byte[] value = GetProperty(ncryptHandle, propertyName, options);
if (value == null)
return null; // Desktop compat: return null if key not present.
if (value.Length == 0)
return string.Empty; // Desktop compat: return empty if property value is 0-length.
unsafe
{
fixed (byte* pValue = value)
{
string valueAsString = Marshal.PtrToStringUni((IntPtr)pValue);
return valueAsString;
}
}
}
internal static string GetCurveName(SafeNCryptHandle ncryptHandle)
{
Debug.Assert(!ncryptHandle.IsInvalid);
return GetPropertyAsString(ncryptHandle, KeyPropertyName.ECCCurveName, CngPropertyOptions.None);
}
internal static void SetCurveName(SafeNCryptHandle keyHandle, string curveName)
{
unsafe
{
byte[] curveNameBytes = new byte[(curveName.Length + 1) * sizeof(char)]; // +1 to add trailing null
System.Text.Encoding.Unicode.GetBytes(curveName, 0, curveName.Length, curveNameBytes, 0);
SetProperty(keyHandle, KeyPropertyName.ECCCurveName, curveNameBytes);
}
}
private static void SetProperty(SafeNCryptHandle ncryptHandle, string propertyName, byte[] value)
{
Debug.Assert(!ncryptHandle.IsInvalid);
unsafe
{
fixed (byte* pBlob = value)
{
ErrorCode errorCode = Interop.NCrypt.NCryptSetProperty(
ncryptHandle,
propertyName,
pBlob,
value.Length,
CngPropertyOptions.None);
if (errorCode != ErrorCode.ERROR_SUCCESS)
{
throw errorCode.ToCryptographicException();
}
}
}
}
}
// Limited version of CngExportPolicies from the Cng contract.
[Flags]
internal enum CngPropertyOptions : int
{
None = 0,
Persist = unchecked((int)0x80000000), //NCRYPT_PERSIST_FLAG (The property should be persisted.)
}
// Limited version of CngKeyCreationOptions from the Cng contract.
[Flags]
internal enum CngKeyCreationOptions : int
{
None = 0x00000000,
}
// Limited version of CngKeyOpenOptions from the Cng contract.
[Flags]
internal enum CngKeyOpenOptions : int
{
None = 0x00000000,
}
// Limited version of CngExportPolicies from the Cng contract.
[Flags]
internal enum CngExportPolicies : int
{
None = 0x00000000,
AllowPlaintextExport = 0x00000002, // NCRYPT_ALLOW_PLAINTEXT_EXPORT_FLAG
}
}
// Internal, lightweight versions of the SafeNCryptHandle types which are public in CNG.
namespace Microsoft.Win32.SafeHandles
{
internal class SafeNCryptHandle : SafeHandle
{
public SafeNCryptHandle()
: base(IntPtr.Zero, ownsHandle: true)
{
}
protected override bool ReleaseHandle()
{
ErrorCode errorCode = Interop.NCrypt.NCryptFreeObject(handle);
bool success = (errorCode == ErrorCode.ERROR_SUCCESS);
Debug.Assert(success);
handle = IntPtr.Zero;
return success;
}
public override bool IsInvalid
{
get { return handle == IntPtr.Zero; }
}
}
internal class SafeNCryptKeyHandle : SafeNCryptHandle
{
}
internal class SafeNCryptProviderHandle : SafeNCryptHandle
{
}
internal class DuplicateSafeNCryptKeyHandle : SafeNCryptKeyHandle
{
public DuplicateSafeNCryptKeyHandle(SafeNCryptKeyHandle original)
: base()
{
bool success = false;
original.DangerousAddRef(ref success);
if (!success)
throw new CryptographicException(); // DangerousAddRef() never actually sets success to false, so no need to expend a resource string here.
SetHandle(original.DangerousGetHandle());
_original = original;
}
protected override bool ReleaseHandle()
{
_original.DangerousRelease();
SetHandle(IntPtr.Zero);
return true;
}
private readonly SafeNCryptKeyHandle _original;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Globalization;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Threading;
namespace System.Linq.Expressions
{
/// <summary>
/// The base type for all nodes in Expression Trees.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1506:AvoidExcessiveClassCoupling")]
public abstract partial class Expression
{
private static readonly CacheDict<Type, MethodInfo> s_lambdaDelegateCache = new CacheDict<Type, MethodInfo>(40);
private static volatile CacheDict<Type, Func<Expression, string, bool, ReadOnlyCollection<ParameterExpression>, LambdaExpression>> s_lambdaFactories;
// For 4.0, many frequently used Expression nodes have had their memory
// footprint reduced by removing the Type and NodeType fields. This has
// large performance benefits to all users of Expression Trees.
//
// To support the 3.5 protected constructor, we store the fields that
// used to be here in a ConditionalWeakTable.
private class ExtensionInfo
{
public ExtensionInfo(ExpressionType nodeType, Type type)
{
NodeType = nodeType;
Type = type;
}
internal readonly ExpressionType NodeType;
internal readonly Type Type;
}
private static ConditionalWeakTable<Expression, ExtensionInfo> s_legacyCtorSupportTable;
/// <summary>
/// Constructs a new instance of <see cref="Expression"/>.
/// </summary>
/// <param name="nodeType">The <see ctype="ExpressionType"/> of the <see cref="Expression"/>.</param>
/// <param name="type">The <see cref="Type"/> of the <see cref="Expression"/>.</param>
[Obsolete("use a different constructor that does not take ExpressionType. Then override NodeType and Type properties to provide the values that would be specified to this constructor.")]
protected Expression(ExpressionType nodeType, Type type)
{
// Can't enforce anything that V1 didn't
if (s_legacyCtorSupportTable == null)
{
Interlocked.CompareExchange(
ref s_legacyCtorSupportTable,
new ConditionalWeakTable<Expression, ExtensionInfo>(),
comparand: null
);
}
s_legacyCtorSupportTable.Add(this, new ExtensionInfo(nodeType, type));
}
/// <summary>
/// Constructs a new instance of <see cref="Expression"/>.
/// </summary>
protected Expression()
{
}
/// <summary>
/// The <see cref="ExpressionType"/> of the <see cref="Expression"/>.
/// </summary>
public virtual ExpressionType NodeType
{
get
{
ExtensionInfo extInfo;
if (s_legacyCtorSupportTable != null && s_legacyCtorSupportTable.TryGetValue(this, out extInfo))
{
return extInfo.NodeType;
}
// the extension expression failed to override NodeType
throw Error.ExtensionNodeMustOverrideProperty("Expression.NodeType");
}
}
/// <summary>
/// The <see cref="Type"/> of the value represented by this <see cref="Expression"/>.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1721:PropertyNamesShouldNotMatchGetMethods")]
public virtual Type Type
{
get
{
ExtensionInfo extInfo;
if (s_legacyCtorSupportTable != null && s_legacyCtorSupportTable.TryGetValue(this, out extInfo))
{
return extInfo.Type;
}
// the extension expression failed to override Type
throw Error.ExtensionNodeMustOverrideProperty("Expression.Type");
}
}
/// <summary>
/// Indicates that the node can be reduced to a simpler node. If this
/// returns true, Reduce() can be called to produce the reduced form.
/// </summary>
public virtual bool CanReduce => false;
/// <summary>
/// Reduces this node to a simpler expression. If CanReduce returns
/// true, this should return a valid expression. This method is
/// allowed to return another node which itself must be reduced.
/// </summary>
/// <returns>The reduced expression.</returns>
public virtual Expression Reduce()
{
if (CanReduce) throw Error.ReducibleMustOverrideReduce();
return this;
}
/// <summary>
/// Reduces the node and then calls the <see cref="ExpressionVisitor.Visit(Expression)"/> method passing the reduced expression.
/// Throws an exception if the node isn't reducible.
/// </summary>
/// <param name="visitor">An instance of <see cref="ExpressionVisitor"/>.</param>
/// <returns>The expression being visited, or an expression which should replace it in the tree.</returns>
/// <remarks>
/// Override this method to provide logic to walk the node's children.
/// A typical implementation will call visitor.Visit on each of its
/// children, and if any of them change, should return a new copy of
/// itself with the modified children.
/// </remarks>
protected internal virtual Expression VisitChildren(ExpressionVisitor visitor)
{
if (!CanReduce) throw Error.MustBeReducible();
return visitor.Visit(ReduceAndCheck());
}
/// <summary>
/// Dispatches to the specific visit method for this node type. For
/// example, <see cref="MethodCallExpression"/> will call into
/// <see cref="ExpressionVisitor.VisitMethodCall"/>.
/// </summary>
/// <param name="visitor">The visitor to visit this node with.</param>
/// <returns>The result of visiting this node.</returns>
/// <remarks>
/// This default implementation for <see cref="ExpressionType.Extension"/>
/// nodes will call <see cref="ExpressionVisitor.VisitExtension"/>.
/// Override this method to call into a more specific method on a derived
/// visitor class of ExprressionVisitor. However, it should still
/// support unknown visitors by calling VisitExtension.
/// </remarks>
protected internal virtual Expression Accept(ExpressionVisitor visitor)
{
return visitor.VisitExtension(this);
}
/// <summary>
/// Reduces this node to a simpler expression. If CanReduce returns
/// true, this should return a valid expression. This method is
/// allowed to return another node which itself must be reduced.
/// </summary>
/// <returns>The reduced expression.</returns>
/// <remarks >
/// Unlike Reduce, this method checks that the reduced node satisfies
/// certain invariants.
/// </remarks>
public Expression ReduceAndCheck()
{
if (!CanReduce) throw Error.MustBeReducible();
Expression newNode = Reduce();
// 1. Reduction must return a new, non-null node
// 2. Reduction must return a new node whose result type can be assigned to the type of the original node
if (newNode == null || newNode == this) throw Error.MustReduceToDifferent();
if (!TypeUtils.AreReferenceAssignable(Type, newNode.Type)) throw Error.ReducedNotCompatible();
return newNode;
}
/// <summary>
/// Reduces the expression to a known node type (i.e. not an Extension node)
/// or simply returns the expression if it is already a known type.
/// </summary>
/// <returns>The reduced expression.</returns>
public Expression ReduceExtensions()
{
Expression node = this;
while (node.NodeType == ExpressionType.Extension)
{
node = node.ReduceAndCheck();
}
return node;
}
/// <summary>
/// Creates a <see cref="String"/> representation of the Expression.
/// </summary>
/// <returns>A <see cref="String"/> representation of the Expression.</returns>
public override string ToString()
{
return ExpressionStringBuilder.ExpressionToString(this);
}
/// <summary>
/// Creates a <see cref="String"/> representation of the Expression.
/// </summary>
/// <returns>A <see cref="String"/> representation of the Expression.</returns>
private string DebugView
{
// Note that this property is often accessed using reflection. As such it will have more dependencies than one
// might surmise from its being internal, and removing it requires greater caution than with other internal methods.
get
{
using (System.IO.StringWriter writer = new System.IO.StringWriter(CultureInfo.CurrentCulture))
{
DebugViewWriter.WriteTo(this, writer);
return writer.ToString();
}
}
}
private static void RequiresCanRead(IReadOnlyList<Expression> items, string paramName)
{
Debug.Assert(items != null);
// this is called a lot, avoid allocating an enumerator if we can...
for (int i = 0, n = items.Count; i < n; i++)
{
ExpressionUtils.RequiresCanRead(items[i], paramName, i);
}
}
private static void RequiresCanWrite(Expression expression, string paramName)
{
if (expression == null)
{
throw new ArgumentNullException(paramName);
}
switch (expression.NodeType)
{
case ExpressionType.Index:
PropertyInfo indexer = ((IndexExpression)expression).Indexer;
if (indexer == null || indexer.CanWrite)
{
return;
}
break;
case ExpressionType.MemberAccess:
MemberInfo member = ((MemberExpression)expression).Member;
PropertyInfo prop = member as PropertyInfo;
if (prop != null)
{
if(prop.CanWrite)
{
return;
}
}
else
{
Debug.Assert(member is FieldInfo);
FieldInfo field = (FieldInfo)member;
if (!(field.IsInitOnly || field.IsLiteral))
{
return;
}
}
break;
case ExpressionType.Parameter:
return;
}
throw Error.ExpressionMustBeWriteable(paramName);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
/*============================================================
**
**
**
**
** Purpose: Generic hash table implementation
**
** #DictionaryVersusHashtableThreadSafety
** Hashtable has multiple reader/single writer (MR/SW) thread safety built into
** certain methods and properties, whereas Dictionary doesn't. If you're
** converting framework code that formerly used Hashtable to Dictionary, it's
** important to consider whether callers may have taken a dependence on MR/SW
** thread safety. If a reader writer lock is available, then that may be used
** with a Dictionary to get the same thread safety guarantee.
**
** Reader writer locks don't exist in silverlight, so we do the following as a
** result of removing non-generic collections from silverlight:
** 1. If the Hashtable was fully synchronized, then we replace it with a
** Dictionary with full locks around reads/writes (same thread safety
** guarantee).
** 2. Otherwise, the Hashtable has the default MR/SW thread safety behavior,
** so we do one of the following on a case-by-case basis:
** a. If the race condition can be addressed by rearranging the code and using a temp
** variable (for example, it's only populated immediately after created)
** then we address the race condition this way and use Dictionary.
** b. If there's concern about degrading performance with the increased
** locking, we ifdef with FEATURE_NONGENERIC_COLLECTIONS so we can at
** least use Hashtable in the desktop build, but Dictionary with full
** locks in silverlight builds. Note that this is heavier locking than
** MR/SW, but this is the only option without rewriting (or adding back)
** the reader writer lock.
** c. If there's no performance concern (e.g. debug-only code) we
** consistently replace Hashtable with Dictionary plus full locks to
** reduce complexity.
** d. Most of serialization is dead code in silverlight. Instead of updating
** those Hashtable occurences in serialization, we carved out references
** to serialization such that this code doesn't need to build in
** silverlight.
===========================================================*/
namespace System.Collections.Generic {
using System;
using System.Collections;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.Serialization;
using System.Security.Permissions;
[DebuggerTypeProxy(typeof(Mscorlib_DictionaryDebugView<,>))]
[DebuggerDisplay("Count = {Count}")]
[Serializable]
[System.Runtime.InteropServices.ComVisible(false)]
public class Dictionary<TKey,TValue>: IDictionary<TKey,TValue>, IDictionary, IReadOnlyDictionary<TKey, TValue>, ISerializable, IDeserializationCallback {
private struct Entry {
public int hashCode; // Lower 31 bits of hash code, -1 if unused
public int next; // Index of next entry, -1 if last
public TKey key; // Key of entry
public TValue value; // Value of entry
}
private int[] buckets;
private Entry[] entries;
private int count;
private int version;
private int freeList;
private int freeCount;
private IEqualityComparer<TKey> comparer;
private KeyCollection keys;
private ValueCollection values;
private Object _syncRoot;
// constants for serialization
private const String VersionName = "Version";
private const String HashSizeName = "HashSize"; // Must save buckets.Length
private const String KeyValuePairsName = "KeyValuePairs";
private const String ComparerName = "Comparer";
public Dictionary(): this(0, null) {}
public Dictionary(int capacity): this(capacity, null) {}
public Dictionary(IEqualityComparer<TKey> comparer): this(0, comparer) {}
public Dictionary(int capacity, IEqualityComparer<TKey> comparer) {
if (capacity < 0) ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.capacity);
if (capacity > 0) Initialize(capacity);
this.comparer = comparer ?? EqualityComparer<TKey>.Default;
#if FEATURE_RANDOMIZED_STRING_HASHING && FEATURE_CORECLR
if (HashHelpers.s_UseRandomizedStringHashing && comparer == EqualityComparer<string>.Default)
{
this.comparer = (IEqualityComparer<TKey>) NonRandomizedStringEqualityComparer.Default;
}
#endif // FEATURE_RANDOMIZED_STRING_HASHING && FEATURE_CORECLR
}
public Dictionary(IDictionary<TKey,TValue> dictionary): this(dictionary, null) {}
public Dictionary(IDictionary<TKey,TValue> dictionary, IEqualityComparer<TKey> comparer):
this(dictionary != null? dictionary.Count: 0, comparer) {
if( dictionary == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.dictionary);
}
// It is likely that the passed-in dictionary is Dictionary<TKey,TValue>. When this is the case,
// avoid the enumerator allocation and overhead by looping through the entries array directly.
// We only do this when dictionary is Dictionary<TKey,TValue> and not a subclass, to maintain
// back-compat with subclasses that may have overridden the enumerator behavior.
if (dictionary.GetType() == typeof(Dictionary<TKey,TValue>)) {
Dictionary<TKey,TValue> d = (Dictionary<TKey,TValue>)dictionary;
int count = d.count;
Entry[] entries = d.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) {
Add(entries[i].key, entries[i].value);
}
}
return;
}
foreach (KeyValuePair<TKey,TValue> pair in dictionary) {
Add(pair.Key, pair.Value);
}
}
protected Dictionary(SerializationInfo info, StreamingContext context) {
//We can't do anything with the keys and values until the entire graph has been deserialized
//and we have a resonable estimate that GetHashCode is not going to fail. For the time being,
//we'll just cache this. The graph is not valid until OnDeserialization has been called.
HashHelpers.SerializationInfoTable.Add(this, info);
}
public IEqualityComparer<TKey> Comparer {
get {
return comparer;
}
}
public int Count {
get { return count - freeCount; }
}
public KeyCollection Keys {
get {
Contract.Ensures(Contract.Result<KeyCollection>() != null);
if (keys == null) keys = new KeyCollection(this);
return keys;
}
}
ICollection<TKey> IDictionary<TKey, TValue>.Keys {
get {
if (keys == null) keys = new KeyCollection(this);
return keys;
}
}
IEnumerable<TKey> IReadOnlyDictionary<TKey, TValue>.Keys {
get {
if (keys == null) keys = new KeyCollection(this);
return keys;
}
}
public ValueCollection Values {
get {
Contract.Ensures(Contract.Result<ValueCollection>() != null);
if (values == null) values = new ValueCollection(this);
return values;
}
}
ICollection<TValue> IDictionary<TKey, TValue>.Values {
get {
if (values == null) values = new ValueCollection(this);
return values;
}
}
IEnumerable<TValue> IReadOnlyDictionary<TKey, TValue>.Values {
get {
if (values == null) values = new ValueCollection(this);
return values;
}
}
public TValue this[TKey key] {
get {
int i = FindEntry(key);
if (i >= 0) return entries[i].value;
ThrowHelper.ThrowKeyNotFoundException();
return default(TValue);
}
set {
Insert(key, value, false);
}
}
public void Add(TKey key, TValue value) {
Insert(key, value, true);
}
void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> keyValuePair) {
Add(keyValuePair.Key, keyValuePair.Value);
}
bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> keyValuePair) {
int i = FindEntry(keyValuePair.Key);
if( i >= 0 && EqualityComparer<TValue>.Default.Equals(entries[i].value, keyValuePair.Value)) {
return true;
}
return false;
}
bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> keyValuePair) {
int i = FindEntry(keyValuePair.Key);
if( i >= 0 && EqualityComparer<TValue>.Default.Equals(entries[i].value, keyValuePair.Value)) {
Remove(keyValuePair.Key);
return true;
}
return false;
}
public void Clear() {
if (count > 0) {
for (int i = 0; i < buckets.Length; i++) buckets[i] = -1;
Array.Clear(entries, 0, count);
freeList = -1;
count = 0;
freeCount = 0;
version++;
}
}
public bool ContainsKey(TKey key) {
return FindEntry(key) >= 0;
}
public bool ContainsValue(TValue value) {
if (value == null) {
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0 && entries[i].value == null) return true;
}
}
else {
EqualityComparer<TValue> c = EqualityComparer<TValue>.Default;
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0 && c.Equals(entries[i].value, value)) return true;
}
}
return false;
}
private void CopyTo(KeyValuePair<TKey,TValue>[] array, int index) {
if (array == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
}
if (index < 0 || index > array.Length ) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < Count) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall);
}
int count = this.count;
Entry[] entries = this.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) {
array[index++] = new KeyValuePair<TKey,TValue>(entries[i].key, entries[i].value);
}
}
}
public Enumerator GetEnumerator() {
return new Enumerator(this, Enumerator.KeyValuePair);
}
IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() {
return new Enumerator(this, Enumerator.KeyValuePair);
}
[System.Security.SecurityCritical] // auto-generated_required
public virtual void GetObjectData(SerializationInfo info, StreamingContext context) {
if (info==null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.info);
}
info.AddValue(VersionName, version);
#if FEATURE_RANDOMIZED_STRING_HASHING
info.AddValue(ComparerName, HashHelpers.GetEqualityComparerForSerialization(comparer), typeof(IEqualityComparer<TKey>));
#else
info.AddValue(ComparerName, comparer, typeof(IEqualityComparer<TKey>));
#endif
info.AddValue(HashSizeName, buckets == null ? 0 : buckets.Length); //This is the length of the bucket array.
if( buckets != null) {
KeyValuePair<TKey, TValue>[] array = new KeyValuePair<TKey, TValue>[Count];
CopyTo(array, 0);
info.AddValue(KeyValuePairsName, array, typeof(KeyValuePair<TKey, TValue>[]));
}
}
private int FindEntry(TKey key) {
if( key == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key);
}
if (buckets != null) {
int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
for (int i = buckets[hashCode % buckets.Length]; i >= 0; i = entries[i].next) {
if (entries[i].hashCode == hashCode && comparer.Equals(entries[i].key, key)) return i;
}
}
return -1;
}
private void Initialize(int capacity) {
int size = HashHelpers.GetPrime(capacity);
buckets = new int[size];
for (int i = 0; i < buckets.Length; i++) buckets[i] = -1;
entries = new Entry[size];
freeList = -1;
}
private void Insert(TKey key, TValue value, bool add) {
if( key == null ) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key);
}
if (buckets == null) Initialize(0);
int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
int targetBucket = hashCode % buckets.Length;
#if FEATURE_RANDOMIZED_STRING_HASHING
int collisionCount = 0;
#endif
for (int i = buckets[targetBucket]; i >= 0; i = entries[i].next) {
if (entries[i].hashCode == hashCode && comparer.Equals(entries[i].key, key)) {
if (add) {
#if FEATURE_CORECLR
ThrowHelper.ThrowAddingDuplicateWithKeyArgumentException(key);
#else
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_AddingDuplicate);
#endif
}
entries[i].value = value;
version++;
return;
}
#if FEATURE_RANDOMIZED_STRING_HASHING
collisionCount++;
#endif
}
int index;
if (freeCount > 0) {
index = freeList;
freeList = entries[index].next;
freeCount--;
}
else {
if (count == entries.Length)
{
Resize();
targetBucket = hashCode % buckets.Length;
}
index = count;
count++;
}
entries[index].hashCode = hashCode;
entries[index].next = buckets[targetBucket];
entries[index].key = key;
entries[index].value = value;
buckets[targetBucket] = index;
version++;
#if FEATURE_RANDOMIZED_STRING_HASHING
#if FEATURE_CORECLR
// In case we hit the collision threshold we'll need to switch to the comparer which is using randomized string hashing
// in this case will be EqualityComparer<string>.Default.
// Note, randomized string hashing is turned on by default on coreclr so EqualityComparer<string>.Default will
// be using randomized string hashing
if (collisionCount > HashHelpers.HashCollisionThreshold && comparer == NonRandomizedStringEqualityComparer.Default)
{
comparer = (IEqualityComparer<TKey>) EqualityComparer<string>.Default;
Resize(entries.Length, true);
}
#else
if(collisionCount > HashHelpers.HashCollisionThreshold && HashHelpers.IsWellKnownEqualityComparer(comparer))
{
comparer = (IEqualityComparer<TKey>) HashHelpers.GetRandomizedEqualityComparer(comparer);
Resize(entries.Length, true);
}
#endif // FEATURE_CORECLR
#endif
}
public virtual void OnDeserialization(Object sender) {
SerializationInfo siInfo;
HashHelpers.SerializationInfoTable.TryGetValue(this, out siInfo);
if (siInfo==null) {
// It might be necessary to call OnDeserialization from a container if the container object also implements
// OnDeserialization. However, remoting will call OnDeserialization again.
// We can return immediately if this function is called twice.
// Note we set remove the serialization info from the table at the end of this method.
return;
}
int realVersion = siInfo.GetInt32(VersionName);
int hashsize = siInfo.GetInt32(HashSizeName);
comparer = (IEqualityComparer<TKey>)siInfo.GetValue(ComparerName, typeof(IEqualityComparer<TKey>));
if( hashsize != 0) {
buckets = new int[hashsize];
for (int i = 0; i < buckets.Length; i++) buckets[i] = -1;
entries = new Entry[hashsize];
freeList = -1;
KeyValuePair<TKey, TValue>[] array = (KeyValuePair<TKey, TValue>[])
siInfo.GetValue(KeyValuePairsName, typeof(KeyValuePair<TKey, TValue>[]));
if (array==null) {
ThrowHelper.ThrowSerializationException(ExceptionResource.Serialization_MissingKeys);
}
for (int i=0; i<array.Length; i++) {
if ( array[i].Key == null) {
ThrowHelper.ThrowSerializationException(ExceptionResource.Serialization_NullKey);
}
Insert(array[i].Key, array[i].Value, true);
}
}
else {
buckets = null;
}
version = realVersion;
HashHelpers.SerializationInfoTable.Remove(this);
}
private void Resize() {
Resize(HashHelpers.ExpandPrime(count), false);
}
private void Resize(int newSize, bool forceNewHashCodes) {
Contract.Assert(newSize >= entries.Length);
int[] newBuckets = new int[newSize];
for (int i = 0; i < newBuckets.Length; i++) newBuckets[i] = -1;
Entry[] newEntries = new Entry[newSize];
Array.Copy(entries, 0, newEntries, 0, count);
if(forceNewHashCodes) {
for (int i = 0; i < count; i++) {
if(newEntries[i].hashCode != -1) {
newEntries[i].hashCode = (comparer.GetHashCode(newEntries[i].key) & 0x7FFFFFFF);
}
}
}
for (int i = 0; i < count; i++) {
if (newEntries[i].hashCode >= 0) {
int bucket = newEntries[i].hashCode % newSize;
newEntries[i].next = newBuckets[bucket];
newBuckets[bucket] = i;
}
}
buckets = newBuckets;
entries = newEntries;
}
public bool Remove(TKey key) {
if(key == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key);
}
if (buckets != null) {
int hashCode = comparer.GetHashCode(key) & 0x7FFFFFFF;
int bucket = hashCode % buckets.Length;
int last = -1;
for (int i = buckets[bucket]; i >= 0; last = i, i = entries[i].next) {
if (entries[i].hashCode == hashCode && comparer.Equals(entries[i].key, key)) {
if (last < 0) {
buckets[bucket] = entries[i].next;
}
else {
entries[last].next = entries[i].next;
}
entries[i].hashCode = -1;
entries[i].next = freeList;
entries[i].key = default(TKey);
entries[i].value = default(TValue);
freeList = i;
freeCount++;
version++;
return true;
}
}
}
return false;
}
public bool TryGetValue(TKey key, out TValue value) {
int i = FindEntry(key);
if (i >= 0) {
value = entries[i].value;
return true;
}
value = default(TValue);
return false;
}
// This is a convenience method for the internal callers that were converted from using Hashtable.
// Many were combining key doesn't exist and key exists but null value (for non-value types) checks.
// This allows them to continue getting that behavior with minimal code delta. This is basically
// TryGetValue without the out param
internal TValue GetValueOrDefault(TKey key) {
int i = FindEntry(key);
if (i >= 0) {
return entries[i].value;
}
return default(TValue);
}
bool ICollection<KeyValuePair<TKey,TValue>>.IsReadOnly {
get { return false; }
}
void ICollection<KeyValuePair<TKey,TValue>>.CopyTo(KeyValuePair<TKey,TValue>[] array, int index) {
CopyTo(array, index);
}
void ICollection.CopyTo(Array array, int index) {
if (array == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
}
if (array.Rank != 1) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_RankMultiDimNotSupported);
}
if( array.GetLowerBound(0) != 0 ) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_NonZeroLowerBound);
}
if (index < 0 || index > array.Length) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < Count) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall);
}
KeyValuePair<TKey,TValue>[] pairs = array as KeyValuePair<TKey,TValue>[];
if (pairs != null) {
CopyTo(pairs, index);
}
else if( array is DictionaryEntry[]) {
DictionaryEntry[] dictEntryArray = array as DictionaryEntry[];
Entry[] entries = this.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) {
dictEntryArray[index++] = new DictionaryEntry(entries[i].key, entries[i].value);
}
}
}
else {
object[] objects = array as object[];
if (objects == null) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType);
}
try {
int count = this.count;
Entry[] entries = this.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) {
objects[index++] = new KeyValuePair<TKey,TValue>(entries[i].key, entries[i].value);
}
}
}
catch(ArrayTypeMismatchException) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType);
}
}
}
IEnumerator IEnumerable.GetEnumerator() {
return new Enumerator(this, Enumerator.KeyValuePair);
}
bool ICollection.IsSynchronized {
get { return false; }
}
object ICollection.SyncRoot {
get {
if( _syncRoot == null) {
System.Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new Object(), null);
}
return _syncRoot;
}
}
bool IDictionary.IsFixedSize {
get { return false; }
}
bool IDictionary.IsReadOnly {
get { return false; }
}
ICollection IDictionary.Keys {
get { return (ICollection)Keys; }
}
ICollection IDictionary.Values {
get { return (ICollection)Values; }
}
object IDictionary.this[object key] {
get {
if( IsCompatibleKey(key)) {
int i = FindEntry((TKey)key);
if (i >= 0) {
return entries[i].value;
}
}
return null;
}
set {
if (key == null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key);
}
ThrowHelper.IfNullAndNullsAreIllegalThenThrow<TValue>(value, ExceptionArgument.value);
try {
TKey tempKey = (TKey)key;
try {
this[tempKey] = (TValue)value;
}
catch (InvalidCastException) {
ThrowHelper.ThrowWrongValueTypeArgumentException(value, typeof(TValue));
}
}
catch (InvalidCastException) {
ThrowHelper.ThrowWrongKeyTypeArgumentException(key, typeof(TKey));
}
}
}
private static bool IsCompatibleKey(object key) {
if( key == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key);
}
return (key is TKey);
}
void IDictionary.Add(object key, object value) {
if (key == null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.key);
}
ThrowHelper.IfNullAndNullsAreIllegalThenThrow<TValue>(value, ExceptionArgument.value);
try {
TKey tempKey = (TKey)key;
try {
Add(tempKey, (TValue)value);
}
catch (InvalidCastException) {
ThrowHelper.ThrowWrongValueTypeArgumentException(value, typeof(TValue));
}
}
catch (InvalidCastException) {
ThrowHelper.ThrowWrongKeyTypeArgumentException(key, typeof(TKey));
}
}
bool IDictionary.Contains(object key) {
if(IsCompatibleKey(key)) {
return ContainsKey((TKey)key);
}
return false;
}
IDictionaryEnumerator IDictionary.GetEnumerator() {
return new Enumerator(this, Enumerator.DictEntry);
}
void IDictionary.Remove(object key) {
if(IsCompatibleKey(key)) {
Remove((TKey)key);
}
}
[Serializable]
public struct Enumerator: IEnumerator<KeyValuePair<TKey,TValue>>,
IDictionaryEnumerator
{
private Dictionary<TKey,TValue> dictionary;
private int version;
private int index;
private KeyValuePair<TKey,TValue> current;
private int getEnumeratorRetType; // What should Enumerator.Current return?
internal const int DictEntry = 1;
internal const int KeyValuePair = 2;
internal Enumerator(Dictionary<TKey,TValue> dictionary, int getEnumeratorRetType) {
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
this.getEnumeratorRetType = getEnumeratorRetType;
current = new KeyValuePair<TKey, TValue>();
}
public bool MoveNext() {
if (version != dictionary.version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
// Use unsigned comparison since we set index to dictionary.count+1 when the enumeration ends.
// dictionary.count+1 could be negative if dictionary.count is Int32.MaxValue
while ((uint)index < (uint)dictionary.count) {
if (dictionary.entries[index].hashCode >= 0) {
current = new KeyValuePair<TKey, TValue>(dictionary.entries[index].key, dictionary.entries[index].value);
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
current = new KeyValuePair<TKey, TValue>();
return false;
}
public KeyValuePair<TKey,TValue> Current {
get { return current; }
}
public void Dispose() {
}
object IEnumerator.Current {
get {
if( index == 0 || (index == dictionary.count + 1)) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen);
}
if (getEnumeratorRetType == DictEntry) {
return new System.Collections.DictionaryEntry(current.Key, current.Value);
} else {
return new KeyValuePair<TKey, TValue>(current.Key, current.Value);
}
}
}
void IEnumerator.Reset() {
if (version != dictionary.version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
index = 0;
current = new KeyValuePair<TKey, TValue>();
}
DictionaryEntry IDictionaryEnumerator.Entry {
get {
if( index == 0 || (index == dictionary.count + 1)) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen);
}
return new DictionaryEntry(current.Key, current.Value);
}
}
object IDictionaryEnumerator.Key {
get {
if( index == 0 || (index == dictionary.count + 1)) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen);
}
return current.Key;
}
}
object IDictionaryEnumerator.Value {
get {
if( index == 0 || (index == dictionary.count + 1)) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen);
}
return current.Value;
}
}
}
[DebuggerTypeProxy(typeof(Mscorlib_DictionaryKeyCollectionDebugView<,>))]
[DebuggerDisplay("Count = {Count}")]
[Serializable]
public sealed class KeyCollection: ICollection<TKey>, ICollection, IReadOnlyCollection<TKey>
{
private Dictionary<TKey,TValue> dictionary;
public KeyCollection(Dictionary<TKey,TValue> dictionary) {
if (dictionary == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.dictionary);
}
this.dictionary = dictionary;
}
public Enumerator GetEnumerator() {
return new Enumerator(dictionary);
}
public void CopyTo(TKey[] array, int index) {
if (array == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
}
if (index < 0 || index > array.Length) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall);
}
int count = dictionary.count;
Entry[] entries = dictionary.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) array[index++] = entries[i].key;
}
}
public int Count {
get { return dictionary.Count; }
}
bool ICollection<TKey>.IsReadOnly {
get { return true; }
}
void ICollection<TKey>.Add(TKey item){
ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_KeyCollectionSet);
}
void ICollection<TKey>.Clear(){
ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_KeyCollectionSet);
}
bool ICollection<TKey>.Contains(TKey item){
return dictionary.ContainsKey(item);
}
bool ICollection<TKey>.Remove(TKey item){
ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_KeyCollectionSet);
return false;
}
IEnumerator<TKey> IEnumerable<TKey>.GetEnumerator() {
return new Enumerator(dictionary);
}
IEnumerator IEnumerable.GetEnumerator() {
return new Enumerator(dictionary);
}
void ICollection.CopyTo(Array array, int index) {
if (array==null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
}
if (array.Rank != 1) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_RankMultiDimNotSupported);
}
if( array.GetLowerBound(0) != 0 ) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_NonZeroLowerBound);
}
if (index < 0 || index > array.Length) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall);
}
TKey[] keys = array as TKey[];
if (keys != null) {
CopyTo(keys, index);
}
else {
object[] objects = array as object[];
if (objects == null) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType);
}
int count = dictionary.count;
Entry[] entries = dictionary.entries;
try {
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) objects[index++] = entries[i].key;
}
}
catch(ArrayTypeMismatchException) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType);
}
}
}
bool ICollection.IsSynchronized {
get { return false; }
}
Object ICollection.SyncRoot {
get { return ((ICollection)dictionary).SyncRoot; }
}
[Serializable]
public struct Enumerator : IEnumerator<TKey>, System.Collections.IEnumerator
{
private Dictionary<TKey, TValue> dictionary;
private int index;
private int version;
private TKey currentKey;
internal Enumerator(Dictionary<TKey, TValue> dictionary) {
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
currentKey = default(TKey);
}
public void Dispose() {
}
public bool MoveNext() {
if (version != dictionary.version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
while ((uint)index < (uint)dictionary.count) {
if (dictionary.entries[index].hashCode >= 0) {
currentKey = dictionary.entries[index].key;
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
currentKey = default(TKey);
return false;
}
public TKey Current {
get {
return currentKey;
}
}
Object System.Collections.IEnumerator.Current {
get {
if( index == 0 || (index == dictionary.count + 1)) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen);
}
return currentKey;
}
}
void System.Collections.IEnumerator.Reset() {
if (version != dictionary.version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
index = 0;
currentKey = default(TKey);
}
}
}
[DebuggerTypeProxy(typeof(Mscorlib_DictionaryValueCollectionDebugView<,>))]
[DebuggerDisplay("Count = {Count}")]
[Serializable]
public sealed class ValueCollection: ICollection<TValue>, ICollection, IReadOnlyCollection<TValue>
{
private Dictionary<TKey,TValue> dictionary;
public ValueCollection(Dictionary<TKey,TValue> dictionary) {
if (dictionary == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.dictionary);
}
this.dictionary = dictionary;
}
public Enumerator GetEnumerator() {
return new Enumerator(dictionary);
}
public void CopyTo(TValue[] array, int index) {
if (array == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
}
if (index < 0 || index > array.Length) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall);
}
int count = dictionary.count;
Entry[] entries = dictionary.entries;
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) array[index++] = entries[i].value;
}
}
public int Count {
get { return dictionary.Count; }
}
bool ICollection<TValue>.IsReadOnly {
get { return true; }
}
void ICollection<TValue>.Add(TValue item){
ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_ValueCollectionSet);
}
bool ICollection<TValue>.Remove(TValue item){
ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_ValueCollectionSet);
return false;
}
void ICollection<TValue>.Clear(){
ThrowHelper.ThrowNotSupportedException(ExceptionResource.NotSupported_ValueCollectionSet);
}
bool ICollection<TValue>.Contains(TValue item){
return dictionary.ContainsValue(item);
}
IEnumerator<TValue> IEnumerable<TValue>.GetEnumerator() {
return new Enumerator(dictionary);
}
IEnumerator IEnumerable.GetEnumerator() {
return new Enumerator(dictionary);
}
void ICollection.CopyTo(Array array, int index) {
if (array == null) {
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
}
if (array.Rank != 1) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_RankMultiDimNotSupported);
}
if( array.GetLowerBound(0) != 0 ) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_NonZeroLowerBound);
}
if (index < 0 || index > array.Length) {
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index, ExceptionResource.ArgumentOutOfRange_NeedNonNegNum);
}
if (array.Length - index < dictionary.Count)
ThrowHelper.ThrowArgumentException(ExceptionResource.Arg_ArrayPlusOffTooSmall);
TValue[] values = array as TValue[];
if (values != null) {
CopyTo(values, index);
}
else {
object[] objects = array as object[];
if (objects == null) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType);
}
int count = dictionary.count;
Entry[] entries = dictionary.entries;
try {
for (int i = 0; i < count; i++) {
if (entries[i].hashCode >= 0) objects[index++] = entries[i].value;
}
}
catch(ArrayTypeMismatchException) {
ThrowHelper.ThrowArgumentException(ExceptionResource.Argument_InvalidArrayType);
}
}
}
bool ICollection.IsSynchronized {
get { return false; }
}
Object ICollection.SyncRoot {
get { return ((ICollection)dictionary).SyncRoot; }
}
[Serializable]
public struct Enumerator : IEnumerator<TValue>, System.Collections.IEnumerator
{
private Dictionary<TKey, TValue> dictionary;
private int index;
private int version;
private TValue currentValue;
internal Enumerator(Dictionary<TKey, TValue> dictionary) {
this.dictionary = dictionary;
version = dictionary.version;
index = 0;
currentValue = default(TValue);
}
public void Dispose() {
}
public bool MoveNext() {
if (version != dictionary.version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
while ((uint)index < (uint)dictionary.count) {
if (dictionary.entries[index].hashCode >= 0) {
currentValue = dictionary.entries[index].value;
index++;
return true;
}
index++;
}
index = dictionary.count + 1;
currentValue = default(TValue);
return false;
}
public TValue Current {
get {
return currentValue;
}
}
Object System.Collections.IEnumerator.Current {
get {
if( index == 0 || (index == dictionary.count + 1)) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumOpCantHappen);
}
return currentValue;
}
}
void System.Collections.IEnumerator.Reset() {
if (version != dictionary.version) {
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_EnumFailedVersion);
}
index = 0;
currentValue = default(TValue);
}
}
}
}
}
| |
using System;
using System.Text;
using VisualHint.SmartPropertyGrid;
using System.Globalization;
using System.Resources;
using System.Reflection;
using System.Collections.Generic;
using System.Xml;
#region Notes on File
//These classes take a Runfile in XML and convert them to a Runfile Object containing strings.
//1.) RunfileXMLtoString
//2.) RunfileString
//3.) Runfile
//-------------------------------------------------------------------->
/*
Runfile XML to String Object
1.) Meta Data
2.) Module
3.) Input
4.) Output
5.) Parameter (info as strings)
*/
#endregion
namespace RunfileEditor
{
#region Notes on EarlabRunfile Class
// Runfile Data Structure
// ----> (a class RunfileInformation with data properties) a single RunfileInformation
// ----> (a class that is the list of Module Descriptors) used to create data params
// ----> (a class EarlabModule w/ string I-O-P and data I-O-P) a list of EarlabModules
// ----> (a Module Count, uses RF ModuleDescriptors) a count of the # of Modules
// ---------------->(Module: MetaData, list of Inputs, list of Outputs, list of Params)
//
//
//This is the RunfileInformation
//Contains the metadata about the Runfile Model that is used
#endregion
public class EarlabRunfile
{
#region Data Members
//--------------------------------------------------------------------->
// Data Memembers & Property Methods of Runfile
//
//XML Vars
//There is the actual XmlDocument that has been validated
public XmlDocument RunfileXML;
//
//
//Still a bit unsure about this, but this is the Runfile xml Node
public XmlNode RunfileModulesNode;
//
//
//The title of the document file of the XML in string form
public string RunfileXMLString;
//
//Has the Runfile changed?
private bool changed1 = false;
//
//These are the unique EarlabModule titles that are needed from the EFI
//(?) Something back from EFI
public ModuleDirectory ModuleDirectory;
//
//
//Runfile Data Structures
//
//Run File Information -- contains informaiton about the model
public RunfileInformation RunfileInformation;
//
//This is the list of Modules contained in the Runfile
public List<RunfileModule> RunfileModules = new List<RunfileModule>();
//
//Modules carry two data structures: (XML data as strings),(XML data as the appropriate value)
public List<EarlabModule> EarlabModules = new List<EarlabModule>();
//
#endregion
#region Constructors
//-------------------------------------------------------------------->
//Constructors
public EarlabRunfile()
{
}
public EarlabRunfile(XmlDocument Runfile)
{
Initialize(Runfile);
}
public void Initialize(XmlDocument Runfile)
{
//code --- calls to other small classes
//1.) Create Run File Information using RunfileInformation class
//XmlNode, RunfileInformation
//the indexing method returns a xmlnode list
//perhaps a better way to do this.
XmlNodeList XList = Runfile.GetElementsByTagName("RunfileInformation");
RunfileInformation = new RunfileInformation(XList[0]);
//2.) Run File Modules that are used
//RunfileDescriptors -> ModuleData
//(?)I'm not sure these "string data modules are needed"
//(xml statement) --Additional information: Object reference not set to an instance of an object.
//Reformat the xml statement!!*****
XmlNodeList MList = Runfile.GetElementsByTagName("Module");
foreach (XmlNode theModule in MList)
{
RunfileModules.Add(new RunfileModule(theModule));
}
//-----------------------
//3.) Hey EFI give me those Module XMLs i want.
//(?) For now use the contructor on ModuleDirectory
//ModuleDirectory interacts with the EFI and gets what is necessary
ModuleDirectory = new ModuleDirectory(UniqueEarlabModuleXMLs);
//4.) Run File -- Earlab Modules using EarlabModule and loop
//RunfileDescriptors + ModuleData => Factories -------> SPGs in GUI
//The logical separation in this is a bit dicey.....
//(?) Not sure rather to have this here, or put it somewhere else
foreach (RunfileModule sModule in RunfileModules)
{
//Label the proper Module Name
string ModuleName = sModule.ModuleInformation.ExecutableName.ToLower();
//Send that RunfileModule and the ModuleXML Module
//(xml statement)
EarlabModules.Add(new EarlabModule(sModule, ModuleDirectory[ModuleName]));
}
}
#endregion Constructors
#region Properties
//public XmlDocument RunfileXml
// {
// set
// {
// XmlDocument myDocument = new XmlDocument();
// myDocument.LoadXml(value);
// this.XmlDocument = myDocument;
// }
// get
// {
// //XmlDocument myDocument = this.XmlDocument;
// //MemoryStream output = new MemoryStream();
// //XmlTextWriter myWriter = new XmlTextWriter(output, Encoding.UTF8);
// //myWriter.Formatting = Formatting.Indented;
// //myWriter.WriteProcessingInstruction("xml", "version='1.0' encoding='UTF-8'");
// //myWriter.WriteStartElement("Root");
// ////myWriter.Close();
// //StreamReader converter = new StreamReader(output);
// //string text = converter.ReadToEnd();
// return RunfileXML;
// }
// }
//public XmlDocument XmlDocument
// {
// set { Initialize(value); }
// get { return EarlabRunfileCreate(); }
// }
public int ModuleCount { get { return RunfileModules.Count; } }
// need to write the set
public bool HasChanged
{
get { return changed1; }
set
{
//check all parts of Runfile
//in a generic fashion
}
}
//info
///<summary>
/// This is some stuff
///
///
///</summary>
public string[] UniqueEarlabModuleXMLs
{
//compare all theRunfileModuleDescriptors against each other.
//if it is not listed keep it, otherwise don't add it.
get
{
//1st shot doest not work, you can't add to a collection that's in a foreach loop.
//Internal data memebers
List<string> Uniques = new List<string>();
//sorted in order
foreach (RunfileModule RfMd in RunfileModules)
{
bool foundit = false;
foreach (string s in Uniques)
{
if (s == RfMd.ModuleInformation.ExecutableName)
{
foundit = true;
break;
}
}
if (!foundit)
Uniques.Add(RfMd.ModuleInformation.ExecutableName);
}
return Uniques.ToArray();
}
}
public RunfileModule this[int ModuleIndex]
{
get
{
if ((ModuleIndex < 0) || (ModuleIndex >= RunfileModules.Count))
throw new IndexOutOfRangeException("The requested module at index " + ModuleIndex + " was not found in the Runfile");
return RunfileModules[ModuleIndex];
}
}
//
public RunfileModule this[string ModuleName]
{
get
{
//(**)
//Search for the Matching moduleName
foreach (RunfileModule currentModule in RunfileModules)
{
if (currentModule.ModuleInformation.ExecutableName.ToLower() == ModuleName)
return currentModule;
}
throw new IndexOutOfRangeException("The requested module executable \"" + ModuleName + "\" was not found in the Runfile");
}
}
//
#endregion
#region Misc. Notes
//---------------------------------------------------------------------->
// Methods
//Updates the Runfile an XML Doc, Powers the Constructor that takes the Runfile Xml Document as well.
//Has a data in string component and a data in data component
//public void RunfileUpdateXML(XmlDocument theRunfile)
/*
//Updates the Runfile from a String Doc, Powers the similar Constructor
public void RunfileUpdateString(String theRunfile)
{
//private data
//code
}
*/
//------------------->
//------------------------------------------------------------------------------------------->
// More Smaller Classes that belong to the Runfile System is dependent on.
/*
* RunfileInformation
* RunfileModuleDescriptor
*
*/
//Takes information from Runfile Information node and works it
/*
<RunfileInFormation>
Author
* Abstract
* Date
* Image Location
</RunfileInFormation>
*/
//--------------------------------------------------------------------------//
//This section deals with writing a Runfile
//Cases to use
//1.) Write New Document
//2.) Rewrite old Document
//---------------//
//
//Basically creates a new XML Document to save it.
/*
*
*
*/
#endregion
//Need to divide this guy up
public XmlDocument RunfileXMLCreate()
{
XmlDocument NewRunfile = new XmlDocument();
//1.) Top Header
//<?xml version="1.0" encoding="utf-8" ?>
NewRunfile.AppendChild(NewRunfile.CreateXmlDeclaration("1.0", "utf-8", ""));
//2.) Routine to Write Runfile header
XmlNode root = NewRunfile.CreateElement("Runfile");
NewRunfile.AppendChild(root);
#region RunfileInformation XML Node
//2.) Start modules -- open module tag
/*
<Runfile>
<RunfileInformation>
<Author> Blah Smith </Author>
<Abstract> afldlkdklasdjsad </Abstract>
<EditDate> 1/1/09 </EditDate>
<ImageLocation> /image/image.jpg </ImageLocation>
</RunfileInformation>
<Modules>
*/
#endregion
#region RunfileInformation Code
XmlNode eRunfileInformation = NewRunfile.CreateElement("RunfileInformation");
root.AppendChild(eRunfileInformation);
//Author ---
XmlNode eRFIAuthor = NewRunfile.CreateElement("Author");
eRFIAuthor.InnerText = this.RunfileInformation.RunfileInformationAuthor.ToString();
XmlNode eRFIAbstract = NewRunfile.CreateElement("Abstract");
eRFIAbstract.InnerText = this.RunfileInformation.RunfileInformationAbstract.ToString();
XmlNode eRFIEditDate = NewRunfile.CreateElement("EditDate");
eRFIEditDate.InnerText = this.RunfileInformation.RunfileInformationEditDate.ToString();
XmlNode eRFIImageLocation = NewRunfile.CreateElement("ImageLocation");
eRFIImageLocation.InnerText = this.RunfileInformation.RunfileInformationImageLocation.ToString();
eRunfileInformation.AppendChild(eRFIAuthor);
eRunfileInformation.AppendChild(eRFIAbstract);
eRunfileInformation.AppendChild(eRFIEditDate);
eRunfileInformation.AppendChild(eRFIImageLocation);
///-----------------------------------------------------------------------------------------------------|
#endregion
#region ModuleInformation Node Notes
//3.) Write modules
//foreach write module
//module write method
//method to write module information
//method to write module I-O-P
/*
<ModuleInformation>
<InstanceName>Left_Pinna</InstanceName>
<ExecutableName>DataSource</ExecutableName>
</ModuleInformation>
*/
#endregion
XmlNode ModulesRoot = NewRunfile.CreateElement("Modules");
root.AppendChild(ModulesRoot);
foreach (EarlabModule Module in this.EarlabModules)
{
XmlNode ModuleRoot = NewRunfile.CreateElement("Module");
#region ModuleInformation Write Method
//Module Info
XmlNode eModuleInfo = NewRunfile.CreateElement("ModuleInformation");
ModuleRoot.AppendChild(eModuleInfo);
XmlNode eMName = NewRunfile.CreateElement("InstanceName");
eMName.InnerText = Module.theEarlabModuleInformation.InstanceName.ToString();
eModuleInfo.AppendChild(eMName);
XmlNode eName = NewRunfile.CreateElement("ExecutableName");
eName.InnerText = Module.theEarlabModuleInformation.ExecutableName.ToString();
eModuleInfo.AppendChild(eName);
// Mod. end
#endregion
#region Inputs && Outputs Methods
XmlNode eInputs = NewRunfile.CreateElement("Inputs");
foreach (EarlabInput elIn in Module.EarlabInputs)
{
}
ModuleRoot.AppendChild(eInputs);
XmlNode eOutputs = NewRunfile.CreateElement("Outputs");
foreach (EarlabOutput elOut in Module.EarlabOutputs)
{
}
ModuleRoot.AppendChild(eOutputs);
#endregion
//Sample parameter -- we have to cast it to get value
XmlNode eParams = NewRunfile.CreateElement("Parameters");
foreach (EarlabParameter elParam in Module.EarlabParameters)
{
//XmlNode thing!! damn it :|
//problem with the node that is returned.
//XmlNode TempNode = NewRunfile.CreateElement("Parameter");
XmlNode ParamRoot = NewRunfile.CreateElement("Parameter");
XmlNode ePName = NewRunfile.CreateElement("Name");
ePName.InnerText = elParam.PName.ToString();
ParamRoot.AppendChild(ePName);
XmlNode ePType = NewRunfile.CreateElement("Type");
ePType.InnerText = elParam.PType.ToString();
ParamRoot.AppendChild(ePType);
//////////////////////////////////////////////////// Value is hard part/////
string TestType = ePType.InnerXml.ToString();
TestType = TestType.ToLower();
XmlNode ePValue = NewRunfile.CreateElement("Value");
//ParamRoot.AppendChild(ePValue);
switch (TestType)
{
case "integer":
case "int":
ePValue.InnerText = ((EarlabParameterInteger)elParam).PValue.ToString();
ParamRoot.AppendChild(ePValue);
break;
case "float":
case "double":
case "dbl":
case "fl":
ePValue.InnerText = ((EarlabParameterDouble)elParam).PValue.ToString();
ParamRoot.AppendChild(ePValue);
break;
case "str":
case "string":
ePValue.InnerText = ((EarlabParameterString)elParam).PValue.ToString();
ParamRoot.AppendChild(ePValue);
break;
case "bool":
case "boolean":
ePValue.InnerText = ((EarlabParameterBoolean)elParam).PValue.ToString();
ParamRoot.AppendChild(ePValue);
break;
case "integer[]":
case "int[]":
//only for ease of use
EarlabParameterIntegerArray IntParam = ((EarlabParameterIntegerArray)elParam);
//to covert the array to a node of element tags that contain values
//trying to figure out how to encapsulate this into a method.
for (int counter = 0; counter < IntParam.PValue.Length; counter++)
{
XmlNode Element1 = NewRunfile.CreateElement("Element");
Element1.InnerText = IntParam.PValue[counter].ToString();
ePValue.AppendChild(Element1);
}
ParamRoot.AppendChild(ePValue);
break;
case "double[]":
case "float[]":
case "dbl[]":
case "fl[]":
//only for ease of use
EarlabParameterDoubleArray DblParam = ((EarlabParameterDoubleArray)elParam);
//to covert the array to a node of element tags that contain values
//trying to figure out how to encapsulate this into a method.
for (int counter = 0; counter < DblParam.PValue.Length; counter++)
{
XmlNode Element1 = NewRunfile.CreateElement("Element");
Element1.InnerText = DblParam.PValue[counter].ToString();
ePValue.AppendChild(Element1);
}
ParamRoot.AppendChild(ePValue);
break;
default:
//fragged
break;
}
eParams.AppendChild(ParamRoot);
}//end the massive foreach loop --
ModuleRoot.AppendChild(eParams);
//Added all the I-O-Ps
ModulesRoot.AppendChild(ModuleRoot);
}
//5.) Check File against schema
//6.) Send file back
return NewRunfile;
}
}
public class RunfileInformation
{
//----------------------------------------------------------->
//
//
public XmlNode RunfileInformationNode;
public string RunfileInformationAuthor;
public string RunfileInformationAbstract;
public string RunfileInformationEditDate;
public string RunfileInformationImageLocation;
public RunfileInformation()
{
}
public RunfileInformation(XmlNode theRunfileInfoNode)
{
//private data
//code
//0.) Set Node
RunfileInformationNode = theRunfileInfoNode;
//1.) Get Info
//(xml statement)
RunfileInformationAuthor = RunfileInformationNode["Author"].InnerText;
RunfileInformationAbstract = RunfileInformationNode["Abstract"].InnerText;
RunfileInformationImageLocation = RunfileInformationNode["ImageLocation"].InnerText;
RunfileInformationEditDate = RunfileInformationNode["EditDate"].InnerText;
}
}
#region ModuleInformation Notes
//Takes in the ModuleInformation Node on a module and gives a data structure
/*
<ModuleInformation>
<InstanceName>Left_Pinna</InstanceName>
<ExecutableName>DataSource</ExecutableName>
</ModuleInformation>
*/
#endregion
#region RunfileModule Notes
/*
RunfileModule -- contains all data from XML in Object form, in the string format.
* RunfileModuleInformation -- contains module info
* RunfileInputs
* RunfileOutputs
* RunfileParameters
*/
#endregion
#region Data Classes for RunfileModule
//public class RunfileModuleDescriptor
//{
// //Data members
// public RunfileModuleInformation ModuleInformation;
// public List<RunfileParameter> RunfileParameters = new List<RunfileParameter>();
// public RunfileModuleDescriptor(XmlNode ModuleNode)
// {
// //1.) Get Module Information using RunfileModuleInformation class
// //(xml statement)
// ModuleInformation = new RunfileModuleInformation(ModuleNode["ModuleInformation"]);
// //2.) Get Run File Parameters using foreach and RunfileParameter class
// //(xml statement) //doesn't work!
// foreach (XmlNode ParameterNode in ModuleNode["Parameters"].ChildNodes)
// {
// //1.) Create a Runfile Parameter and add it to list
// RunfileParameters.Add(new RunfileParameter(ParameterNode));
// }
// }
//}
public class RunfileModule
{
#region Data Members
public XmlNode theRunfileModuleNode;
public RunfileModuleInformation ModuleInformation;
public List<RunfileInput> theRunfileInputs = new List<RunfileInput>();
public List<RunfileOutput> theRunfileOutputs = new List<RunfileOutput>();
public List<RunfileParameter> RunfileParameters = new List<RunfileParameter>();
#endregion
#region Constructors
public RunfileModule()
{
}
public RunfileModule(XmlNode ModuleNode)
{
theRunfileModuleNode = ModuleNode;
/*
1 get rf module info --
* use constructor (grab proper node) and store in in the public data member
*
* 2 get I-O-Ps these are just foreach loops.
*
* e.g. get P --- send parameters node.
add at end of each loop
*
*/
//1.)constuctor to R F M I
//theRunfileModuleInformation = new RunfileModuleInformation(ModuleNode["ModuleInformation"]);
//Xpath info ...
//(xml statement)
ModuleInformation = new RunfileModuleInformation(ModuleNode["ModuleInformation"]);
//2.) I - O - P
//Need to add error protection in front of each of these
//(?)Possibly rewrite as a generic class? Is it worth it?
//Inputs
//(xml statement)
//must be fixed
foreach (XmlNode eInput in ModuleNode["Inputs"].ChildNodes)
{
theRunfileInputs.Add(new RunfileInput(eInput));
}
//Outputs
//(xml statement)
//must be fixed
foreach (XmlNode eOutput in ModuleNode["Outputs"].ChildNodes)
{
theRunfileOutputs.Add(new RunfileOutput(eOutput));
}
// DOM to grab the parameter in the parameters node
//(xml statement)
//
//XmlNodeList ePList = ModuleNode["Parameters"].ChildNodes;
foreach (XmlNode eParameter in ModuleNode["Parameters"].ChildNodes)
{
RunfileParameters.Add(new RunfileParameter(eParameter));
}
}
#endregion
}
public class RunfileModuleInformation
{
//data memebers
public string InstanceName;
public string ExecutableName;
//code
public RunfileModuleInformation()
{
}
//
public RunfileModuleInformation(XmlNode ModuleInformationNode)
{
//(xml statement)
InstanceName = ModuleInformationNode["InstanceName"].InnerText;
ExecutableName = ModuleInformationNode["ExecutableName"].InnerText;
}
}
public class RunfileInput
{
public RunfileInput(XmlNode Input)
{
}
}
public class RunfileOutput
{
public RunfileOutput(XmlNode Output)
{
}
}
public class RunfileParameter
{
//Store the Node location/value rather than the string data.
//
/*
* double -- double -- dbl
<Parameter>
<Name>SampleRate</Name>
<Type>Double</Type>
<Value>44100</Value>
</Parameter>
* boolean - bool
<Parameter>
<Name>InputIsWaveFile</Name>
<Type>Boolean</Type>
<Value>true</Value>
</Parameter>
*
integer int
<Parameter>
<Name>ChannelNumber</Name>
<Type>Integer</Type>
<Value>0</Value>
</Parameter>
*
</Parameter>
<Name>IntArrayParam</Name>
<Type>Integer[]</Type>
<Value>
<Element>1</Element>
<Element>2</Element>
<Element>3</Element>
<Element>4</Element>
<Element>5</Element>
<Element>6</Element>
</Value>
</Parameter>
*/
//Data Memembers
public bool isArray;
public string ParameterName;
public string ParameterType;
//Just a public node, which comprises Array or Single value
public XmlNode ParameterValue;
//Constructor
public RunfileParameter(XmlNode ParameterNode)
{
//string TempValue;
//(xml statement)
ParameterName = ParameterNode["Name"].InnerText;
ParameterType = ParameterNode["Type"].InnerText;
//basically we need a quick function to strip out all the white space
ParameterType = ParameterType.Replace(" ", "");
//Do we need to standardize how the ParameterType ends up stored? as the real or Temp?
//I like the idea of standardizing the thing. (?)
//Also, since we are storing this stuff as a string this is an
//if control structure as opposed to a switch,
//(?) Reuse for parsing the data.
if (ParameterType.ToLower() == "integer[]" || ParameterType.ToLower() == "double[]" || ParameterType.ToLower() == "string[]"
|| ParameterType.ToLower() == "bool[]" || ParameterType.ToLower() == "float[]")
{
//This node holds values
//(xml statement)
ParameterValue = ParameterNode["Value"];
isArray = true;
}
else
{
//This node holds a single value
//(xml statement)
ParameterValue = ParameterNode["Value"];
isArray = false;
}
}
}
#endregion Data Classes for Runfile
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System.Collections.Generic;
using System.Compiler;
using System.Diagnostics;
using System.Diagnostics.Contracts;
namespace Microsoft.Contracts.Foxtrot
{
internal sealed class ReplaceResult : StandardVisitor
{
/// <summary>
/// Result starts out as a Local that is created by the client of this class. But if during
/// the replacement of Result<T>(), an occurrence is found within an anonymous delegate,
/// then the local is replaced with a member binding.
///
/// If the delegate is within a closure class, then the member binding is "this.f", where
/// f is a new field that is added to the top-level closure class and "this" will be the
/// type of the top-level closure class.
///
/// If the delegate is a static method added to the class itself, then the member binding
/// is "null.f" where f is a new static field that is added to the class containing the method
/// that contained the contract in which this occurrence of Result was found.
///
/// When this happens, a client should use the value of ReplaceResult.Result
/// to assign the return value of the method to.
///
/// Note: The client should assign the value that the method returns to *both* the local
/// originally passed in to the constructor of this visitor *and* to the member binding
/// (if Result is modified because there is an occurrence of Result<T>()
/// in a delegate) because if any occurrences are found before visiting the closure,
/// those occurrences will be replaced by the local and not the member binding!
///
/// When Result is found *not* within a delegate, then it is always replaced with
/// the original local that the client handed to the constructor.
/// </summary>
private readonly Local originalLocalForResult;
/// <summary>
/// if non-null, used to store result in declaring type.
/// NOTE: the field type is the type of the
/// result in the instantiated context. For generic closure classes (which are generated by the
/// rewriter during copying of OOB contracts), the closure potentially expects a generic form for
/// the result value. We thus have to generate access to the result by casting (box InstanceType, unbox GenericType).
/// </summary>
private Field topLevelStaticResultField;
/// <summary>
/// if non-null, used to store result in top-level closure instance.
/// NOTE: the field type is the type of the
/// result in the instantiated context. For generic closure classes (which are generated by the
/// rewriter during copying of OOB contracts), the closure potentially expects a generic form for
/// the result value. We thus have to generate access to the result by casting (box InstanceType, unbox GenericType).
/// </summary>
private Field topLevelClosureResultField;
public IEnumerable<MemberBinding> NecessaryResultInitialization(Dictionary<TypeNode, Local> closureLocals)
{
if (topLevelStaticResultField != null)
{
yield return new MemberBinding(null, topLevelStaticResultField);
}
if (topLevelClosureResultField != null)
{
// note: this field is the field in the generic context of the closure. For the method to initialize this
// we need the instantiated form, which we remember in topLevelClosureClassInstance
Local local;
if (closureLocals.TryGetValue(topLevelClosureClassInstance, out local))
{
yield return
new MemberBinding(local,
GetInstanceField(this.originalLocalForResult.Type, topLevelClosureResultField,
topLevelClosureClassInstance));
}
else
{
var access = new This(topLevelClosureClassInstance);
yield return
new MemberBinding(access,
GetInstanceField(this.originalLocalForResult.Type, topLevelClosureResultField,
topLevelClosureClassInstance));
}
}
}
public IEnumerable<MemberBinding> NecessaryResultInitializationAsync(Dictionary<Local, MemberBinding> closureLocals)
{
if (topLevelStaticResultField != null)
{
yield return new MemberBinding(null, topLevelStaticResultField);
}
if (topLevelClosureResultField != null)
{
// note: this field is the field in the generic context of the closure. For the method to initialize this
// we need the instantiated form, which we remember in topLevelClosureClassInstance
MemberBinding mb = null;
foreach (var pair in closureLocals)
{
var keyType = HelperMethods.Unspecialize(pair.Key.Type);
if (keyType == topLevelClosureClassDefinition)
{
mb = pair.Value;
yield return
new MemberBinding(mb,
GetInstanceField(this.originalLocalForResult.Type, topLevelClosureResultField,
topLevelClosureClassInstance));
break;
}
}
Debug.Assert(mb != null, "Should have found access");
}
}
private readonly Module assemblyBeingRewritten;
private Method currentClosureMethod; // definition
private Expression currentAccessToTopLevelClosure;
private TypeNode topLevelClosureClassDefinition;
private TypeNode topLevelClosureClassInstance;
private TypeNode currentClosureClassInstance;
private readonly TypeNodeList topLevelMethodFormals;
/// <summary>
/// Used when the method with the closure is generic and the field ends up on the corresponding generic closure class
/// </summary>
private TypeNode properlyInstantiatedFieldType;
private int delegateNestingLevel;
private readonly TypeNode declaringType; // needed to copy anonymous delegates into
public ReplaceResult(Method containingMethod, Local originalLocalForResult, Module assemblyBeingRewritten)
{
Contract.Requires(containingMethod != null);
this.assemblyBeingRewritten = assemblyBeingRewritten;
this.declaringType = containingMethod.DeclaringType;
this.topLevelMethodFormals = containingMethod.TemplateParameters;
this.originalLocalForResult = originalLocalForResult;
this.delegateNestingLevel = 0;
}
/// <summary>
/// This property would be true, if Contract.Result<T>() was captured in the static context.
/// For instance, following code will lead to this situation:
/// public string Method(params string[] strings)
/// {
/// Contract.Ensures(Contract.ForAll(strings, s => Contract.Result<string>() == s));
/// return "42";
/// }
///
/// In this case, caller of this code can decide to emit a warning, because current behavior
/// could lead to issues in multithreaded environment
/// </summary>
public bool ContractResultWasCapturedInStaticContext { get; private set; }
public override Expression VisitReturnValue(ReturnValue returnValue)
{
if (this.delegateNestingLevel == 0)
{
// not inside a closure method
return this.originalLocalForResult;
}
if (this.currentClosureMethod.IsStatic || IsRoslynBasedStaticClosure())
{
// This is a hack and we should notify about it!
ContractResultWasCapturedInStaticContext = true;
// static closure: no place to store result. Current hack is to use a static field of the
// declaring type. However, if we have a static closure inside a non-static closure, this
// breaks down, as we support only one kind of storage for return values.
var field = GetReturnValueClosureField(this.declaringType, this.originalLocalForResult.Type,
FieldFlags.CompilerControlled | FieldFlags.Private | FieldFlags.Static,
originalLocalForResult.UniqueKey);
Contract.Assume(returnValue != null);
return CreateProperResultAccess(returnValue, null, field);
}
Debug.Assert(this.currentAccessToTopLevelClosure != null);
Debug.Assert(this.topLevelClosureClassDefinition != null);
{
// Return an expression that is the value of the field defined in the
// top-level closure class to hold the method's return value.
// This will be this.up.Result where "up" is the field C#
// generated to point to the instance of the top-level closure class.
// "Result" is the field defined in this visitor's VisitConstruct when
// it finds a reference to a anonymous delegate.
var field = GetReturnValueClosureField(this.topLevelClosureClassDefinition,
this.properlyInstantiatedFieldType, FieldFlags.CompilerControlled | FieldFlags.Assembly,
this.topLevelClosureClassDefinition.UniqueKey);
Contract.Assume(returnValue != null);
return CreateProperResultAccess(returnValue, this.currentAccessToTopLevelClosure, field);
}
}
/// <summary>
/// Roslyn-based compiler changed the pattern for caching static (i.e. non-capturing) lambdas.
/// This method returns true if currentClosureClassInstance is a static closure class generated by the Roslyn-based compiler.
/// </summary>
private bool IsRoslynBasedStaticClosure()
{
return currentClosureClassInstance.IsRoslynBasedStaticClosure();
}
private static Expression CreateProperResultAccess(ReturnValue returnValue, Expression closureObject, Field resultField)
{
Contract.Requires(returnValue != null);
Contract.Requires(resultField != null);
var fieldAccess = new MemberBinding(closureObject, resultField);
if (resultField.Type != returnValue.Type)
{
// must cast to generic type expected in this context (box instance unbox.any Generic)
return
new BinaryExpression(
new BinaryExpression(fieldAccess, new Literal(resultField.Type), NodeType.Box),
new Literal(returnValue.Type), NodeType.UnboxAny);
}
return fieldAccess;
}
/// <summary>
/// If there is an anonymous delegate within a postcondition, then there
/// will be a call to a delegate constructor.
/// That call looks like "d..ctor(o,m)" where d is the type of the delegate.
/// There are two cases depending on whether the anonymous delegate captured
/// anything. In both cases, m is the method implementing the anonymous delegate.
/// (1) It does capture something. Then o is the instance of the closure class
/// implementing the delegate, and m is an instance method in the closure
/// class.
/// (2) It does *not* capture anything. Then o is the literal for null and
/// m is a static method that was added directly to the class.
///
/// This method will cause the method (i.e., m) to be visited to collect any
/// Result<T>() expressions that occur in it.
/// </summary>
/// <param name="cons">The AST representing the call to the constructor
/// of the delegate type.</param>
/// <returns>Whatever the base visitor returns</returns>
public override Expression VisitConstruct(Construct cons)
{
if (cons.Type is DelegateNode)
{
UnaryExpression ue = cons.Operands[1] as UnaryExpression;
if (ue == null) goto JustVisit;
MemberBinding mb = ue.Operand as MemberBinding;
if (mb == null) goto JustVisit;
Method m = mb.BoundMember as Method;
if (!HelperMethods.IsCompilerGenerated(m)) goto JustVisit;
Contract.Assume(m != null);
m = Definition(m);
this.delegateNestingLevel++;
TypeNode savedClosureClass = this.currentClosureClassInstance;
Method savedClosureMethod = this.currentClosureMethod;
Expression savedCurrentAccessToTopLevelClosure = this.currentAccessToTopLevelClosure;
try
{
this.currentClosureMethod = m;
if (m.IsStatic)
{
this.currentClosureClassInstance = null; // no closure object
}
else
{
this.currentClosureClassInstance = cons.Operands[0].Type;
if (savedClosureClass == null)
{
// Then this is the top-level closure class.
this.topLevelClosureClassInstance = this.currentClosureClassInstance;
this.topLevelClosureClassDefinition = Definition(this.topLevelClosureClassInstance);
this.currentAccessToTopLevelClosure = new This(this.topLevelClosureClassDefinition);
this.properlyInstantiatedFieldType = this.originalLocalForResult.Type;
if (this.topLevelMethodFormals != null)
{
Contract.Assume(this.topLevelClosureClassDefinition.IsGeneric);
Contract.Assume(topLevelClosureClassDefinition.TemplateParameters.Count >=
this.topLevelMethodFormals.Count);
// replace method type parameters in result properly with last n corresponding type parameters of closure class
TypeNodeList closureFormals = topLevelClosureClassDefinition.TemplateParameters;
if (closureFormals.Count > this.topLevelMethodFormals.Count)
{
int offset = closureFormals.Count - this.topLevelMethodFormals.Count;
closureFormals = new TypeNodeList(this.topLevelMethodFormals.Count);
for (int i = 0; i < this.topLevelMethodFormals.Count; i++)
{
closureFormals.Add(topLevelClosureClassDefinition.TemplateParameters[i + offset]);
}
}
Duplicator dup = new Duplicator(this.declaringType.DeclaringModule, this.declaringType);
Specializer spec = new Specializer(this.declaringType.DeclaringModule,
topLevelMethodFormals, closureFormals);
var type = dup.VisitTypeReference(this.originalLocalForResult.Type);
type = spec.VisitTypeReference(type);
this.properlyInstantiatedFieldType = type;
}
}
else
{
while (currentClosureClassInstance.Template != null)
currentClosureClassInstance = currentClosureClassInstance.Template;
// Find the field in this.closureClass that the C# compiler generated
// to point to the top-level closure
foreach (Member mem in this.currentClosureClassInstance.Members)
{
Field f = mem as Field;
if (f == null) continue;
if (f.Type == this.topLevelClosureClassDefinition)
{
var consolidatedTemplateParams = this.currentClosureClassInstance.ConsolidatedTemplateParameters;
TypeNode thisType;
if (consolidatedTemplateParams != null && consolidatedTemplateParams.Count > 0)
{
thisType =
this.currentClosureClassInstance.GetGenericTemplateInstance(
this.assemblyBeingRewritten, consolidatedTemplateParams);
}
else
{
thisType = this.currentClosureClassInstance;
}
this.currentAccessToTopLevelClosure = new MemberBinding(new This(thisType), f);
break;
}
}
}
}
this.VisitBlock(m.Body);
}
finally
{
this.delegateNestingLevel--;
this.currentClosureMethod = savedClosureMethod;
this.currentClosureClassInstance = savedClosureClass;
this.currentAccessToTopLevelClosure = savedCurrentAccessToTopLevelClosure;
}
}
JustVisit:
return base.VisitConstruct(cons);
}
private static Method Definition(Method m)
{
Contract.Requires(m != null);
while (m.Template != null) m = m.Template;
return m;
}
private static TypeNode Definition(TypeNode t)
{
Contract.Requires(t != null);
while (t.Template != null) t = t.Template;
return t;
}
private Field GetReturnValueClosureField(TypeNode declaringType, TypeNode resultType, FieldFlags flags, int uniqueKey)
{
Contract.Requires(declaringType != null);
Contract.Assume(declaringType.Template == null);
Identifier name = Identifier.For("_result" + uniqueKey.ToString()); // unique name for this field
Field f = declaringType.GetField(name);
if (f != null) return f;
f = new Field(declaringType,
null,
flags,
name,
resultType,
null);
declaringType.Members.Add(f);
// remember we added it so we can make it part of initializations
if (f.IsStatic)
{
topLevelStaticResultField = f;
}
else
{
topLevelClosureResultField = f;
}
return f;
}
private static Field GetInstanceField(TypeNode originalReturnType, Field possiblyGenericField, TypeNode instanceDeclaringType)
{
Contract.Requires(instanceDeclaringType != null);
if (instanceDeclaringType.Template == null) return possiblyGenericField;
var declaringTemplate = instanceDeclaringType;
while (declaringTemplate.Template != null)
{
declaringTemplate = declaringTemplate.Template;
}
Contract.Assume(declaringTemplate == possiblyGenericField.DeclaringType);
return Rewriter.GetFieldInstanceReference(possiblyGenericField, instanceDeclaringType);
#if false
Field f = instanceDeclaringType.GetField(possiblyGenericField.Name);
if (f != null)
{
// already instantiated
return f;
}
// pseudo instance
Field instance = new Field(instanceDeclaringType, possiblyGenericField.Attributes, possiblyGenericField.Flags, possiblyGenericField.Name, originalReturnType, null);
instanceDeclaringType.Members.Add(instance);
return instance;
#endif
}
}
}
| |
/*
Copyright (c) 2010-2015 by Genstein and Jason Lautzenheiser.
This file is (or was originally) part of Trizbort, the Interactive Fiction Mapper.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml;
namespace Trizbort.Export
{
internal abstract class CodeExporter : IDisposable
{
/// <summary>
/// The collection of locations to export, indexed by their corresponding room.
/// </summary>
private readonly Dictionary<Room, Location> mMapRoomToLocation = new Dictionary<Room, Location>();
protected CodeExporter()
{
LocationsInExportOrder = new List<Location>();
RegionsInExportOrder = new List<ExportRegion>();
}
public abstract string FileDialogTitle { get; }
public abstract List<KeyValuePair<string, string>> FileDialogFilters { get; }
protected virtual Encoding Encoding => Encoding.UTF8;
protected abstract IEnumerable<string> ReservedWords { get; }
protected static IEnumerable<AutomapDirection> AllDirections
{
get
{
foreach (AutomapDirection direction in Enum.GetValues(typeof (AutomapDirection)))
{
yield return direction;
}
}
}
/// <summary>
/// The collection of locations on the map, in the order in which they should be exported.
/// </summary>
protected List<Location> LocationsInExportOrder { get; private set; }
protected List<ExportRegion> RegionsInExportOrder { get; private set; }
public void Dispose()
{
Dispose(true);
}
protected virtual void Dispose(bool disposing)
{
}
public string Export()
{
string ss;
using (var writer = new StringWriter())
{
var title = Project.Current.Title;
if (string.IsNullOrEmpty(title))
{
title = PathHelper.SafeGetFilenameWithoutExtension(Project.Current.FileName);
if (string.IsNullOrEmpty(title))
{
title = "A Trizbort Map";
}
}
var author = Project.Current.Author;
if (string.IsNullOrEmpty(author))
{
author = "A Trizbort User";
}
var history = Project.Current.History;
prepareContent();
ExportHeader(writer, title, author, Project.Current.Description ?? string.Empty, history);
ExportContent(writer);
ss = writer.ToString();
}
return ss;
}
public void Export(string fileName)
{
using (var writer = Create(fileName))
{
var title = Project.Current.Title;
if (string.IsNullOrEmpty(title))
{
title = PathHelper.SafeGetFilenameWithoutExtension(Project.Current.FileName);
if (string.IsNullOrEmpty(title))
{
title = "A Trizbort Map";
}
}
var author = Project.Current.Author;
if (string.IsNullOrEmpty(author))
{
author = "A Trizbort User";
}
var history = Project.Current.History;
prepareContent();
ExportHeader(writer, title, author, Project.Current.Description ?? string.Empty, history);
ExportContent(writer);
}
}
protected virtual StreamWriter Create(string fileName)
{
return new StreamWriter(fileName, false, Encoding, 2 ^ 16);
}
protected abstract void ExportHeader(TextWriter writer, string title, string author, string description, string history);
protected abstract void ExportContent(TextWriter writer);
protected abstract string GetExportName(Room room, int? suffix);
protected abstract string GetExportName(string displayName, int? suffix);
private void prepareContent()
{
findRegions();
findRooms();
findExits();
pickBestExits();
findThings();
}
private void findRegions()
{
var mapExportNameToRegion = new Dictionary<string, Region>(StringComparer.InvariantCultureIgnoreCase);
foreach (var reservedWord in ReservedWords)
mapExportNameToRegion.Add(reservedWord, null);
foreach (var region in Settings.Regions)
{
var exportName = GetExportName(region.RegionName, null);
if (exportName == string.Empty)
exportName = "region";
var index = 2;
while (mapExportNameToRegion.ContainsKey(exportName))
exportName = GetExportName(region.RegionName, index++);
mapExportNameToRegion[exportName] = region;
RegionsInExportOrder.Add(new ExportRegion(region, exportName));
}
}
private void findRooms()
{
var mapExportNameToRoom = new Dictionary<string, Room>(StringComparer.InvariantCultureIgnoreCase);
// prevent use of reserved words
foreach (var reservedWord in ReservedWords)
{
mapExportNameToRoom.Add(reservedWord, null);
}
foreach (var region in RegionsInExportOrder)
{
mapExportNameToRoom.Add(region.ExportName, null);
}
foreach (var element in Project.Current.Elements.OfType<Room>())
{
var room = element;
// assign each room a unique export name.
var exportName = GetExportName(room, null);
if (exportName == string.Empty)
exportName = "object";
var index = 2;
while (mapExportNameToRoom.ContainsKey(exportName))
{
exportName = GetExportName(room, index++);
}
mapExportNameToRoom[exportName] = room;
var location = new Location(room, exportName);
LocationsInExportOrder.Add(location);
mMapRoomToLocation[room] = location;
}
}
private void findExits()
{
// find the exits from each room,
// file them by room, and assign them priorities.
// don't decide yet which exit is "the" from a room in a particular direction,
// since we need to compare all a room's exits for that.
foreach (var connection in Project.Current.Elements.OfType<Connection>())
{
CompassPoint sourceCompassPoint, targetCompassPoint;
var sourceRoom = connection.GetSourceRoom(out sourceCompassPoint);
var targetRoom = connection.GetTargetRoom(out targetCompassPoint);
if (sourceRoom == null || targetRoom == null)
{
// ignore fully or partially undocked connections
continue;
}
if (sourceRoom == targetRoom && sourceCompassPoint == targetCompassPoint)
{
// ignore stub connections, such as from automapping
continue;
}
Location sourceLocation, targetLocation;
if (mMapRoomToLocation.TryGetValue(sourceRoom, out sourceLocation) &&
mMapRoomToLocation.TryGetValue(targetRoom, out targetLocation))
{
sourceLocation.AddExit(new Exit(sourceLocation, targetLocation, sourceCompassPoint, connection.StartText, connection.Style));
if (connection.Flow == ConnectionFlow.TwoWay)
{
targetLocation.AddExit(new Exit(targetLocation, sourceLocation, targetCompassPoint, connection.EndText, connection.Style));
}
}
}
}
private void findThings()
{
var mapExportNameToThing = new Dictionary<string, Thing>(StringComparer.InvariantCultureIgnoreCase);
// prevent use of reserved words
foreach (var reservedWord in ReservedWords)
{
mapExportNameToThing.Add(reservedWord, null);
}
foreach (var rooms in LocationsInExportOrder)
{
mapExportNameToThing.Add(rooms.ExportName, null);
}
foreach (var region in RegionsInExportOrder)
{
mapExportNameToThing.Add(region.ExportName, null);
}
foreach (var location in LocationsInExportOrder)
{
var objectsText = location.Room.Objects;
if (string.IsNullOrEmpty(objectsText))
{
continue;
}
var objectNames = objectsText.Replace("\r", string.Empty).Split(new[] {'\n'}, StringSplitOptions.RemoveEmptyEntries);
foreach (var objectName in objectNames)
{
// the display name is simply the object name without indentation
var displayName = objectName.Trim();
if (string.IsNullOrEmpty(displayName))
{
continue;
}
// assign each thing a unique export name.
var exportName = GetExportName(displayName, null);
var index = 2;
while (mapExportNameToThing.ContainsKey(exportName))
{
exportName = GetExportName(displayName, index++);
}
// on each line, indentation denotes containment;
// work out how much indentation there is
var indent = 0;
while (indent < objectName.Length && objectName[indent] == ' ')
{
++indent;
}
// compare indentations to deduce containment
Thing container = null;
for (var thingIndex = location.Things.Count - 1; thingIndex >= 0; --thingIndex)
{
var priorThing = location.Things[thingIndex];
if (indent > priorThing.Indent)
{
container = priorThing;
break;
}
}
var thing = new Thing(displayName, exportName, location, container, indent);
mapExportNameToThing.Add(exportName, thing);
location.Things.Add(thing);
}
}
}
private void pickBestExits()
{
// for every direction from every room, if there are one or more exits
// in said direction, pick the best one.
foreach (var location in LocationsInExportOrder)
{
location.PickBestExits();
}
}
protected class ExportRegion
{
public ExportRegion(Region region, string exportName)
{
Region = region;
ExportName = exportName;
}
public Region Region { get; private set; }
public string ExportName { get; private set; }
public bool Exported { get; set; }
}
protected class Location
{
private readonly List<Exit> mExits = new List<Exit>();
private readonly Dictionary<AutomapDirection, Exit> mMapDirectionToBestExit = new Dictionary<AutomapDirection, Exit>();
public Location(Room room, string exportName)
{
Room = room;
ExportName = exportName;
}
public Room Room { get; private set; }
public string ExportName { get; private set; }
public bool Exported { get; set; }
public List<Thing> Things { get; } = new List<Thing>();
public void AddExit(Exit exit)
{
mExits.Add(exit);
}
public void PickBestExits()
{
mMapDirectionToBestExit.Clear();
foreach (var direction in AllDirections)
{
var exit = pickBestExit(direction);
if (exit != null)
{
mMapDirectionToBestExit.Add(direction, exit);
}
}
}
private Exit pickBestExit(AutomapDirection direction)
{
// sort exits by priority for this direction only
mExits.Sort((Exit a, Exit b) =>
{
var one = a.GetPriority(direction);
var two = b.GetPriority(direction);
return two - one;
});
// pick the highest priority exit if its direction matches;
// if the highest priority exit's direction doesn't match,
// there's no exit in this direction.
if (mExits.Count > 0)
{
var exit = mExits[0];
if (exit.PrimaryDirection == direction || exit.SecondaryDirection == direction)
{
return exit;
}
}
return null;
}
public Exit GetBestExit(AutomapDirection direction)
{
Exit exit;
if (mMapDirectionToBestExit.TryGetValue(direction, out exit))
{
return exit;
}
return null;
}
}
protected class Exit
{
/// <summary>
/// The priority of the this exit's primary direction, compared to other exits which may go in the same direction from
/// the same room.
/// </summary>
/// <remarks>
/// Since multiple exits may lead the same way from the same room, priorities are
/// used to decide which exit is the "best" exit in any direction.
/// For example, a northerly exit which is docked to the N compass point and which
/// does not go up, down, in or out is a higher priority than a northerly exit
/// docked to the NNE compass point and which also goes up.
/// </remarks>
private int mPrimaryPriority;
public Exit(Location source, Location target, CompassPoint visualCompassPoint, string connectionText, ConnectionStyle connectionStyle)
{
Source = source;
Target = target;
VisualCompassPoint = visualCompassPoint;
Conditional = connectionStyle == ConnectionStyle.Dashed;
assignPrimaryPriority();
assignSecondaryDirection(connectionText);
if (SecondaryDirection != null)
PrimaryDirection = (AutomapDirection)SecondaryDirection;
else
assignPrimaryDirection();
}
/// <summary>
/// The room from which this exit leads.
/// </summary>
public Location Source { get; private set; }
/// <summary>
/// The room to which this exit leads.
/// </summary>
public Location Target { get; private set; }
/// <summary>
/// The compass point in Trizbort at which this exit is docked to the starting room.
/// </summary>
/// <remarks>
/// Naturally this may include compass points such as SouthSouthWest need to be
/// translated into an exportable direction; see PrimaryDirection and SecondaryDirection.
/// </remarks>
public CompassPoint VisualCompassPoint { get; private set; }
/// <summary>
/// The primary direction of this exit: N, S, E, W, NE, NW, SE, SW.
/// Deduced from VisualCompassPoint.
/// </summary>
public AutomapDirection PrimaryDirection { get; private set; }
/// <summary>
/// The secondary direction of this exit, if any: either up, down, in or out.
/// </summary>
public AutomapDirection? SecondaryDirection { get; private set; }
/// <summary>
/// True if this exit requires some in-game action from the player to be used; false otherwise.
/// </summary>
public bool Conditional { get; private set; }
/// <summary>
/// True if this exit has been exported; false otherwise.
/// </summary>
public bool Exported { get; set; }
/// <summary>
/// Get the priority of the exit, in the given direction, with respect to other exits.
/// Higher priorities indicate more suitable exits.
/// </summary>
public int GetPriority(AutomapDirection direction)
{
if (direction == PrimaryDirection)
{
return mPrimaryPriority;
}
if (direction == SecondaryDirection)
{
return 1;
}
return -1;
}
private void assignPrimaryDirection()
{
switch (VisualCompassPoint)
{
case CompassPoint.NorthNorthWest:
case CompassPoint.North:
case CompassPoint.NorthNorthEast:
PrimaryDirection = AutomapDirection.North;
break;
case CompassPoint.NorthEast:
PrimaryDirection = AutomapDirection.NorthEast;
break;
case CompassPoint.EastNorthEast:
case CompassPoint.East:
case CompassPoint.EastSouthEast:
PrimaryDirection = AutomapDirection.East;
break;
case CompassPoint.SouthEast:
PrimaryDirection = AutomapDirection.SouthEast;
break;
case CompassPoint.SouthSouthEast:
case CompassPoint.South:
case CompassPoint.SouthSouthWest:
PrimaryDirection = AutomapDirection.South;
break;
case CompassPoint.SouthWest:
PrimaryDirection = AutomapDirection.SouthWest;
break;
case CompassPoint.WestSouthWest:
case CompassPoint.West:
case CompassPoint.WestNorthWest:
PrimaryDirection = AutomapDirection.West;
break;
case CompassPoint.NorthWest:
PrimaryDirection = AutomapDirection.NorthWest;
break;
default:
throw new InvalidOperationException("Unexpected compass point found on ");
}
}
private void assignSecondaryDirection(string connectionText)
{
switch (connectionText)
{
case Connection.Up:
SecondaryDirection = AutomapDirection.Up;
break;
case Connection.Down:
SecondaryDirection = AutomapDirection.Down;
break;
case Connection.In:
SecondaryDirection = AutomapDirection.In;
break;
case Connection.Out:
SecondaryDirection = AutomapDirection.Out;
break;
default:
SecondaryDirection = null;
break;
}
}
private void assignPrimaryPriority()
{
mPrimaryPriority = 0;
switch (VisualCompassPoint)
{
case CompassPoint.North:
case CompassPoint.South:
case CompassPoint.East:
case CompassPoint.West:
case CompassPoint.NorthEast:
case CompassPoint.SouthEast:
case CompassPoint.SouthWest:
case CompassPoint.NorthWest:
if (SecondaryDirection == null)
{
mPrimaryPriority += 4;
}
else
{
mPrimaryPriority -= 2;
}
break;
default:
if (SecondaryDirection == null)
{
mPrimaryPriority += 3;
}
else
{
mPrimaryPriority -= 1;
}
break;
}
}
/// <summary>
/// Test whether an exit is reciprocated in the other direction; i.e. is there a bidirectional connection.
/// </summary>
public static bool IsReciprocated(Location source, AutomapDirection direction, Location target)
{
if (target != null)
{
var oppositeDirection = CompassPointHelper.GetOpposite(direction);
var reciprocal = target.GetBestExit(oppositeDirection);
if (reciprocal != null)
{
Debug.Assert(reciprocal.PrimaryDirection == oppositeDirection || reciprocal.SecondaryDirection == oppositeDirection, "Alleged opposite direction appears to lead somewhere else. Something went wrong whilst building the set of exits from each room.");
return reciprocal.Target == source;
}
}
return false;
}
}
protected class Thing
{
public Thing(string displayName, string exportName, Location location, Thing container, int indent)
{
DisplayName = displayName;
ExportName = exportName;
Location = location;
Container = container;
Debug.Assert(container == null || container.Location == location, "Thing's container is not located in the same room as the thing.");
container?.Contents.Add(this);
Indent = indent;
Contents = new List<Thing>();
}
public string DisplayName { get; private set; }
public string ExportName { get; private set; }
public Location Location { get; private set; }
public Thing Container { get; private set; }
public int Indent { get; private set; }
public List<Thing> Contents { get; private set; }
}
}
}
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System.Collections;
using System.Management.Automation;
using Microsoft.Azure.Commands.ResourceManager.Common.Tags;
using Microsoft.Azure.Management.Storage;
using Microsoft.Azure.Management.Storage.Models;
using StorageModels = Microsoft.Azure.Management.Storage.Models;
using Microsoft.Azure.Commands.Management.Storage.Models;
using System.Collections.Generic;
using System;
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
namespace Microsoft.Azure.Commands.Management.Storage
{
[Cmdlet(VerbsCommon.Remove, StorageAccountRuleNounStr, SupportsShouldProcess = true, DefaultParameterSetName = NetWorkRuleStringParameterSet)]
[OutputType(typeof(PSVirtualNetworkRule), ParameterSetName = new string[] { NetWorkRuleStringParameterSet, NetworkRuleObjectParameterSet })]
[OutputType(typeof(PSIpRule), ParameterSetName = new string[] { IpRuleStringParameterSet, IpRuleObjectParameterSet })]
public class RemoveAzureStorageAccountNetworkRuleCommand : StorageAccountBaseCmdlet
{
/// <summary>
/// NetWorkRule in String parameter set name
/// </summary>
private const string NetWorkRuleStringParameterSet = "NetWorkRuleString";
/// <summary>
/// IpRule in String paremeter set name
/// </summary>
private const string IpRuleStringParameterSet = "IpRuleString";
/// <summary>
/// NetWorkRule Objects pipeline parameter set
/// </summary>
private const string NetworkRuleObjectParameterSet = "NetworkRuleObject";
/// <summary>
/// IpRule Objects pipeline parameter set
/// </summary>
private const string IpRuleObjectParameterSet = "IpRuleObject";
[Parameter(
Position = 0,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
HelpMessage = "Resource Group Name.")]
[ResourceGroupCompleter]
[ValidateNotNullOrEmpty]
public string ResourceGroupName { get; set; }
[Parameter(
Position = 1,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
HelpMessage = "Storage Account Name.")]
[Alias(StorageAccountNameAlias, AccountNameAlias)]
[ValidateNotNullOrEmpty]
public string Name { get; set; }
[Parameter(
Mandatory = true,
HelpMessage = "Storage Account NetworkRule IPRules.",
ValueFromPipeline = true, ParameterSetName = IpRuleObjectParameterSet)]
public PSIpRule[] IPRule { get; set; }
[Parameter(
Mandatory = true,
HelpMessage = "Storage Account NetworkRule VirtualNetworkRules.",
ValueFromPipeline = true, ParameterSetName = NetworkRuleObjectParameterSet)]
public PSVirtualNetworkRule[] VirtualNetworkRule { get; set; }
[Parameter(
Mandatory = true,
HelpMessage = "Storage Account NetworkRule IPRules IPAddressOrRange in string.",
ParameterSetName = IpRuleStringParameterSet)]
public string[] IPAddressOrRange { get; set; }
[Parameter(
Mandatory = true,
HelpMessage = "Storage Account NetworkRule VirtualNetworkRules VirtualNetworkResourceId in string.",
ParameterSetName = NetWorkRuleStringParameterSet)]
[Alias("SubnetId", "VirtualNetworkId")]
public string[] VirtualNetworkResourceId { get; set; }
[Parameter(Mandatory = false, HelpMessage = "Run cmdlet in the background")]
public SwitchParameter AsJob { get; set; }
public override void ExecuteCmdlet()
{
base.ExecuteCmdlet();
if (ShouldProcess(this.Name, "Remove Storage Account Networkrules"))
{
var storageAccount = this.StorageClient.StorageAccounts.GetProperties(
this.ResourceGroupName,
this.Name);
NetworkRuleSet storageACL = storageAccount.NetworkRuleSet;
if (storageACL == null)
{
storageACL = new NetworkRuleSet();
}
switch (ParameterSetName)
{
case NetWorkRuleStringParameterSet:
if (storageACL.VirtualNetworkRules == null)
storageACL.VirtualNetworkRules = new List<VirtualNetworkRule>();
foreach (string s in VirtualNetworkResourceId)
{
VirtualNetworkRule rule = new VirtualNetworkRule(s);
if (!RemoveNetworkRule(storageACL.VirtualNetworkRules, rule))
throw new ArgumentOutOfRangeException("VirtualNetworkResourceId", String.Format("Can't remove VirtualNetworkRule with specific ResourceId since not exist: {0}", rule.VirtualNetworkResourceId));
}
break;
case IpRuleStringParameterSet:
if (storageACL.IpRules == null)
storageACL.IpRules = new List<IPRule>();
foreach (string s in IPAddressOrRange)
{
IPRule rule = new IPRule(s);
if (!RemoveIpRule(storageACL.IpRules, rule))
throw new ArgumentOutOfRangeException("IPAddressOrRange", String.Format("Can't remove IpRule with specific IPAddressOrRange since not exist: {0}", rule.IPAddressOrRange));
}
break;
case NetworkRuleObjectParameterSet:
if (storageACL.VirtualNetworkRules == null)
storageACL.VirtualNetworkRules = new List<VirtualNetworkRule>();
foreach (PSVirtualNetworkRule rule in VirtualNetworkRule)
{
if (!RemoveNetworkRule(storageACL.VirtualNetworkRules, PSNetworkRuleSet.ParseStorageNetworkRuleVirtualNetworkRule(rule)))
throw new ArgumentOutOfRangeException("VirtualNetworkRule", String.Format("Can't remove VirtualNetworkRule with specific ResourceId since not exist: {0}", rule.VirtualNetworkResourceId));
}
break;
case IpRuleObjectParameterSet:
if (storageACL.IpRules == null)
storageACL.IpRules = new List<IPRule>();
foreach (PSIpRule rule in IPRule)
{
if (!RemoveIpRule(storageACL.IpRules, PSNetworkRuleSet.ParseStorageNetworkRuleIPRule(rule)))
throw new ArgumentOutOfRangeException("IPRule", String.Format("Can't remove IpRule with specific IPAddressOrRange since not exist: {0}", rule.IPAddressOrRange));
}
break;
}
StorageAccountUpdateParameters updateParameters = new StorageAccountUpdateParameters();
updateParameters.NetworkRuleSet = storageACL;
var updatedAccountResponse = this.StorageClient.StorageAccounts.Update(
this.ResourceGroupName,
this.Name,
updateParameters);
storageAccount = this.StorageClient.StorageAccounts.GetProperties(this.ResourceGroupName, this.Name);
switch (ParameterSetName)
{
case NetWorkRuleStringParameterSet:
case NetworkRuleObjectParameterSet:
WriteObject(PSNetworkRuleSet.ParsePSNetworkRule(storageAccount.NetworkRuleSet).VirtualNetworkRules);
break;
case IpRuleStringParameterSet:
case IpRuleObjectParameterSet:
WriteObject(PSNetworkRuleSet.ParsePSNetworkRule(storageAccount.NetworkRuleSet).IpRules);
break;
}
}
}
/// <summary>
/// Remove one IpRule from IpRule List
/// </summary>
/// <param name="ruleList">The IpRule List</param>
/// <param name="ruleToRemove">The IP Rule to remove</param>
/// <returns>true if reove success</returns>
public bool RemoveIpRule(IList<IPRule> ruleList, IPRule ruleToRemove)
{
foreach (IPRule rule in ruleList)
{
if (rule.IPAddressOrRange == ruleToRemove.IPAddressOrRange)
{
ruleList.Remove(rule);
return true;
}
}
return false;
}
/// <summary>
/// Remove one NetworkRule from NetworkRule List
/// </summary>
/// <param name="ruleList">The NetworkRule List</param>
/// <param name="ruleToRemove">The NetworkRulee to remove</param>
/// <returns>true if reove success</returns>
public bool RemoveNetworkRule(IList<VirtualNetworkRule> ruleList, VirtualNetworkRule ruleToRemove)
{
foreach (VirtualNetworkRule rule in ruleList)
{
if (rule.VirtualNetworkResourceId == ruleToRemove.VirtualNetworkResourceId)
{
ruleList.Remove(rule);
return true;
}
}
return false;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using static Python.Runtime.Runtime;
namespace Python.Runtime
{
public static class RuntimeData
{
private static Type _formatterType;
public static Type FormatterType
{
get => _formatterType;
set
{
if (!typeof(IFormatter).IsAssignableFrom(value))
{
throw new ArgumentException("Not a type implemented IFormatter");
}
_formatterType = value;
}
}
public static ICLRObjectStorer WrappersStorer { get; set; }
/// <summary>
/// Clears the old "clr_data" entry if a previous one is present.
/// </summary>
static void ClearCLRData ()
{
BorrowedReference capsule = PySys_GetObject("clr_data");
if (!capsule.IsNull)
{
IntPtr oldData = PyCapsule_GetPointer(capsule, IntPtr.Zero);
PyMem_Free(oldData);
PyCapsule_SetPointer(capsule, IntPtr.Zero);
}
}
internal static void Stash()
{
var metaStorage = new RuntimeDataStorage();
MetaType.SaveRuntimeData(metaStorage);
var importStorage = new RuntimeDataStorage();
ImportHook.SaveRuntimeData(importStorage);
var typeStorage = new RuntimeDataStorage();
TypeManager.SaveRuntimeData(typeStorage);
var clsStorage = new RuntimeDataStorage();
ClassManager.SaveRuntimeData(clsStorage);
var moduleStorage = new RuntimeDataStorage();
SaveRuntimeDataModules(moduleStorage);
var objStorage = new RuntimeDataStorage();
SaveRuntimeDataObjects(objStorage);
var runtimeStorage = new RuntimeDataStorage();
runtimeStorage.AddValue("meta", metaStorage);
runtimeStorage.AddValue("import", importStorage);
runtimeStorage.AddValue("types", typeStorage);
runtimeStorage.AddValue("classes", clsStorage);
runtimeStorage.AddValue("modules", moduleStorage);
runtimeStorage.AddValue("objs", objStorage);
IFormatter formatter = CreateFormatter();
var ms = new MemoryStream();
formatter.Serialize(ms, runtimeStorage);
Debug.Assert(ms.Length <= int.MaxValue);
byte[] data = ms.GetBuffer();
// TODO: use buffer api instead
IntPtr mem = PyMem_Malloc(ms.Length + IntPtr.Size);
Marshal.WriteIntPtr(mem, (IntPtr)ms.Length);
Marshal.Copy(data, 0, mem + IntPtr.Size, (int)ms.Length);
ClearCLRData();
NewReference capsule = PyCapsule_New(mem, IntPtr.Zero, IntPtr.Zero);
PySys_SetObject("clr_data", capsule);
// Let the dictionary own the reference
capsule.Dispose();
}
internal static void RestoreRuntimeData()
{
try
{
RestoreRuntimeDataImpl();
}
finally
{
ClearStash();
}
}
private static void RestoreRuntimeDataImpl()
{
BorrowedReference capsule = PySys_GetObject("clr_data");
if (capsule.IsNull)
{
return;
}
IntPtr mem = PyCapsule_GetPointer(capsule, IntPtr.Zero);
int length = (int)Marshal.ReadIntPtr(mem);
byte[] data = new byte[length];
Marshal.Copy(mem + IntPtr.Size, data, 0, length);
var ms = new MemoryStream(data);
var formatter = CreateFormatter();
var storage = (RuntimeDataStorage)formatter.Deserialize(ms);
var objs = RestoreRuntimeDataObjects(storage.GetStorage("objs"));
RestoreRuntimeDataModules(storage.GetStorage("modules"));
TypeManager.RestoreRuntimeData(storage.GetStorage("types"));
var clsObjs = ClassManager.RestoreRuntimeData(storage.GetStorage("classes"));
ImportHook.RestoreRuntimeData(storage.GetStorage("import"));
PyCLRMetaType = MetaType.RestoreRuntimeData(storage.GetStorage("meta"));
foreach (var item in objs)
{
item.Value.ExecutePostActions();
XDecref(item.Key.pyHandle);
}
foreach (var item in clsObjs)
{
item.Value.ExecutePostActions();
}
}
public static bool HasStashData()
{
return !PySys_GetObject("clr_data").IsNull;
}
public static void ClearStash()
{
PySys_SetObject("clr_data", default);
}
static bool CheckSerializable (object o)
{
Type type = o.GetType();
do
{
if (!type.IsSerializable)
{
return false;
}
} while ((type = type.BaseType) != null);
return true;
}
private static void SaveRuntimeDataObjects(RuntimeDataStorage storage)
{
var objs = ManagedType.GetManagedObjects();
var extensionObjs = new List<ManagedType>();
var wrappers = new Dictionary<object, List<CLRObject>>();
var serializeObjs = new CLRWrapperCollection();
var contexts = new Dictionary<IntPtr, InterDomainContext>();
foreach (var entry in objs)
{
var obj = entry.Key;
XIncref(obj.pyHandle);
switch (entry.Value)
{
case ManagedType.TrackTypes.Extension:
Debug.Assert(CheckSerializable(obj));
var context = new InterDomainContext();
contexts[obj.pyHandle] = context;
obj.Save(context);
extensionObjs.Add(obj);
break;
case ManagedType.TrackTypes.Wrapper:
// Wrapper must be the CLRObject
var clrObj = (CLRObject)obj;
object inst = clrObj.inst;
CLRMappedItem item;
List<CLRObject> mappedObjs;
if (!serializeObjs.TryGetValue(inst, out item))
{
item = new CLRMappedItem(inst)
{
Handles = new List<IntPtr>()
};
serializeObjs.Add(item);
Debug.Assert(!wrappers.ContainsKey(inst));
mappedObjs = new List<CLRObject>();
wrappers.Add(inst, mappedObjs);
}
else
{
mappedObjs = wrappers[inst];
}
item.Handles.Add(clrObj.pyHandle);
mappedObjs.Add(clrObj);
break;
default:
break;
}
}
var wrapperStorage = new RuntimeDataStorage();
WrappersStorer?.Store(serializeObjs, wrapperStorage);
var internalStores = new List<CLRObject>();
foreach (var item in serializeObjs)
{
if (!item.Stored)
{
if (!CheckSerializable(item.Instance))
{
continue;
}
internalStores.AddRange(wrappers[item.Instance]);
}
foreach (var clrObj in wrappers[item.Instance])
{
XIncref(clrObj.pyHandle);
var context = new InterDomainContext();
contexts[clrObj.pyHandle] = context;
clrObj.Save(context);
}
}
storage.AddValue("internalStores", internalStores);
storage.AddValue("extensions", extensionObjs);
storage.AddValue("wrappers", wrapperStorage);
storage.AddValue("contexts", contexts);
}
private static Dictionary<ManagedType, InterDomainContext> RestoreRuntimeDataObjects(RuntimeDataStorage storage)
{
var extensions = storage.GetValue<List<ManagedType>>("extensions");
var internalStores = storage.GetValue<List<CLRObject>>("internalStores");
var contexts = storage.GetValue <Dictionary<IntPtr, InterDomainContext>>("contexts");
var storedObjs = new Dictionary<ManagedType, InterDomainContext>();
foreach (var obj in Enumerable.Union(extensions, internalStores))
{
var context = contexts[obj.pyHandle];
obj.Load(context);
storedObjs.Add(obj, context);
}
if (WrappersStorer != null)
{
var wrapperStorage = storage.GetStorage("wrappers");
var handle2Obj = WrappersStorer.Restore(wrapperStorage);
foreach (var item in handle2Obj)
{
object obj = item.Instance;
foreach (var handle in item.Handles)
{
var context = contexts[handle];
var co = CLRObject.Restore(obj, handle, context);
storedObjs.Add(co, context);
}
}
}
return storedObjs;
}
private static void SaveRuntimeDataModules(RuntimeDataStorage storage)
{
var pyModules = PyImport_GetModuleDict();
var items = PyDict_Items(pyModules);
long length = PyList_Size(items);
var modules = new Dictionary<IntPtr, IntPtr>(); ;
for (long i = 0; i < length; i++)
{
var item = PyList_GetItem(items, i);
var name = PyTuple_GetItem(item.DangerousGetAddress(), 0);
var module = PyTuple_GetItem(item.DangerousGetAddress(), 1);
if (ManagedType.IsManagedType(module))
{
XIncref(name);
XIncref(module);
modules.Add(name, module);
}
}
items.Dispose();
storage.AddValue("modules", modules);
}
private static void RestoreRuntimeDataModules(RuntimeDataStorage storage)
{
var modules = storage.GetValue<Dictionary<IntPtr, IntPtr>>("modules");
var pyMoudles = PyImport_GetModuleDict();
foreach (var item in modules)
{
var moduleName = new BorrowedReference(item.Key);
var module = new BorrowedReference(item.Value);
int res = PyDict_SetItem(pyMoudles, moduleName, module);
PythonException.ThrowIfIsNotZero(res);
XDecref(item.Key);
XDecref(item.Value);
}
modules.Clear();
}
private static IFormatter CreateFormatter()
{
return FormatterType != null ?
(IFormatter)Activator.CreateInstance(FormatterType)
: new BinaryFormatter();
}
}
[Serializable]
public class RuntimeDataStorage
{
private Stack _stack;
private Dictionary<string, object> _namedValues;
public T AddValue<T>(string name, T value)
{
if (_namedValues == null)
{
_namedValues = new Dictionary<string, object>();
}
_namedValues.Add(name, value);
return value;
}
public object GetValue(string name)
{
return _namedValues[name];
}
public T GetValue<T>(string name)
{
return (T)GetValue(name);
}
public T GetValue<T>(string name, out T value)
{
value = GetValue<T>(name);
return value;
}
public RuntimeDataStorage GetStorage(string name)
{
return GetValue<RuntimeDataStorage>(name);
}
public T PushValue<T>(T value)
{
if (_stack == null)
{
_stack = new Stack();
}
_stack.Push(value);
return value;
}
public object PopValue()
{
return _stack.Pop();
}
public T PopValue<T>()
{
return (T)PopValue();
}
public T PopValue<T>(out T value)
{
return value = (T)PopValue();
}
}
[Serializable]
class InterDomainContext
{
private RuntimeDataStorage _storage;
public RuntimeDataStorage Storage => _storage ?? (_storage = new RuntimeDataStorage());
/// <summary>
/// Actions after loaded.
/// </summary>
[NonSerialized]
private List<Action> _postActions;
public List<Action> PostActions => _postActions ?? (_postActions = new List<Action>());
public void AddPostAction(Action action)
{
PostActions.Add(action);
}
public void ExecutePostActions()
{
if (_postActions == null)
{
return;
}
foreach (var action in _postActions)
{
action();
}
}
}
public class CLRMappedItem
{
public object Instance { get; private set; }
public IList<IntPtr> Handles { get; set; }
public bool Stored { get; set; }
public CLRMappedItem(object instance)
{
Instance = instance;
}
}
public interface ICLRObjectStorer
{
ICollection<CLRMappedItem> Store(CLRWrapperCollection wrappers, RuntimeDataStorage storage);
CLRWrapperCollection Restore(RuntimeDataStorage storage);
}
public class CLRWrapperCollection : KeyedCollection<object, CLRMappedItem>
{
public bool TryGetValue(object key, out CLRMappedItem value)
{
if (Dictionary == null)
{
value = null;
return false;
}
return Dictionary.TryGetValue(key, out value);
}
protected override object GetKeyForItem(CLRMappedItem item)
{
return item.Instance;
}
}
}
| |
namespace Elasticsearch.Client
{
///<summary><see href="https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html"/></summary>
public class DeleteByQueryParameters : Parameters
{
///<summary>The analyzer to use for the query string</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters analyzer(string value)
{
SetValue("analyzer", value);
return this;
}
///<summary>Specify whether wildcard and prefix queries should be analyzed (default: false)</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters analyze_wildcard(bool value)
{
SetValue("analyze_wildcard", value.ToString().ToLower());
return this;
}
///<summary>The default operator for query string query (AND or OR)</summary>
///<param name="value"><para>Options: AND,OR</para><para>Default: OR</para></param>
public virtual DeleteByQueryParameters default_operator(string value)
{
SetValue("default_operator", value);
return this;
}
///<summary>The field to use as default where no field prefix is given in the query string</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters df(string value)
{
SetValue("df", value);
return this;
}
///<summary>Starting offset (default: 0)</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters from(long value)
{
SetValue("from", value);
return this;
}
///<summary>Whether specified concrete indices should be ignored when unavailable (missing or closed)</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters ignore_unavailable(bool value)
{
SetValue("ignore_unavailable", value.ToString().ToLower());
return this;
}
///<summary>Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters allow_no_indices(bool value)
{
SetValue("allow_no_indices", value.ToString().ToLower());
return this;
}
///<summary>What to do when the delete-by-query hits version conflicts?</summary>
///<param name="value"><para>Options: abort,proceed</para><para>Default: abort</para></param>
public virtual DeleteByQueryParameters conflicts(string value)
{
SetValue("conflicts", value);
return this;
}
///<summary>Whether to expand wildcard expression to concrete indices that are open, closed or both.</summary>
///<param name="value"><para>Options: open,closed,none,all</para><para>Default: open</para></param>
public virtual DeleteByQueryParameters expand_wildcards(string value)
{
SetValue("expand_wildcards", value);
return this;
}
///<summary>Specify whether format-based query failures (such as providing text to a numeric field) should be ignored</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters lenient(bool value)
{
SetValue("lenient", value.ToString().ToLower());
return this;
}
///<summary>Specify the node or shard the operation should be performed on (default: random)</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters preference(string value)
{
SetValue("preference", value);
return this;
}
///<summary>Query in the Lucene query string syntax</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters q(string value)
{
SetValue("q", value);
return this;
}
///<summary>A comma-separated list of specific routing values</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters routing(string value)
{
SetValue("routing", value);
return this;
}
///<summary>Specify how long a consistent view of the index should be maintained for scrolled search</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters scroll(string value)
{
SetValue("scroll", value);
return this;
}
///<summary>Search operation type</summary>
///<param name="value"><para>Options: query_then_fetch,dfs_query_then_fetch</para></param>
public virtual DeleteByQueryParameters search_type(string value)
{
SetValue("search_type", value);
return this;
}
///<summary>Explicit timeout for each search request. Defaults to no timeout.</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters search_timeout(string value)
{
SetValue("search_timeout", value);
return this;
}
///<summary>Number of hits to return (default: 10)</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters size(long value)
{
SetValue("size", value);
return this;
}
///<summary>A comma-separated list of <field>:<direction> pairs</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters sort(string value)
{
SetValue("sort", value);
return this;
}
///<summary>True or false to return the _source field or not, or a list of fields to return</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters _source(string value)
{
SetValue("_source", value);
return this;
}
///<summary>A list of fields to exclude from the returned _source field</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters _source_exclude(string value)
{
SetValue("_source_exclude", value);
return this;
}
///<summary>A list of fields to extract and return from the _source field</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters _source_include(string value)
{
SetValue("_source_include", value);
return this;
}
///<summary>The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early.</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters terminate_after(long value)
{
SetValue("terminate_after", value);
return this;
}
///<summary>Specific 'tag' of the request for logging and statistical purposes</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters stats(string value)
{
SetValue("stats", value);
return this;
}
///<summary>Specify whether to return document version as part of a hit</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters version(bool value)
{
SetValue("version", value.ToString().ToLower());
return this;
}
///<summary>Specify if request cache should be used for this request or not, defaults to index level setting</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters request_cache(bool value)
{
SetValue("request_cache", value.ToString().ToLower());
return this;
}
///<summary>Should the effected indexes be refreshed?</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters refresh(bool value)
{
SetValue("refresh", value.ToString().ToLower());
return this;
}
///<summary>Time each individual bulk request should wait for shards that are unavailable.</summary>
///<param name="value"><para>Default: 1m</para></param>
public virtual DeleteByQueryParameters timeout(string value)
{
SetValue("timeout", value);
return this;
}
///<summary>Sets the number of shard copies that must be active before proceeding with the delete by query operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters wait_for_active_shards(string value)
{
SetValue("wait_for_active_shards", value);
return this;
}
///<summary>Size on the scroll request powering the update_by_query</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters scroll_size(long value)
{
SetValue("scroll_size", value);
return this;
}
///<summary>Should the request should block until the delete-by-query is complete.</summary>
///<param name="value"><para>Default: True</para></param>
public virtual DeleteByQueryParameters wait_for_completion(bool value)
{
SetValue("wait_for_completion", value.ToString().ToLower());
return this;
}
///<summary>The throttle for this request in sub-requests per second. -1 means no throttle.</summary>
///<param name="value"><para>Default: 0</para></param>
public virtual DeleteByQueryParameters requests_per_second(long value)
{
SetValue("requests_per_second", value);
return this;
}
///<summary>The number of slices this task should be divided into. Defaults to 1 meaning the task isn't sliced into subtasks.</summary>
///<param name="value"><para>Default: 1</para></param>
public virtual DeleteByQueryParameters slices(long value)
{
SetValue("slices", value);
return this;
}
///<summary>Pretty format the returned JSON response.</summary>
///<param name="value"><para>Default: False</para></param>
public virtual DeleteByQueryParameters pretty(bool value)
{
SetValue("pretty", value.ToString().ToLower());
return this;
}
///<summary>Return human readable values for statistics.</summary>
///<param name="value"><para>Default: True</para></param>
public virtual DeleteByQueryParameters human(bool value)
{
SetValue("human", value.ToString().ToLower());
return this;
}
///<summary>Include the stack trace of returned errors.</summary>
///<param name="value"><para>Default: False</para></param>
public virtual DeleteByQueryParameters error_trace(bool value)
{
SetValue("error_trace", value.ToString().ToLower());
return this;
}
///<summary>The URL-encoded request definition. Useful for libraries that do not accept a request body for non-POST requests.</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters source(string value)
{
SetValue("source", value);
return this;
}
///<summary>A comma-separated list of filters used to reduce the respone.</summary>
///<param name="value"></param>
public virtual DeleteByQueryParameters filter_path(string value)
{
SetValue("filter_path", value);
return this;
}
}
}
| |
using System;
using System.Collections;
using NBitcoin.BouncyCastle.Crypto;
using NBitcoin.BouncyCastle.Crypto.Digests;
using NBitcoin.BouncyCastle.Crypto.Parameters;
using NBitcoin.BouncyCastle.Utilities;
namespace NBitcoin.BouncyCastle.Crypto.Signers
{
/// <summary> ISO9796-2 - mechanism using a hash function with recovery (scheme 1)</summary>
public class Iso9796d2Signer : ISignerWithRecovery
{
/// <summary>
/// Return a reference to the recoveredMessage message.
/// </summary>
/// <returns>The full/partial recoveredMessage message.</returns>
/// <seealso cref="ISignerWithRecovery.GetRecoveredMessage"/>
public byte[] GetRecoveredMessage()
{
return recoveredMessage;
}
public const int TrailerImplicit = 0xBC;
public const int TrailerRipeMD160 = 0x31CC;
public const int TrailerRipeMD128 = 0x32CC;
public const int TrailerSha1 = 0x33CC;
public const int TrailerSha256 = 0x34CC;
public const int TrailerSha512 = 0x35CC;
public const int TrailerSha384 = 0x36CC;
public const int TrailerWhirlpool = 0x37CC;
private static IDictionary trailerMap = Platform.CreateHashtable();
static Iso9796d2Signer()
{
trailerMap.Add("RIPEMD128", TrailerRipeMD128);
trailerMap.Add("RIPEMD160", TrailerRipeMD160);
trailerMap.Add("SHA-1", TrailerSha1);
trailerMap.Add("SHA-256", TrailerSha256);
trailerMap.Add("SHA-384", TrailerSha384);
trailerMap.Add("SHA-512", TrailerSha512);
trailerMap.Add("Whirlpool", TrailerWhirlpool);
}
private IDigest digest;
private IAsymmetricBlockCipher cipher;
private int trailer;
private int keyBits;
private byte[] block;
private byte[] mBuf;
private int messageLength;
private bool fullMessage;
private byte[] recoveredMessage;
private byte[] preSig;
private byte[] preBlock;
/// <summary>
/// Generate a signer for the with either implicit or explicit trailers
/// for ISO9796-2.
/// </summary>
/// <param name="cipher">base cipher to use for signature creation/verification</param>
/// <param name="digest">digest to use.</param>
/// <param name="isImplicit">whether or not the trailer is implicit or gives the hash.</param>
public Iso9796d2Signer(
IAsymmetricBlockCipher cipher,
IDigest digest,
bool isImplicit)
{
this.cipher = cipher;
this.digest = digest;
if (isImplicit)
{
trailer = TrailerImplicit;
}
else
{
string digestName = digest.AlgorithmName;
if (trailerMap.Contains(digestName))
{
trailer = (int)trailerMap[digest.AlgorithmName];
}
else
{
throw new System.ArgumentException("no valid trailer for digest");
}
}
}
/// <summary> Constructor for a signer with an explicit digest trailer.
///
/// </summary>
/// <param name="cipher">cipher to use.
/// </param>
/// <param name="digest">digest to sign with.
/// </param>
public Iso9796d2Signer(IAsymmetricBlockCipher cipher, IDigest digest)
: this(cipher, digest, false)
{
}
public string AlgorithmName
{
get { return digest.AlgorithmName + "with" + "ISO9796-2S1"; }
}
public virtual void Init(bool forSigning, ICipherParameters parameters)
{
RsaKeyParameters kParam = (RsaKeyParameters) parameters;
cipher.Init(forSigning, kParam);
keyBits = kParam.Modulus.BitLength;
block = new byte[(keyBits + 7) / 8];
if (trailer == TrailerImplicit)
{
mBuf = new byte[block.Length - digest.GetDigestSize() - 2];
}
else
{
mBuf = new byte[block.Length - digest.GetDigestSize() - 3];
}
Reset();
}
/// <summary> compare two byte arrays - constant time.</summary>
private bool IsSameAs(byte[] a, byte[] b)
{
int checkLen;
if (messageLength > mBuf.Length)
{
if (mBuf.Length > b.Length)
{
return false;
}
checkLen = mBuf.Length;
}
else
{
if (messageLength != b.Length)
{
return false;
}
checkLen = b.Length;
}
bool isOkay = true;
for (int i = 0; i != checkLen; i++)
{
if (a[i] != b[i])
{
isOkay = false;
}
}
return isOkay;
}
/// <summary> clear possible sensitive data</summary>
private void ClearBlock(
byte[] block)
{
Array.Clear(block, 0, block.Length);
}
public virtual void UpdateWithRecoveredMessage(
byte[] signature)
{
byte[] block = cipher.ProcessBlock(signature, 0, signature.Length);
if (((block[0] & 0xC0) ^ 0x40) != 0)
throw new InvalidCipherTextException("malformed signature");
if (((block[block.Length - 1] & 0xF) ^ 0xC) != 0)
throw new InvalidCipherTextException("malformed signature");
int delta = 0;
if (((block[block.Length - 1] & 0xFF) ^ 0xBC) == 0)
{
delta = 1;
}
else
{
int sigTrail = ((block[block.Length - 2] & 0xFF) << 8) | (block[block.Length - 1] & 0xFF);
string digestName = digest.AlgorithmName;
if (!trailerMap.Contains(digestName))
throw new ArgumentException("unrecognised hash in signature");
if (sigTrail != (int)trailerMap[digestName])
throw new InvalidOperationException("signer initialised with wrong digest for trailer " + sigTrail);
delta = 2;
}
//
// find out how much padding we've got
//
int mStart = 0;
for (mStart = 0; mStart != block.Length; mStart++)
{
if (((block[mStart] & 0x0f) ^ 0x0a) == 0)
break;
}
mStart++;
int off = block.Length - delta - digest.GetDigestSize();
//
// there must be at least one byte of message string
//
if ((off - mStart) <= 0)
throw new InvalidCipherTextException("malformed block");
//
// if we contain the whole message as well, check the hash of that.
//
if ((block[0] & 0x20) == 0)
{
fullMessage = true;
recoveredMessage = new byte[off - mStart];
Array.Copy(block, mStart, recoveredMessage, 0, recoveredMessage.Length);
}
else
{
fullMessage = false;
recoveredMessage = new byte[off - mStart];
Array.Copy(block, mStart, recoveredMessage, 0, recoveredMessage.Length);
}
preSig = signature;
preBlock = block;
digest.BlockUpdate(recoveredMessage, 0, recoveredMessage.Length);
messageLength = recoveredMessage.Length;
recoveredMessage.CopyTo(mBuf, 0);
}
/// <summary> update the internal digest with the byte b</summary>
public void Update(
byte input)
{
digest.Update(input);
if (messageLength < mBuf.Length)
{
mBuf[messageLength] = input;
}
messageLength++;
}
/// <summary> update the internal digest with the byte array in</summary>
public void BlockUpdate(
byte[] input,
int inOff,
int length)
{
while (length > 0 && messageLength < mBuf.Length)
{
//for (int i = 0; i < length && (i + messageLength) < mBuf.Length; i++)
//{
// mBuf[messageLength + i] = input[inOff + i];
//}
this.Update(input[inOff]);
inOff++;
length--;
}
digest.BlockUpdate(input, inOff, length);
messageLength += length;
}
/// <summary> reset the internal state</summary>
public virtual void Reset()
{
digest.Reset();
messageLength = 0;
ClearBlock(mBuf);
if (recoveredMessage != null)
{
ClearBlock(recoveredMessage);
}
recoveredMessage = null;
fullMessage = false;
if (preSig != null)
{
preSig = null;
ClearBlock(preBlock);
preBlock = null;
}
}
/// <summary> Generate a signature for the loaded message using the key we were
/// initialised with.
/// </summary>
public virtual byte[] GenerateSignature()
{
int digSize = digest.GetDigestSize();
int t = 0;
int delta = 0;
if (trailer == TrailerImplicit)
{
t = 8;
delta = block.Length - digSize - 1;
digest.DoFinal(block, delta);
block[block.Length - 1] = (byte) TrailerImplicit;
}
else
{
t = 16;
delta = block.Length - digSize - 2;
digest.DoFinal(block, delta);
block[block.Length - 2] = (byte) ((uint)trailer >> 8);
block[block.Length - 1] = (byte) trailer;
}
byte header = 0;
int x = (digSize + messageLength) * 8 + t + 4 - keyBits;
if (x > 0)
{
int mR = messageLength - ((x + 7) / 8);
header = (byte) (0x60);
delta -= mR;
Array.Copy(mBuf, 0, block, delta, mR);
}
else
{
header = (byte) (0x40);
delta -= messageLength;
Array.Copy(mBuf, 0, block, delta, messageLength);
}
if ((delta - 1) > 0)
{
for (int i = delta - 1; i != 0; i--)
{
block[i] = (byte) 0xbb;
}
block[delta - 1] ^= (byte) 0x01;
block[0] = (byte) 0x0b;
block[0] |= header;
}
else
{
block[0] = (byte) 0x0a;
block[0] |= header;
}
byte[] b = cipher.ProcessBlock(block, 0, block.Length);
ClearBlock(mBuf);
ClearBlock(block);
return b;
}
/// <summary> return true if the signature represents a ISO9796-2 signature
/// for the passed in message.
/// </summary>
public virtual bool VerifySignature(byte[] signature)
{
byte[] block;
if (preSig == null)
{
try
{
block = cipher.ProcessBlock(signature, 0, signature.Length);
}
catch (Exception)
{
return false;
}
}
else
{
if (!Arrays.AreEqual(preSig, signature))
throw new InvalidOperationException("updateWithRecoveredMessage called on different signature");
block = preBlock;
preSig = null;
preBlock = null;
}
if (((block[0] & 0xC0) ^ 0x40) != 0)
return ReturnFalse(block);
if (((block[block.Length - 1] & 0xF) ^ 0xC) != 0)
return ReturnFalse(block);
int delta = 0;
if (((block[block.Length - 1] & 0xFF) ^ 0xBC) == 0)
{
delta = 1;
}
else
{
int sigTrail = ((block[block.Length - 2] & 0xFF) << 8) | (block[block.Length - 1] & 0xFF);
string digestName = digest.AlgorithmName;
if (!trailerMap.Contains(digestName))
throw new ArgumentException("unrecognised hash in signature");
if (sigTrail != (int)trailerMap[digestName])
throw new InvalidOperationException("signer initialised with wrong digest for trailer " + sigTrail);
delta = 2;
}
//
// find out how much padding we've got
//
int mStart = 0;
for (; mStart != block.Length; mStart++)
{
if (((block[mStart] & 0x0f) ^ 0x0a) == 0)
{
break;
}
}
mStart++;
//
// check the hashes
//
byte[] hash = new byte[digest.GetDigestSize()];
int off = block.Length - delta - hash.Length;
//
// there must be at least one byte of message string
//
if ((off - mStart) <= 0)
{
return ReturnFalse(block);
}
//
// if we contain the whole message as well, check the hash of that.
//
if ((block[0] & 0x20) == 0)
{
fullMessage = true;
// check right number of bytes passed in.
if (messageLength > off - mStart)
{
return ReturnFalse(block);
}
digest.Reset();
digest.BlockUpdate(block, mStart, off - mStart);
digest.DoFinal(hash, 0);
bool isOkay = true;
for (int i = 0; i != hash.Length; i++)
{
block[off + i] ^= hash[i];
if (block[off + i] != 0)
{
isOkay = false;
}
}
if (!isOkay)
{
return ReturnFalse(block);
}
recoveredMessage = new byte[off - mStart];
Array.Copy(block, mStart, recoveredMessage, 0, recoveredMessage.Length);
}
else
{
fullMessage = false;
digest.DoFinal(hash, 0);
bool isOkay = true;
for (int i = 0; i != hash.Length; i++)
{
block[off + i] ^= hash[i];
if (block[off + i] != 0)
{
isOkay = false;
}
}
if (!isOkay)
{
return ReturnFalse(block);
}
recoveredMessage = new byte[off - mStart];
Array.Copy(block, mStart, recoveredMessage, 0, recoveredMessage.Length);
}
//
// if they've input a message check what we've recovered against
// what was input.
//
if (messageLength != 0)
{
if (!IsSameAs(mBuf, recoveredMessage))
{
return ReturnFalse(block);
}
}
ClearBlock(mBuf);
ClearBlock(block);
return true;
}
private bool ReturnFalse(byte[] block)
{
ClearBlock(mBuf);
ClearBlock(block);
return false;
}
/// <summary>
/// Return true if the full message was recoveredMessage.
/// </summary>
/// <returns> true on full message recovery, false otherwise.</returns>
/// <seealso cref="ISignerWithRecovery.HasFullMessage"/>
public virtual bool HasFullMessage()
{
return fullMessage;
}
}
}
| |
using System;
using System.Collections;
using System.Diagnostics;
using System.Linq;
namespace BloomFilter
{
public class Filter<T>
{
/// <summary>
/// A function that can be used to hash input.
/// </summary>
/// <param name="input">The values to be hashed.</param>
/// <returns>The resulting hash code.</returns>
public delegate int HashFunction(T input);
/// <summary>
/// Creates a new Bloom filter, specifying an error rate of 1/capacity, using the optimal size for the underlying data structure based on the desired capacity and error rate, as well as the optimal number of hash functions.
/// A secondary hash function will be provided for you if your type T is either string or int. Otherwise an exception will be thrown. If you are not using these types please use the overload that supports custom hash functions.
/// </summary>
/// <param name="capacity">The anticipated number of items to be added to the filter. More than this number of items can be added, but the error rate will exceed what is expected.</param>
public Filter(int capacity) : this(capacity, null) { }
/// <summary>
/// Creates a new Bloom filter, using the optimal size for the underlying data structure based on the desired capacity and error rate, as well as the optimal number of hash functions.
/// A secondary hash function will be provided for you if your type T is either string or int. Otherwise an exception will be thrown. If you are not using these types please use the overload that supports custom hash functions.
/// </summary>
/// <param name="capacity">The anticipated number of items to be added to the filter. More than this number of items can be added, but the error rate will exceed what is expected.</param>
/// <param name="errorRate">The accepable false-positive rate (e.g., 0.01F = 1%)</param>
public Filter(int capacity, int errorRate) : this(capacity, errorRate, null) { }
/// <summary>
/// Creates a new Bloom filter, specifying an error rate of 1/capacity, using the optimal size for the underlying data structure based on the desired capacity and error rate, as well as the optimal number of hash functions.
/// </summary>
/// <param name="capacity">The anticipated number of items to be added to the filter. More than this number of items can be added, but the error rate will exceed what is expected.</param>
/// <param name="hashFunction">The function to hash the input values. Do not use GetHashCode(). If it is null, and T is string or int a hash function will be provided for you.</param>
public Filter(int capacity, HashFunction hashFunction) : this(capacity, BestErrorRate(capacity), hashFunction) { }
/// <summary>
/// Creates a new Bloom filter, using the optimal size for the underlying data structure based on the desired capacity and error rate, as well as the optimal number of hash functions.
/// </summary>
/// <param name="capacity">The anticipated number of items to be added to the filter. More than this number of items can be added, but the error rate will exceed what is expected.</param>
/// <param name="errorRate">The accepable false-positive rate (e.g., 0.01F = 1%)</param>
/// <param name="hashFunction">The function to hash the input values. Do not use GetHashCode(). If it is null, and T is string or int a hash function will be provided for you.</param>
public Filter(int capacity, float errorRate, HashFunction hashFunction) : this(capacity, errorRate, hashFunction, BestM(capacity, errorRate), BestK(capacity, errorRate)) { }
/// <summary>
/// Creates a new Bloom filter.
/// </summary>
/// <param name="capacity">The anticipated number of items to be added to the filter. More than this number of items can be added, but the error rate will exceed what is expected.</param>
/// <param name="errorRate">The accepable false-positive rate (e.g., 0.01F = 1%)</param>
/// <param name="hashFunction">The function to hash the input values. Do not use GetHashCode(). If it is null, and T is string or int a hash function will be provided for you.</param>
/// <param name="m">The number of elements in the BitArray.</param>
/// <param name="k">The number of hash functions to use.</param>
public Filter(int capacity, float errorRate, HashFunction hashFunction, int m, int k)
{
// validate the params are in range
if (capacity < 1)
throw new ArgumentOutOfRangeException(nameof(capacity), capacity, "capacity must be > 0");
if (errorRate >= 1 || errorRate <= 0)
throw new ArgumentOutOfRangeException(nameof(errorRate), errorRate, $"errorRate must be between 0 and 1, exclusive. Was {errorRate}");
if (m < 1) // from overflow in bestM calculation
throw new ArgumentOutOfRangeException($"The provided capacity and errorRate values would result in an array of length > int.MaxValue. Please reduce either of these values. Capacity: {capacity}, Error rate: {errorRate}");
// set the secondary hash function
if (hashFunction == null)
{
if (typeof(T) == typeof(String))
{
_secondaryHash = HashString;
}
else if (typeof(T) == typeof(int))
{
_secondaryHash = HashInt32;
}
else
{
throw new ArgumentNullException(nameof(hashFunction), "Please provide a hash function for your type T, when T is not a string or int.");
}
}
else
_secondaryHash = hashFunction;
_hashFunctionCount = k;
_hashBits = new BitArray(m);
}
/// <summary>
/// Adds a new item to the filter. It cannot be removed.
/// </summary>
/// <param name="item"></param>
public void Add(T item)
{
// start flipping bits for each hash of item
for (var i = 0; i < _hashFunctionCount; i++)
{
var hash = ComputeHash(item.GetHashCode(), _secondaryHash(item), i);
_hashBits[hash] = true;
}
}
/// <summary>
/// Checks for the existance of the item in the filter for a given probability.
/// </summary>
/// <param name="item"></param>
/// <returns></returns>
public bool Contains(T item)
{
for (var i = 0; i < _hashFunctionCount; i++)
{
var hash = ComputeHash(item.GetHashCode(), _secondaryHash(item), i);
if (_hashBits[hash] == false)
return false;
}
return true;
}
/// <summary>
/// The ratio of false to true bits in the filter. E.g., 1 true bit in a 10 bit filter means a truthiness of 0.1.
/// </summary>
public double Truthiness => (double) TrueBits() / _hashBits.Count;
private int TrueBits()
{
return _hashBits.Cast<bool>().Count(bit => bit);
}
/// <summary>
/// Performs Dillinger and Manolios double hashing.
/// </summary>
private int ComputeHash(int primaryHash, int secondaryHash, int i)
{
var resultingHash = (primaryHash + (i * secondaryHash)) % _hashBits.Count;
return Math.Abs(resultingHash);
}
private readonly int _hashFunctionCount;
private readonly BitArray _hashBits;
private readonly HashFunction _secondaryHash;
private static int BestK(int capacity, float errorRate)
{
return (int)Math.Round(Math.Log(2.0) * BestM(capacity, errorRate) / capacity);
}
private static int BestM(int capacity, float errorRate)
{
return (int)Math.Ceiling(capacity * Math.Log(errorRate, (1.0 / Math.Pow(2, Math.Log(2.0)))));
}
private static float BestErrorRate(int capacity)
{
var c = (float)(1.0 / capacity);
if (Math.Abs(c) > 0.0000000001)
return c;
return (float)Math.Pow(0.6185, int.MaxValue / capacity); // http://www.cs.princeton.edu/courses/archive/spring02/cs493/lec7.pdf
}
/// <summary>
/// Hashes a 32-bit signed int using Thomas Wang's method v3.1 (http://www.concentric.net/~Ttwang/tech/inthash.htm).
/// Runtime is suggested to be 11 cycles.
/// </summary>
/// <param name="input">The integer to hash.</param>
/// <returns>The hashed result.</returns>
private static int HashInt32(T input)
{
var x = input as uint?;
unchecked
{
x = ~x + (x << 15); // x = (x << 15) - x- 1, as (~x) + y is equivalent to y - x - 1 in two's complement representation
x = x ^ (x >> 12);
x = x + (x << 2);
x = x ^ (x >> 4);
x = x * 2057; // x = (x + (x << 3)) + (x<< 11);
x = x ^ (x >> 16);
Debug.Assert(x != null, "x != null");
return (int)x;
}
}
/// <summary>
/// Hashes a string using Bob Jenkin's "One At A Time" method from Dr. Dobbs (http://burtleburtle.net/bob/hash/doobs.html).
/// Runtime is suggested to be 9x+9, where x = input.Length.
/// </summary>
/// <param name="input">The string to hash.</param>
/// <returns>The hashed result.</returns>
private static int HashString(T input)
{
var s = input as string;
var hash = 0;
Debug.Assert(s != null, "s != null");
foreach (var t in s)
{
hash += t;
hash += (hash << 10);
hash ^= (hash >> 6);
}
hash += (hash << 3);
hash ^= (hash >> 11);
hash += (hash << 15);
return hash;
}
}
}
| |
namespace FakeItEasy.Specs
{
using System.Diagnostics.CodeAnalysis;
using FluentAssertions;
using Xbehave;
public static class UnconfiguredFakeSpecs
{
public interface IFoo
{
bool IsADummy { get; }
}
[Scenario]
public static void VirtualMethodCalledDuringConstruction(
MakesVirtualCallInConstructor fake)
{
"Given a type with a default constructor"
.x(() => { }); // see MakesVirtualCallInConstructor
"And the constructor calls a virtual method"
.x(() => { }); // see MakesVirtualCallInConstructor.ctor()
"And the method returns a non-default value"
.x(() => { }); // see MakesVirtualCallInConstructor.VirtualMethod()
"When I create a fake of the type"
.x(() => fake = A.Fake<MakesVirtualCallInConstructor>());
"Then the method will return a default value"
.x(() => fake.VirtualMethodValueDuringConstructorCall.Should().Be(string.Empty));
"And the method call will be recorded"
.x(() => A.CallTo(() => fake.VirtualMethod("call in constructor")).MustHaveHappened());
}
[Scenario]
public static void VirtualMethodCalledAfterConstruction(
MakesVirtualCallInConstructor fake,
string result)
{
"Given a type with a virtual method"
.x(() => { }); // see MakesVirtualCallInConstructor
"And the method returns a non-default value"
.x(() => { }); // see MakesVirtualCallInConstructor.VirtualMethod
"And a fake of that type"
.x(() => fake = A.Fake<MakesVirtualCallInConstructor>());
"When I call the method"
.x(() => result = fake.VirtualMethod("call after constructor"));
"Then it will return a default value"
.x(() => result.Should().Be(string.Empty));
"And the method call will be recorded"
.x(() => A.CallTo(() => fake.VirtualMethod("call after constructor")).MustHaveHappened());
}
[Scenario]
public static void VirtualPropertiesCalledDuringConstruction(
FakedClass fake)
{
"Given a type with a default constructor"
.x(() => { }); // see FakedClass
"And the constructor calls some virtual properties"
.x(() => { }); // see FakedClass.ctor()
"And the properties return non-default values"
.x(() => { }); // see FakedClass.StringProperty, FakedClass.ValueTypeProperty
"When I create a fake of the type"
.x(() => fake = A.Fake<FakedClass>());
"Then the reference-type property will return a default value"
.x(() => fake.StringPropertyValueDuringConstructorCall.Should().Be(string.Empty));
"And the value-type property will return a default value"
.x(() => fake.ValueTypePropertyValueDuringConstructorCall.Should().Be(0));
}
[Scenario]
public static void VirtualReferenceTypeProperty(
FakedClass fake,
string result)
{
"Given a type with a default constructor"
.x(() => { }); // see FakedClass
"And the constructor assigns a value to a virtual reference-type property"
.x(() => { }); // see FakedClass.ctor()
"And a fake of that type"
.x(() => fake = A.Fake<FakedClass>());
"When I fetch the property value"
.x(() => result = fake.StringProperty);
"Then it will be the value assigned during construction"
.x(() => result.Should().Be("value set in constructor"));
}
[Scenario]
public static void VirtualValueTypeProperty(
FakedClass fake,
int result)
{
"Given a type with a default constructor"
.x(() => { }); // see FakedClass
"And the constructor assigns a value to a virtual value-type property"
.x(() => { }); // see FakedClass.ctor()
"And a fake of that type"
.x(() => fake = A.Fake<FakedClass>());
"When I fetch the property value"
.x(() => result = fake.ValueTypeProperty);
"Then it will be the value assigned during construction"
.x(() => result.Should().Be(123456));
}
[Scenario]
public static void FakeableProperty(
FakedClass fake,
IFoo? result)
{
"Given a type with a virtual fakeable-type property"
.x(() => { }); // see FakedClasss
"And a fake of that type"
.x(() => fake = A.Fake<FakedClass>());
"When I get the property value"
.x(() => result = fake.FakeableProperty);
"Then the property will not be null"
.x(() => result.Should().NotBeNull());
"And it will be a Dummy"
.x(() => result!.IsADummy.Should().BeTrue("because the property value should be a Dummy"));
}
[Scenario]
public static void ToStringDescribesFake(FakedClass fake, string? toStringValue)
{
"Given a faked class instance"
.x(() => fake = A.Fake<FakedClass>());
"When I call ToString on it"
.x(() => toStringValue = fake.ToString());
"Then it indicates that it's a fake"
.x(() => toStringValue.Should().Be("Faked FakeItEasy.Specs.UnconfiguredFakeSpecs+FakedClass"));
}
[Scenario]
public static void EqualsSameFake(FakedClass fake, bool equalsResult)
{
"Given a faked class instance"
.x(() => fake = A.Fake<FakedClass>());
"When I compare it to itself using Equals"
.x(() => equalsResult = fake.Equals(fake));
"Then it compares as equal"
.x(() => equalsResult.Should().BeTrue());
}
[Scenario]
public static void EqualsDifferentInstanceOfSameFakedType(FakedClass fake1, FakedClass fake2, bool equalsResult)
{
"Given a faked class instance"
.x(() => fake1 = A.Fake<FakedClass>());
"And a second faked class instance of the same type"
.x(() => fake2 = A.Fake<FakedClass>());
"When I compare the instances using Equals"
.x(() => equalsResult = fake1.Equals(fake2));
"Then they compare as unequal"
.x(() => equalsResult.Should().BeFalse());
}
public class FakedClass
{
[SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors", Justification = "This anti-pattern is part of the the tested scenario.")]
public FakedClass()
{
this.StringPropertyValueDuringConstructorCall = this.StringProperty;
this.ValueTypePropertyValueDuringConstructorCall = this.ValueTypeProperty;
this.StringProperty = "value set in constructor";
this.ValueTypeProperty = 123456;
}
public virtual string StringProperty { get; set; }
public string? StringPropertyValueDuringConstructorCall { get; }
public virtual int ValueTypeProperty { get; set; }
public int ValueTypePropertyValueDuringConstructorCall { get; }
public virtual IFoo? FakeableProperty { get; set; }
}
public class Foo : IFoo
{
public bool IsADummy { get; set; }
}
public class FooFactory : DummyFactory<IFoo>
{
protected override IFoo Create()
{
return new Foo { IsADummy = true };
}
}
}
}
| |
/**
* Repairs missing pb_Object and pb_Entity references. It is based
* on this article by Unity Gems: http://unitygems.com/lateral1/
*/
using UnityEngine;
using UnityEditor;
using System.Collections.Generic;
using System.Linq;
using ProBuilder2.Common;
using ProBuilder2.MeshOperations;
namespace ProBuilder2.EditorCommon
{
/**
* Extends MonoBehaviour Inspector, automatically fixing missing script
* references (typically caused by ProBuilder upgrade process).
*/
[CustomEditor(typeof(MonoBehaviour))]
public class pb_MissingScriptEditor : Editor
{
#region Members
static bool applyDummyScript = true; ///< If true, any null components that can't be set will have this script applied to their reference, allowing us to later remove them.
static float index = 0; ///< general idea of where we are in terms of processing this scene.
static float total; ///< general idea of how many missing script references are in this scene.
static bool doFix = false; ///< while true, the inspector will attempt to cycle to broken gameobjects until none are found.
static List<GameObject> unfixable = new List<GameObject>(); ///< if a non-pb missing reference is encountered, need to let the iterator know not to bother,
static MonoScript _mono_pb; ///< MonoScript assets
static MonoScript _mono_pe; ///< MonoScript assets
static MonoScript _mono_dummy; ///< MonoScript assets
/**
* Load the pb_Object and pb_Entity classes to MonoScript assets. Saves us from having to fall back on Reflection.
*/
static void LoadMonoScript()
{
GameObject go = new GameObject();
pb_Object pb = go.AddComponent<pb_Object>();
pb_Entity pe = go.GetComponent<pb_Entity>();
if(pe == null)
pe = go.AddComponent<pb_Entity>();
pb_DummyScript du = go.AddComponent<pb_DummyScript>();
_mono_pb = MonoScript.FromMonoBehaviour( pb );
_mono_pe = MonoScript.FromMonoBehaviour( pe );
_mono_dummy = MonoScript.FromMonoBehaviour( du );
DestroyImmediate(go);
}
public MonoScript pb_monoscript
{
get
{
if(_mono_pb == null) LoadMonoScript();
return _mono_pb;
}
}
public MonoScript pe_monoscript
{
get
{
if(_mono_pe == null) LoadMonoScript();
return _mono_pe;
}
}
public MonoScript dummy_monoscript
{
get
{
if(_mono_dummy == null) LoadMonoScript();
return _mono_dummy;
}
}
#endregion
[MenuItem("Tools/" + pb_Constant.PRODUCT_NAME + "/Repair/Repair Missing Script References")]
public static void MenuRepairMissingScriptReferences()
{
FixAllScriptReferencesInScene();
}
static void FixAllScriptReferencesInScene()
{
EditorApplication.ExecuteMenuItem("Window/Inspector");
Object[] all = Resources.FindObjectsOfTypeAll(typeof(GameObject)).Where(x => ((GameObject)x).GetComponents<Component>().Any(n => n == null) ).ToArray();
total = all.Length;
unfixable.Clear();
if(total > 1)
{
Undo.RecordObjects(all, "Fix missing script references");
index = 0;
doFix = true;
Next();
}
else
{
if( applyDummyScript )
DeleteDummyScripts();
EditorUtility.DisplayDialog("Success", "No missing ProBuilder script references found.", "Okay");
}
}
/**
* Advance to the next gameobject with missing components. If none are found, display dialog and exit.
*/
static void Next()
{
bool earlyExit = false;
if( EditorUtility.DisplayCancelableProgressBar("Repair ProBuilder Script References", "Fixing " + (int)Mathf.Floor(index+1) + " out of " + total + " objects in scene.", ((float)index/total) ) )
{
earlyExit = true;
doFix = false;
}
if(!earlyExit)
{
// Cycle through FindObjectsOfType on every Next() because using a static list didn't work for some reason.
foreach(GameObject go in Resources.FindObjectsOfTypeAll(typeof(GameObject)))
{
if(go.GetComponents<Component>().Any(x => x == null) && !unfixable.Contains(go))
{
if( (PrefabUtility.GetPrefabType(go) == PrefabType.PrefabInstance ||
PrefabUtility.GetPrefabType(go) == PrefabType.Prefab ) )
{
GameObject pref = (GameObject)PrefabUtility.GetPrefabParent(go);
if(pref && (pref.GetComponent<pb_Object>() || pref.GetComponent<pb_Entity>()))
{
unfixable.Add(go);
continue;
}
}
if(go.hideFlags != HideFlags.None)
{
unfixable.Add(go);
continue;
}
Selection.activeObject = go;
return;
}
}
}
pb_Object[] pbs = (pb_Object[])Resources.FindObjectsOfTypeAll(typeof(pb_Object));
for(int i = 0; i < pbs.Length; i++)
{
EditorUtility.DisplayProgressBar("Checking ProBuilder Meshes", "Refresh " + (i+1) + " out of " + total + " objects in scene.", ((float)i/pbs.Length) );
try
{
pbs[i].ToMesh();
pbs[i].Refresh();
pbs[i].Optimize();
} catch (System.Exception e)
{
Debug.LogWarning("Failed reconstituting " + pbs[i].name + ". Proceeding with upgrade anyways. Usually this means a prefab is already fixed, and just needs to be instantiated to take effect.\n" + e.ToString());
}
}
EditorUtility.ClearProgressBar();
if( applyDummyScript )
DeleteDummyScripts();
EditorUtility.DisplayDialog("Success", "Successfully repaired " + total + " ProBuilder objects.", "Okay");
if(!pb_EditorSceneUtility.SaveCurrentSceneIfUserWantsTo())
Debug.LogWarning("Repaired script references will be lost on exit if this scene is not saved!");
doFix = false;
skipEvent = true;
}
/**
* SerializedProperty names found in pb_Entity.
*/
List<string> PB_OBJECT_SCRIPT_PROPERTIES = new List<string>()
{
"_sharedIndices",
"_vertices",
"_uv",
"_sharedIndicesUV",
"_quads"
};
/**
* SerializedProperty names found in pb_Object.
*/
List<string> PB_ENTITY_SCRIPT_PROPERTIES = new List<string>()
{
"pb",
"userSetDimensions",
"_entityType",
"forceConvex"
};
// Prevents ArgumentException after displaying 'Done' dialog. For some reason the Event loop skips layout phase after DisplayDialog.
private static bool skipEvent = false;
public override void OnInspectorGUI()
{
if(skipEvent && Event.current.type == EventType.Repaint)
{
skipEvent = false;
return;
}
SerializedProperty scriptProperty = this.serializedObject.FindProperty("m_Script");
if(scriptProperty == null || scriptProperty.objectReferenceValue != null)
{
if(doFix)
{
if(Event.current.type == EventType.Repaint)
{
Next();
}
}
else
{
base.OnInspectorGUI();
}
return;
}
int pbObjectMatches = 0, pbEntityMatches = 0;
// Shows a detailed tree view of all the properties in this serializedobject.
// GUILayout.Label( SerializedObjectToString(this.serializedObject) );
SerializedProperty iterator = this.serializedObject.GetIterator();
iterator.Next(true);
while( iterator.Next(true) )
{
if( PB_OBJECT_SCRIPT_PROPERTIES.Contains(iterator.name) )
pbObjectMatches++;
if( PB_ENTITY_SCRIPT_PROPERTIES.Contains(iterator.name) )
pbEntityMatches++;
}
// If we can fix it, show the help box, otherwise just default inspector it up.
if(pbObjectMatches >= 3 || pbEntityMatches >= 3)
{
EditorGUILayout.HelpBox("Missing Script Reference\n\nProBuilder can automatically fix this missing reference. To fix all references in the scene, click \"Fix All in Scene\". To fix just this one, click \"Reconnect\".", MessageType.Warning);
}
else
{
if(doFix)
{
if( applyDummyScript )
{
index += .5f;
scriptProperty.objectReferenceValue = dummy_monoscript;
scriptProperty.serializedObject.ApplyModifiedProperties();
scriptProperty = this.serializedObject.FindProperty("m_Script");
scriptProperty.serializedObject.Update();
}
else
{
unfixable.Add( ((Component)target).gameObject );
}
Next();
GUIUtility.ExitGUI();
return;
}
else
{
base.OnInspectorGUI();
}
return;
}
GUI.backgroundColor = Color.green;
if(!doFix)
{
if(GUILayout.Button("Fix All in Scene"))
{
FixAllScriptReferencesInScene();
return;
}
}
GUI.backgroundColor = Color.cyan;
if((doFix && Event.current.type == EventType.Repaint) || GUILayout.Button("Reconnect"))
{
if(pbObjectMatches >= 3) // only increment for pb_Object otherwise the progress bar will fill 2x faster than it should
{
index++;
}
else
{
// Make sure that pb_Object is fixed first if we're automatically cycling objects.
if(doFix && ((Component)target).gameObject.GetComponent<pb_Object>() == null)
return;
}
if(!doFix)
{
Undo.RegisterCompleteObjectUndo(target, "Fix missing reference.");
}
// Debug.Log("Fix: " + (pbObjectMatches > 2 ? "pb_Object" : "pb_Entity") + " " + ((Component)target).gameObject.name);
scriptProperty.objectReferenceValue = pbObjectMatches >= 3 ? pb_monoscript : pe_monoscript;
scriptProperty.serializedObject.ApplyModifiedProperties();
scriptProperty = this.serializedObject.FindProperty("m_Script");
scriptProperty.serializedObject.Update();
if(doFix)
Next();
GUIUtility.ExitGUI();
}
GUI.backgroundColor = Color.white;
}
/**
* Scan the scene for gameObjects referencing `pb_DummyScript` and delete them.
*/
static void DeleteDummyScripts()
{
pb_DummyScript[] dummies = (pb_DummyScript[])Resources.FindObjectsOfTypeAll(typeof(pb_DummyScript));
dummies = dummies.Where(x => x.hideFlags == HideFlags.None).ToArray();
if(dummies.Length > 0)
{
int ret = EditorUtility.DisplayDialogComplex("Found Unrepairable Objects", "Repair script found " + dummies.Length + " missing components that could not be repaired. Would you like to delete those components now, or attempt to rebuild (ProBuilderize) them?", "Delete", "Cancel", "ProBuilderize");
switch(ret)
{
case 1: // cancel
{}
break;
default:
{
// Delete and ProBuilderize
if(ret == 2)
{
// Only interested in objects that have 2 null components (pb_Object and pb_Entity)
Object[] broken = (Object[])Resources.FindObjectsOfTypeAll(typeof(GameObject))
.Where(x => !x.Equals(null) &&
x is GameObject &&
((GameObject)x).GetComponents<pb_DummyScript>().Length == 2 &&
((GameObject)x).GetComponent<MeshRenderer>() != null &&
((GameObject)x).GetComponent<MeshFilter>() != null &&
((GameObject)x).GetComponent<MeshFilter>().sharedMesh != null
).ToArray();
broken = broken.Distinct().ToArray();
ProBuilder2.Actions.ProBuilderize.DoProBuilderize(
System.Array.ConvertAll(broken, x => (GameObject) x)
.Select(x => x.GetComponent<MeshFilter>()),
pb_MeshImporter.Settings.Default);
}
// Always delete components
Undo.RecordObjects(dummies.Select(x=>x.gameObject).ToArray(), "Delete Broken Scripts");
for(int i = 0; i < dummies.Length; i++)
GameObject.DestroyImmediate( dummies[i] );
}
break;
}
}
}
/**
* Returns a formatted string with all properties in serialized object.
*/
static string SerializedObjectToString(SerializedObject serializedObject)
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
if(serializedObject == null)
{
sb.Append("NULL");
return sb.ToString();
}
SerializedProperty iterator = serializedObject.GetIterator();
iterator.Next(true);
while( iterator.Next(true) )
{
string tabs = "";
for(int i = 0; i < iterator.depth; i++) tabs += "\t";
sb.AppendLine(tabs + iterator.name + (iterator.propertyType == SerializedPropertyType.ObjectReference && iterator.type.Contains("Component") && iterator.objectReferenceValue == null ? " -> NULL" : "") );
tabs += " - ";
sb.AppendLine(tabs + "Type: (" + iterator.type + " / " + iterator.propertyType + " / " + " / " + iterator.name + ")");
sb.AppendLine(tabs + iterator.propertyPath);
sb.AppendLine(tabs + "Value: " + SerializedPropertyValue(iterator));
}
return sb.ToString();
}
/**
* Return a string from the value of a SerializedProperty.
*/
static string SerializedPropertyValue(SerializedProperty sp)
{
switch(sp.propertyType)
{
case SerializedPropertyType.Integer:
return sp.intValue.ToString();
case SerializedPropertyType.Boolean:
return sp.boolValue.ToString();
case SerializedPropertyType.Float:
return sp.floatValue.ToString();
case SerializedPropertyType.String:
return sp.stringValue.ToString();
case SerializedPropertyType.Color:
return sp.colorValue.ToString();
case SerializedPropertyType.ObjectReference:
return (sp.objectReferenceValue == null ? "null" : sp.objectReferenceValue.name);
case SerializedPropertyType.LayerMask:
return sp.intValue.ToString();
case SerializedPropertyType.Enum:
return sp.enumValueIndex.ToString();
case SerializedPropertyType.Vector2:
return sp.vector2Value.ToString();
case SerializedPropertyType.Vector3:
return sp.vector3Value.ToString();
// Not public api as of 4.3?
// case SerializedPropertyType.Vector4:
// return sp.vector4Value.ToString();
case SerializedPropertyType.Rect:
return sp.rectValue.ToString();
case SerializedPropertyType.ArraySize:
return sp.intValue.ToString();
case SerializedPropertyType.Character:
return "Character";
case SerializedPropertyType.AnimationCurve:
return sp.animationCurveValue.ToString();
case SerializedPropertyType.Bounds:
return sp.boundsValue.ToString();
case SerializedPropertyType.Gradient:
return "Gradient";
default:
return "Unknown type";
}
}
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
#pragma warning disable 1634, 1691
#pragma warning disable 56506
using System;
using Dbg = System.Management.Automation;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Management.Automation;
using System.Management.Automation.Provider;
using System.Security;
namespace Microsoft.PowerShell.Commands
{
/// <summary>
/// This is the base class for all the providers that produce a view
/// on session state data (Variables, Aliases, and Functions)
/// </summary>
public abstract class SessionStateProviderBase : ContainerCmdletProvider, IContentCmdletProvider
{
#region tracer
/// <summary>
/// An instance of the PSTraceSource class used for trace output
/// </summary>
[Dbg.TraceSourceAttribute(
"SessionStateProvider",
"Providers that produce a view of session state data.")]
private static readonly Dbg.PSTraceSource s_tracer =
Dbg.PSTraceSource.GetTracer("SessionStateProvider",
"Providers that produce a view of session state data.");
#endregion tracer
#region protected members
/// <summary>
/// Derived classes must override to get items from session state
/// </summary>
///
/// <param name="name">
/// The name of the item to get.
/// </param>
///
/// <returns>
/// The item of the given name in the appropriate session state table.
/// </returns>
///
internal abstract object GetSessionStateItem(string name);
/// <summary>
/// Sets a session state item in the appropriate session state table.
/// Derived classes must override this method to set the item in the
/// proper table.
/// </summary>
///
/// <param name="name">
/// The name of the item to set.
/// </param>
///
/// <param name="value">
/// The new value for the item.
/// </param>
///
/// <param name="writeItem">
/// If true, the item that was set should be written to WriteItemObject.
/// </param>
///
internal abstract void SetSessionStateItem(string name, object value, bool writeItem);
/// <summary>
/// Removes a session state item from the appropriate session state table.
/// Derived classes must override this method to remove items from the
/// proper table.
/// </summary>
///
/// <param name="name">
/// The name of the item to remove.
/// </param>
///
internal abstract void RemoveSessionStateItem(string name);
/// <summary>
/// Gets all the items in the appropriate session state table.
/// </summary>
///
/// <returns>
/// An IDictionary representing the items in the session state table.
/// The key is the name of the item and the value is the value.
/// </returns>
///
internal abstract IDictionary GetSessionStateTable();
/// <summary>
/// Since items are often more than their value, this method should
/// be overridden to provide the value for an item
/// </summary>
///
/// <param name="item">
/// The item to extract the value from.
/// </param>
///
/// <returns>
/// The value of the specified item.
/// </returns>
///
/// <remarks>
/// The default implementation will get
/// the Value property of a DictionaryEntry
/// </remarks>
///
internal virtual object GetValueOfItem(object item)
{
Dbg.Diagnostics.Assert(
item != null,
"Caller should verify the item parameter");
object value = item;
if (item is DictionaryEntry)
{
value = ((DictionaryEntry)item).Value;
}
return value;
} // GetValueOfItem
/// <summary>
/// Determines if the item can be renamed. Derived classes that need
/// to perform a check should override this method.
/// </summary>
///
/// <param name="item">
/// The item to verify if it can be renamed.
/// </param>
///
/// <returns>
/// true if the item can be renamed or false otherwise.
/// </returns>
///
internal virtual bool CanRenameItem(object item)
{
return true;
}
#endregion protected members
#region ItemCmdletProvider overrides
/// <summary>
/// Gets an item from session state.
/// </summary>
///
/// <param name="name">
/// Name of the item to get.
/// </param>
///
/// <remarks>
/// The item instance is written to the WriteObject
/// method.
/// </remarks>
///
protected override void GetItem(string name)
{
bool isContainer = false;
object item = null;
IDictionary table = GetSessionStateTable();
if (table != null)
{
if (String.IsNullOrEmpty(name))
{
isContainer = true;
item = table.Values;
}
else
{
item = table[name];
}
}
if (item != null)
{
if (SessionState.IsVisible(this.Context.Origin, item))
{
WriteItemObject(item, name, isContainer);
}
}
} // GetItem
/// <summary>
/// Sets a session state item to a given value
/// </summary>
///
/// <param name="name">
/// Name of the item to set
/// </param>
///
/// <param name="value">
/// The value to which to set the item
/// </param>
///
/// <returns>
/// Nothing. The item that was set is written to the
/// WriteObject method.
/// </returns>
///
protected override void SetItem(
string name,
object value)
{
if (String.IsNullOrEmpty(name))
{
WriteError(new ErrorRecord(
PSTraceSource.NewArgumentNullException("name"),
"SetItemNullName",
ErrorCategory.InvalidArgument,
name));
return;
}
try
{
// Confirm the set item with the user
string action = SessionStateProviderBaseStrings.SetItemAction;
string resourceTemplate = SessionStateProviderBaseStrings.SetItemResourceTemplate;
string resource =
String.Format(
Host.CurrentCulture,
resourceTemplate,
name,
value);
if (ShouldProcess(resource, action))
{
SetSessionStateItem(name, value, true);
}
}
catch (SessionStateException e)
{
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
}
} // SetItem
/// <summary>
///
/// </summary>
/// <param name="path"></param>
protected override void ClearItem(string path)
{
if (String.IsNullOrEmpty(path))
{
WriteError(new ErrorRecord(
PSTraceSource.NewArgumentNullException("path"),
"ClearItemNullPath",
ErrorCategory.InvalidArgument,
path));
return;
}
try
{
// Confirm the clear item with the user
string action = SessionStateProviderBaseStrings.ClearItemAction;
string resourceTemplate = SessionStateProviderBaseStrings.ClearItemResourceTemplate;
string resource =
String.Format(
Host.CurrentCulture,
resourceTemplate,
path);
if (ShouldProcess(resource, action))
{
SetSessionStateItem(path, null, false);
}
}
catch (SessionStateException e)
{
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
}
} // ClearItem
#endregion ItemCmdletProvider overrides
#region ContainerCmdletProvider overrides
/// <summary>
/// Gets the item(s) at the given path
/// </summary>
///
/// <param name="path">
/// The name of the item to retrieve, or all if empty or null.
/// </param>
///
/// <param name="recurse">
/// Ignored.
/// </param>
///
protected override void GetChildItems(string path, bool recurse)
{
CommandOrigin origin = this.Context.Origin;
if (String.IsNullOrEmpty(path))
{
IDictionary dictionary = null;
try
{
dictionary = GetSessionStateTable();
}
catch (SecurityException e)
{
WriteError(
new ErrorRecord(
e,
"GetTableSecurityException",
ErrorCategory.ReadError,
path));
return;
}
// bug Windows7 #300974 says that we should sort
List<DictionaryEntry> sortedEntries = new List<DictionaryEntry>(dictionary.Count + 1);
foreach (DictionaryEntry entry in dictionary)
{
sortedEntries.Add(entry);
}
sortedEntries.Sort(
delegate (DictionaryEntry left, DictionaryEntry right)
{
string leftKey = (string)left.Key;
string rightKey = (string)right.Key;
IComparer<string> stringComparer = StringComparer.CurrentCultureIgnoreCase;
return stringComparer.Compare(leftKey, rightKey);
});
// Now write out each object
foreach (DictionaryEntry entry in sortedEntries)
{
try
{
if (SessionState.IsVisible(origin, entry.Value))
{
WriteItemObject(entry.Value, (string)entry.Key, false);
}
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
return;
}
catch (SecurityException securityException)
{
WriteError(
new ErrorRecord(
securityException,
"GetItemSecurityException",
ErrorCategory.PermissionDenied,
(string)entry.Key));
return;
}
}
}
else
{
object item = null;
try
{
item = GetSessionStateItem(path);
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
return;
}
catch (SecurityException securityException)
{
WriteError(
new ErrorRecord(
securityException,
"GetItemSecurityException",
ErrorCategory.PermissionDenied,
path));
return;
}
if (item != null)
{
if (SessionState.IsVisible(origin, item))
{
WriteItemObject(item, path, false);
}
}
}
} // GetChildItems
/// <summary>
/// Gets the name(s) of the item(s) at the given path
/// </summary>
///
/// <param name="path">
/// The name of the item to retrieve, or all if empty or null.
/// </param>
///
/// <param name="returnContainers">
/// Ignored.
/// </param>
///
protected override void GetChildNames(string path, ReturnContainers returnContainers)
{
CommandOrigin origin = this.Context.Origin;
if (String.IsNullOrEmpty(path))
{
IDictionary dictionary = null;
try
{
dictionary = GetSessionStateTable();
}
catch (SecurityException e)
{
WriteError(
new ErrorRecord(
e,
"GetChildNamesSecurityException",
ErrorCategory.ReadError,
path));
return;
}
// Now write out each object's key...
foreach (DictionaryEntry entry in dictionary)
{
try
{
if (SessionState.IsVisible(origin, entry.Value))
{
WriteItemObject(entry.Key, (string)entry.Key, false);
}
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
return;
}
catch (SecurityException securityException)
{
WriteError(
new ErrorRecord(
securityException,
"GetItemSecurityException",
ErrorCategory.PermissionDenied,
(string)entry.Key));
return;
}
}
}
else
{
object item = null;
try
{
item = GetSessionStateItem(path);
}
catch (SecurityException e)
{
WriteError(
new ErrorRecord(
e,
"GetChildNamesSecurityException",
ErrorCategory.ReadError,
path));
return;
}
if (item != null)
{
if (SessionState.IsVisible(origin, item))
{
WriteItemObject(path, path, false);
}
}
}
} // GetChildNames
/// <summary>
/// Determines if there are any items
/// </summary>
///
/// <param name="path">
/// The container to check to see if there are any children.
/// </param>
///
/// <returns>
/// True if path is empty or null, false otherwise.
/// </returns>
///
protected override bool HasChildItems(string path)
{
bool result = false;
if (String.IsNullOrEmpty(path))
{
try
{
if (GetSessionStateTable().Count > 0)
{
result = true;
}
}
catch (SecurityException e)
{
WriteError(
new ErrorRecord(
e,
"HasChildItemsSecurityException",
ErrorCategory.ReadError,
path));
}
}
return result;
} // HasChildItems
/// <summary>
/// Determines if the specified item exists
/// </summary>
///
/// <param name="path">
/// The path to the item to check. If this is null or empty, the item
/// container is used (and always exists).
/// </param>
///
/// <returns>
/// True if the item exists, false otherwise.
/// </returns>
///
protected override bool ItemExists(string path)
{
bool result = false;
if (String.IsNullOrEmpty(path))
{
result = true;
}
else
{
object item = null;
try
{
item = GetSessionStateItem(path);
}
catch (SecurityException e)
{
WriteError(
new ErrorRecord(
e,
"ItemExistsSecurityException",
ErrorCategory.ReadError,
path));
}
if (item != null)
{
result = true;
}
}
return result;
} // ItemExists
/// <summary>
/// Determines if the specified path is syntactically and semantically valid.
/// </summary>
///
/// <param name="path">
/// The path to validate.
/// </param>
///
/// <returns>
/// True if the path is valid, or false otherwise.
/// </returns>
///
/// <remarks>
/// The path may not contain the following characters:
/// . ( ) :
/// </remarks>
///
protected override bool IsValidPath(string path)
{
return !String.IsNullOrEmpty(path);
}
/// <summary>
/// Removes the item at the specified path.
/// </summary>
///
/// <param name="path">
/// The name of the item to be removed.
/// </param>
///
/// <param name="recurse">
/// Ignored.
/// </param>
///
protected override void RemoveItem(string path, bool recurse)
{
if (String.IsNullOrEmpty(path))
{
Exception e =
PSTraceSource.NewArgumentException("path");
WriteError(new ErrorRecord(
e,
"RemoveItemNullPath",
ErrorCategory.InvalidArgument,
path));
}
else
{
// Confirm the remove item with the user
string action = SessionStateProviderBaseStrings.RemoveItemAction;
string resourceTemplate = SessionStateProviderBaseStrings.RemoveItemResourceTemplate;
string resource =
String.Format(
Host.CurrentCulture,
resourceTemplate,
path);
if (ShouldProcess(resource, action))
{
try
{
RemoveSessionStateItem(path);
}
catch (SessionStateException e)
{
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
catch (SecurityException securityException)
{
WriteError(
new ErrorRecord(
securityException,
"RemoveItemSecurityException",
ErrorCategory.PermissionDenied,
path));
return;
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
return;
}
}
}
}
/// <summary>
/// Creates a new item if one of the same name doesn't already exist.
/// </summary>
///
/// <param name="path">
/// The name of the item to create.
/// </param>
///
/// <param name="type">
/// Ignored.
/// </param>
///
/// <param name="newItem">
/// The value of the new item.
/// </param>
///
protected override void NewItem(string path, string type, object newItem)
{
if (String.IsNullOrEmpty(path))
{
Exception e =
PSTraceSource.NewArgumentException("path");
WriteError(new ErrorRecord(
e,
"NewItemNullPath",
ErrorCategory.InvalidArgument,
path));
return;
}
if (newItem == null)
{
ArgumentNullException argException =
PSTraceSource.NewArgumentNullException("value");
WriteError(
new ErrorRecord(
argException,
"NewItemValueNotSpecified",
ErrorCategory.InvalidArgument,
path));
return;
}
if (ItemExists(path) && !Force)
{
PSArgumentException e =
(PSArgumentException)
PSTraceSource.NewArgumentException(
"path",
SessionStateStrings.NewItemAlreadyExists,
path);
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
else
{
// Confirm the new item with the user
string action = SessionStateProviderBaseStrings.NewItemAction;
string resourceTemplate = SessionStateProviderBaseStrings.NewItemResourceTemplate;
string resource =
String.Format(
Host.CurrentCulture,
resourceTemplate,
path,
type,
newItem);
if (ShouldProcess(resource, action))
{
SetItem(path, newItem);
}
}
} // NewItem
/// <summary>
/// Copies the specified item.
/// </summary>
///
/// <param name="path">
/// The name of the item to copy.
/// </param>
///
/// <param name="copyPath">
/// The name of the item to create.
/// </param>
///
/// <param name="recurse">
/// Ignored.
/// </param>
///
protected override void CopyItem(string path, string copyPath, bool recurse)
{
if (String.IsNullOrEmpty(path))
{
Exception e =
PSTraceSource.NewArgumentException("path");
WriteError(new ErrorRecord(
e,
"CopyItemNullPath",
ErrorCategory.InvalidArgument,
path));
return;
}
// If copyPath is null or empty, that means we are trying to copy
// the item to itself so it should be a no-op.
if (String.IsNullOrEmpty(copyPath))
{
// Just get the item for -passthru
GetItem(path);
return;
}
object item = null;
try
{
item = GetSessionStateItem(path);
}
catch (SecurityException e)
{
WriteError(
new ErrorRecord(
e,
"CopyItemSecurityException",
ErrorCategory.ReadError,
path));
return;
}
if (item != null)
{
// Confirm the new item with the user
string action = SessionStateProviderBaseStrings.CopyItemAction;
string resourceTemplate = SessionStateProviderBaseStrings.CopyItemResourceTemplate;
string resource =
String.Format(
Host.CurrentCulture,
resourceTemplate,
path,
copyPath);
if (ShouldProcess(resource, action))
{
try
{
SetSessionStateItem(copyPath, GetValueOfItem(item), true);
}
catch (SessionStateException e)
{
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
return;
}
}
}
else
{
PSArgumentException e =
(PSArgumentException)
PSTraceSource.NewArgumentException(
"path",
SessionStateStrings.CopyItemDoesntExist,
path);
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
} // CopyItem
/// <summary>
/// Copies the specified item.
/// </summary>
///
/// <param name="name">
/// The name of the item to copy.
/// </param>
///
/// <param name="newName">
/// The new name of the item.
/// </param>
///
protected override void RenameItem(string name, string newName)
{
if (String.IsNullOrEmpty(name))
{
Exception e =
PSTraceSource.NewArgumentException("name");
WriteError(new ErrorRecord(
e,
"RenameItemNullPath",
ErrorCategory.InvalidArgument,
name));
return;
}
object item = null;
try
{
item = GetSessionStateItem(name);
}
catch (SecurityException e)
{
WriteError(
new ErrorRecord(
e,
"RenameItemSecurityException",
ErrorCategory.ReadError,
name));
return;
}
if (item != null)
{
if (ItemExists(newName) && !Force)
{
PSArgumentException e =
(PSArgumentException)
PSTraceSource.NewArgumentException(
"newName",
SessionStateStrings.NewItemAlreadyExists,
newName);
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
else
{
try
{
if (CanRenameItem(item))
{
// Confirm the new item with the user
string action = SessionStateProviderBaseStrings.RenameItemAction;
string resourceTemplate = SessionStateProviderBaseStrings.RenameItemResourceTemplate;
string resource =
String.Format(
Host.CurrentCulture,
resourceTemplate,
name,
newName);
if (ShouldProcess(resource, action))
{
if (String.Equals(name, newName, StringComparison.OrdinalIgnoreCase))
{
// This is a no-op. Just get the item for -passthru
GetItem(newName);
return;
}
try
{
SetSessionStateItem(newName, item, true);
RemoveSessionStateItem(name);
}
catch (SessionStateException e)
{
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
catch (PSArgumentException argException)
{
WriteError(
new ErrorRecord(
argException.ErrorRecord,
argException));
return;
}
catch (SecurityException securityException)
{
WriteError(
new ErrorRecord(
securityException,
"RenameItemSecurityException",
ErrorCategory.PermissionDenied,
name));
return;
}
}
}
}
catch (SessionStateException e)
{
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
}
}
else
{
PSArgumentException e =
(PSArgumentException)
PSTraceSource.NewArgumentException(
"name",
SessionStateStrings.RenameItemDoesntExist,
name);
WriteError(
new ErrorRecord(
e.ErrorRecord,
e));
return;
}
} // RenameItem
#endregion ContainerCmdletProvider overrides
#region IContentCmdletProvider methods
/// <summary>
/// Gets an instance of the content reader for this provider for the
/// specified path.
/// </summary>
///
/// <param name="path">
/// The path to get the content reader for.
/// </param>
///
/// <returns>
/// An instance of an IContentReader for the given path.
/// </returns>
///
public IContentReader GetContentReader(string path)
{
return new SessionStateProviderBaseContentReaderWriter(path, this);
}
/// <summary>
/// Gets an instance of the content writer for this provider for the
/// specified path.
/// </summary>
///
/// <param name="path">
/// The path to get the content writer for.
/// </param>
///
/// <returns>
/// An instance of an IContentWriter for the given path.
/// </returns>
///
public IContentWriter GetContentWriter(string path)
{
return new SessionStateProviderBaseContentReaderWriter(path, this);
}
/// <summary>
/// Always throws a NotSupportedException
/// </summary>
///
/// <param name="path">
/// ignored.
/// </param>
///
/// <exception cref="NotSupportedException">
/// This exception is always thrown.
/// </exception>
///
public void ClearContent(string path)
{
throw
PSTraceSource.NewNotSupportedException(
SessionStateStrings.IContent_Clear_NotSupported);
}
#region dynamic parameters
// For now, none of the derived providers need dynamic parameters
// so these methods just return null
/// <summary>
/// Always returns null
/// </summary>
/// <param name="path"></param>
/// <returns>null</returns>
///
public object GetContentReaderDynamicParameters(string path) { return null; }
/// <summary>
/// Always returns null
/// </summary>
/// <param name="path"></param>
/// <returns>null</returns>
///
public object GetContentWriterDynamicParameters(string path) { return null; }
/// <summary>
/// Always returns null
/// </summary>
/// <param name="path"></param>
/// <returns>null</returns>
///
public object ClearContentDynamicParameters(string path) { return null; }
#endregion
#endregion
} // SessionStateProviderBase
/// <summary>
/// The content reader/writer for all providers deriving from SessionStateProviderBase
/// </summary>
///
public class SessionStateProviderBaseContentReaderWriter : IContentReader, IContentWriter
{
/// <summary>
/// Constructs a content reader/writer for the specified provider using the specified
/// path to read or write the content
/// </summary>
///
/// <param name="path">
/// The path to the session state item which the content will be read or written.
/// </param>
///
/// <param name="provider">
/// The SessionStateProviderBase derived provider that the content will be read or written
/// from/to.
/// </param>
///
/// <exception cref="ArgumentException">
/// if <paramref name="path"/> is null or empty.
/// </exception>
///
/// <exception cref="ArgumentNullException">
/// If <paramref name="provider"/> is null.
/// </exception>
///
internal SessionStateProviderBaseContentReaderWriter(string path, SessionStateProviderBase provider)
{
if (String.IsNullOrEmpty(path))
{
throw PSTraceSource.NewArgumentException("path");
}
if (provider == null)
{
throw PSTraceSource.NewArgumentNullException("provider");
}
_path = path;
_provider = provider;
}
private string _path;
private SessionStateProviderBase _provider;
/// <summary>
/// Reads the content from the item
/// </summary>
///
/// <param name="readCount">
/// The number of "blocks" of data to be read from the item.
/// </param>
///
/// <returns>
/// An array of the blocks of data read from the item.
/// </returns>
///
/// <remarks>
/// A "block" of content is provider specific. For the file system
/// a "block" may be considered a byte, a character, or delimited string.
/// </remarks>
///
public IList Read(long readCount)
{
IList result = null;
if (!_contentRead)
{
object item = _provider.GetSessionStateItem(_path);
if (item != null)
{
object getItemValueResult = _provider.GetValueOfItem(item);
if (getItemValueResult != null)
{
result = getItemValueResult as IList ?? new object[] { getItemValueResult };
}
_contentRead = true;
}
}
return result;
}
private bool _contentRead;
/// <summary>
/// Writes content to the item.
/// </summary>
///
/// <param name="content">
/// An array of content "blocks" to be written to the item.
/// </param>
///
/// <returns>
/// The blocks of content that were successfully written to the item.
/// </returns>
///
/// <remarks>
/// A "block" of content is provider specific. For the file system
/// a "block" may be considered a byte, a character, or delimited string.
/// </remarks>
///
public IList Write(IList content)
{
if (content == null)
{
throw PSTraceSource.NewArgumentNullException("content");
}
// Unravel the IList if there is only one value
object valueToSet = content;
if (content.Count == 1)
{
valueToSet = content[0];
}
_provider.SetSessionStateItem(_path, valueToSet, false);
return content;
}
/// <summary>
/// None of the derived providers supports seeking for V1 so this
/// always throws a NotSupportedException
/// </summary>
///
/// <param name="offset">
/// ignored
/// </param>
///
/// <param name="origin">
/// ignored
/// </param>
///
/// <exception cref="NotSupportedException">
/// This exception is always thrown.
/// </exception>
///
public void Seek(long offset, SeekOrigin origin)
{
throw
PSTraceSource.NewNotSupportedException(
SessionStateStrings.IContent_Seek_NotSupported);
}
/// <summary>
/// Closes the reader. None of the derived providers need to
/// close their reader so do nothing.
/// </summary>
///
public void Close() { }
/// <summary>
/// Closes the reader. None of the derived providers need to
/// close their reader so do nothing.
/// </summary>
///
public void Dispose() { Close(); GC.SuppressFinalize(this); }
}
}
#pragma warning restore 56506
| |
using System;
using System.Collections;
using System.Collections.Generic;
namespace Reference.Lib.DataStructures.Trees
{
public class BinaryTree<T> : IEnumerable<T>
{
public TreeTraversalMethod TraversalMethod { get; set; } = TreeTraversalMethod.BreadthFirst;
public int Count { get; protected set; }
public BinaryTreeNode<T> Root { get; private set; }
public bool IsEmpty => Root == null;
/// <summary>
/// A tree is Degenerate if every node has one child
/// </summary>
/// <returns>true if the entire Tree is degenerate; otherwise false</returns>
public bool IsDegenerate => Root != null && VerifyProperty(IsDegenerateDelegate, Root);
/// <summary>
/// A Tree is full if every node has either 0 or 2 children
/// </summary>
/// <returns>true if Tree is full; otherwise false</returns>
public bool IsFull => Root != null && VerifyProperty(IsFullDelegate, Root);
/// <summary>
/// A 'Complete' tree has the property that every level,
/// possibly excluding the last level, is completely filled.
/// All nodes in the last level must be as far left as possible.
/// </summary>
/// <returns></returns>
public bool IsComplete
{
get
{
var last = Height - 1;
var final = false;
foreach (var node in BreadthFirstTraversal())
if (node.Height == last)
if (!final)
{
if (node.Children != 2)
final = true;
}
else
{
if (!node.IsLeaf)
return false;
}
else if (node.Height < last)
if (node.Children < 2)
return false;
return true;
}
}
public int Height { get; internal set; }
public bool IsHeightBalanced => Height <= OptimalHeight;
/// <summary>
/// * Left & RIght sub-tree heights differ by 1, at most
/// * Left sub-tree is balanced
/// * Right sub-tree is balanced
/// </summary>
/// <returns></returns>
public bool IsBalanced => IsBalancedNode(Root);
public int OptimalHeight => (int) Math.Log(Count, 2) + 1;
/// <summary>
/// A 'Perfect' tree has the property that all interior nodes
/// have two children and all leaves have the same depth/level
/// </summary>
/// <returns></returns>
public bool IsPerfect => IsFull && IsComplete;
public virtual IEnumerator<T> GetEnumerator()
{
if (Root == null)
yield break;
Func<IEnumerable<BinaryTreeNode<T>>> method;
switch (TraversalMethod)
{
case TreeTraversalMethod.InOrder:
method = InOrderTraversal;
break;
case TreeTraversalMethod.PreOrder:
method = PreOrderTraversal;
break;
case TreeTraversalMethod.PostOrder:
method = PostOrderTraversal;
break;
case TreeTraversalMethod.BreadthFirst:
method = BreadthFirstTraversal;
break;
default:
method = DepthFirstTraversal;
break;
}
foreach (var res in method())
yield return res.Value;
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
private bool IsBalancedNode(BinaryTreeNode<T> node)
{
if (node == null) return true;
// this fails to get correct height
var leftH = node.HasLeftChild ? GetTreeHeightFrom(node.Left) : 0;
var rightH = node.HasRightChild ? GetTreeHeightFrom(node.Right) : 0;
return Math.Abs(leftH - rightH) <= 1 && IsBalancedNode(node.Right) && IsBalancedNode(node.Left);
}
protected int GetTreeHeightFrom(BinaryTreeNode<T> node)
{
var offset = node.Height;
var max = 0;
foreach (var child in InOrder(node))
{
var adjusted = child.Height - offset;
max = adjusted > max ? adjusted : max;
}
return max;
}
public virtual void Add(T value)
{
if (Root == null)
{
SetRoot(value);
return;
}
foreach (var node in BreadthFirstTraversal())
if (node.Children < 2)
{
if (!node.HasLeftChild)
SetAsLeftChild(node, value);
else
SetAsRightChild(node, value);
break;
}
}
public void Add(params T[] values)
{
foreach (var value in values)
Add(value);
}
public void Clear()
{
Count = 0;
Root = null;
}
private bool VerifyProperty(Func<BinaryTreeNode<T>, bool> method, BinaryTreeNode<T> node)
{
if (node == null)
return true;
if (!method(node))
return false;
return VerifyProperty(method, node.Left) && VerifyProperty(method, node.Right);
}
protected void SetRoot(T value)
{
Root = new BinaryTreeNode<T>(value) {Height = 1};
++Count;
}
protected void SetAsLeftChild(BinaryTreeNode<T> parent, T child)
{
parent.Left = new BinaryTreeNode<T>(child, parent.Height + 1);
++Count;
if (parent.Left.Height > Height)
Height = parent.Left.Height;
}
protected void SetAsRightChild(BinaryTreeNode<T> parent, T child)
{
parent.Right = new BinaryTreeNode<T>(child, parent.Height + 1);
++Count;
if (parent.Right.Height > Height)
Height = parent.Right.Height;
}
/// <summary>
/// InOrder (Left, Root, Right)
/// Output: 4,2,5,1,
/// </summary>
/// <returns></returns>
protected IEnumerable<BinaryTreeNode<T>> InOrderTraversal()
{
return InOrder(Root);
}
/// <summary>
/// PreOrder (Root, Left, Right)
/// Output: 1,2,4,5,3
/// </summary>
/// <returns></returns>
protected IEnumerable<BinaryTreeNode<T>> PreOrderTraversal()
{
return DepthFirstTraversal();
}
/// <summary>
/// PostOrder (Left, Right, Root)
/// Output: 4,5,2,3,1
/// </summary>
/// <returns></returns>
protected IEnumerable<BinaryTreeNode<T>> PostOrderTraversal()
{
return PostOrder(Root);
}
private IEnumerable<BinaryTreeNode<T>> PostOrder(BinaryTreeNode<T> root)
{
if (root == null)
yield break;
if (root.HasLeftChild)
foreach (var left in PostOrder(root.Left))
yield return left;
if (root.HasRightChild)
foreach (var right in PostOrder(root.Right))
yield return right;
yield return root;
}
private IEnumerable<BinaryTreeNode<T>> InOrder(BinaryTreeNode<T> root)
{
if (root == null)
yield break;
if (root.HasLeftChild)
foreach (var left in InOrder(root.Left))
yield return left;
yield return root;
if (root.HasRightChild)
foreach (var right in InOrder(root.Right))
yield return right;
}
protected IEnumerable<BinaryTreeNode<T>>
BreadthFirstTraversal()
{
if (Root == null)
yield break;
var queue = new Queue<BinaryTreeNode<T>>();
queue.Enqueue(Root);
while (queue.Count > 0)
{
var current = queue.Dequeue();
yield return current;
if (current.HasLeftChild)
queue.Enqueue(current.Left);
if (current.HasRightChild)
queue.Enqueue(current.Right);
}
}
protected IEnumerable<BinaryTreeNode<T>> DepthFirstTraversal()
{
if (Root == null)
yield break;
var stack = new Stack<BinaryTreeNode<T>>();
stack.Push(Root);
while (stack.Count > 0)
{
var current = stack.Pop();
yield return current;
// LIFO
if (current.HasRightChild)
stack.Push(current.Right);
if (current.HasLeftChild)
stack.Push(current.Left);
}
}
private static bool IsFullDelegate(BinaryTreeNode<T> node)
{
return node.IsFull;
}
private static bool IsDegenerateDelegate(BinaryTreeNode<T> node)
{
return node.IsDegenerate || node.IsLeaf;
}
private static bool IsPerfectDelegate(int? leafHeight, BinaryTreeNode<T> node)
{
if (!node.IsLeaf)
return node.Children == 2;
if (leafHeight == null)
return true;
return node.Height == (int) leafHeight;
}
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.IO;
using System.Reflection;
using System.Xml;
using System.Xml.Xsl;
using SIL.Lift.Merging.xmldiff;
using SIL.Lift.Validation;
namespace SIL.Lift
{
/// <summary>
/// This contains various static utility methods related to Lift file processing.
/// </summary>
public class Utilities
{
/// <summary>
/// Add guids
/// </summary>
/// <param name="inputPath"></param>
/// <returns>path to a processed version</returns>
static public string ProcessLiftForLaterMerging(string inputPath)
{
if (inputPath == null)
{
throw new ArgumentNullException("inputPath");
}
string outputOfPassOne = Path.GetTempFileName();
XmlWriterSettings settings = new XmlWriterSettings();
settings.Indent = true;
settings.NewLineOnAttributes = true;//ugly, but great for merging with revision control systems
// nb: don't use XmlTextWriter.Create, that's broken. Ignores the indent setting
using (XmlWriter writer = XmlWriter.Create(outputOfPassOne /*Console.Out*/, settings))
{
using (XmlReader reader = XmlReader.Create(inputPath))
{
//bool elementWasReplaced = false;
while (!reader.EOF)
{
ProcessNode(reader, writer);
}
}
}
XslCompiledTransform transform = new XslCompiledTransform();
using (Stream canonicalizeXsltStream = Assembly.GetExecutingAssembly().GetManifestResourceStream("SIL.Lift.canonicalizeLift.xsl"))
{
if (canonicalizeXsltStream != null)
{
using (XmlReader xsltReader = XmlReader.Create(canonicalizeXsltStream))
{
transform.Load(xsltReader);
xsltReader.Close();
}
canonicalizeXsltStream.Close();
}
}
string outputOfPassTwo = Path.GetTempFileName();
using (Stream output = File.Create(outputOfPassTwo))
{
transform.Transform(outputOfPassOne, new XsltArgumentList(), output);
}
TempFileCollection tempfiles = transform.TemporaryFiles;
if (tempfiles != null) // tempfiles will be null when debugging is not enabled
{
tempfiles.Delete();
}
File.Delete(outputOfPassOne);
return outputOfPassTwo;
}
private static void ProcessNode(XmlReader reader, XmlWriter writer)
{
switch (reader.NodeType)
{
case XmlNodeType.EndElement:
case XmlNodeType.Element:
ProcessElement(reader, writer);
break;
default:
WriteShallowNode(reader, writer);
break;
}
}
private static void ProcessElement(XmlReader reader, XmlWriter writer)
{
if (reader.Name == "entry")
{
string guid = reader.GetAttribute("guid");
if (String.IsNullOrEmpty(guid))
{
guid = Guid.NewGuid().ToString();
writer.WriteStartElement(reader.Prefix, reader.LocalName, reader.NamespaceURI);
writer.WriteAttributes(reader, true);
writer.WriteAttributeString("guid", guid);
string s = reader.ReadInnerXml();//this seems to be enough to get the reader to the next element
writer.WriteRaw(s);
writer.WriteEndElement();
}
else
{
writer.WriteNode(reader, true);
}
}
else
{
WriteShallowNode(reader, writer);
}
}
///<summary>
/// Create an empty Lift file, unless a file of the given name exists already and
/// doOverwriteIfExists is false.
///</summary>
static public void CreateEmptyLiftFile(string path, string producerString, bool doOverwriteIfExists)
{
if (File.Exists(path))
{
if (doOverwriteIfExists)
{
File.Delete(path);
}
else
{
return;
}
}
XmlWriterSettings settings = new XmlWriterSettings();
settings.Indent = true;
settings.NewLineOnAttributes = true;//ugly, but great for merging with revision control systems
using (XmlWriter writer = XmlWriter.Create(path, settings))
{
writer.WriteStartDocument();
writer.WriteStartElement("lift");
writer.WriteAttributeString("version", Validator.LiftVersion);
writer.WriteAttributeString("producer", producerString);
writer.WriteEndElement();
writer.WriteEndDocument();
}
}
//came from a blog somewhere
static internal void WriteShallowNode(XmlReader reader, XmlWriter writer)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
if (writer == null)
{
throw new ArgumentNullException("writer");
}
switch (reader.NodeType)
{
case XmlNodeType.Element:
writer.WriteStartElement(reader.Prefix, reader.LocalName, reader.NamespaceURI);
writer.WriteAttributes(reader, true);
if (reader.IsEmptyElement)
{
writer.WriteEndElement();
}
break;
case XmlNodeType.Text:
writer.WriteString(reader.Value);
break;
case XmlNodeType.Whitespace:
case XmlNodeType.SignificantWhitespace:
writer.WriteWhitespace(reader.Value);
break;
case XmlNodeType.CDATA:
writer.WriteCData(reader.Value);
break;
case XmlNodeType.EntityReference:
writer.WriteEntityRef(reader.Name);
break;
case XmlNodeType.XmlDeclaration:
case XmlNodeType.ProcessingInstruction:
writer.WriteProcessingInstruction(reader.Name, reader.Value);
break;
case XmlNodeType.DocumentType:
writer.WriteDocType(reader.Name, reader.GetAttribute("PUBLIC"), reader.GetAttribute("SYSTEM"),
reader.Value);
break;
case XmlNodeType.Comment:
writer.WriteComment(reader.Value);
break;
case XmlNodeType.EndElement:
writer.WriteFullEndElement();
break;
}
reader.Read();
}
/// <summary>
/// Check wether the two XML elements (given as strings) are equal.
/// </summary>
public static bool AreXmlElementsEqual(string ours, string theirs)
{
StringReader osr = new StringReader(ours);
XmlReader or = XmlReader.Create(osr);
XmlDocument od = new XmlDocument();
XmlNode on = od.ReadNode(or);
if (on != null)
on.Normalize();
StringReader tsr = new StringReader(theirs);
XmlReader tr = XmlReader.Create(tsr);
XmlDocument td = new XmlDocument();
XmlNode tn = td.ReadNode(tr);
if (tn != null)
{
tn.Normalize();//doesn't do much
// StringBuilder builder = new StringBuilder();
// XmlWriter w = XmlWriter.Create(builder);
return AreXmlElementsEqual(on, tn);
}
return false;
}
/// <summary>
/// Check whether the two XML elements are equal.
/// </summary>
public static bool AreXmlElementsEqual(XmlNode ours, XmlNode theirs)
{
if (ours.NodeType == XmlNodeType.Text)
{
if (ours.NodeType != XmlNodeType.Text)
{
return false;
}
bool oursIsEmpty = (ours.InnerText.Trim() == string.Empty);
bool theirsIsEmpty = (theirs.InnerText.Trim() == string.Empty);
if(oursIsEmpty != theirsIsEmpty)
{
return false;
}
return ours.InnerText.Trim() == theirs.InnerText.Trim();
}
// DiffConfiguration config = new DiffConfiguration(WhitespaceHandling.None);
var diff = new XmlDiff(new XmlInput(ours.OuterXml), new XmlInput(theirs.OuterXml));//, config);
DiffResult d = diff.Compare();
return (d == null || d.Difference == null || !d.Difference.HasMajorDifference);
}
/// <summary>
/// Get an attribute value from the XML element, or throw an exception if it isn't there.
/// </summary>
public static string GetStringAttribute(XmlNode form, string attr)
{
try
{
if (form.Attributes != null)
return form.Attributes[attr].Value;
}
catch(NullReferenceException)
{
}
throw new LiftFormatException(string.Format("Expected a {0} attribute on {1}.", attr, form.OuterXml));
}
/// <summary>
/// Get an attribute value from the XML element, or return null if it isn't there.
/// </summary>
public static string GetOptionalAttributeString(XmlNode xmlNode, string attributeName)
{
if (xmlNode.Attributes == null)
return null;
XmlAttribute attr = xmlNode.Attributes[attributeName];
if (attr == null)
return null;
return attr.Value;
}
/// <summary>
/// Make the string safe for writing in an XML attribute value.
/// </summary>
public static string MakeSafeXmlAttribute(string sInput)
{
string sOutput = sInput;
if (sOutput != null && sOutput.Length != 0)
{
sOutput = sOutput.Replace("&", "&");
sOutput = sOutput.Replace("\"", """);
sOutput = sOutput.Replace("'", "'");
sOutput = sOutput.Replace("<", "<");
sOutput = sOutput.Replace(">", ">");
}
return sOutput;
}
/// <summary>
/// Parse a string into a new XmlNode that belongs to the same XmlDocument as
/// nodeMaker (which may be either the XmlDocument or a child XmlNode).
/// </summary>
public static XmlNode GetDocumentNodeFromRawXml(string outerXml, XmlNode nodeMaker)
{
if(string.IsNullOrEmpty(outerXml))
throw new ArgumentException("string outerXml is null or empty");
XmlDocument doc = nodeMaker as XmlDocument;
if(doc == null)
doc = nodeMaker.OwnerDocument;
if (doc == null)
throw new ArgumentException("Could not get XmlDocument from XmlNode nodeMaker");
using (StringReader sr = new StringReader(outerXml))
{
using (XmlReader r = XmlReader.Create(sr))
{
r.Read();
return doc.ReadNode(r);
}
}
}
}
}
| |
#region Apache License
//
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to you under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
#if !NETMF
using System.Configuration;
#endif
using System.Reflection;
#if NETMF
using Microsoft.SPOT;
#endif
using log4net.Util;
using log4net.Repository;
namespace log4net.Core
{
/// <summary>
/// Static manager that controls the creation of repositories
/// </summary>
/// <remarks>
/// <para>
/// Static manager that controls the creation of repositories
/// </para>
/// <para>
/// This class is used by the wrapper managers (e.g. <see cref="log4net.LogManager"/>)
/// to provide access to the <see cref="ILogger"/> objects.
/// </para>
/// <para>
/// This manager also holds the <see cref="IRepositorySelector"/> that is used to
/// lookup and create repositories. The selector can be set either programmatically using
/// the <see cref="RepositorySelector"/> property, or by setting the <c>log4net.RepositorySelector</c>
/// AppSetting in the applications config file to the fully qualified type name of the
/// selector to use.
/// </para>
/// </remarks>
/// <author>Nicko Cadell</author>
/// <author>Gert Driesen</author>
public sealed class LoggerManager
{
#region Private Instance Constructors
/// <summary>
/// Private constructor to prevent instances. Only static methods should be used.
/// </summary>
/// <remarks>
/// <para>
/// Private constructor to prevent instances. Only static methods should be used.
/// </para>
/// </remarks>
private LoggerManager()
{
}
#endregion Private Instance Constructors
#region Static Constructor
/// <summary>
/// Hook the shutdown event
/// </summary>
/// <remarks>
/// <para>
/// On the full .NET runtime, the static constructor hooks up the
/// <c>AppDomain.ProcessExit</c> and <c>AppDomain.DomainUnload</c>> events.
/// These are used to shutdown the log4net system as the application exits.
/// </para>
/// </remarks>
static LoggerManager()
{
try
{
// Register the AppDomain events, note we have to do this with a
// method call rather than directly here because the AppDomain
// makes a LinkDemand which throws the exception during the JIT phase.
RegisterAppDomainEvents();
}
catch(System.Security.SecurityException)
{
LogLog.Debug(declaringType, "Security Exception (ControlAppDomain LinkDemand) while trying "+
"to register Shutdown handler with the AppDomain. LoggerManager.Shutdown() "+
"will not be called automatically when the AppDomain exits. It must be called "+
"programmatically.");
}
// Dump out our assembly version into the log if debug is enabled
LogLog.Debug(declaringType, GetVersionInfo());
// Set the default repository selector
#if NETCF
s_repositorySelector = new CompactRepositorySelector(typeof(log4net.Repository.Hierarchy.Hierarchy));
#else
// Look for the RepositorySelector type specified in the AppSettings 'log4net.RepositorySelector'
string appRepositorySelectorTypeName = SystemInfo.GetAppSetting("log4net.RepositorySelector");
if (appRepositorySelectorTypeName != null && appRepositorySelectorTypeName.Length > 0)
{
// Resolve the config string into a Type
Type appRepositorySelectorType = null;
try
{
appRepositorySelectorType = SystemInfo.GetTypeFromString(appRepositorySelectorTypeName, false, true);
}
catch(Exception ex)
{
LogLog.Error(declaringType, "Exception while resolving RepositorySelector Type ["+appRepositorySelectorTypeName+"]", ex);
}
if (appRepositorySelectorType != null)
{
// Create an instance of the RepositorySelectorType
object appRepositorySelectorObj = null;
try
{
appRepositorySelectorObj = Activator.CreateInstance(appRepositorySelectorType);
}
catch(Exception ex)
{
LogLog.Error(declaringType, "Exception while creating RepositorySelector ["+appRepositorySelectorType.FullName+"]", ex);
}
if (appRepositorySelectorObj != null && appRepositorySelectorObj is IRepositorySelector)
{
s_repositorySelector = (IRepositorySelector)appRepositorySelectorObj;
}
else
{
LogLog.Error(declaringType, "RepositorySelector Type ["+appRepositorySelectorType.FullName+"] is not an IRepositorySelector");
}
}
}
// Create the DefaultRepositorySelector if not configured above
if (s_repositorySelector == null)
{
s_repositorySelector = new DefaultRepositorySelector(typeof(log4net.Repository.Hierarchy.Hierarchy));
}
#endif
}
/// <summary>
/// Register for ProcessExit and DomainUnload events on the AppDomain
/// </summary>
/// <remarks>
/// <para>
/// This needs to be in a separate method because the events make
/// a LinkDemand for the ControlAppDomain SecurityPermission. Because
/// this is a LinkDemand it is demanded at JIT time. Therefore we cannot
/// catch the exception in the method itself, we have to catch it in the
/// caller.
/// </para>
/// </remarks>
private static void RegisterAppDomainEvents()
{
#if !NETCF
// ProcessExit seems to be fired if we are part of the default domain
AppDomain.CurrentDomain.ProcessExit += new EventHandler(OnProcessExit);
// Otherwise DomainUnload is fired
AppDomain.CurrentDomain.DomainUnload += new EventHandler(OnDomainUnload);
#endif
}
#endregion Static Constructor
#region Public Static Methods
/// <summary>
/// Return the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <param name="repository">the repository to lookup in</param>
/// <returns>Return the default <see cref="ILoggerRepository"/> instance</returns>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the <paramref name="repository"/> argument.
/// </para>
/// </remarks>
[Obsolete("Use GetRepository instead of GetLoggerRepository")]
public static ILoggerRepository GetLoggerRepository(string repository)
{
return GetRepository(repository);
}
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <returns>The default <see cref="ILoggerRepository"/> instance.</returns>
[Obsolete("Use GetRepository instead of GetLoggerRepository")]
public static ILoggerRepository GetLoggerRepository(Assembly repositoryAssembly)
{
return GetRepository(repositoryAssembly);
}
/// <summary>
/// Return the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <param name="repository">the repository to lookup in</param>
/// <returns>Return the default <see cref="ILoggerRepository"/> instance</returns>
/// <remarks>
/// <para>
/// Gets the <see cref="ILoggerRepository"/> for the repository specified
/// by the <paramref name="repository"/> argument.
/// </para>
/// </remarks>
public static ILoggerRepository GetRepository(string repository)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
return RepositorySelector.GetRepository(repository);
}
/// <summary>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <returns>The default <see cref="ILoggerRepository"/> instance.</returns>
/// <remarks>
/// <para>
/// Returns the default <see cref="ILoggerRepository"/> instance.
/// </para>
/// </remarks>
public static ILoggerRepository GetRepository(Assembly repositoryAssembly)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
return RepositorySelector.GetRepository(repositoryAssembly);
}
/// <summary>
/// Returns the named logger if it exists.
/// </summary>
/// <param name="repository">The repository to lookup in.</param>
/// <param name="name">The fully qualified logger name to look for.</param>
/// <returns>
/// The logger found, or <c>null</c> if the named logger does not exist in the
/// specified repository.
/// </returns>
/// <remarks>
/// <para>
/// If the named logger exists (in the specified repository) then it
/// returns a reference to the logger, otherwise it returns
/// <c>null</c>.
/// </para>
/// </remarks>
public static ILogger Exists(string repository, string name)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
if (name == null)
{
throw new ArgumentNullException("name");
}
return RepositorySelector.GetRepository(repository).Exists(name);
}
/// <summary>
/// Returns the named logger if it exists.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <param name="name">The fully qualified logger name to look for.</param>
/// <returns>
/// The logger found, or <c>null</c> if the named logger does not exist in the
/// specified assembly's repository.
/// </returns>
/// <remarks>
/// <para>
/// If the named logger exists (in the specified assembly's repository) then it
/// returns a reference to the logger, otherwise it returns
/// <c>null</c>.
/// </para>
/// </remarks>
public static ILogger Exists(Assembly repositoryAssembly, string name)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
if (name == null)
{
throw new ArgumentNullException("name");
}
return RepositorySelector.GetRepository(repositoryAssembly).Exists(name);
}
/// <summary>
/// Returns all the currently defined loggers in the specified repository.
/// </summary>
/// <param name="repository">The repository to lookup in.</param>
/// <returns>All the defined loggers.</returns>
/// <remarks>
/// <para>
/// The root logger is <b>not</b> included in the returned array.
/// </para>
/// </remarks>
public static ILogger[] GetCurrentLoggers(string repository)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
return RepositorySelector.GetRepository(repository).GetCurrentLoggers();
}
/// <summary>
/// Returns all the currently defined loggers in the specified assembly's repository.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <returns>All the defined loggers.</returns>
/// <remarks>
/// <para>
/// The root logger is <b>not</b> included in the returned array.
/// </para>
/// </remarks>
public static ILogger[] GetCurrentLoggers(Assembly repositoryAssembly)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
return RepositorySelector.GetRepository(repositoryAssembly).GetCurrentLoggers();
}
/// <summary>
/// Retrieves or creates a named logger.
/// </summary>
/// <param name="repository">The repository to lookup in.</param>
/// <param name="name">The name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
/// <remarks>
/// <para>
/// Retrieves a logger named as the <paramref name="name"/>
/// parameter. If the named logger already exists, then the
/// existing instance will be returned. Otherwise, a new instance is
/// created.
/// </para>
/// <para>
/// By default, loggers do not have a set level but inherit
/// it from the hierarchy. This is one of the central features of
/// log4net.
/// </para>
/// </remarks>
public static ILogger GetLogger(string repository, string name)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
if (name == null)
{
throw new ArgumentNullException("name");
}
return RepositorySelector.GetRepository(repository).GetLogger(name);
}
/// <summary>
/// Retrieves or creates a named logger.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <param name="name">The name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
/// <remarks>
/// <para>
/// Retrieves a logger named as the <paramref name="name"/>
/// parameter. If the named logger already exists, then the
/// existing instance will be returned. Otherwise, a new instance is
/// created.
/// </para>
/// <para>
/// By default, loggers do not have a set level but inherit
/// it from the hierarchy. This is one of the central features of
/// log4net.
/// </para>
/// </remarks>
public static ILogger GetLogger(Assembly repositoryAssembly, string name)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
if (name == null)
{
throw new ArgumentNullException("name");
}
return RepositorySelector.GetRepository(repositoryAssembly).GetLogger(name);
}
/// <summary>
/// Shorthand for <see cref="M:LogManager.GetLogger(string)"/>.
/// </summary>
/// <param name="repository">The repository to lookup in.</param>
/// <param name="type">The <paramref name="type"/> of which the fullname will be used as the name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
/// <remarks>
/// <para>
/// Gets the logger for the fully qualified name of the type specified.
/// </para>
/// </remarks>
public static ILogger GetLogger(string repository, Type type)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
if (type == null)
{
throw new ArgumentNullException("type");
}
return RepositorySelector.GetRepository(repository).GetLogger(type.FullName);
}
/// <summary>
/// Shorthand for <see cref="M:LogManager.GetLogger(string)"/>.
/// </summary>
/// <param name="repositoryAssembly">the assembly to use to lookup the repository</param>
/// <param name="type">The <paramref name="type"/> of which the fullname will be used as the name of the logger to retrieve.</param>
/// <returns>The logger with the name specified.</returns>
/// <remarks>
/// <para>
/// Gets the logger for the fully qualified name of the type specified.
/// </para>
/// </remarks>
public static ILogger GetLogger(Assembly repositoryAssembly, Type type)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
if (type == null)
{
throw new ArgumentNullException("type");
}
return RepositorySelector.GetRepository(repositoryAssembly).GetLogger(type.FullName);
}
/// <summary>
/// Shuts down the log4net system.
/// </summary>
/// <remarks>
/// <para>
/// Calling this method will <b>safely</b> close and remove all
/// appenders in all the loggers including root contained in all the
/// default repositories.
/// </para>
/// <para>
/// Some appenders need to be closed before the application exists.
/// Otherwise, pending logging events might be lost.
/// </para>
/// <para>
/// The <c>shutdown</c> method is careful to close nested
/// appenders before closing regular appenders. This is allows
/// configurations where a regular appender is attached to a logger
/// and again to a nested appender.
/// </para>
/// </remarks>
public static void Shutdown()
{
foreach(ILoggerRepository repository in GetAllRepositories())
{
repository.Shutdown();
}
}
/// <summary>
/// Shuts down the repository for the repository specified.
/// </summary>
/// <param name="repository">The repository to shutdown.</param>
/// <remarks>
/// <para>
/// Calling this method will <b>safely</b> close and remove all
/// appenders in all the loggers including root contained in the
/// repository for the <paramref name="repository"/> specified.
/// </para>
/// <para>
/// Some appenders need to be closed before the application exists.
/// Otherwise, pending logging events might be lost.
/// </para>
/// <para>
/// The <c>shutdown</c> method is careful to close nested
/// appenders before closing regular appenders. This is allows
/// configurations where a regular appender is attached to a logger
/// and again to a nested appender.
/// </para>
/// </remarks>
public static void ShutdownRepository(string repository)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
RepositorySelector.GetRepository(repository).Shutdown();
}
/// <summary>
/// Shuts down the repository for the repository specified.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository.</param>
/// <remarks>
/// <para>
/// Calling this method will <b>safely</b> close and remove all
/// appenders in all the loggers including root contained in the
/// repository for the repository. The repository is looked up using
/// the <paramref name="repositoryAssembly"/> specified.
/// </para>
/// <para>
/// Some appenders need to be closed before the application exists.
/// Otherwise, pending logging events might be lost.
/// </para>
/// <para>
/// The <c>shutdown</c> method is careful to close nested
/// appenders before closing regular appenders. This is allows
/// configurations where a regular appender is attached to a logger
/// and again to a nested appender.
/// </para>
/// </remarks>
public static void ShutdownRepository(Assembly repositoryAssembly)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
RepositorySelector.GetRepository(repositoryAssembly).Shutdown();
}
/// <summary>
/// Resets all values contained in this repository instance to their defaults.
/// </summary>
/// <param name="repository">The repository to reset.</param>
/// <remarks>
/// <para>
/// Resets all values contained in the repository instance to their
/// defaults. This removes all appenders from all loggers, sets
/// the level of all non-root loggers to <c>null</c>,
/// sets their additivity flag to <c>true</c> and sets the level
/// of the root logger to <see cref="Level.Debug"/>. Moreover,
/// message disabling is set its default "off" value.
/// </para>
/// </remarks>
public static void ResetConfiguration(string repository)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
RepositorySelector.GetRepository(repository).ResetConfiguration();
}
/// <summary>
/// Resets all values contained in this repository instance to their defaults.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to lookup the repository to reset.</param>
/// <remarks>
/// <para>
/// Resets all values contained in the repository instance to their
/// defaults. This removes all appenders from all loggers, sets
/// the level of all non-root loggers to <c>null</c>,
/// sets their additivity flag to <c>true</c> and sets the level
/// of the root logger to <see cref="Level.Debug"/>. Moreover,
/// message disabling is set its default "off" value.
/// </para>
/// </remarks>
public static void ResetConfiguration(Assembly repositoryAssembly)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
RepositorySelector.GetRepository(repositoryAssembly).ResetConfiguration();
}
/// <summary>
/// Creates a repository with the specified name.
/// </summary>
/// <param name="repository">The name of the repository, this must be unique amongst repositories.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <remarks>
/// <para>
/// <b>CreateDomain is obsolete. Use CreateRepository instead of CreateDomain.</b>
/// </para>
/// <para>
/// Creates the default type of <see cref="ILoggerRepository"/> which is a
/// <see cref="log4net.Repository.Hierarchy.Hierarchy"/> object.
/// </para>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An <see cref="Exception"/> will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <exception cref="LogException">The specified repository already exists.</exception>
[Obsolete("Use CreateRepository instead of CreateDomain")]
public static ILoggerRepository CreateDomain(string repository)
{
return CreateRepository(repository);
}
/// <summary>
/// Creates a repository with the specified name.
/// </summary>
/// <param name="repository">The name of the repository, this must be unique amongst repositories.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <remarks>
/// <para>
/// Creates the default type of <see cref="ILoggerRepository"/> which is a
/// <see cref="log4net.Repository.Hierarchy.Hierarchy"/> object.
/// </para>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An <see cref="Exception"/> will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <exception cref="LogException">The specified repository already exists.</exception>
public static ILoggerRepository CreateRepository(string repository)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
return RepositorySelector.CreateRepository(repository, null);
}
/// <summary>
/// Creates a repository with the specified name and repository type.
/// </summary>
/// <param name="repository">The name of the repository, this must be unique to the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <remarks>
/// <para>
/// <b>CreateDomain is obsolete. Use CreateRepository instead of CreateDomain.</b>
/// </para>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An Exception will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <exception cref="LogException">The specified repository already exists.</exception>
[Obsolete("Use CreateRepository instead of CreateDomain")]
public static ILoggerRepository CreateDomain(string repository, Type repositoryType)
{
return CreateRepository(repository, repositoryType);
}
/// <summary>
/// Creates a repository with the specified name and repository type.
/// </summary>
/// <param name="repository">The name of the repository, this must be unique to the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <remarks>
/// <para>
/// The <paramref name="repository"/> name must be unique. Repositories cannot be redefined.
/// An Exception will be thrown if the repository already exists.
/// </para>
/// </remarks>
/// <exception cref="LogException">The specified repository already exists.</exception>
public static ILoggerRepository CreateRepository(string repository, Type repositoryType)
{
if (repository == null)
{
throw new ArgumentNullException("repository");
}
if (repositoryType == null)
{
throw new ArgumentNullException("repositoryType");
}
return RepositorySelector.CreateRepository(repository, repositoryType);
}
/// <summary>
/// Creates a repository for the specified assembly and repository type.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to get the name of the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <remarks>
/// <para>
/// <b>CreateDomain is obsolete. Use CreateRepository instead of CreateDomain.</b>
/// </para>
/// <para>
/// The <see cref="ILoggerRepository"/> created will be associated with the repository
/// specified such that a call to <see cref="M:GetRepository(Assembly)"/> with the
/// same assembly specified will return the same repository instance.
/// </para>
/// </remarks>
[Obsolete("Use CreateRepository instead of CreateDomain")]
public static ILoggerRepository CreateDomain(Assembly repositoryAssembly, Type repositoryType)
{
return CreateRepository(repositoryAssembly, repositoryType);
}
/// <summary>
/// Creates a repository for the specified assembly and repository type.
/// </summary>
/// <param name="repositoryAssembly">The assembly to use to get the name of the repository.</param>
/// <param name="repositoryType">A <see cref="Type"/> that implements <see cref="ILoggerRepository"/>
/// and has a no arg constructor. An instance of this type will be created to act
/// as the <see cref="ILoggerRepository"/> for the repository specified.</param>
/// <returns>The <see cref="ILoggerRepository"/> created for the repository.</returns>
/// <remarks>
/// <para>
/// The <see cref="ILoggerRepository"/> created will be associated with the repository
/// specified such that a call to <see cref="M:GetRepository(Assembly)"/> with the
/// same assembly specified will return the same repository instance.
/// </para>
/// </remarks>
public static ILoggerRepository CreateRepository(Assembly repositoryAssembly, Type repositoryType)
{
if (repositoryAssembly == null)
{
throw new ArgumentNullException("repositoryAssembly");
}
if (repositoryType == null)
{
throw new ArgumentNullException("repositoryType");
}
return RepositorySelector.CreateRepository(repositoryAssembly, repositoryType);
}
/// <summary>
/// Gets an array of all currently defined repositories.
/// </summary>
/// <returns>An array of all the known <see cref="ILoggerRepository"/> objects.</returns>
/// <remarks>
/// <para>
/// Gets an array of all currently defined repositories.
/// </para>
/// </remarks>
public static ILoggerRepository[] GetAllRepositories()
{
return RepositorySelector.GetAllRepositories();
}
/// <summary>
/// Gets or sets the repository selector used by the <see cref="LogManager" />.
/// </summary>
/// <value>
/// The repository selector used by the <see cref="LogManager" />.
/// </value>
/// <remarks>
/// <para>
/// The repository selector (<see cref="IRepositorySelector"/>) is used by
/// the <see cref="LogManager"/> to create and select repositories
/// (<see cref="ILoggerRepository"/>).
/// </para>
/// <para>
/// The caller to <see cref="LogManager"/> supplies either a string name
/// or an assembly (if not supplied the assembly is inferred using
/// <see cref="M:Assembly.GetCallingAssembly()"/>).
/// </para>
/// <para>
/// This context is used by the selector to lookup a specific repository.
/// </para>
/// <para>
/// For the full .NET Framework, the default repository is <c>DefaultRepositorySelector</c>;
/// for the .NET Compact Framework <c>CompactRepositorySelector</c> is the default
/// repository.
/// </para>
/// </remarks>
public static IRepositorySelector RepositorySelector
{
get { return s_repositorySelector; }
set { s_repositorySelector = value; }
}
#endregion Public Static Methods
#region Private Static Methods
/// <summary>
/// Internal method to get pertinent version info.
/// </summary>
/// <returns>A string of version info.</returns>
private static string GetVersionInfo()
{
System.Text.StringBuilder sb = new System.Text.StringBuilder();
// Grab the currently executing assembly
Assembly myAssembly = Assembly.GetExecutingAssembly();
// Build Up message
sb.Append("log4net assembly [").Append(myAssembly.FullName).Append("]. ");
sb.Append("Loaded from [").Append(SystemInfo.AssemblyLocationInfo(myAssembly)).Append("]. ");
sb.Append("(.NET Runtime [").Append(Environment.Version.ToString()).Append("]");
#if (!SSCLI)
sb.Append(" on ").Append(Environment.OSVersion.ToString());
#endif
sb.Append(")");
return sb.ToString();
}
#if (!NETCF)
/// <summary>
/// Called when the <see cref="AppDomain.DomainUnload"/> event fires
/// </summary>
/// <param name="sender">the <see cref="AppDomain"/> that is exiting</param>
/// <param name="e">null</param>
/// <remarks>
/// <para>
/// Called when the <see cref="AppDomain.DomainUnload"/> event fires.
/// </para>
/// <para>
/// When the event is triggered the log4net system is <see cref="M:Shutdown()"/>.
/// </para>
/// </remarks>
private static void OnDomainUnload(object sender, EventArgs e)
{
Shutdown();
}
/// <summary>
/// Called when the <see cref="AppDomain.ProcessExit"/> event fires
/// </summary>
/// <param name="sender">the <see cref="AppDomain"/> that is exiting</param>
/// <param name="e">null</param>
/// <remarks>
/// <para>
/// Called when the <see cref="AppDomain.ProcessExit"/> event fires.
/// </para>
/// <para>
/// When the event is triggered the log4net system is <see cref="M:Shutdown()"/>.
/// </para>
/// </remarks>
private static void OnProcessExit(object sender, EventArgs e)
{
Shutdown();
}
#endif
#endregion Private Static Methods
#region Private Static Fields
/// <summary>
/// The fully qualified type of the LoggerManager class.
/// </summary>
/// <remarks>
/// Used by the internal logger to record the Type of the
/// log message.
/// </remarks>
private readonly static Type declaringType = typeof(LoggerManager);
/// <summary>
/// Initialize the default repository selector
/// </summary>
private static IRepositorySelector s_repositorySelector;
#endregion Private Static Fields
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Batch
{
using System.Linq;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
public partial class BatchManagementClient : Microsoft.Rest.ServiceClient<BatchManagementClient>, IBatchManagementClient, IAzureClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
public System.Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public Newtonsoft.Json.JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public Newtonsoft.Json.JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
public Microsoft.Rest.ServiceClientCredentials Credentials { get; private set; }
/// <summary>
/// A unique identifier of a Microsoft Azure subscription. The subscription id
/// forms part of the URI for every service call.
/// </summary>
public string SubscriptionId { get; set; }
/// <summary>
/// Client API Version.
/// </summary>
public string ApiVersion { get; private set; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
public string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running Operations.
/// Default value is 30.
/// </summary>
public int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
public bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IBatchAccountOperations.
/// </summary>
public virtual IBatchAccountOperations BatchAccount { get; private set; }
/// <summary>
/// Gets the IApplicationPackageOperations.
/// </summary>
public virtual IApplicationPackageOperations ApplicationPackage { get; private set; }
/// <summary>
/// Gets the IApplicationOperations.
/// </summary>
public virtual IApplicationOperations Application { get; private set; }
/// <summary>
/// Gets the ILocationOperations.
/// </summary>
public virtual ILocationOperations Location { get; private set; }
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected BatchManagementClient(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected BatchManagementClient(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected BatchManagementClient(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected BatchManagementClient(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public BatchManagementClient(Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public BatchManagementClient(Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public BatchManagementClient(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the BatchManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public BatchManagementClient(System.Uri baseUri, Microsoft.Rest.ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// An optional partial-method to perform custom initialization.
/// </summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
this.BatchAccount = new BatchAccountOperations(this);
this.ApplicationPackage = new ApplicationPackageOperations(this);
this.Application = new ApplicationOperations(this);
this.Location = new LocationOperations(this);
this.BaseUri = new System.Uri("https://management.azure.com");
this.ApiVersion = "2015-12-01";
this.AcceptLanguage = "en-US";
this.LongRunningOperationRetryTimeout = 30;
this.GenerateClientRequestId = true;
SerializationSettings = new Newtonsoft.Json.JsonSerializerSettings
{
Formatting = Newtonsoft.Json.Formatting.Indented,
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(),
Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter>
{
new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter()
}
};
SerializationSettings.Converters.Add(new Microsoft.Rest.Serialization.TransformationJsonConverter());
DeserializationSettings = new Newtonsoft.Json.JsonSerializerSettings
{
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new Microsoft.Rest.Serialization.ReadOnlyJsonContractResolver(),
Converters = new System.Collections.Generic.List<Newtonsoft.Json.JsonConverter>
{
new Microsoft.Rest.Serialization.Iso8601TimeSpanConverter()
}
};
CustomInitialize();
DeserializationSettings.Converters.Add(new Microsoft.Rest.Serialization.TransformationJsonConverter());
DeserializationSettings.Converters.Add(new Microsoft.Rest.Azure.CloudErrorJsonConverter());
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Threading;
using BTDB.Buffer;
using BTDB.Collections;
using BTDB.FieldHandler;
using BTDB.IL;
using BTDB.KVDBLayer;
using BTDB.StreamLayer;
using Extensions = BTDB.FieldHandler.Extensions;
namespace BTDB.ODBLayer
{
public class RelationInfo
{
readonly uint _id;
readonly string _name;
readonly IRelationInfoResolver _relationInfoResolver;
readonly Type _interfaceType;
readonly Type _clientType;
readonly object _defaultClientObject;
RelationVersionInfo?[] _relationVersions = Array.Empty<RelationVersionInfo?>();
Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object, object> _primaryKeysSaver;
Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object> _valueSaver;
internal StructList<ItemLoaderInfo> ItemLoaderInfos;
public class ItemLoaderInfo
{
readonly RelationInfo _owner;
readonly Type _itemType;
public ItemLoaderInfo(RelationInfo owner, Type itemType)
{
_owner = owner;
_itemType = itemType;
_valueLoaders = new Action<IInternalObjectDBTransaction, AbstractBufferedReader, object>?[_owner._relationVersions.Length];
_primaryKeysLoader = CreatePkLoader(itemType, _owner.ClientRelationVersionInfo.PrimaryKeyFields.Span,
$"RelationKeyLoader_{_owner.Name}_{itemType.ToSimpleName()}");
}
internal object CreateInstance(IInternalObjectDBTransaction tr, ByteBuffer keyBytes, ByteBuffer valueBytes)
{
var reader = new ByteBufferReader(keyBytes);
var obj = _primaryKeysLoader(tr, reader);
reader.Restart(valueBytes);
var version = reader.ReadVUInt32();
GetValueLoader(version)(tr, reader, obj);
return obj;
}
readonly Func<IInternalObjectDBTransaction, AbstractBufferedReader, object> _primaryKeysLoader;
readonly Action<IInternalObjectDBTransaction, AbstractBufferedReader, object>?[] _valueLoaders;
Action<IInternalObjectDBTransaction, AbstractBufferedReader, object> GetValueLoader(uint version)
{
Action<IInternalObjectDBTransaction, AbstractBufferedReader, object>? res;
do
{
res = _valueLoaders[version];
if (res != null) return res;
res = CreateLoader(_itemType,
_owner._relationVersions[version]!.Fields.Span, $"RelationValueLoader_{_owner.Name}_{version}_{_itemType.ToSimpleName()}");
} while (Interlocked.CompareExchange(ref _valueLoaders[version], res, null) != null);
return res;
}
Func<IInternalObjectDBTransaction, AbstractBufferedReader, object> CreatePkLoader(Type instanceType,
ReadOnlySpan<TableFieldInfo> fields, string loaderName)
{
var thatType = typeof(Func<>).MakeGenericType(instanceType);
var method =
ILBuilder.Instance.NewMethod(
loaderName, typeof(Func<IInternalObjectDBTransaction, AbstractBufferedReader, object>), typeof(Func<object>));
var ilGenerator = method.Generator;
var container = _owner._relationInfoResolver.Container;
object that = null;
if (container != null)
{
that = container.ResolveOptional(thatType);
}
ilGenerator.DeclareLocal(instanceType);
if (that == null)
{
ilGenerator
.Newobj(instanceType.GetConstructor(Type.EmptyTypes)!)
.Stloc(0);
}
else
{
ilGenerator
.Ldarg(0)
.Callvirt(thatType.GetMethod(nameof(Func<object>.Invoke))!)
.Stloc(0);
}
var loadInstructions = new StructList<(IFieldHandler, Action<IILGen>?, MethodInfo?)>();
var props = instanceType.GetProperties(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
foreach (var srcFieldInfo in fields)
{
var fieldInfo = props.FirstOrDefault(p => GetPersistentName(p) == srcFieldInfo.Name);
if (fieldInfo != null)
{
var setterMethod = fieldInfo.GetSetMethod(true);
var fieldType = setterMethod!.GetParameters()[0].ParameterType;
var specializedSrcHandler =
srcFieldInfo.Handler!.SpecializeLoadForType(fieldType, null);
var willLoad = specializedSrcHandler.HandledType();
var converterGenerator =
_owner._relationInfoResolver.TypeConvertorGenerator.GenerateConversion(willLoad, fieldType);
if (converterGenerator != null)
{
loadInstructions.Add((specializedSrcHandler, converterGenerator, setterMethod));
continue;
}
}
loadInstructions.Add((srcFieldInfo.Handler!, null, null));
}
// Remove useless skips from end
while (loadInstructions.Count > 0 && loadInstructions.Last.Item2 == null)
{
loadInstructions.RemoveAt(^1);
}
var anyNeedsCtx = false;
for (var i = 0; i < loadInstructions.Count; i++)
{
if (!loadInstructions[i].Item1.NeedsCtx()) continue;
anyNeedsCtx = true;
break;
}
if (anyNeedsCtx)
{
ilGenerator.DeclareLocal(typeof(IReaderCtx));
ilGenerator
.Ldarg(1)
.Ldarg(2)
.Newobj(() => new DBReaderCtx(null, null))
.Stloc(1);
}
for (var i = 0; i < loadInstructions.Count; i++)
{
ref var loadInstruction = ref loadInstructions[i];
Action<IILGen> readerOrCtx;
if (loadInstruction.Item1.NeedsCtx())
readerOrCtx = il => il.Ldloc(1);
else
readerOrCtx = il => il.Ldarg(2);
if (loadInstruction.Item2 != null)
{
ilGenerator.Ldloc(0);
loadInstruction.Item1.Load(ilGenerator, readerOrCtx);
loadInstruction.Item2(ilGenerator);
ilGenerator.Call(loadInstruction.Item3!);
continue;
}
loadInstruction.Item1.Skip(ilGenerator, readerOrCtx);
}
ilGenerator.Ldloc(0).Ret();
return (Func<IInternalObjectDBTransaction, AbstractBufferedReader, object>)method.Create(that);
}
Action<IInternalObjectDBTransaction, AbstractBufferedReader, object> CreateLoader(Type instanceType,
ReadOnlySpan<TableFieldInfo> fields, string loaderName)
{
var method =
ILBuilder.Instance.NewMethod<Action<IInternalObjectDBTransaction, AbstractBufferedReader, object>>(
loaderName);
var ilGenerator = method.Generator;
ilGenerator.DeclareLocal(instanceType);
ilGenerator
.Ldarg(2)
.Castclass(instanceType)
.Stloc(0);
var instanceTableFieldInfos = new StructList<TableFieldInfo>();
var loadInstructions = new StructList<(IFieldHandler, Action<IILGen>?, MethodInfo?, bool Init)>();
var props = instanceType.GetProperties(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
var persistentNameToPropertyInfo = new RefDictionary<string, PropertyInfo>();
var publicFields = instanceType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (var field in publicFields)
{
if (field.GetCustomAttribute<NotStoredAttribute>(true)!=null) continue;
throw new BTDBException($"Public field {instanceType.ToSimpleName()}.{field.Name} must have NotStoredAttribute. It is just intermittent, until they can start to be supported.");
}
foreach (var pi in props)
{
if (pi.GetCustomAttributes(typeof(NotStoredAttribute), true).Length != 0) continue;
if (pi.GetIndexParameters().Length != 0) continue;
var tfi = TableFieldInfo.Build(_owner.Name, pi, _owner._relationInfoResolver.FieldHandlerFactory,
FieldHandlerOptions.None);
instanceTableFieldInfos.Add(tfi);
persistentNameToPropertyInfo.GetOrAddValueRef(tfi.Name) = pi;
}
foreach (var srcFieldInfo in fields)
{
var fieldInfo = persistentNameToPropertyInfo.GetOrFakeValueRef(srcFieldInfo.Name);
if (fieldInfo != null)
{
var setterMethod = fieldInfo.GetSetMethod(true);
var fieldType = setterMethod!.GetParameters()[0].ParameterType;
var specializedSrcHandler =
srcFieldInfo.Handler!.SpecializeLoadForType(fieldType, null);
var willLoad = specializedSrcHandler.HandledType();
var converterGenerator =
_owner._relationInfoResolver.TypeConvertorGenerator.GenerateConversion(willLoad, fieldType);
if (converterGenerator != null)
{
for (var i = 0; i < instanceTableFieldInfos.Count; i++)
{
if (instanceTableFieldInfos[i].Name != srcFieldInfo.Name) continue;
instanceTableFieldInfos.RemoveAt(i);
break;
}
loadInstructions.Add((specializedSrcHandler, converterGenerator, setterMethod, false));
continue;
}
}
loadInstructions.Add((srcFieldInfo.Handler!, null, null, false));
}
// Remove useless skips from end
while (loadInstructions.Count > 0 && loadInstructions.Last.Item2 == null)
{
loadInstructions.RemoveAt(^1);
}
foreach (var srcFieldInfo in instanceTableFieldInfos)
{
var iFieldHandlerWithInit = srcFieldInfo.Handler as IFieldHandlerWithInit;
if (iFieldHandlerWithInit == null) continue;
var specializedSrcHandler = srcFieldInfo.Handler;
var willLoad = specializedSrcHandler.HandledType();
var fieldInfo = persistentNameToPropertyInfo.GetOrFakeValueRef(srcFieldInfo.Name);
var setterMethod = fieldInfo.GetSetMethod(true);
var converterGenerator =
_owner._relationInfoResolver.TypeConvertorGenerator.GenerateConversion(willLoad,
setterMethod!.GetParameters()[0].ParameterType);
if (converterGenerator == null) continue;
if (!iFieldHandlerWithInit.NeedInit()) continue;
loadInstructions.Add((specializedSrcHandler, converterGenerator, setterMethod, true));
}
var anyNeedsCtx = false;
for (var i = 0; i < loadInstructions.Count; i++)
{
if (!loadInstructions[i].Item1.NeedsCtx()) continue;
anyNeedsCtx = true;
break;
}
if (anyNeedsCtx)
{
ilGenerator.DeclareLocal(typeof(IReaderCtx));
ilGenerator
.Ldarg(0)
.Ldarg(1)
.Newobj(() => new DBReaderCtx(null, null))
.Stloc(1);
}
for (var i = 0; i < loadInstructions.Count; i++)
{
ref var loadInstruction = ref loadInstructions[i];
Action<IILGen> readerOrCtx;
if (loadInstruction.Item1.NeedsCtx())
readerOrCtx = il => il.Ldloc(1);
else
readerOrCtx = il => il.Ldarg(1);
if (loadInstruction.Item2 != null)
{
ilGenerator.Ldloc(0);
if (loadInstruction.Init)
{
((IFieldHandlerWithInit)loadInstruction.Item1).Init(ilGenerator, readerOrCtx);
}
else
{
loadInstruction.Item1.Load(ilGenerator, readerOrCtx);
}
loadInstruction.Item2(ilGenerator);
ilGenerator.Call(loadInstruction.Item3!);
continue;
}
loadInstruction.Item1.Skip(ilGenerator, readerOrCtx);
}
ilGenerator.Ret();
return method.Create();
}
}
Action<IInternalObjectDBTransaction, AbstractBufferedReader, IList<ulong>>?[]
_valueIDictFinders =
Array.Empty<Action<IInternalObjectDBTransaction, AbstractBufferedReader, IList<ulong>>?>();
//SK
Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object, object>[]
_secondaryKeysSavers; //secondary key idx => sk key saver
readonly ConcurrentDictionary<ulong, Action<IInternalObjectDBTransaction, AbstractBufferedWriter,
AbstractBufferedReader, AbstractBufferedReader, object>>
_secondaryKeysConvertSavers =
new ConcurrentDictionary<ulong, Action<IInternalObjectDBTransaction, AbstractBufferedWriter,
AbstractBufferedReader, AbstractBufferedReader, object>>();
readonly ConcurrentDictionary<ulong,
Action<AbstractBufferedReader, AbstractBufferedReader, AbstractBufferedWriter>>
_secondaryKeyValueToPKLoader =
new ConcurrentDictionary<ulong,
Action<AbstractBufferedReader, AbstractBufferedReader, AbstractBufferedWriter>>();
public readonly struct SimpleLoaderType : IEquatable<SimpleLoaderType>
{
public IFieldHandler FieldHandler { get; }
public Type RealType { get; }
public SimpleLoaderType(IFieldHandler fieldHandler, Type realType)
{
FieldHandler = fieldHandler;
RealType = realType;
}
public bool Equals(SimpleLoaderType other)
{
return FieldHandler == other.FieldHandler && RealType == other.RealType;
}
}
readonly
ConcurrentDictionary<SimpleLoaderType, object> //object is of type Action<AbstractBufferedReader, IReaderCtx, (object or value type same as in conc. dic. key)>
_simpleLoader = new ConcurrentDictionary<SimpleLoaderType, object>();
internal readonly List<ulong> FreeContentOldDict = new List<ulong>();
internal readonly List<ulong> FreeContentNewDict = new List<ulong>();
internal byte[] Prefix;
internal byte[] PrefixSecondary;
bool? _needImplementFreeContent;
internal byte[]? PrimeSK2Real;
public RelationInfo(uint id, string name, RelationBuilder builder, IInternalObjectDBTransaction tr)
{
_id = id;
_name = name;
_relationInfoResolver = builder.RelationInfoResolver;
_interfaceType = builder.InterfaceType;
_clientType = builder.ItemType;
_defaultClientObject = builder.PristineItemInstance;
CalculatePrefix();
LoadUnresolvedVersionInfos(tr.KeyValueDBTransaction);
ResolveVersionInfos();
ClientRelationVersionInfo = CreateVersionInfoFromPrime(builder.ClientRelationVersionInfo);
Extensions.RegisterFieldHandlers(ClientRelationVersionInfo.GetAllFields().ToArray().Select(a=>a.Handler), tr.Owner);
ApartFields = builder.ApartFields;
foreach (var loadType in builder.LoadTypes)
{
ItemLoaderInfos.Add(new ItemLoaderInfo(this, loadType));
}
if (LastPersistedVersion > 0 &&
RelationVersionInfo.Equal(_relationVersions[LastPersistedVersion]!, ClientRelationVersionInfo))
{
_relationVersions[LastPersistedVersion] = ClientRelationVersionInfo;
ClientTypeVersion = LastPersistedVersion;
CreateCreatorLoadersAndSavers();
CheckSecondaryKeys(tr, ClientRelationVersionInfo);
}
else
{
ClientTypeVersion = LastPersistedVersion + 1;
_relationVersions[ClientTypeVersion] = ClientRelationVersionInfo;
var writerKey = new ByteBufferWriter();
writerKey.WriteByteArrayRaw(ObjectDB.RelationVersionsPrefix);
writerKey.WriteVUInt32(_id);
writerKey.WriteVUInt32(ClientTypeVersion);
var writerValue = new ByteBufferWriter();
ClientRelationVersionInfo.Save(writerValue);
tr.KeyValueDBTransaction.SetKeyPrefix(ByteBuffer.NewEmpty());
tr.KeyValueDBTransaction.CreateOrUpdateKeyValue(writerKey.Data, writerValue.Data);
CreateCreatorLoadersAndSavers();
if (LastPersistedVersion > 0)
{
CheckThatPrimaryKeyHasNotChanged(tr, name, ClientRelationVersionInfo,
_relationVersions[LastPersistedVersion]!);
UpdateSecondaryKeys(tr, ClientRelationVersionInfo, _relationVersions[LastPersistedVersion]!);
}
}
}
void CheckThatPrimaryKeyHasNotChanged(IInternalObjectDBTransaction tr, string name,
RelationVersionInfo info, RelationVersionInfo previousInfo)
{
var db = tr.Owner;
var pkFields = info.PrimaryKeyFields;
var prevPkFields = previousInfo.PrimaryKeyFields;
if (pkFields.Length != prevPkFields.Length)
{
if (db.ActualOptions.SelfHealing)
{
db.Logger?.ReportIncompatiblePrimaryKey(name, $"{pkFields.Length}!={prevPkFields.Length}");
ClearRelationData(tr, previousInfo);
return;
}
throw new BTDBException(
$"Change of primary key in relation '{name}' is not allowed. Field count {pkFields.Length} != {prevPkFields.Length}.");
}
for (var i = 0; i < pkFields.Length; i++)
{
if (ArePrimaryKeyFieldsCompatible(pkFields.Span[i].Handler!, prevPkFields.Span[i].Handler!)) continue;
db.Logger?.ReportIncompatiblePrimaryKey(name, pkFields.Span[i].Name);
if (db.ActualOptions.SelfHealing)
{
ClearRelationData(tr, previousInfo);
return;
}
throw new BTDBException(
$"Change of primary key in relation '{name}' is not allowed. Field '{pkFields.Span[i].Name}' is not compatible.");
}
}
static bool ArePrimaryKeyFieldsCompatible(IFieldHandler newHandler, IFieldHandler previousHandler)
{
var newHandledType = newHandler.HandledType();
var previousHandledType = previousHandler.HandledType();
if (newHandledType == previousHandledType)
return true;
if (newHandledType.IsEnum && previousHandledType.IsEnum)
{
var prevEnumCfg =
new EnumFieldHandler.EnumConfiguration(((EnumFieldHandler) previousHandler).Configuration);
var newEnumCfg = new EnumFieldHandler.EnumConfiguration(((EnumFieldHandler) newHandler).Configuration);
return prevEnumCfg.IsBinaryRepresentationSubsetOf(newEnumCfg);
}
return false;
}
public bool NeedImplementFreeContent()
{
if (!_needImplementFreeContent.HasValue)
{
CalcNeedImplementFreeContent();
}
return _needImplementFreeContent!.Value;
}
void CalcNeedImplementFreeContent()
{
for (var i = 0; i < _relationVersions.Length; i++)
{
if (_relationVersions[i] == null) continue;
GetIDictFinder((uint) i);
if (_needImplementFreeContent.HasValue)
return;
}
_needImplementFreeContent = false;
}
void CheckSecondaryKeys(IInternalObjectDBTransaction tr, RelationVersionInfo info)
{
var count = GetRelationCount(tr);
var secKeysToAdd = new StructList<KeyValuePair<uint, SecondaryKeyInfo>>();
foreach (var sk in info.SecondaryKeys)
{
if (WrongCountInSecondaryKey(tr.KeyValueDBTransaction, count, sk.Key))
{
DeleteSecondaryKey(tr.KeyValueDBTransaction, sk.Key);
secKeysToAdd.Add(sk);
}
}
if (secKeysToAdd.Count > 0)
CalculateSecondaryKey(tr, secKeysToAdd);
}
long GetRelationCount(IInternalObjectDBTransaction tr)
{
tr.KeyValueDBTransaction.SetKeyPrefix(Prefix);
return tr.KeyValueDBTransaction.GetKeyValueCount();
}
void UpdateSecondaryKeys(IInternalObjectDBTransaction tr, RelationVersionInfo info,
RelationVersionInfo previousInfo)
{
var count = GetRelationCount(tr);
foreach (var prevIdx in previousInfo.SecondaryKeys.Keys)
{
if (!info.SecondaryKeys.ContainsKey(prevIdx))
DeleteSecondaryKey(tr.KeyValueDBTransaction, prevIdx);
}
var secKeysToAdd = new StructList<KeyValuePair<uint, SecondaryKeyInfo>>();
foreach (var sk in info.SecondaryKeys)
{
if (!previousInfo.SecondaryKeys.ContainsKey(sk.Key))
{
secKeysToAdd.Add(sk);
}
else if (WrongCountInSecondaryKey(tr.KeyValueDBTransaction, count, sk.Key))
{
DeleteSecondaryKey(tr.KeyValueDBTransaction, sk.Key);
secKeysToAdd.Add(sk);
}
}
if (secKeysToAdd.Count > 0)
CalculateSecondaryKey(tr, secKeysToAdd);
}
bool WrongCountInSecondaryKey(IKeyValueDBTransaction tr, long count, uint index)
{
SetPrefixToSecondaryKey(tr, index);
return count != tr.GetKeyValueCount();
}
void ClearRelationData(IInternalObjectDBTransaction tr, RelationVersionInfo info)
{
foreach (var prevIdx in info.SecondaryKeys.Keys)
{
DeleteSecondaryKey(tr.KeyValueDBTransaction, prevIdx);
}
var writer = new ByteBufferWriter();
writer.WriteBlock(ObjectDB.AllRelationsPKPrefix);
writer.WriteVUInt32(Id);
tr.KeyValueDBTransaction.SetKeyPrefix(writer.Data);
tr.KeyValueDBTransaction.EraseAll();
}
void DeleteSecondaryKey(IKeyValueDBTransaction keyValueTr, uint index)
{
SetPrefixToSecondaryKey(keyValueTr, index);
keyValueTr.EraseAll();
}
void SetPrefixToSecondaryKey(IKeyValueDBTransaction keyValueTr, uint index)
{
var writer = new ByteBufferWriter();
writer.WriteBlock(PrefixSecondary);
writer.WriteUInt8((byte)index);
keyValueTr.SetKeyPrefix(writer.Data);
}
void CalculateSecondaryKey(IInternalObjectDBTransaction tr, ReadOnlySpan<KeyValuePair<uint, SecondaryKeyInfo>> indexes)
{
var keyWriter = new ByteBufferWriter();
var enumeratorType = typeof(RelationEnumerator<>).MakeGenericType(_clientType);
keyWriter.WriteByteArrayRaw(Prefix);
var enumerator = (IEnumerator) Activator.CreateInstance(enumeratorType, tr, this,
keyWriter.GetDataAndRewind().ToAsyncSafe(), new SimpleModificationCounter(), 0);
var keySavers = new Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object>[indexes.Length];
for (var i = 0; i < indexes.Length; i++)
{
keySavers[i] = CreateSaver(ClientRelationVersionInfo.GetSecondaryKeyFields(indexes[i].Key),
$"Relation_{Name}_Upgrade_SK_{indexes[i].Value.Name}_KeySaver");
}
while (enumerator!.MoveNext())
{
var obj = enumerator.Current;
tr.TransactionProtector.Start();
tr.KeyValueDBTransaction.SetKeyPrefix(PrefixSecondary);
for (var i = 0; i < indexes.Length; i++)
{
keyWriter.WriteUInt8((byte)indexes[i].Key);
keySavers[i](tr, keyWriter, obj);
var keyBytes = keyWriter.GetDataAndRewind();
if (!tr.KeyValueDBTransaction.CreateOrUpdateKeyValue(keyBytes, ByteBuffer.NewEmpty()))
throw new BTDBException("Internal error, secondary key bytes must be always unique.");
}
}
}
void LoadUnresolvedVersionInfos(IKeyValueDBTransaction tr)
{
LastPersistedVersion = 0;
var writer = new ByteBufferWriter();
writer.WriteByteArrayRaw(ObjectDB.RelationVersionsPrefix);
writer.WriteVUInt32(_id);
tr.SetKeyPrefix(writer.Data);
var relationVersions = new Dictionary<uint, RelationVersionInfo>();
if (tr.FindFirstKey())
{
var keyReader = new KeyValueDBKeyReader(tr);
var valueReader = new KeyValueDBValueReader(tr);
do
{
keyReader.Restart();
valueReader.Restart();
LastPersistedVersion = keyReader.ReadVUInt32();
var relationVersionInfo = RelationVersionInfo.LoadUnresolved(valueReader, _name);
relationVersions[LastPersistedVersion] = relationVersionInfo;
} while (tr.FindNextKey());
}
_relationVersions = new RelationVersionInfo[LastPersistedVersion + 2];
foreach (var (key, value) in relationVersions)
{
_relationVersions[key] = value;
}
_valueIDictFinders = new Action<IInternalObjectDBTransaction, AbstractBufferedReader, IList<ulong>>?[_relationVersions.Length];
}
void ResolveVersionInfos()
{
foreach (var versionInfo in _relationVersions)
{
versionInfo?.ResolveFieldHandlers(_relationInfoResolver.FieldHandlerFactory);
}
}
internal uint Id => _id;
internal string Name => _name;
internal Type ClientType => _clientType;
internal Type? InterfaceType => _interfaceType;
internal object DefaultClientObject => _defaultClientObject;
internal RelationVersionInfo ClientRelationVersionInfo { get; }
internal uint LastPersistedVersion { get; set; }
internal uint ClientTypeVersion { get; }
internal IDictionary<string, MethodInfo> ApartFields { get; }
void CreateCreatorLoadersAndSavers()
{
_valueSaver = CreateSaver(ClientRelationVersionInfo.Fields.Span, $"RelationValueSaver_{Name}");
_primaryKeysSaver = CreateSaverWithApartFields(ClientRelationVersionInfo.PrimaryKeyFields.Span,
$"RelationKeySaver_{Name}");
if (ClientRelationVersionInfo.SecondaryKeys.Count > 0)
{
_secondaryKeysSavers =
new Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object, object>
[ClientRelationVersionInfo.SecondaryKeys.Keys.Max() + 1];
foreach (var (idx, secondaryKeyInfo) in ClientRelationVersionInfo.SecondaryKeys)
{
_secondaryKeysSavers[idx] = CreateSaverWithApartFields(
ClientRelationVersionInfo.GetSecondaryKeyFields(idx),
$"Relation_{Name}_SK_{secondaryKeyInfo.Name}_KeySaver");
}
}
}
internal Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object> ValueSaver => _valueSaver;
internal Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object, object> PrimaryKeysSaver =>
_primaryKeysSaver;
void CreateSaverIl(IILGen ilGen, ReadOnlySpan<TableFieldInfo> fields,
Action<IILGen> pushInstance, Action<IILGen>? pushRelationIface,
Action<IILGen> pushWriter, Action<IILGen> pushTransaction)
{
var writerCtxLocal = CreateWriterCtx(ilGen, fields, pushWriter, pushTransaction);
var props = ClientType.GetProperties(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
foreach (var field in fields)
{
var getter = props.First(p => GetPersistentName(p) == field.Name).GetGetMethod(true);
Action<IILGen> writerOrCtx;
var handler = field.Handler!.SpecializeSaveForType(getter!.ReturnType);
if (handler.NeedsCtx())
writerOrCtx = il => il.Ldloc(writerCtxLocal!);
else
writerOrCtx = pushWriter;
MethodInfo apartFieldGetter = null;
if (pushRelationIface != null)
ApartFields.TryGetValue(field.Name, out apartFieldGetter);
handler.Save(ilGen, writerOrCtx, il =>
{
if (apartFieldGetter != null)
{
il.Do(pushRelationIface!);
getter = apartFieldGetter;
}
else
{
il.Do(pushInstance);
}
il.Callvirt(getter);
_relationInfoResolver.TypeConvertorGenerator.GenerateConversion(getter.ReturnType,
handler.HandledType())!(il);
});
}
}
static IILLocal? CreateWriterCtx(IILGen ilGenerator, ReadOnlySpan<TableFieldInfo> fields,
Action<IILGen> pushWriter, Action<IILGen> pushTransaction)
{
var anyNeedsCtx = false;
foreach (var field in fields)
{
if (field.Handler!.NeedsCtx())
{
anyNeedsCtx = true;
break;
}
}
IILLocal writerCtxLocal = null;
if (anyNeedsCtx)
{
writerCtxLocal = ilGenerator.DeclareLocal(typeof(IWriterCtx));
ilGenerator
.Do(pushTransaction)
.Do(pushWriter)
.Newobj(() => new DBWriterCtx(null, null))
.Stloc(writerCtxLocal);
}
return writerCtxLocal;
}
static void StoreNthArgumentOfTypeIntoLoc(IILGen il, ushort argIdx, Type type, ushort locIdx)
{
il
.Ldarg(argIdx)
.Castclass(type)
.Stloc(locIdx);
}
struct LocalAndHandler
{
public IILLocal Local;
public IFieldHandler Handler;
}
Action<IInternalObjectDBTransaction, AbstractBufferedWriter, AbstractBufferedReader, AbstractBufferedReader,
object> CreateBytesToSKSaver(
uint version, uint secondaryKeyIndex, string saverName)
{
var method =
ILBuilder.Instance
.NewMethod<Action<IInternalObjectDBTransaction, AbstractBufferedWriter, AbstractBufferedReader,
AbstractBufferedReader, object>>(saverName);
var ilGenerator = method.Generator;
IILLocal defaultObjectLoc = null;
static void PushWriter(IILGen il) => il.Ldarg(1);
var firstBuffer = new BufferInfo(); //pk's
var secondBuffer = new BufferInfo(); //values
var outOfOrderSkParts = new Dictionary<int, LocalAndHandler>(); //local and specialized saver
var pks = ClientRelationVersionInfo.PrimaryKeyFields.Span;
var skFieldIds = ClientRelationVersionInfo.SecondaryKeys[secondaryKeyIndex].Fields;
var skFields = ClientRelationVersionInfo.GetSecondaryKeyFields(secondaryKeyIndex).ToArray();
var valueFields = _relationVersions[version]!.Fields.Span;
var writerCtxLocal = CreateWriterCtx(ilGenerator, skFields, PushWriter, il => il.Ldarg(0));
for (var skFieldIdx = 0; skFieldIdx < skFieldIds.Count; skFieldIdx++)
{
if (outOfOrderSkParts.TryGetValue(skFieldIdx, out var saveLocalInfo))
{
var writerOrCtx = WriterOrContextForHandler(saveLocalInfo.Handler, writerCtxLocal, PushWriter);
saveLocalInfo.Handler.Save(ilGenerator, writerOrCtx, il => il.Ldloc(saveLocalInfo.Local));
continue;
}
var skf = skFieldIds[skFieldIdx];
if (skf.IsFromPrimaryKey)
{
InitializeBuffer(2, ref firstBuffer, ilGenerator, pks, true);
//firstBuffer.ActualFieldIdx == number of processed PK's
for (var pkIdx = firstBuffer.ActualFieldIdx; pkIdx < skf.Index; pkIdx++)
{
//all PK parts are contained in SK
FindPosition(pkIdx, skFieldIds, 0, out _, out var skFieldIdxForPk);
StoreIntoLocal(ilGenerator, pks[pkIdx].Handler!, firstBuffer, outOfOrderSkParts, skFieldIdxForPk,
skFields[skFieldIdxForPk].Handler!);
}
CopyToOutput(ilGenerator, pks[(int) skf.Index].Handler!, writerCtxLocal!, PushWriter,
skFields[skFieldIdx].Handler!, firstBuffer);
firstBuffer.ActualFieldIdx = (int) skf.Index + 1;
}
else
{
InitializeBuffer(3, ref secondBuffer, ilGenerator, valueFields, false);
var valueFieldIdx = -1;
for (var i = 0; i < valueFields.Length; i++)
{
if (valueFields[i].Name == skFields[skFieldIdx].Name)
{
valueFieldIdx = i;
break;
}
}
if (valueFieldIdx >= 0)
{
for (var valueIdx = secondBuffer.ActualFieldIdx; valueIdx < valueFieldIdx; valueIdx++)
{
var valueField = valueFields[valueIdx];
var storeForSkIndex = -1;
for (var i = 0; i < skFields.Length; i++)
{
if (skFields[i].Name == valueField.Name)
{
storeForSkIndex = i;
break;
}
}
if (storeForSkIndex == -1)
valueField.Handler!.Skip(ilGenerator,
valueField.Handler.NeedsCtx() ? secondBuffer.PushCtx : secondBuffer.PushReader);
else
StoreIntoLocal(ilGenerator, valueField.Handler!, secondBuffer, outOfOrderSkParts,
storeForSkIndex, skFields[storeForSkIndex].Handler!);
}
CopyToOutput(ilGenerator, valueFields[valueFieldIdx].Handler!, writerCtxLocal!, PushWriter,
skFields[skFieldIdx].Handler!, secondBuffer);
secondBuffer.ActualFieldIdx = valueFieldIdx + 1;
}
else
{
//older version of value does not contain sk field - store field from default value (can be initialized in constructor)
if (defaultObjectLoc == null)
{
defaultObjectLoc = ilGenerator.DeclareLocal(ClientType);
ilGenerator.Ldarg(4)
.Castclass(ClientType)
.Stloc(defaultObjectLoc);
}
var loc = defaultObjectLoc;
CreateSaverIl(ilGenerator,
new[] {ClientRelationVersionInfo.GetSecondaryKeyField((int) skf.Index)},
il => il.Ldloc(loc), null, PushWriter, il => il.Ldarg(0));
}
}
}
ilGenerator.Ret();
return method.Create();
}
static void CopyToOutput(IILGen ilGenerator, IFieldHandler valueHandler, IILLocal writerCtxLocal,
Action<IILGen> pushWriter,
IFieldHandler skHandler, BufferInfo buffer)
{
var writerOrCtx = WriterOrContextForHandler(valueHandler, writerCtxLocal, pushWriter);
skHandler.SpecializeSaveForType(valueHandler.HandledType()).Save(ilGenerator, writerOrCtx,
il =>
{
valueHandler.Load(ilGenerator, valueHandler.NeedsCtx() ? buffer.PushCtx : buffer.PushReader);
});
}
static void StoreIntoLocal(IILGen ilGenerator, IFieldHandler valueHandler, BufferInfo bufferInfo,
Dictionary<int, LocalAndHandler> outOfOrderSkParts, int skFieldIdx, IFieldHandler skFieldHandler)
{
var local = ilGenerator.DeclareLocal(valueHandler.HandledType());
valueHandler.Load(ilGenerator, valueHandler.NeedsCtx() ? bufferInfo.PushCtx : bufferInfo.PushReader);
ilGenerator.Stloc(local);
outOfOrderSkParts[skFieldIdx] = new LocalAndHandler
{
Handler = skFieldHandler.SpecializeSaveForType(valueHandler.HandledType()),
Local = local
};
}
static Action<IILGen> WriterOrContextForHandler(IFieldHandler handler, IILLocal? writerCtxLocal,
Action<IILGen> pushWriter)
{
return handler.NeedsCtx() ? il => il.Ldloc(writerCtxLocal!) : pushWriter;
}
static void InitializeBuffer(ushort bufferArgIdx, ref BufferInfo bufferInfo, IILGen ilGenerator,
ReadOnlySpan<TableFieldInfo> fields, bool skipAllRelationsPKPrefix)
{
if (bufferInfo.ReaderCreated) return;
bufferInfo.ReaderCreated = true;
bufferInfo.PushReader = il => il.Ldarg(bufferArgIdx);
if (skipAllRelationsPKPrefix)
ilGenerator
.Do(bufferInfo.PushReader)
.Call(() => default(AbstractBufferedReader).SkipInt8()); //ObjectDB.AllRelationsPKPrefix
ilGenerator
.Do(bufferInfo.PushReader).Call(() => default(AbstractBufferedReader).SkipVUInt32());
var anyNeedsCtx = false;
foreach (var fieldInfo in fields)
{
if (fieldInfo.Handler!.NeedsCtx())
{
anyNeedsCtx = true;
break;
}
}
if (anyNeedsCtx)
{
var readerCtxLocal = ilGenerator.DeclareLocal(typeof(IReaderCtx));
ilGenerator
.Ldarg(0) //tr
.Ldarg(bufferArgIdx)
.Newobj(() => new DBReaderCtx(null, null))
.Stloc(readerCtxLocal);
bufferInfo.PushCtx = il => il.Ldloc(readerCtxLocal);
}
}
Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object, object> CreateSaverWithApartFields(
ReadOnlySpan<TableFieldInfo> fields, string saverName)
{
var method = ILBuilder.Instance.NewMethod<
Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object, object>>(saverName);
var ilGenerator = method.Generator;
ilGenerator.DeclareLocal(ClientType);
StoreNthArgumentOfTypeIntoLoc(ilGenerator, 2, ClientType, 0);
var hasApartFields = ApartFields.Any();
if (hasApartFields)
{
ilGenerator.DeclareLocal(_interfaceType);
StoreNthArgumentOfTypeIntoLoc(ilGenerator, 3, _interfaceType, 1);
}
CreateSaverIl(ilGenerator, fields,
il => il.Ldloc(0), hasApartFields ? il => il.Ldloc(1) : (Action<IILGen>) null,
il => il.Ldarg(1), il => il.Ldarg(0));
ilGenerator
.Ret();
return method.Create();
}
Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object> CreateSaver(
ReadOnlySpan<TableFieldInfo> fields, string saverName)
{
var method =
ILBuilder.Instance.NewMethod<Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object>>(
saverName);
var ilGenerator = method.Generator;
ilGenerator.DeclareLocal(ClientType);
StoreNthArgumentOfTypeIntoLoc(ilGenerator, 2, ClientType, 0);
CreateSaverIl(ilGenerator, fields,
il => il.Ldloc(0), null, il => il.Ldarg(1), il => il.Ldarg(0));
ilGenerator
.Ret();
return method.Create();
}
static bool SecondaryIndexHasSameDefinition(ReadOnlySpan<TableFieldInfo> currFields, ReadOnlySpan<TableFieldInfo> prevFields)
{
if (currFields.Length != prevFields.Length)
return false;
for (var i = 0; i < currFields.Length; i++)
{
if (!TableFieldInfo.Equal(currFields[i], prevFields[i]))
return false;
}
return true;
}
RelationVersionInfo CreateVersionInfoFromPrime(RelationVersionInfo prime)
{
var secondaryKeys = new Dictionary<uint, SecondaryKeyInfo>();
PrimeSK2Real = new byte[prime.SecondaryKeys.Count];
if (LastPersistedVersion > 0)
{
var prevVersion = _relationVersions[LastPersistedVersion];
foreach (var primeSecondaryKey in prime.SecondaryKeys)
{
if (prevVersion!.SecondaryKeysNames.TryGetValue(primeSecondaryKey.Value.Name, out var index))
{
var prevFields = prevVersion.GetSecondaryKeyFields(index);
var currFields = prime.GetSecondaryKeyFields(primeSecondaryKey.Key);
if (SecondaryIndexHasSameDefinition(currFields, prevFields))
goto existing;
}
while (prevVersion.SecondaryKeys.ContainsKey(index) || secondaryKeys.ContainsKey(index))
index++;
existing:
PrimeSK2Real[primeSecondaryKey.Key] = (byte)index;
secondaryKeys.Add(index, primeSecondaryKey.Value);
}
}
else
{
foreach (var primeSecondaryKey in prime.SecondaryKeys)
{
PrimeSK2Real[primeSecondaryKey.Key] = (byte)primeSecondaryKey.Key;
secondaryKeys.Add(primeSecondaryKey.Key, primeSecondaryKey.Value);
}
}
return new RelationVersionInfo(prime.PrimaryKeyFields, secondaryKeys, prime.SecondaryKeyFields, prime.Fields);
}
static bool IsIgnoredType(Type type)
{
if (type.IsGenericType)
{
var genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(IReadOnlyCollection<>))
return true;
}
else
{
if (type == typeof(IEnumerable))
return true;
}
return false;
}
public static IEnumerable<MethodInfo> GetMethods(Type interfaceType)
{
if (IsIgnoredType(interfaceType)) yield break;
var methods = interfaceType.GetMethods().Where(x => x.IsAbstract);
foreach (var method in methods)
yield return method;
foreach (var iface in interfaceType.GetInterfaces())
{
if (IsIgnoredType(iface)) continue;
var inheritedMethods = iface.GetMethods();
foreach (var method in inheritedMethods)
yield return method;
}
}
public static IEnumerable<PropertyInfo> GetProperties(Type interfaceType)
{
if (IsIgnoredType(interfaceType)) yield break;
var properties = interfaceType.GetProperties(BindingFlags.Instance | BindingFlags.Public);
foreach (var property in properties)
{
if (property.Name == nameof(IRelation.BtdbInternalNextInChain)) continue;
yield return property;
}
foreach (var iface in interfaceType.GetInterfaces())
{
if (IsIgnoredType(iface)) continue;
var inheritedProperties = iface.GetProperties(BindingFlags.Instance | BindingFlags.Public);
foreach (var property in inheritedProperties)
{
if (property.Name == nameof(IRelation.BtdbInternalNextInChain)) continue;
yield return property;
}
}
}
Action<IInternalObjectDBTransaction, AbstractBufferedReader, IList<ulong>> GetIDictFinder(uint version)
{
Action<IInternalObjectDBTransaction, AbstractBufferedReader, IList<ulong>>? res;
do
{
res = _valueIDictFinders[version];
if (res != null) return res;
res = CreateIDictFinder(version);
} while (Interlocked.CompareExchange(ref _valueIDictFinders[version], res, null) != null);
return res;
}
internal Action<IInternalObjectDBTransaction, AbstractBufferedWriter, object, object> GetSecondaryKeysKeySaver
(uint secondaryKeyIndex)
{
return _secondaryKeysSavers[secondaryKeyIndex];
}
internal Action<IInternalObjectDBTransaction, AbstractBufferedWriter, AbstractBufferedReader,
AbstractBufferedReader, object> GetPKValToSKMerger
(uint version, uint secondaryKeyIndex)
{
var h = secondaryKeyIndex + version * 10000ul;
return _secondaryKeysConvertSavers.GetOrAdd(h,
(_, ver, secKeyIndex, relationInfo) => CreateBytesToSKSaver(ver, secKeyIndex,
$"Relation_{relationInfo.Name}_PkVal_to_SK_{relationInfo.ClientRelationVersionInfo.SecondaryKeys[secKeyIndex].Name}_v{ver}"),
version, secondaryKeyIndex, this);
}
//takes secondaryKey key & value bytes and restores primary key bytes
public Action<AbstractBufferedReader, AbstractBufferedReader, AbstractBufferedWriter> GetSKKeyValueToPKMerger
(uint secondaryKeyIndex, uint paramFieldCountInFirstBuffer)
{
var h = 10000ul * secondaryKeyIndex + paramFieldCountInFirstBuffer;
return _secondaryKeyValueToPKLoader.GetOrAdd(h,
(_, secKeyIndex, relationInfo, paramFieldCount) => relationInfo.CreatePrimaryKeyFromSKDataMerger(
secKeyIndex, paramFieldCount,
$"Relation_SK_to_PK_{relationInfo.ClientRelationVersionInfo.SecondaryKeys[secKeyIndex].Name}_p{paramFieldCount}"),
secondaryKeyIndex, this, (int) paramFieldCountInFirstBuffer);
}
struct MemorizedPositionWithLength
{
public int BufferIndex { get; set; } //0 first, 1 second
public IILLocal Pos { get; set; } // IMemorizedPosition
public IILLocal Length { get; set; } // int
}
struct BufferInfo
{
public bool ReaderCreated;
public Action<IILGen> PushReader;
public Action<IILGen> PushCtx;
public int ActualFieldIdx;
}
Action<AbstractBufferedReader, AbstractBufferedReader, AbstractBufferedWriter> CreatePrimaryKeyFromSKDataMerger(
uint secondaryKeyIndex,
int paramFieldCountInFirstBuffer, string mergerName)
{
var method =
ILBuilder.Instance
.NewMethod<Action<AbstractBufferedReader, AbstractBufferedReader, AbstractBufferedWriter>>(
mergerName);
var ilGenerator = method.Generator;
void PushWriter(IILGen il) => il.Ldarg(2);
var skFields = ClientRelationVersionInfo.SecondaryKeys[secondaryKeyIndex].Fields;
var positionLoc = ilGenerator.DeclareLocal(typeof(ulong)); //stored position
var memoPositionLoc = ilGenerator.DeclareLocal(typeof(IMemorizedPosition));
var firstBuffer = new BufferInfo();
var secondBuffer = new BufferInfo {ActualFieldIdx = paramFieldCountInFirstBuffer};
var outOfOrderPKParts =
new Dictionary<int, MemorizedPositionWithLength>(); //index -> bufferIdx, IMemorizedPosition, length
var pks = ClientRelationVersionInfo.PrimaryKeyFields.Span;
for (var pkIdx = 0; pkIdx < pks.Length; pkIdx++)
{
if (outOfOrderPKParts.ContainsKey(pkIdx))
{
var memo = outOfOrderPKParts[pkIdx];
var pushReader = GetBufferPushAction(memo.BufferIndex, firstBuffer.PushReader,
secondBuffer.PushReader);
CopyFromMemorizedPosition(ilGenerator, pushReader, PushWriter, memo, memoPositionLoc);
continue;
}
FindPosition(pkIdx, skFields, paramFieldCountInFirstBuffer, out var bufferIdx, out var skFieldIdx);
if (bufferIdx == 0)
{
MergerInitializeFirstBufferReader(ilGenerator, ref firstBuffer);
CopyFromBuffer(ilGenerator, bufferIdx, skFieldIdx, ref firstBuffer, outOfOrderPKParts, pks,
skFields, positionLoc,
memoPositionLoc, PushWriter);
}
else
{
MergerInitializeBufferReader(ref secondBuffer, 1);
CopyFromBuffer(ilGenerator, bufferIdx, skFieldIdx, ref secondBuffer, outOfOrderPKParts, pks,
skFields, positionLoc,
memoPositionLoc, PushWriter);
}
}
ilGenerator.Ret();
return method.Create();
}
void CopyFromBuffer(IILGen ilGenerator, int bufferIdx, int skFieldIdx, ref BufferInfo bi,
Dictionary<int, MemorizedPositionWithLength> outOfOrderPKParts,
ReadOnlySpan<TableFieldInfo> pks, IList<FieldId> skFields, IILLocal positionLoc, IILLocal memoPositionLoc,
Action<IILGen> pushWriter)
{
for (var idx = bi.ActualFieldIdx; idx < skFieldIdx; idx++)
{
var field = skFields[idx];
if (field.IsFromPrimaryKey)
{
outOfOrderPKParts[(int) field.Index] = SkipWithMemorizing(bufferIdx, ilGenerator, bi.PushReader,
pks[(int) field.Index].Handler!, positionLoc);
}
else
{
var f = ClientRelationVersionInfo.GetSecondaryKeyField((int) field.Index);
f.Handler!.Skip(ilGenerator, bi.PushReader);
}
}
var skField = skFields[skFieldIdx];
GenerateCopyFieldFromByteBufferToWriterIl(ilGenerator, pks[(int) skField.Index].Handler!, bi.PushReader,
pushWriter, positionLoc, memoPositionLoc);
bi.ActualFieldIdx = skFieldIdx + 1;
}
static void FindPosition(int pkIdx, IList<FieldId> skFields, int paramFieldCountInFirstBuffer,
out int bufferIdx, out int skFieldIdx)
{
for (var i = 0; i < skFields.Count; i++)
{
var field = skFields[i];
if (!field.IsFromPrimaryKey) continue;
if (field.Index != pkIdx) continue;
skFieldIdx = i;
bufferIdx = i < paramFieldCountInFirstBuffer ? 0 : 1;
return;
}
throw new BTDBException("Secondary key relation processing error.");
}
static void MergerInitializeBufferReader(ref BufferInfo bi, ushort arg)
{
if (bi.ReaderCreated)
return;
bi.ReaderCreated = true;
bi.PushReader = il => il.Ldarg(arg);
}
static void MergerInitializeFirstBufferReader(IILGen ilGenerator, ref BufferInfo bi)
{
if (bi.ReaderCreated)
return;
MergerInitializeBufferReader(ref bi, 0);
ilGenerator
//skip all relations
.Do(bi.PushReader)
.LdcI4(ObjectDB.AllRelationsSKPrefix.Length)
.Callvirt(() => default(AbstractBufferedReader).SkipBlock(0))
//skip relation id
.Do(bi.PushReader).Call(() => default(AbstractBufferedReader).SkipVUInt32())
//skip secondary key index
.Do(bi.PushReader).Call(() => default(AbstractBufferedReader).SkipVUInt32());
}
Action<IILGen> GetBufferPushAction(int bufferIndex, Action<IILGen> pushReaderFirst,
Action<IILGen> pushReaderSecond)
{
return bufferIndex == 0 ? pushReaderFirst : pushReaderSecond;
}
MemorizedPositionWithLength SkipWithMemorizing(int activeBuffer, IILGen ilGenerator, Action<IILGen> pushReader,
IFieldHandler handler, IILLocal tempPosition)
{
var memoPos = ilGenerator.DeclareLocal(typeof(IMemorizedPosition));
var memoLen = ilGenerator.DeclareLocal(typeof(int));
var position = new MemorizedPositionWithLength
{BufferIndex = activeBuffer, Pos = memoPos, Length = memoLen};
MemorizeCurrentPosition(ilGenerator, pushReader, memoPos);
StoreCurrentPosition(ilGenerator, pushReader, tempPosition);
handler.Skip(ilGenerator, pushReader);
ilGenerator
.Do(pushReader) //[VR]
.Callvirt(() => default(AbstractBufferedReader).GetCurrentPosition()) //[posNew];
.Ldloc(tempPosition) //[posNew, posOld]
.Sub() //[readLen]
.ConvI4() //[readLen(i)]
.Stloc(memoLen); //[]
return position;
}
void CopyFromMemorizedPosition(IILGen ilGenerator, Action<IILGen> pushReader, Action<IILGen> pushWriter,
MemorizedPositionWithLength memo,
IILLocal memoPositionLoc)
{
MemorizeCurrentPosition(ilGenerator, pushReader, memoPositionLoc);
ilGenerator
.Do(pushWriter) //[W]
.Do(pushReader) //[W,VR]
.Ldloc(memo.Length) //[W, VR, readLen]
.Ldloc(memo.Pos) //[W, VR, readLen, Memorize]
.Callvirt(() => default(IMemorizedPosition).Restore()) //[W, VR]
.Call(() => default(AbstractBufferedReader).ReadByteArrayRaw(0)) //[W, byte[]]
.Call(() => default(AbstractBufferedWriter).WriteByteArrayRaw(null)) //[]
.Ldloc(memoPositionLoc) //[Memorize]
.Callvirt(() => default(IMemorizedPosition).Restore()); //[]
}
void MemorizeCurrentPosition(IILGen ilGenerator, Action<IILGen> pushReader, IILLocal memoPositionLoc)
{
ilGenerator
.Do(pushReader)
.Castclass(typeof(ByteBufferReader))
.Call(() => default(ByteBufferReader).MemorizeCurrentPosition())
.Stloc(memoPositionLoc);
}
void StoreCurrentPosition(IILGen ilGenerator, Action<IILGen> pushReader, IILLocal positionLoc)
{
ilGenerator
.Do(pushReader)
.Callvirt(() => default(AbstractBufferedReader).GetCurrentPosition())
.Stloc(positionLoc);
}
void GenerateCopyFieldFromByteBufferToWriterIl(IILGen ilGenerator, IFieldHandler handler,
Action<IILGen> pushReader,
Action<IILGen> pushWriter, IILLocal positionLoc, IILLocal memoPositionLoc)
{
MemorizeCurrentPosition(ilGenerator, pushReader, memoPositionLoc);
StoreCurrentPosition(ilGenerator, pushReader, positionLoc);
handler.Skip(ilGenerator, pushReader);
ilGenerator
.Do(pushWriter) //[W]
.Do(pushReader) //[W,VR]
.Dup() //[W, VR, VR]
.Callvirt(() => default(AbstractBufferedReader).GetCurrentPosition()) //[W, VR, posNew];
.Ldloc(positionLoc) //[W, VR, posNew, posOld]
.Sub() //[W, VR, readLen]
.ConvI4() //[W, VR, readLen(i)]
.Ldloc(memoPositionLoc) //[W, VR, readLen, Memorize]
.Callvirt(() => default(IMemorizedPosition).Restore()) //[W, VR, readLen]
.Call(() => default(AbstractBufferedReader).ReadByteArrayRaw(0)) //[W, byte[]]
.Call(() => default(AbstractBufferedWriter).WriteByteArrayRaw(null)); //[]
}
public object GetSimpleLoader(SimpleLoaderType handler)
{
return _simpleLoader.GetOrAdd(handler, CreateSimpleLoader);
}
object CreateSimpleLoader(SimpleLoaderType loaderType)
{
var delegateType = typeof(Func<,,>).MakeGenericType(typeof(AbstractBufferedReader), typeof(IReaderCtx),
loaderType.RealType);
var dm = ILBuilder.Instance.NewMethod(loaderType.FieldHandler.Name + "SimpleReader", delegateType);
var ilGenerator = dm.Generator;
void PushReaderOrCtx(IILGen il) => il.Ldarg((ushort) (loaderType.FieldHandler.NeedsCtx() ? 1 : 0));
loaderType.FieldHandler.Load(ilGenerator, PushReaderOrCtx);
ilGenerator
.Do(_relationInfoResolver.TypeConvertorGenerator.GenerateConversion(loaderType.FieldHandler.HandledType(),
loaderType.RealType)!)
.Ret();
return dm.Create();
}
static string GetPersistentName(PropertyInfo p)
{
var a = p.GetCustomAttribute<PersistedNameAttribute>();
return a != null ? a.Name : p.Name;
}
internal static string GetPersistentName(string name, PropertyInfo[] properties)
{
foreach (var prop in properties)
{
if (prop.Name == name)
return GetPersistentName(prop);
}
return name;
}
public void FreeContent(IInternalObjectDBTransaction tr, ByteBuffer valueBytes)
{
FreeContentOldDict.Clear();
FindUsedObjectsToFree(tr, valueBytes, FreeContentOldDict);
foreach (var dictId in FreeContentOldDict)
{
FreeIDictionary(tr, dictId);
}
}
internal static void FreeIDictionary(IInternalObjectDBTransaction tr, ulong dictId)
{
var o = ObjectDB.AllDictionariesPrefix.Length;
var prefix = new byte[o + PackUnpack.LengthVUInt(dictId)];
Array.Copy(ObjectDB.AllDictionariesPrefix, prefix, o);
PackUnpack.PackVUInt(prefix, ref o, dictId);
tr.TransactionProtector.Start();
tr.KeyValueDBTransaction.SetKeyPrefixUnsafe(prefix);
tr.KeyValueDBTransaction.EraseAll();
}
public void FindUsedObjectsToFree(IInternalObjectDBTransaction tr, ByteBuffer valueBytes,
IList<ulong> dictionaries)
{
var valueReader = new ByteBufferReader(valueBytes);
var version = valueReader.ReadVUInt32();
GetIDictFinder(version).Invoke(tr, valueReader, dictionaries);
}
Action<IInternalObjectDBTransaction, AbstractBufferedReader, IList<ulong>> CreateIDictFinder(uint version)
{
var method = ILBuilder.Instance
.NewMethod<Action<IInternalObjectDBTransaction, AbstractBufferedReader, IList<ulong>>>(
$"Relation{Name}_IDictFinder");
var ilGenerator = method.Generator;
var relationVersionInfo = _relationVersions[version];
var needGenerateFreeFor = 0;
var fakeMethod = ILBuilder.Instance.NewMethod<Action>("Relation_fake");
var fakeGenerator = fakeMethod.Generator;
var valueFields = relationVersionInfo!.Fields.ToArray();
for (var i = 0; i < valueFields.Length; i++)
{
var needsFreeContent = valueFields[i].Handler!.FreeContent(fakeGenerator, _ => { });
if (needsFreeContent != NeedsFreeContent.No)
needGenerateFreeFor = i + 1;
}
if (needGenerateFreeFor == 0)
{
return (a, b, c) => { };
}
_needImplementFreeContent = true;
if (relationVersionInfo.NeedsCtx())
{
ilGenerator.DeclareLocal(typeof(IReaderCtx)); //loc 0
ilGenerator
.Ldarg(0)
.Ldarg(1)
.Ldarg(2)
.Newobj(() => new DBReaderWithFreeInfoCtx(null, null, null))
.Stloc(0);
}
for (var i = 0; i < needGenerateFreeFor; i++)
{
Action<IILGen> readerOrCtx;
if (valueFields[i].Handler!.NeedsCtx())
readerOrCtx = il => il.Ldloc(0);
else
readerOrCtx = il => il.Ldarg(1);
valueFields[i].Handler.FreeContent(ilGenerator, readerOrCtx);
}
ilGenerator.Ret();
return method.Create();
}
void CalculatePrefix()
{
var len = PackUnpack.LengthVUInt(Id);
var prefix = new byte[1 + len];
prefix[0] = ObjectDB.AllRelationsPKPrefixByte;
PackUnpack.UnsafePackVUInt(ref prefix[1], Id, len);
Prefix = prefix;
prefix = new byte[1 + len];
prefix[0] = ObjectDB.AllRelationsSKPrefixByte;
PackUnpack.UnsafePackVUInt(ref prefix[1], Id, len);
PrefixSecondary = prefix;
}
public override string ToString()
{
return $"{Name} {ClientType} Id:{Id}";
}
}
public class DBReaderWithFreeInfoCtx : DBReaderCtx
{
readonly IList<ulong> _freeDictionaries;
StructList<bool> _seenObjects;
public DBReaderWithFreeInfoCtx(IInternalObjectDBTransaction transaction, AbstractBufferedReader reader,
IList<ulong> freeDictionaries)
: base(transaction, reader)
{
_freeDictionaries = freeDictionaries;
}
public IList<ulong> DictIds => _freeDictionaries;
public override void RegisterDict(ulong dictId)
{
_freeDictionaries.Add(dictId);
}
public override void FreeContentInNativeObject()
{
var id = _reader!.ReadVInt64();
if (id == 0)
{
}
else if (id <= int.MinValue || id > 0)
{
Transaction.TransactionProtector.Start();
Transaction.KeyValueDBTransaction.SetKeyPrefix(ObjectDB.AllObjectsPrefix);
if (!Transaction.KeyValueDBTransaction.FindExactKey(ObjectDBTransaction.BuildKeyFromOid((ulong) id)))
return;
var reader = new ByteBufferReader(Transaction.KeyValueDBTransaction.GetValue());
var tableId = reader.ReadVUInt32();
var tableInfo = ((ObjectDB) Transaction.Owner).TablesInfo.FindById(tableId);
if (tableInfo == null)
return;
var tableVersion = reader.ReadVUInt32();
var freeContentTuple = tableInfo.GetFreeContent(tableVersion);
if (freeContentTuple.Item1 != NeedsFreeContent.No)
{
freeContentTuple.Item2(Transaction, null, reader, _freeDictionaries);
}
}
else
{
var ido = (int) (-id) - 1;
if (!AlreadyProcessedInstance(ido))
Transaction.FreeContentInNativeObject(this);
}
}
bool AlreadyProcessedInstance(int ido)
{
while (_seenObjects.Count <= ido) _seenObjects.Add(false);
var res = _seenObjects[ido];
_seenObjects[ido] = true;
return res;
}
}
class SimpleModificationCounter : IRelationModificationCounter
{
public int ModificationCounter => 0;
public void CheckModifiedDuringEnum(int prevModification)
{
}
public void MarkModification()
{
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Globalization;
using System.IO;
#if HAVE_BIG_INTEGER
using System.Numerics;
#endif
namespace Microsoft.Identity.Json.Serialization
{
internal class TraceJsonWriter : JsonWriter
{
private readonly JsonWriter _innerWriter;
private readonly JsonTextWriter _textWriter;
private readonly StringWriter _sw;
public TraceJsonWriter(JsonWriter innerWriter)
{
_innerWriter = innerWriter;
_sw = new StringWriter(CultureInfo.InvariantCulture);
// prefix the message in the stringwriter to avoid concat with a potentially large JSON string
_sw.Write("Serialized JSON: " + Environment.NewLine);
_textWriter = new JsonTextWriter(_sw)
{
Formatting = Formatting.Indented,
Culture = innerWriter.Culture,
DateFormatHandling = innerWriter.DateFormatHandling,
DateFormatString = innerWriter.DateFormatString,
DateTimeZoneHandling = innerWriter.DateTimeZoneHandling,
FloatFormatHandling = innerWriter.FloatFormatHandling
};
}
public string GetSerializedJsonMessage()
{
return _sw.ToString();
}
public override void WriteValue(decimal value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(decimal? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(bool value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(bool? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(byte value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(byte? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(char value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(char? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(byte[] value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value == null)
{
base.WriteUndefined();
}
else
{
base.WriteValue(value);
}
}
public override void WriteValue(DateTime value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(DateTime? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
#if HAVE_DATE_TIME_OFFSET
public override void WriteValue(DateTimeOffset value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(DateTimeOffset? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
#endif
public override void WriteValue(double value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(double? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteUndefined()
{
_textWriter.WriteUndefined();
_innerWriter.WriteUndefined();
base.WriteUndefined();
}
public override void WriteNull()
{
_textWriter.WriteNull();
_innerWriter.WriteNull();
base.WriteUndefined();
}
public override void WriteValue(float value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(float? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(Guid value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(Guid? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(int value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(int? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(long value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(long? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(object value)
{
#if HAVE_BIG_INTEGER
if (value is BigInteger)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
InternalWriteValue(JsonToken.Integer);
}
else
#endif
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value == null)
{
base.WriteUndefined();
}
else
{
// base.WriteValue(value) will error
InternalWriteValue(JsonToken.String);
}
}
}
public override void WriteValue(sbyte value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(sbyte? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(short value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(short? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(string value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(TimeSpan value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(TimeSpan? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(uint value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(uint? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(ulong value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(ulong? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteValue(Uri value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value == null)
{
base.WriteUndefined();
}
else
{
base.WriteValue(value);
}
}
public override void WriteValue(ushort value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
base.WriteValue(value);
}
public override void WriteValue(ushort? value)
{
_textWriter.WriteValue(value);
_innerWriter.WriteValue(value);
if (value.HasValue)
{
base.WriteValue(value.GetValueOrDefault());
}
else
{
base.WriteUndefined();
}
}
public override void WriteWhitespace(string ws)
{
_textWriter.WriteWhitespace(ws);
_innerWriter.WriteWhitespace(ws);
base.WriteWhitespace(ws);
}
public override void WriteComment(string text)
{
_textWriter.WriteComment(text);
_innerWriter.WriteComment(text);
base.WriteComment(text);
}
public override void WriteStartArray()
{
_textWriter.WriteStartArray();
_innerWriter.WriteStartArray();
base.WriteStartArray();
}
public override void WriteEndArray()
{
_textWriter.WriteEndArray();
_innerWriter.WriteEndArray();
base.WriteEndArray();
}
public override void WriteStartConstructor(string name)
{
_textWriter.WriteStartConstructor(name);
_innerWriter.WriteStartConstructor(name);
base.WriteStartConstructor(name);
}
public override void WriteEndConstructor()
{
_textWriter.WriteEndConstructor();
_innerWriter.WriteEndConstructor();
base.WriteEndConstructor();
}
public override void WritePropertyName(string name)
{
_textWriter.WritePropertyName(name);
_innerWriter.WritePropertyName(name);
base.WritePropertyName(name);
}
public override void WritePropertyName(string name, bool escape)
{
_textWriter.WritePropertyName(name, escape);
_innerWriter.WritePropertyName(name, escape);
// method with escape will error
base.WritePropertyName(name);
}
public override void WriteStartObject()
{
_textWriter.WriteStartObject();
_innerWriter.WriteStartObject();
base.WriteStartObject();
}
public override void WriteEndObject()
{
_textWriter.WriteEndObject();
_innerWriter.WriteEndObject();
base.WriteEndObject();
}
public override void WriteRawValue(string json)
{
_textWriter.WriteRawValue(json);
_innerWriter.WriteRawValue(json);
// calling base method will write json twice
InternalWriteValue(JsonToken.Undefined);
}
public override void WriteRaw(string json)
{
_textWriter.WriteRaw(json);
_innerWriter.WriteRaw(json);
base.WriteRaw(json);
}
public override void Close()
{
_textWriter.Close();
_innerWriter.Close();
base.Close();
}
public override void Flush()
{
_textWriter.Flush();
_innerWriter.Flush();
}
}
}
| |
/* Genuine Channels product.
*
* Copyright (c) 2002-2007 Dmitry Belikov. All rights reserved.
*
* This source code comes under and must be used and distributed according to the Genuine Channels license agreement.
*/
using System;
using System.Collections;
using System.Diagnostics;
using System.Net;
using System.Net.Sockets;
using System.IO;
using System.Threading;
using Belikov.Common.ThreadProcessing;
using Belikov.GenuineChannels.Connection;
using Belikov.GenuineChannels.DotNetRemotingLayer;
using Belikov.GenuineChannels.Logbook;
using Belikov.GenuineChannels.Messaging;
using Belikov.GenuineChannels.Parameters;
using Belikov.GenuineChannels.Receiving;
using Belikov.GenuineChannels.Security;
using Belikov.GenuineChannels.TransportContext;
namespace Belikov.GenuineChannels.Connection
{
/// <summary>
/// Represents a connection providing sending-receiving services via appropriate
/// Transport.
/// </summary>
internal abstract class PhysicalConnection
{
/// <summary>
/// Constructs an instance of the PhysicalConnection class.
/// </summary>
public PhysicalConnection()
{
#if DEBUG
this._connectionNumber = Interlocked.Increment(ref _dbg_ConnectionCounter);
#endif
}
/// <summary>
/// Connection-level Security Session.
/// </summary>
public SecuritySession ConnectionLevelSecurity;
/// <summary>
/// To guarantee atomic access to local members.
/// </summary>
protected object _accessToLocalMembers = new object();
#region -- Debug information ---------------------------------------------------------------
#if DEBUG
private static int _dbg_ConnectionCounter = 0;
private int _connectionNumber = 0;
public string TypeOfSocket;
/// <summary>
/// Returns a String that represents the current Object.
/// </summary>
/// <returns>A String that represents the current Object.</returns>
public override string ToString()
{
return string.Format("(No: {0}. Type: {1}.)", this._connectionNumber,
this.TypeOfSocket == null ? "<not specified>" : this.TypeOfSocket);
}
#endif
#endregion
#region -- Locking -------------------------------------------------------------------------
/// <summary>
/// This lock must be obtain during all operations that may change the state of the connection.
/// </summary>
public object PhysicalConnectionStateLock = new object();
/// <summary>
/// The state of the connection.
/// True value of this member indicates that the connection is ready for the action.
/// </summary>
public bool ConnectionAvailable = false;
/// <summary>
/// Acquires access to the connection.
/// </summary>
/// <returns>True if access is acquired.</returns>
public bool AcquireIfAvailable()
{
lock (this.PhysicalConnectionStateLock)
{
if (! this.ConnectionAvailable)
return false;
this.ConnectionAvailable = false;
return true;
}
}
/// <summary>
/// Releases the connection.
/// </summary>
public void MarkAsAvailable()
{
lock (this.PhysicalConnectionStateLock)
{
this.ConnectionAvailable = true;
}
}
#endregion
#region -- Status of reestablishing --------------------------------------------------------
/// <summary>
/// Gets an indication whether the physical connection is being reestablished.
/// </summary>
public bool Reestablish_IsBeingReestablished
{
get
{
lock (this._accessToLocalMembers)
return this._reestablish_IsBeingReestablished;
}
}
private bool _reestablish_IsBeingReestablished = false;
/// <summary>
/// Answers true if a lock for connection reestablishing has been obtained.
/// </summary>
/// <returns>True if a lock for connection reestablishing has been obtained.</returns>
public bool Reestablish_ObtainStatus()
{
lock (this._accessToLocalMembers)
{
if (_reestablish_IsBeingReestablished)
return false;
_reestablish_IsBeingReestablished = true;
return true;
}
}
/// <summary>
/// Resets the status of reestablishing.
/// </summary>
public void Reestablish_ResetStatus()
{
lock (this._accessToLocalMembers)
{
if (! _reestablish_IsBeingReestablished)
{
BinaryLogWriter binaryLogWriter = GenuineLoggingServices.BinaryLogWriter;
if (binaryLogWriter != null)
{
binaryLogWriter.WriteImplementationWarningEvent("PhysicalConnection.Reestablish_ResetStatus",
LogMessageType.Error, GenuineExceptions.Get_Debugging_GeneralWarning("The connection is not being reestablished!"),
GenuineUtility.CurrentThreadId, Thread.CurrentThread.Name,
"The connection is not being reestablished!");
}
}
_reestablish_IsBeingReestablished = false;
}
}
#endregion
#region -- Disposing -----------------------------------------------------------------------
/// <summary>
/// Indicates whether this instance was disposed.
/// </summary>
internal bool IsDisposed
{
get
{
using (new ReaderAutoLocker(this.DisposeLock))
return this.__disposed;
}
set
{
using (new WriterAutoLocker(this.DisposeLock))
this.__disposed = value;
}
}
private bool __disposed = false;
/// <summary>
/// The reason of the disposing.
/// </summary>
internal Exception _disposeReason = null;
/// <summary>
/// Dispose lock.
/// </summary>
internal ReaderWriterLock DisposeLock = new ReaderWriterLock();
/// <summary>
/// Releases all resources.
/// </summary>
/// <param name="reason">The reason of disposing.</param>
public void Dispose(Exception reason)
{
if (this.IsDisposed)
return ;
if (reason == null)
reason = GenuineExceptions.Get_Processing_TransportConnectionFailed();
// stop the processing
using (new WriterAutoLocker(this.DisposeLock))
{
if (this.IsDisposed)
return ;
this.IsDisposed = true;
this._disposeReason = reason;
}
InternalDispose(reason);
}
/// <summary>
/// Releases resources.
/// </summary>
/// <param name="reason">The reason of disposing.</param>
public abstract void InternalDispose(Exception reason);
#endregion
}
}
| |
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
public class DebugPanelViewport : MonoBehaviour {
public static DebugPanelViewport main;
RectTransform categoriesRect;
RectTransform fieldsRect;
RectTransform clearButton;
RectTransform ignoreButton;
RectTransform showAllButton;
RectTransform hideAllButton;
Image ignoreButtonImg;
CanvasGroup panelGroup;
CanvasGroup rootGroup;
public static Font arial;
bool uiLock = false;
bool fieldsListChanged = false;
Dictionary<string, Button> categories = new Dictionary<string, Button>();
Dictionary<string, DebugPanelField> fields = new Dictionary<string, DebugPanelField>();
List<string> keys = new List<string> ();
// Colors
Color systemColor = new Color (0.4f, 1f, 0.6f, 1);
Color warningColor = new Color (0.9f, 0.9f, 0.4f, 1);
Color errorColor = new Color (1f, 0.4f, 0.4f, 1);
void Awake () {
main = this;
arial = Resources.GetBuiltinResource<Font>("Arial.ttf");
CreateUI ();
Lock (DebugPanel.main.hideOnAwake);
IngorDefLog (DebugPanel.main.ignoreDefLog);
}
void Update () {
UpdateFields (DebugPanel.main.parameters);
if (!uiLock)
UpdateCategories (DebugPanel.main.categories);
}
void CreateUI () {
Vector2 controlButtonSize = new Vector2 (60, 60);
// Root canvas
GameObject root = new GameObject ();
root.transform.name = "ViewportDebugPanel";
root.transform.parent = transform;
root.transform.SetSiblingIndex (0);
Canvas rootCanvas = root.AddComponent<Canvas>();
rootCanvas.sortingOrder = 32767;
rootCanvas.renderMode = RenderMode.ScreenSpaceOverlay;
rootCanvas.pixelPerfect = true;
CanvasScaler scaler = root.AddComponent<CanvasScaler> ();
scaler.uiScaleMode = CanvasScaler.ScaleMode.ScaleWithScreenSize;
scaler.screenMatchMode = CanvasScaler.ScreenMatchMode.MatchWidthOrHeight;
scaler.matchWidthOrHeight = 0.7f;
scaler.referencePixelsPerUnit = 100;
rootGroup = root.AddComponent<CanvasGroup> ();
root.AddComponent<GraphicRaycaster> ();
// Panel
GameObject panel = new GameObject();
panel.name = "Panel";
panel.transform.parent = root.transform;
ContentSizeFitter csFitter = panel.AddComponent<ContentSizeFitter> ();
csFitter.verticalFit = ContentSizeFitter.FitMode.PreferredSize;
VerticalLayoutGroup vLayout = panel.AddComponent<VerticalLayoutGroup> ();
vLayout.childForceExpandWidth = true;
vLayout.childForceExpandHeight = true;
vLayout.padding = new RectOffset (0, (int) controlButtonSize.x + 5, 0, 0);
Image background = panel.AddComponent<Image> ();
background.sprite = null;
background.color = DebugPanel.main.bgColor;
panelGroup = panel.AddComponent<CanvasGroup> ();
RectTransform rect = (RectTransform) panel.transform;
rect.SetInsetAndSizeFromParentEdge (RectTransform.Edge.Right, 0, 0);
rect.anchorMin = new Vector2 (0, 1);
rect.anchorMax = new Vector2 (1, 1);
rect.pivot = new Vector2 (0, 1);
rect.anchoredPosition = new Vector2(0, 0);
// Categories Layout
GameObject categoriesLayout = new GameObject();
categoriesLayout.name = "Categories";
categoriesLayout.transform.parent = panel.transform;
GridLayoutGroup gLayout = categoriesLayout.AddComponent<GridLayoutGroup>();
gLayout.padding = new RectOffset(5, 5, 5, 5);
gLayout.cellSize = new Vector2 (100, 40);
gLayout.spacing = new Vector2 (4, 4);
categoriesRect = (RectTransform)gLayout.transform;
categoriesRect.pivot = new Vector2 (0, 1);
// Fields Layout
GameObject fieldsLayout = new GameObject();
fieldsLayout.name = "Fields";
fieldsLayout.transform.parent = panel.transform;
vLayout = fieldsLayout.AddComponent<VerticalLayoutGroup>();
vLayout.padding = new RectOffset (5, 5, 0, 5);
vLayout.spacing = 3;
vLayout.childForceExpandWidth = false;
vLayout.childForceExpandHeight = false;
fieldsRect = (RectTransform)vLayout.transform;
fieldsRect.pivot = new Vector2 (0, 1);
// Controls Layout
GameObject controlsLayout = new GameObject();
controlsLayout.name = "Controls";
controlsLayout.transform.parent = root.transform;
vLayout = controlsLayout.AddComponent<VerticalLayoutGroup> ();
vLayout.padding = new RectOffset (5, 5, 5, 5);
vLayout.spacing = 3;
vLayout.childAlignment = TextAnchor.UpperRight;
vLayout.childForceExpandHeight = false;
vLayout.childForceExpandWidth = false;
csFitter = controlsLayout.AddComponent<ContentSizeFitter> ();
csFitter.verticalFit = ContentSizeFitter.FitMode.PreferredSize;
csFitter.horizontalFit = ContentSizeFitter.FitMode.PreferredSize;
rect = (RectTransform)vLayout.transform;
rect.pivot = new Vector2 (1, 1);
rect.anchorMin = new Vector2 (1, 1);
rect.anchorMax = new Vector2 (1, 1);
rect.anchoredPosition = new Vector2 (0, 0);
// Lock Button
Button button = CreateButton("Lock", "LOCK", new Color(1,1,1,0.5f));
button.transform.SetParent(controlsLayout.transform);
rect = (RectTransform)button.transform;
rect.pivot = new Vector2 (1, 1);
LayoutElement lElement = button.gameObject.AddComponent<LayoutElement> ();
lElement.minWidth = controlButtonSize.x;
lElement.minHeight = controlButtonSize.y;
button.onClick.AddListener(() => Lock ());
// Clear Button
button = CreateButton("Clear", "CLEAR", new Color(1,1,1,0.5f));
button.transform.SetParent(controlsLayout.transform);
clearButton = (RectTransform)button.transform;
clearButton.pivot = new Vector2 (1, 1);
lElement = button.gameObject.AddComponent<LayoutElement> ();
lElement.minWidth = controlButtonSize.x;
lElement.minHeight = controlButtonSize.y;
button.onClick.AddListener(() => DebugPanel.Clear ());
// Ignore Deflog Button
button = CreateButton("IgnoreDefLog", "IGNORE\nDEFLOG", new Color(1,1,1,0.5f));
ignoreButtonImg = button.gameObject.GetComponent<Image> ();
button.transform.SetParent(controlsLayout.transform);
ignoreButton = (RectTransform)button.transform;
ignoreButton.pivot = new Vector2 (1, 1);
lElement = button.gameObject.AddComponent<LayoutElement> ();
lElement.minWidth = controlButtonSize.x;
lElement.minHeight = controlButtonSize.y;
button.onClick.AddListener(() => IngorDefLog(!DebugPanel.main.ignoreDefLog));
// Show-all Button
button = CreateButton("ShowAll", "SHOW\nALL", new Color(1,1,1,0.5f));
button.transform.SetParent(controlsLayout.transform);
showAllButton = (RectTransform)button.transform;
showAllButton.pivot = new Vector2 (1, 1);
lElement = button.gameObject.AddComponent<LayoutElement> ();
lElement.minWidth = controlButtonSize.x;
lElement.minHeight = controlButtonSize.y;
button.onClick.AddListener(() => DebugPanel.TurnAll (true));
// Hide-all Button
button = CreateButton("HideAll", "HIDE\nALL", new Color(1,1,1,0.5f));
button.transform.SetParent(controlsLayout.transform);
hideAllButton = (RectTransform)button.transform;
hideAllButton.pivot = new Vector2 (1, 1);
lElement = button.gameObject.AddComponent<LayoutElement> ();
lElement.minWidth = controlButtonSize.x;
lElement.minHeight = controlButtonSize.y;
button.onClick.AddListener(() => DebugPanel.TurnAll (false));
Canvas.ForceUpdateCanvases ();
}
public void UpdateCategories(Dictionary<string, bool> target) {
keys = new List<string> (target.Keys);
Button button;
Color color;
bool changed = false;
foreach (string key in keys) {
if (!categories.ContainsKey(key)) {
if (key == "") continue;
if (DebugPanel.main.ignoreDefLog && DebugPanel.IsDeflog(key)) continue;
color = GetColorbyCategory(key);
color.a = 0.6f;
button = CreateButton(key, key, color);
button.name = key;
button.transform.SetParent(categoriesRect.transform);
button.transform.localScale = new Vector3(1, 1, 1);
AddCategoryListener(button, key);
categories.Add(key, button);
changed = true;
}
}
keys = new List<string> (categories.Keys);
foreach (string key in keys) {
if (!target.ContainsKey(key)) {
Destroy(categories[key].gameObject);
categories.Remove(key);
changed = true;
}
}
if (changed) {
keys = new List<string> (categories.Keys);
keys.Sort (delegate(string x, string y) {
if (x == "Warning") return 1;
if (y == "Warning") return -1;
if (x == "Error") return 1;
if (y == "Error") return -1;
if (x == "System") return 1;
if (y == "System") return -1;
return x.CompareTo(y);
});
for (int i = 0; i < keys.Count; i++)
categories[keys[i]].transform.SetSiblingIndex(0);
}
}
void AddCategoryListener (Button button, string key) {
button.onClick.AddListener(() => {
DebugPanel.main.categories[key] = !DebugPanel.main.categories[key];
Image img = button.gameObject.GetComponent<Image>();
Color color = GetColorbyCategory(button.name);
if (!DebugPanel.main.categories[key]) color = Color.Lerp(color, Color.black, 0.5f);
color.a = 0.6f;
img.color = color;
});
}
Color GetColorbyCategory(string key) {
Color color;
switch (key) {
case "System": color = systemColor; break;
case "Warning": color = warningColor; break;
case "Error": color = errorColor; break;
default: color = Color.white; break;
}
return color;
}
void IngorDefLog (bool i) {
DebugPanel.IngorDefLog (i);
ignoreButtonImg.color = DebugPanel.main.ignoreDefLog ? systemColor : new Color (1, 1, 1, 0.5f);
if (i) {
keys = new List<string>();
keys.Add("Error");
keys.Add("Warning");
foreach (string key in keys) {
if (categories.ContainsKey(key)) {
Destroy(categories[key].gameObject);
categories.Remove(key);
}
}
}
}
public void UpdateFields(Dictionary<string, DebugPanel.Field> target) {
keys = new List<string> (target.Keys);
foreach (string key in keys) {
if (!fields.ContainsKey(key) && DebugPanel.main.categories[target[key].category]) {
fields.Add(key, CreateField(key));
fieldsListChanged = true;
}
}
foreach (Transform child in fieldsRect) {
if (!child.gameObject.activeSelf) {
fields.Remove(child.name);
Destroy(child.gameObject);
fieldsListChanged = true;
}
}
if (fieldsListChanged) {
StartCoroutine("SortFields");
}
}
IEnumerator SortFields() {
yield return 0;
keys = new List<string> (categories.Keys);
keys.Add("");
keys.Sort (delegate(string y, string x) {
if (x == "Warning") return 1;
if (y == "Warning") return -1;
if (x == "Error") return 1;
if (y == "Error") return -1;
if (x == "System") return 1;
if (y == "System") return -1;
return x.CompareTo(y);
});
List<string> keys2 = new List<string>(fields.Keys);
keys2.Sort(delegate(string x, string y) {
return x.CompareTo(y);
});
int index = 0;
foreach(string cat in keys)
foreach(string field in keys2)
if (DebugPanel.main.parameters.ContainsKey(field) && DebugPanel.main.parameters[field].category == cat) {
fields[field].transform.SetSiblingIndex(index);
index ++;
}
fieldsListChanged = false;
}
public static void RemoveField (string key) {
if (!main.fields.ContainsKey(key)) return;
main.fields [key].gameObject.SetActive (false);
}
DebugPanelField CreateField(string key) {
GameObject layout = new GameObject ();
layout.name = key;
layout.transform.parent = fieldsRect;
Color color;
switch (DebugPanel.main.parameters[key].category) {
case "System": color = systemColor; break;
case "Warning": color = warningColor; break;
case "Error": color = errorColor; break;
default: color = Color.white; break;
}
DebugPanelField field = layout.AddComponent<DebugPanelField> ();
field.color = color;
return field;
}
void Lock(bool l) {
uiLock = l;
clearButton.gameObject.SetActive (!l);
showAllButton.gameObject.SetActive (!l);
hideAllButton.gameObject.SetActive (!l);
ignoreButton.gameObject.SetActive (!l);
categoriesRect.gameObject.SetActive (!l);
panelGroup.blocksRaycasts = !l;
rootGroup.alpha = l ? DebugPanel.main.lockAlphaMultiplier : 1.0f;
}
void Lock() {
Lock (!uiLock);
}
Button CreateButton(string buttonName, string label, Color color) {
GameObject buttonGO = new GameObject();
buttonGO.name = buttonName;
Image img = buttonGO.AddComponent<Image> ();
img.sprite = null;
img.color = color;
Button button = buttonGO.AddComponent<Button> ();
GameObject labelGO = new GameObject();
labelGO.name = "Label";
labelGO.transform.parent = buttonGO.transform;
Text text = labelGO.AddComponent<Text>();
text.text = label;
text.font = arial;
text.fontSize = 12;
text.color = Color.black;
text.alignment = TextAnchor.MiddleCenter;
RectTransform rect = (RectTransform) labelGO.transform;
rect.anchorMin = new Vector2 (0, 0);
rect.anchorMax = new Vector2 (1, 1);
rect.offsetMin = new Vector2 (0, 0);
rect.offsetMax = new Vector2 (0, 0);
return button;
}
}
| |
// <copyright file="DenseVectorTests.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2013 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.LinearAlgebra.Complex32;
using NUnit.Framework;
using System;
using System.Collections.Generic;
namespace MathNet.Numerics.UnitTests.LinearAlgebraTests.Complex32
{
using Numerics;
/// <summary>
/// Dense vector tests.
/// </summary>
public class DenseVectorTests : VectorTests
{
/// <summary>
/// Creates a new instance of the Vector class.
/// </summary>
/// <param name="size">The size of the <strong>Vector</strong> to construct.</param>
/// <returns>The new <c>Vector</c>.</returns>
protected override Vector<Complex32> CreateVector(int size)
{
return new DenseVector(size);
}
/// <summary>
/// Creates a new instance of the Vector class.
/// </summary>
/// <param name="data">The array to create this vector from.</param>
/// <returns>The new <c>Vector</c>.</returns>
protected override Vector<Complex32> CreateVector(IList<Complex32> data)
{
var vector = new DenseVector(data.Count);
for (var index = 0; index < data.Count; index++)
{
vector[index] = data[index];
}
return vector;
}
/// <summary>
/// Can create a dense vector form array.
/// </summary>
[Test]
public void CanCreateDenseVectorFromArray()
{
var data = new Complex32[Data.Length];
Array.Copy(Data, data, Data.Length);
var vector = new DenseVector(data);
for (var i = 0; i < data.Length; i++)
{
Assert.AreEqual(data[i], vector[i]);
}
vector[0] = new Complex32(10.0f, 1);
Assert.AreEqual(new Complex32(10.0f, 1), data[0]);
}
/// <summary>
/// Can create a dense vector from another dense vector.
/// </summary>
[Test]
public void CanCreateDenseVectorFromAnotherDenseVector()
{
var vector = new DenseVector(Data);
var other = DenseVector.OfVector(vector);
Assert.AreNotSame(vector, other);
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(vector[i], other[i]);
}
}
/// <summary>
/// Can create a dense vector from another vector.
/// </summary>
[Test]
public void CanCreateDenseVectorFromAnotherVector()
{
var vector = (Vector<Complex32>) new DenseVector(Data);
var other = DenseVector.OfVector(vector);
Assert.AreNotSame(vector, other);
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(vector[i], other[i]);
}
}
/// <summary>
/// Can create a dense vector from user defined vector.
/// </summary>
[Test]
public void CanCreateDenseVectorFromUserDefinedVector()
{
var vector = new UserDefinedVector(Data);
var other = DenseVector.OfVector(vector);
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(vector[i], other[i]);
}
}
/// <summary>
/// Can create a dense vector with constant values.
/// </summary>
[Test]
public void CanCreateDenseVectorWithConstantValues()
{
var vector = DenseVector.Create(5, 5);
foreach (var t in vector)
{
Assert.AreEqual(t, new Complex32(5.0f, 0));
}
}
/// <summary>
/// Can create a dense matrix.
/// </summary>
[Test]
public void CanCreateDenseMatrix()
{
var vector = new DenseVector(3);
var matrix = Matrix<Complex32>.Build.SameAs(vector, 2, 3);
Assert.IsInstanceOf<DenseMatrix>(matrix);
Assert.AreEqual(2, matrix.RowCount);
Assert.AreEqual(3, matrix.ColumnCount);
}
/// <summary>
/// Can convert a dense vector to an array.
/// </summary>
[Test]
public void CanConvertDenseVectorToArray()
{
var vector = new DenseVector(Data);
var array = (Complex32[]) vector;
Assert.IsInstanceOf(typeof (Complex32[]), array);
CollectionAssert.AreEqual(vector, array);
}
/// <summary>
/// Can convert an array to a dense vector.
/// </summary>
[Test]
public void CanConvertArrayToDenseVector()
{
var array = new[] {new Complex32(1, 1), new Complex32(2, 1), new Complex32(3, 1), new Complex32(4, 1)};
var vector = (DenseVector) array;
Assert.IsInstanceOf(typeof (DenseVector), vector);
CollectionAssert.AreEqual(array, array);
}
/// <summary>
/// Can call unary plus operator on a vector.
/// </summary>
[Test]
public void CanCallUnaryPlusOperatorOnDenseVector()
{
var vector = new DenseVector(Data);
var other = +vector;
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(vector[i], other[i]);
}
}
/// <summary>
/// Can add two dense vectors using "+" operator.
/// </summary>
[Test]
public void CanAddTwoDenseVectorsUsingOperator()
{
var vector = new DenseVector(Data);
var other = new DenseVector(Data);
var result = vector + other;
CollectionAssert.AreEqual(Data, vector, "Making sure the original vector wasn't modified.");
CollectionAssert.AreEqual(Data, other, "Making sure the original vector wasn't modified.");
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(Data[i]*2.0f, result[i]);
}
}
/// <summary>
/// Can call unary negate operator on a dense vector.
/// </summary>
[Test]
public void CanCallUnaryNegationOperatorOnDenseVector()
{
var vector = new DenseVector(Data);
var other = -vector;
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(-Data[i], other[i]);
}
}
/// <summary>
/// Can subtract two dense vectors using "-" operator.
/// </summary>
[Test]
public void CanSubtractTwoDenseVectorsUsingOperator()
{
var vector = new DenseVector(Data);
var other = new DenseVector(Data);
var result = vector - other;
CollectionAssert.AreEqual(Data, vector, "Making sure the original vector wasn't modified.");
CollectionAssert.AreEqual(Data, other, "Making sure the original vector wasn't modified.");
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(Complex32.Zero, result[i]);
}
}
/// <summary>
/// Can multiply a dense vector by a Complex using "*" operator.
/// </summary>
[Test]
public void CanMultiplyDenseVectorByComplexUsingOperators()
{
var vector = new DenseVector(Data);
vector = vector*new Complex32(2.0f, 1);
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(Data[i]*new Complex32(2.0f, 1), vector[i]);
}
vector = vector*1.0f;
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(Data[i]*new Complex32(2.0f, 1), vector[i]);
}
vector = new DenseVector(Data);
vector = new Complex32(2.0f, 1)*vector;
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(Data[i]*new Complex32(2.0f, 1), vector[i]);
}
vector = 1.0f*vector;
for (var i = 0; i < Data.Length; i++)
{
Assert.AreEqual(Data[i]*new Complex32(2.0f, 1), vector[i]);
}
}
/// <summary>
/// Can divide a dense vector by a Complex using "/" operator.
/// </summary>
[Test]
public void CanDivideDenseVectorByComplexUsingOperators()
{
var vector = new DenseVector(Data);
vector = vector/new Complex32(2.0f, 1);
for (var i = 0; i < Data.Length; i++)
{
AssertHelpers.AlmostEqualRelative(Data[i]/new Complex32(2.0f, 1), vector[i], 6);
}
vector = vector/1.0f;
for (var i = 0; i < Data.Length; i++)
{
AssertHelpers.AlmostEqualRelative(Data[i]/new Complex32(2.0f, 1), vector[i], 6);
}
}
/// <summary>
/// Can calculate an outer product for a dense vector.
/// </summary>
[Test]
public void CanCalculateOuterProductForDenseVector()
{
var vector1 = CreateVector(Data);
var vector2 = CreateVector(Data);
var m = Vector<Complex32>.OuterProduct(vector1, vector2);
for (var i = 0; i < vector1.Count; i++)
{
for (var j = 0; j < vector2.Count; j++)
{
Assert.AreEqual(m[i, j], vector1[i]*vector2[j]);
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using Xunit;
using Xunit.NetCore.Extensions;
namespace System.Diagnostics.Tests
{
public partial class ProcessTests : ProcessTestBase
{
private class FinalizingProcess : Process
{
public static volatile bool WasFinalized;
public static void CreateAndRelease()
{
new FinalizingProcess();
}
protected override void Dispose(bool disposing)
{
if (!disposing)
{
WasFinalized = true;
}
base.Dispose(disposing);
}
}
private void SetAndCheckBasePriority(ProcessPriorityClass exPriorityClass, int priority)
{
_process.PriorityClass = exPriorityClass;
_process.Refresh();
Assert.Equal(priority, _process.BasePriority);
}
private void AssertNonZeroWindowsZeroUnix(long value)
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
Assert.NotEqual(0, value);
}
else
{
Assert.Equal(0, value);
}
}
[ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsNanoServer))]
[PlatformSpecific(TestPlatforms.Windows)] // Expected behavior varies on Windows and Unix
public void TestBasePriorityOnWindows()
{
ProcessPriorityClass originalPriority = _process.PriorityClass;
Assert.Equal(ProcessPriorityClass.Normal, originalPriority);
try
{
// We are not checking for RealTime case here, as RealTime priority process can
// preempt the threads of all other processes, including operating system processes
// performing important tasks, which may cause the machine to be unresponsive.
//SetAndCheckBasePriority(ProcessPriorityClass.RealTime, 24);
SetAndCheckBasePriority(ProcessPriorityClass.High, 13);
SetAndCheckBasePriority(ProcessPriorityClass.Idle, 4);
SetAndCheckBasePriority(ProcessPriorityClass.Normal, 8);
}
finally
{
_process.PriorityClass = originalPriority;
}
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Expected behavior varies on Windows and Unix
[OuterLoop]
[Trait(XunitConstants.Category, XunitConstants.RequiresElevation)]
public void TestBasePriorityOnUnix()
{
ProcessPriorityClass originalPriority = _process.PriorityClass;
Assert.Equal(ProcessPriorityClass.Normal, originalPriority);
try
{
SetAndCheckBasePriority(ProcessPriorityClass.High, -11);
SetAndCheckBasePriority(ProcessPriorityClass.Idle, 19);
SetAndCheckBasePriority(ProcessPriorityClass.Normal, 0);
}
finally
{
_process.PriorityClass = originalPriority;
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
[InlineData(null)]
public void TestEnableRaiseEvents(bool? enable)
{
bool exitedInvoked = false;
Process p = CreateProcessLong();
if (enable.HasValue)
{
p.EnableRaisingEvents = enable.Value;
}
p.Exited += delegate { exitedInvoked = true; };
StartSleepKillWait(p);
if (enable.GetValueOrDefault())
{
// There's no guarantee that the Exited callback will be invoked by
// the time Process.WaitForExit completes, though it's extremely likely.
// There could be a race condition where WaitForExit is returning from
// its wait and sees that the callback is already running asynchronously,
// at which point it returns to the caller even if the callback hasn't
// entirely completed. As such, we spin until the value is set.
Assert.True(SpinWait.SpinUntil(() => exitedInvoked, WaitInMS));
}
else
{
Assert.False(exitedInvoked);
}
}
[Fact]
public void TestExitCode()
{
{
Process p = CreateProcess();
p.Start();
Assert.True(p.WaitForExit(WaitInMS));
Assert.Equal(SuccessExitCode, p.ExitCode);
}
{
Process p = CreateProcessLong();
StartSleepKillWait(p);
Assert.NotEqual(0, p.ExitCode);
}
}
[PlatformSpecific(TestPlatforms.AnyUnix)] // Tests UseShellExecute with ProcessStartInfo
[Fact]
public void TestUseShellExecute_Unix_Succeeds()
{
using (var p = Process.Start(new ProcessStartInfo { UseShellExecute = true, FileName = "exit", Arguments = "42" }))
{
Assert.True(p.WaitForExit(WaitInMS));
Assert.Equal(42, p.ExitCode);
}
}
[Fact]
public void TestExitTime()
{
// ExitTime resolution on some platforms is less accurate than our DateTime.UtcNow resolution, so
// we subtract ms from the begin time to account for it.
DateTime timeBeforeProcessStart = DateTime.UtcNow.AddMilliseconds(-25);
Process p = CreateProcessLong();
p.Start();
Assert.Throws<InvalidOperationException>(() => p.ExitTime);
p.Kill();
Assert.True(p.WaitForExit(WaitInMS));
Assert.True(p.ExitTime.ToUniversalTime() >= timeBeforeProcessStart,
$@"TestExitTime is incorrect. " +
$@"TimeBeforeStart {timeBeforeProcessStart} Ticks={timeBeforeProcessStart.Ticks}, " +
$@"ExitTime={p.ExitTime}, Ticks={p.ExitTime.Ticks}, " +
$@"ExitTimeUniversal {p.ExitTime.ToUniversalTime()} Ticks={p.ExitTime.ToUniversalTime().Ticks}, " +
$@"NowUniversal {DateTime.Now.ToUniversalTime()} Ticks={DateTime.Now.Ticks}");
}
[Fact]
public void TestId()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
Assert.Equal(_process.Id, Interop.GetProcessId(_process.SafeHandle));
}
else
{
IEnumerable<int> testProcessIds = Process.GetProcessesByName(HostRunnerName).Select(p => p.Id);
Assert.Contains(_process.Id, testProcessIds);
}
}
[Fact]
public void TestHasExited()
{
{
Process p = CreateProcess();
p.Start();
Assert.True(p.WaitForExit(WaitInMS));
Assert.True(p.HasExited, "TestHasExited001 failed");
}
{
Process p = CreateProcessLong();
p.Start();
try
{
Assert.False(p.HasExited, "TestHasExited002 failed");
}
finally
{
p.Kill();
Assert.True(p.WaitForExit(WaitInMS));
}
Assert.True(p.HasExited, "TestHasExited003 failed");
}
}
[Fact]
public void TestMachineName()
{
// Checking that the MachineName returns some value.
Assert.NotNull(_process.MachineName);
}
[Fact]
public void TestMainModuleOnNonOSX()
{
string fileName = "dotnet";
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
fileName = "dotnet.exe";
Process p = Process.GetCurrentProcess();
Assert.True(p.Modules.Count > 0);
Assert.Equal(fileName, p.MainModule.ModuleName);
Assert.EndsWith(fileName, p.MainModule.FileName);
Assert.Equal(string.Format("System.Diagnostics.ProcessModule ({0})", fileName), p.MainModule.ToString());
}
[Fact]
public void TestMaxWorkingSet()
{
using (Process p = Process.GetCurrentProcess())
{
Assert.True((long)p.MaxWorkingSet > 0);
Assert.True((long)p.MinWorkingSet >= 0);
}
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
return; // doesn't support getting/setting working set for other processes
long curValue = (long)_process.MaxWorkingSet;
Assert.True(curValue >= 0);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
try
{
_process.MaxWorkingSet = (IntPtr)((int)curValue + 1024);
IntPtr min, max;
uint flags;
Interop.GetProcessWorkingSetSizeEx(_process.SafeHandle, out min, out max, out flags);
curValue = (int)max;
_process.Refresh();
Assert.Equal(curValue, (int)_process.MaxWorkingSet);
}
finally
{
_process.MaxWorkingSet = (IntPtr)curValue;
}
}
}
[Fact]
public void TestMinWorkingSet()
{
using (Process p = Process.GetCurrentProcess())
{
Assert.True((long)p.MaxWorkingSet > 0);
Assert.True((long)p.MinWorkingSet >= 0);
}
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
return; // doesn't support getting/setting working set for other processes
long curValue = (long)_process.MinWorkingSet;
Assert.True(curValue >= 0);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
try
{
_process.MinWorkingSet = (IntPtr)((int)curValue - 1024);
IntPtr min, max;
uint flags;
Interop.GetProcessWorkingSetSizeEx(_process.SafeHandle, out min, out max, out flags);
curValue = (int)min;
_process.Refresh();
Assert.Equal(curValue, (int)_process.MinWorkingSet);
}
finally
{
_process.MinWorkingSet = (IntPtr)curValue;
}
}
}
[Fact]
public void TestModules()
{
ProcessModuleCollection moduleCollection = Process.GetCurrentProcess().Modules;
foreach (ProcessModule pModule in moduleCollection)
{
// Validated that we can get a value for each of the following.
Assert.NotNull(pModule);
Assert.NotNull(pModule.FileName);
Assert.NotNull(pModule.ModuleName);
// Just make sure these don't throw
IntPtr baseAddr = pModule.BaseAddress;
IntPtr entryAddr = pModule.EntryPointAddress;
int memSize = pModule.ModuleMemorySize;
}
}
[Fact]
public void TestNonpagedSystemMemorySize64()
{
AssertNonZeroWindowsZeroUnix(_process.NonpagedSystemMemorySize64);
}
[Fact]
public void TestPagedMemorySize64()
{
AssertNonZeroWindowsZeroUnix(_process.PagedMemorySize64);
}
[Fact]
public void TestPagedSystemMemorySize64()
{
AssertNonZeroWindowsZeroUnix(_process.PagedSystemMemorySize64);
}
[Fact]
public void TestPeakPagedMemorySize64()
{
AssertNonZeroWindowsZeroUnix(_process.PeakPagedMemorySize64);
}
[Fact]
public void TestPeakVirtualMemorySize64()
{
AssertNonZeroWindowsZeroUnix(_process.PeakVirtualMemorySize64);
}
[Fact]
public void TestPeakWorkingSet64()
{
AssertNonZeroWindowsZeroUnix(_process.PeakWorkingSet64);
}
[Fact]
public void TestPrivateMemorySize64()
{
AssertNonZeroWindowsZeroUnix(_process.PrivateMemorySize64);
}
[Fact]
public void TestVirtualMemorySize64()
{
Assert.True(_process.VirtualMemorySize64 > 0);
}
[Fact]
public void TestWorkingSet64()
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
// resident memory can be 0 on OSX.
Assert.True(_process.WorkingSet64 >= 0);
return;
}
Assert.True(_process.WorkingSet64 > 0);
}
[Fact]
public void TestProcessorTime()
{
Assert.True(_process.UserProcessorTime.TotalSeconds >= 0);
Assert.True(_process.PrivilegedProcessorTime.TotalSeconds >= 0);
double processorTimeBeforeSpin = Process.GetCurrentProcess().TotalProcessorTime.TotalSeconds;
double processorTimeAtHalfSpin = 0;
// Perform loop to occupy cpu, takes less than a second.
int i = int.MaxValue / 16;
while (i > 0)
{
i--;
if (i == int.MaxValue / 32)
processorTimeAtHalfSpin = Process.GetCurrentProcess().TotalProcessorTime.TotalSeconds;
}
Assert.InRange(processorTimeAtHalfSpin, processorTimeBeforeSpin, Process.GetCurrentProcess().TotalProcessorTime.TotalSeconds);
}
[ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // https://github.com/Microsoft/BashOnWindows/issues/974
public void TestProcessStartTime()
{
TimeSpan allowedWindow = TimeSpan.FromSeconds(3);
DateTime testStartTime = DateTime.UtcNow;
using (var remote = RemoteInvoke(() => { Console.Write(Process.GetCurrentProcess().StartTime.ToUniversalTime()); return SuccessExitCode; },
new RemoteInvokeOptions { StartInfo = new ProcessStartInfo { RedirectStandardOutput = true } }))
{
DateTime remoteStartTime = DateTime.Parse(remote.Process.StandardOutput.ReadToEnd());
DateTime curTime = DateTime.UtcNow;
Assert.InRange(remoteStartTime, testStartTime - allowedWindow, curTime + allowedWindow);
}
}
[ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // https://github.com/Microsoft/BashOnWindows/issues/968
[PlatformSpecific(~TestPlatforms.OSX)] // getting/setting affinity not supported on OSX
public void TestProcessorAffinity()
{
IntPtr curProcessorAffinity = _process.ProcessorAffinity;
try
{
_process.ProcessorAffinity = new IntPtr(0x1);
Assert.Equal(new IntPtr(0x1), _process.ProcessorAffinity);
}
finally
{
_process.ProcessorAffinity = curProcessorAffinity;
Assert.Equal(curProcessorAffinity, _process.ProcessorAffinity);
}
}
[Fact]
public void TestPriorityBoostEnabled()
{
bool isPriorityBoostEnabled = _process.PriorityBoostEnabled;
try
{
_process.PriorityBoostEnabled = true;
Assert.True(_process.PriorityBoostEnabled, "TestPriorityBoostEnabled001 failed");
_process.PriorityBoostEnabled = false;
Assert.False(_process.PriorityBoostEnabled, "TestPriorityBoostEnabled002 failed");
}
finally
{
_process.PriorityBoostEnabled = isPriorityBoostEnabled;
}
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Expected behavior varies on Windows and Unix
[OuterLoop]
[Trait(XunitConstants.Category, XunitConstants.RequiresElevation)]
public void TestPriorityClassUnix()
{
ProcessPriorityClass priorityClass = _process.PriorityClass;
try
{
_process.PriorityClass = ProcessPriorityClass.High;
Assert.Equal(_process.PriorityClass, ProcessPriorityClass.High);
_process.PriorityClass = ProcessPriorityClass.Normal;
Assert.Equal(_process.PriorityClass, ProcessPriorityClass.Normal);
}
finally
{
_process.PriorityClass = priorityClass;
}
}
[Fact, PlatformSpecific(TestPlatforms.Windows)] // Expected behavior varies on Windows and Unix
public void TestPriorityClassWindows()
{
ProcessPriorityClass priorityClass = _process.PriorityClass;
try
{
_process.PriorityClass = ProcessPriorityClass.High;
Assert.Equal(_process.PriorityClass, ProcessPriorityClass.High);
_process.PriorityClass = ProcessPriorityClass.Normal;
Assert.Equal(_process.PriorityClass, ProcessPriorityClass.Normal);
}
finally
{
_process.PriorityClass = priorityClass;
}
}
[Fact]
public void TestInvalidPriorityClass()
{
Process p = new Process();
Assert.Throws<ArgumentException>(() => { p.PriorityClass = ProcessPriorityClass.Normal | ProcessPriorityClass.Idle; });
}
[Fact]
public void TestProcessName()
{
Assert.Equal(Path.GetFileNameWithoutExtension(_process.ProcessName), Path.GetFileNameWithoutExtension(HostRunner), StringComparer.OrdinalIgnoreCase);
}
[Fact]
public void TestSafeHandle()
{
Assert.False(_process.SafeHandle.IsInvalid);
}
[Fact]
public void TestSessionId()
{
uint sessionId;
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
Interop.ProcessIdToSessionId((uint)_process.Id, out sessionId);
}
else
{
sessionId = (uint)Interop.getsid(_process.Id);
}
Assert.Equal(sessionId, (uint)_process.SessionId);
}
[Fact]
public void TestGetCurrentProcess()
{
Process current = Process.GetCurrentProcess();
Assert.NotNull(current);
int currentProcessId = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ?
Interop.GetCurrentProcessId() :
Interop.getpid();
Assert.Equal(currentProcessId, current.Id);
}
[Fact]
public void TestGetProcessById()
{
Process p = Process.GetProcessById(_process.Id);
Assert.Equal(_process.Id, p.Id);
Assert.Equal(_process.ProcessName, p.ProcessName);
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Uses P/Invokes to get process Id
public void TestRootGetProcessById()
{
Process p = Process.GetProcessById(1);
Assert.Equal(1, p.Id);
}
[Fact]
public void TestGetProcesses()
{
Process currentProcess = Process.GetCurrentProcess();
// Get all the processes running on the machine, and check if the current process is one of them.
var foundCurrentProcess = (from p in Process.GetProcesses()
where (p.Id == currentProcess.Id) && (p.ProcessName.Equals(currentProcess.ProcessName))
select p).Any();
Assert.True(foundCurrentProcess, "TestGetProcesses001 failed");
foundCurrentProcess = (from p in Process.GetProcesses(currentProcess.MachineName)
where (p.Id == currentProcess.Id) && (p.ProcessName.Equals(currentProcess.ProcessName))
select p).Any();
Assert.True(foundCurrentProcess, "TestGetProcesses002 failed");
}
[Fact]
public void TestGetProcessesByName()
{
// Get the current process using its name
Process currentProcess = Process.GetCurrentProcess();
Assert.True(Process.GetProcessesByName(currentProcess.ProcessName).Count() > 0, "TestGetProcessesByName001 failed");
Assert.True(Process.GetProcessesByName(currentProcess.ProcessName, currentProcess.MachineName).Count() > 0, "TestGetProcessesByName001 failed");
}
public static IEnumerable<object[]> GetTestProcess()
{
Process currentProcess = Process.GetCurrentProcess();
yield return new object[] { currentProcess, Process.GetProcessById(currentProcess.Id, "127.0.0.1") };
yield return new object[] { currentProcess, Process.GetProcessesByName(currentProcess.ProcessName, "127.0.0.1").Where(p => p.Id == currentProcess.Id).Single() };
}
private static bool ProcessPeformanceCounterEnabled()
{
try
{
using (Microsoft.Win32.RegistryKey perfKey = Microsoft.Win32.Registry.LocalMachine.OpenSubKey(@"SYSTEM\CurrentControlSet\Services\PerfProc\Performance"))
{
if (perfKey == null)
return false;
int? value = (int?)perfKey.GetValue("Disable Performance Counters", null);
return !value.HasValue || value.Value == 0;
}
}
catch (Exception)
{
// Ignore exceptions, and just assume the counter is enabled.
}
return true;
}
[PlatformSpecific(TestPlatforms.Windows)] // Behavior differs on Windows and Unix
[ConditionalTheory(nameof(ProcessPeformanceCounterEnabled))]
[MemberData(nameof(GetTestProcess))]
public void TestProcessOnRemoteMachineWindows(Process currentProcess, Process remoteProcess)
{
Assert.Equal(currentProcess.Id, remoteProcess.Id);
Assert.Equal(currentProcess.BasePriority, remoteProcess.BasePriority);
Assert.Equal(currentProcess.EnableRaisingEvents, remoteProcess.EnableRaisingEvents);
Assert.Equal("127.0.0.1", remoteProcess.MachineName);
// This property throws exception only on remote processes.
Assert.Throws<NotSupportedException>(() => remoteProcess.MainModule);
}
[Fact, PlatformSpecific(TestPlatforms.AnyUnix)] // Behavior differs on Windows and Unix
public void TestProcessOnRemoteMachineUnix()
{
Process currentProcess = Process.GetCurrentProcess();
Assert.Throws<PlatformNotSupportedException>(() => Process.GetProcessesByName(currentProcess.ProcessName, "127.0.0.1"));
Assert.Throws<PlatformNotSupportedException>(() => Process.GetProcessById(currentProcess.Id, "127.0.0.1"));
}
[Fact]
public void TestStartInfo()
{
{
Process process = CreateProcessLong();
process.Start();
Assert.Equal(HostRunner, process.StartInfo.FileName);
process.Kill();
Assert.True(process.WaitForExit(WaitInMS));
}
{
Process process = CreateProcessLong();
process.Start();
Assert.Throws<System.InvalidOperationException>(() => (process.StartInfo = new ProcessStartInfo()));
process.Kill();
Assert.True(process.WaitForExit(WaitInMS));
}
{
Process process = new Process();
process.StartInfo = new ProcessStartInfo(TestConsoleApp);
Assert.Equal(TestConsoleApp, process.StartInfo.FileName);
}
{
Process process = new Process();
Assert.Throws<ArgumentNullException>(() => process.StartInfo = null);
}
{
Process process = Process.GetCurrentProcess();
Assert.Throws<System.InvalidOperationException>(() => process.StartInfo);
}
}
[Theory]
[InlineData(@"""abc"" d e", @"abc,d,e")]
[InlineData(@"""abc"" d e", @"abc,d,e")]
[InlineData("\"abc\"\t\td\te", @"abc,d,e")]
[InlineData(@"a\\b d""e f""g h", @"a\\b,de fg,h")]
[InlineData(@"\ \\ \\\", @"\,\\,\\\")]
[InlineData(@"a\\\""b c d", @"a\""b,c,d")]
[InlineData(@"a\\\\""b c"" d e", @"a\\b c,d,e")]
[InlineData(@"a""b c""d e""f g""h i""j k""l", @"ab cd,ef gh,ij kl")]
[InlineData(@"a b c""def", @"a,b,cdef")]
[InlineData(@"""\a\"" \\""\\\ b c", @"\a"" \\\\,b,c")]
public void TestArgumentParsing(string inputArguments, string expectedArgv)
{
using (var handle = RemoteInvokeRaw((Func<string, string, string, int>)ConcatThreeArguments,
inputArguments,
new RemoteInvokeOptions { Start = true, StartInfo = new ProcessStartInfo { RedirectStandardOutput = true } }))
{
Assert.Equal(expectedArgv, handle.Process.StandardOutput.ReadToEnd());
}
}
private static int ConcatThreeArguments(string one, string two, string three)
{
Console.Write(string.Join(",", one, two, three));
return SuccessExitCode;
}
// [Fact] // uncomment for diagnostic purposes to list processes to console
public void TestDiagnosticsWithConsoleWriteLine()
{
foreach (var p in Process.GetProcesses().OrderBy(p => p.Id))
{
Console.WriteLine("{0} : \"{1}\" (Threads: {2})", p.Id, p.ProcessName, p.Threads.Count);
p.Dispose();
}
}
[Fact]
public void CanBeFinalized()
{
FinalizingProcess.CreateAndRelease();
GC.Collect();
GC.WaitForPendingFinalizers();
Assert.True(FinalizingProcess.WasFinalized);
}
[Theory]
[InlineData(false)]
[InlineData(true)]
public void TestStartWithMissingFile(bool fullPath)
{
string path = Guid.NewGuid().ToString("N");
if (fullPath)
{
path = Path.GetFullPath(path);
Assert.True(Path.IsPathRooted(path));
}
else
{
Assert.False(Path.IsPathRooted(path));
}
Assert.False(File.Exists(path));
Win32Exception e = Assert.Throws<Win32Exception>(() => Process.Start(path));
Assert.NotEqual(0, e.NativeErrorCode);
}
[PlatformSpecific(TestPlatforms.Windows)] // Needs permissions on Unix
// NativeErrorCode not 193 on Windows Nano for ERROR_BAD_EXE_FORMAT, issue #10290
[ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsNanoServer))]
public void TestStartOnWindowsWithBadFileFormat()
{
string path = GetTestFilePath();
File.Create(path).Dispose();
Win32Exception e = Assert.Throws<Win32Exception>(() => Process.Start(path));
Assert.NotEqual(0, e.NativeErrorCode);
}
[PlatformSpecific(TestPlatforms.AnyUnix)] // Uses P/Invokes to set permissions
[Fact]
public void TestStartOnUnixWithBadPermissions()
{
string path = GetTestFilePath();
File.Create(path).Dispose();
Assert.Equal(0, chmod(path, 644)); // no execute permissions
Win32Exception e = Assert.Throws<Win32Exception>(() => Process.Start(path));
Assert.NotEqual(0, e.NativeErrorCode);
}
[PlatformSpecific(TestPlatforms.AnyUnix)] // Uses P/Invokes to set permissions
[Fact]
public void TestStartOnUnixWithBadFormat()
{
string path = GetTestFilePath();
File.Create(path).Dispose();
Assert.Equal(0, chmod(path, 744)); // execute permissions
using (Process p = Process.Start(path))
{
p.WaitForExit();
Assert.NotEqual(0, p.ExitCode);
}
}
[DllImport("libc")]
private static extern int chmod(string path, int mode);
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
namespace System.Globalization
{
// List of calendar data
// Note the we cache overrides.
// Note that localized names (resource names) aren't available from here.
//
// NOTE: Calendars depend on the locale name that creates it. Only a few
// properties are available without locales using CalendarData.GetCalendar(CalendarData)
internal partial class CalendarData
{
// Max calendars
internal const int MAX_CALENDARS = 23;
// Identity
internal String sNativeName; // Calendar Name for the locale
// Formats
internal String[] saShortDates; // Short Data format, default first
internal String[] saYearMonths; // Year/Month Data format, default first
internal String[] saLongDates; // Long Data format, default first
internal String sMonthDay; // Month/Day format
// Calendar Parts Names
internal String[] saEraNames; // Names of Eras
internal String[] saAbbrevEraNames; // Abbreviated Era Names
internal String[] saAbbrevEnglishEraNames; // Abbreviated Era Names in English
internal String[] saDayNames; // Day Names, null to use locale data, starts on Sunday
internal String[] saAbbrevDayNames; // Abbrev Day Names, null to use locale data, starts on Sunday
internal String[] saSuperShortDayNames; // Super short Day of week names
internal String[] saMonthNames; // Month Names (13)
internal String[] saAbbrevMonthNames; // Abbrev Month Names (13)
internal String[] saMonthGenitiveNames; // Genitive Month Names (13)
internal String[] saAbbrevMonthGenitiveNames; // Genitive Abbrev Month Names (13)
internal String[] saLeapYearMonthNames; // Multiple strings for the month names in a leap year.
// Integers at end to make marshaller happier
internal int iTwoDigitYearMax = 2029; // Max 2 digit year (for Y2K bug data entry)
internal int iCurrentEra = 0; // current era # (usually 1)
// Use overrides?
internal bool bUseUserOverrides; // True if we want user overrides.
// Static invariant for the invariant locale
internal static readonly CalendarData Invariant = CreateInvariant();
// Private constructor
private CalendarData() { }
// Invariant factory
private static CalendarData CreateInvariant()
{
// Set our default/gregorian US calendar data
// Calendar IDs are 1-based, arrays are 0 based.
CalendarData invariant = new CalendarData();
// Set default data for calendar
// Note that we don't load resources since this IS NOT supposed to change (by definition)
invariant.sNativeName = "Gregorian Calendar"; // Calendar Name
// Year
invariant.iTwoDigitYearMax = 2029; // Max 2 digit year (for Y2K bug data entry)
invariant.iCurrentEra = 1; // Current era #
// Formats
invariant.saShortDates = new String[] { "MM/dd/yyyy", "yyyy-MM-dd" }; // short date format
invariant.saLongDates = new String[] { "dddd, dd MMMM yyyy" }; // long date format
invariant.saYearMonths = new String[] { "yyyy MMMM" }; // year month format
invariant.sMonthDay = "MMMM dd"; // Month day pattern
// Calendar Parts Names
invariant.saEraNames = new String[] { "A.D." }; // Era names
invariant.saAbbrevEraNames = new String[] { "AD" }; // Abbreviated Era names
invariant.saAbbrevEnglishEraNames = new String[] { "AD" }; // Abbreviated era names in English
invariant.saDayNames = new String[] { "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" };// day names
invariant.saAbbrevDayNames = new String[] { "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" }; // abbreviated day names
invariant.saSuperShortDayNames = new String[] { "Su", "Mo", "Tu", "We", "Th", "Fr", "Sa" }; // The super short day names
invariant.saMonthNames = new String[] { "January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December", String.Empty}; // month names
invariant.saAbbrevMonthNames = new String[] { "Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec", String.Empty}; // abbreviated month names
invariant.saMonthGenitiveNames = invariant.saMonthNames; // Genitive month names (same as month names for invariant)
invariant.saAbbrevMonthGenitiveNames = invariant.saAbbrevMonthNames; // Abbreviated genitive month names (same as abbrev month names for invariant)
invariant.saLeapYearMonthNames = invariant.saMonthNames; // leap year month names are unused in Gregorian English (invariant)
invariant.bUseUserOverrides = false;
return invariant;
}
//
// Get a bunch of data for a calendar
//
internal CalendarData(String localeName, CalendarId calendarId, bool bUseUserOverrides)
{
this.bUseUserOverrides = bUseUserOverrides;
Debug.Assert(!GlobalizationMode.Invariant);
if (!LoadCalendarDataFromSystem(localeName, calendarId))
{
Debug.Fail("[CalendarData] LoadCalendarDataFromSystem call isn't expected to fail for calendar " + calendarId + " locale " + localeName);
// Something failed, try invariant for missing parts
// This is really not good, but we don't want the callers to crash.
if (this.sNativeName == null) this.sNativeName = String.Empty; // Calendar Name for the locale.
// Formats
if (this.saShortDates == null) this.saShortDates = Invariant.saShortDates; // Short Data format, default first
if (this.saYearMonths == null) this.saYearMonths = Invariant.saYearMonths; // Year/Month Data format, default first
if (this.saLongDates == null) this.saLongDates = Invariant.saLongDates; // Long Data format, default first
if (this.sMonthDay == null) this.sMonthDay = Invariant.sMonthDay; // Month/Day format
// Calendar Parts Names
if (this.saEraNames == null) this.saEraNames = Invariant.saEraNames; // Names of Eras
if (this.saAbbrevEraNames == null) this.saAbbrevEraNames = Invariant.saAbbrevEraNames; // Abbreviated Era Names
if (this.saAbbrevEnglishEraNames == null) this.saAbbrevEnglishEraNames = Invariant.saAbbrevEnglishEraNames; // Abbreviated Era Names in English
if (this.saDayNames == null) this.saDayNames = Invariant.saDayNames; // Day Names, null to use locale data, starts on Sunday
if (this.saAbbrevDayNames == null) this.saAbbrevDayNames = Invariant.saAbbrevDayNames; // Abbrev Day Names, null to use locale data, starts on Sunday
if (this.saSuperShortDayNames == null) this.saSuperShortDayNames = Invariant.saSuperShortDayNames; // Super short Day of week names
if (this.saMonthNames == null) this.saMonthNames = Invariant.saMonthNames; // Month Names (13)
if (this.saAbbrevMonthNames == null) this.saAbbrevMonthNames = Invariant.saAbbrevMonthNames; // Abbrev Month Names (13)
// Genitive and Leap names can follow the fallback below
}
if (calendarId == CalendarId.TAIWAN)
{
if (SystemSupportsTaiwaneseCalendar())
{
// We got the month/day names from the OS (same as gregorian), but the native name is wrong
this.sNativeName = "\x4e2d\x83ef\x6c11\x570b\x66c6";
}
else
{
this.sNativeName = String.Empty;
}
}
// Check for null genitive names (in case unmanaged side skips it for non-gregorian calendars, etc)
if (this.saMonthGenitiveNames == null || this.saMonthGenitiveNames.Length == 0 || String.IsNullOrEmpty(this.saMonthGenitiveNames[0]))
this.saMonthGenitiveNames = this.saMonthNames; // Genitive month names (same as month names for invariant)
if (this.saAbbrevMonthGenitiveNames == null || this.saAbbrevMonthGenitiveNames.Length == 0 || String.IsNullOrEmpty(this.saAbbrevMonthGenitiveNames[0]))
this.saAbbrevMonthGenitiveNames = this.saAbbrevMonthNames; // Abbreviated genitive month names (same as abbrev month names for invariant)
if (this.saLeapYearMonthNames == null || this.saLeapYearMonthNames.Length == 0 || String.IsNullOrEmpty(this.saLeapYearMonthNames[0]))
this.saLeapYearMonthNames = this.saMonthNames;
InitializeEraNames(localeName, calendarId);
InitializeAbbreviatedEraNames(localeName, calendarId);
// Abbreviated English Era Names are only used for the Japanese calendar.
if (calendarId == CalendarId.JAPAN)
{
this.saAbbrevEnglishEraNames = JapaneseCalendar.EnglishEraNames();
}
else
{
// For all others just use the an empty string (doesn't matter we'll never ask for it for other calendars)
this.saAbbrevEnglishEraNames = new String[] { "" };
}
// Japanese is the only thing with > 1 era. Its current era # is how many ever
// eras are in the array. (And the others all have 1 string in the array)
this.iCurrentEra = this.saEraNames.Length;
}
private void InitializeEraNames(string localeName, CalendarId calendarId)
{
// Note that the saEraNames only include "A.D." We don't have localized names for other calendars available from windows
switch (calendarId)
{
// For Localized Gregorian we really expect the data from the OS.
case CalendarId.GREGORIAN:
// Fallback for CoreCLR < Win7 or culture.dll missing
if (this.saEraNames == null || this.saEraNames.Length == 0 || String.IsNullOrEmpty(this.saEraNames[0]))
{
this.saEraNames = new String[] { "A.D." };
}
break;
// The rest of the calendars have constant data, so we'll just use that
case CalendarId.GREGORIAN_US:
case CalendarId.JULIAN:
this.saEraNames = new String[] { "A.D." };
break;
case CalendarId.HEBREW:
this.saEraNames = new String[] { "C.E." };
break;
case CalendarId.HIJRI:
case CalendarId.UMALQURA:
if (localeName == "dv-MV")
{
// Special case for Divehi
this.saEraNames = new String[] { "\x0780\x07a8\x0796\x07b0\x0783\x07a9" };
}
else
{
this.saEraNames = new String[] { "\x0628\x0639\x062F \x0627\x0644\x0647\x062C\x0631\x0629" };
}
break;
case CalendarId.GREGORIAN_ARABIC:
case CalendarId.GREGORIAN_XLIT_ENGLISH:
case CalendarId.GREGORIAN_XLIT_FRENCH:
// These are all the same:
this.saEraNames = new String[] { "\x0645" };
break;
case CalendarId.GREGORIAN_ME_FRENCH:
this.saEraNames = new String[] { "ap. J.-C." };
break;
case CalendarId.TAIWAN:
if (SystemSupportsTaiwaneseCalendar())
{
this.saEraNames = new String[] { "\x4e2d\x83ef\x6c11\x570b" };
}
else
{
this.saEraNames = new String[] { String.Empty };
}
break;
case CalendarId.KOREA:
this.saEraNames = new String[] { "\xb2e8\xae30" };
break;
case CalendarId.THAI:
this.saEraNames = new String[] { "\x0e1e\x002e\x0e28\x002e" };
break;
case CalendarId.JAPAN:
case CalendarId.JAPANESELUNISOLAR:
this.saEraNames = JapaneseCalendar.EraNames();
break;
case CalendarId.PERSIAN:
if (this.saEraNames == null || this.saEraNames.Length == 0 || String.IsNullOrEmpty(this.saEraNames[0]))
{
this.saEraNames = new String[] { "\x0647\x002e\x0634" };
}
break;
default:
// Most calendars are just "A.D."
this.saEraNames = Invariant.saEraNames;
break;
}
}
private void InitializeAbbreviatedEraNames(string localeName, CalendarId calendarId)
{
// Note that the saAbbrevEraNames only include "AD" We don't have localized names for other calendars available from windows
switch (calendarId)
{
// For Localized Gregorian we really expect the data from the OS.
case CalendarId.GREGORIAN:
// Fallback for CoreCLR < Win7 or culture.dll missing
if (this.saAbbrevEraNames == null || this.saAbbrevEraNames.Length == 0 || String.IsNullOrEmpty(this.saAbbrevEraNames[0]))
{
this.saAbbrevEraNames = new String[] { "AD" };
}
break;
// The rest of the calendars have constant data, so we'll just use that
case CalendarId.GREGORIAN_US:
case CalendarId.JULIAN:
this.saAbbrevEraNames = new String[] { "AD" };
break;
case CalendarId.JAPAN:
case CalendarId.JAPANESELUNISOLAR:
this.saAbbrevEraNames = JapaneseCalendar.AbbrevEraNames();
break;
case CalendarId.HIJRI:
case CalendarId.UMALQURA:
if (localeName == "dv-MV")
{
// Special case for Divehi
this.saAbbrevEraNames = new String[] { "\x0780\x002e" };
}
else
{
this.saAbbrevEraNames = new String[] { "\x0647\x0640" };
}
break;
case CalendarId.TAIWAN:
// Get era name and abbreviate it
this.saAbbrevEraNames = new String[1];
if (this.saEraNames[0].Length == 4)
{
this.saAbbrevEraNames[0] = this.saEraNames[0].Substring(2, 2);
}
else
{
this.saAbbrevEraNames[0] = this.saEraNames[0];
}
break;
case CalendarId.PERSIAN:
if (this.saAbbrevEraNames == null || this.saAbbrevEraNames.Length == 0 || String.IsNullOrEmpty(this.saAbbrevEraNames[0]))
{
this.saAbbrevEraNames = this.saEraNames;
}
break;
default:
// Most calendars just use the full name
this.saAbbrevEraNames = this.saEraNames;
break;
}
}
internal static CalendarData GetCalendarData(CalendarId calendarId)
{
//
// Get a calendar.
// Unfortunately we depend on the locale in the OS, so we need a locale
// no matter what. So just get the appropriate calendar from the
// appropriate locale here
//
// Get a culture name
// TODO: Note that this doesn't handle the new calendars (lunisolar, etc)
String culture = CalendarIdToCultureName(calendarId);
// Return our calendar
return CultureInfo.GetCultureInfo(culture)._cultureData.GetCalendar(calendarId);
}
private static String CalendarIdToCultureName(CalendarId calendarId)
{
switch (calendarId)
{
case CalendarId.GREGORIAN_US:
return "fa-IR"; // "fa-IR" Iran
case CalendarId.JAPAN:
return "ja-JP"; // "ja-JP" Japan
case CalendarId.TAIWAN:
return "zh-TW"; // zh-TW Taiwan
case CalendarId.KOREA:
return "ko-KR"; // "ko-KR" Korea
case CalendarId.HIJRI:
case CalendarId.GREGORIAN_ARABIC:
case CalendarId.UMALQURA:
return "ar-SA"; // "ar-SA" Saudi Arabia
case CalendarId.THAI:
return "th-TH"; // "th-TH" Thailand
case CalendarId.HEBREW:
return "he-IL"; // "he-IL" Israel
case CalendarId.GREGORIAN_ME_FRENCH:
return "ar-DZ"; // "ar-DZ" Algeria
case CalendarId.GREGORIAN_XLIT_ENGLISH:
case CalendarId.GREGORIAN_XLIT_FRENCH:
return "ar-IQ"; // "ar-IQ"; Iraq
default:
// Default to gregorian en-US
break;
}
return "en-US";
}
}
}
| |
namespace KabMan.Client.Modules.Switch
{
partial class frmSwitchDetailListeUpdate
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule2 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule();
DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule3 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule();
this.layoutControlItem2 = new DevExpress.XtraLayout.LayoutControlItem();
this.txtKabelURMLC = new DevExpress.XtraEditors.TextEdit();
this.layoutControl1 = new DevExpress.XtraLayout.LayoutControl();
this.txtPatchcabel = new DevExpress.XtraEditors.TextEdit();
this.txtCableMultiTrunk = new DevExpress.XtraEditors.TextEdit();
this.btnSchliessen = new DevExpress.XtraEditors.SimpleButton();
this.btnSpeichern = new DevExpress.XtraEditors.SimpleButton();
this.txtVTPort = new DevExpress.XtraEditors.TextEdit();
this.txtBlech = new DevExpress.XtraEditors.TextEdit();
this.layoutControlGroup1 = new DevExpress.XtraLayout.LayoutControlGroup();
this.layoutControlItem1 = new DevExpress.XtraLayout.LayoutControlItem();
this.emptySpaceItem1 = new DevExpress.XtraLayout.EmptySpaceItem();
this.layoutControlItem5 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem7 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem8 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem9 = new DevExpress.XtraLayout.LayoutControlItem();
this.layoutControlItem3 = new DevExpress.XtraLayout.LayoutControlItem();
this.dxValidationProvider1 = new DevExpress.XtraEditors.DXErrorProvider.DXValidationProvider(this.components);
this.styleController1 = new DevExpress.XtraEditors.StyleController(this.components);
this.layoutControlItem4 = new DevExpress.XtraLayout.LayoutControlItem();
this.lkpSAN = new DevExpress.XtraEditors.LookUpEdit();
this.txtFinishUpdate = new DevExpress.XtraEditors.TextEdit();
this.layoutControlItem10 = new DevExpress.XtraLayout.LayoutControlItem();
this.textEdit1 = new DevExpress.XtraEditors.TextEdit();
this.layoutControlItem6 = new DevExpress.XtraLayout.LayoutControlItem();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.txtKabelURMLC.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).BeginInit();
this.layoutControl1.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.txtPatchcabel.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.txtCableMultiTrunk.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.txtVTPort.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.txtBlech.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.emptySpaceItem1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem5)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem7)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem8)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem9)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.dxValidationProvider1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.styleController1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem4)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.lkpSAN.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.txtFinishUpdate.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem10)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.textEdit1.Properties)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem6)).BeginInit();
this.SuspendLayout();
//
// layoutControlItem2
//
this.layoutControlItem2.Control = this.txtKabelURMLC;
this.layoutControlItem2.CustomizationFormText = "Kabel URM-LC";
this.layoutControlItem2.Location = new System.Drawing.Point(0, 31);
this.layoutControlItem2.Name = "layoutControlItem2";
this.layoutControlItem2.Size = new System.Drawing.Size(333, 31);
this.layoutControlItem2.Text = "Cable LC-URM";
this.layoutControlItem2.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem2.TextSize = new System.Drawing.Size(86, 20);
//
// txtKabelURMLC
//
this.txtKabelURMLC.Location = new System.Drawing.Point(98, 38);
this.txtKabelURMLC.Name = "txtKabelURMLC";
this.txtKabelURMLC.Size = new System.Drawing.Size(231, 20);
this.txtKabelURMLC.StyleController = this.layoutControl1;
this.txtKabelURMLC.TabIndex = 12;
//
// layoutControl1
//
this.layoutControl1.AllowCustomizationMenu = false;
this.layoutControl1.Controls.Add(this.textEdit1);
this.layoutControl1.Controls.Add(this.txtFinishUpdate);
this.layoutControl1.Controls.Add(this.lkpSAN);
this.layoutControl1.Controls.Add(this.txtPatchcabel);
this.layoutControl1.Controls.Add(this.txtCableMultiTrunk);
this.layoutControl1.Controls.Add(this.txtKabelURMLC);
this.layoutControl1.Controls.Add(this.btnSchliessen);
this.layoutControl1.Controls.Add(this.btnSpeichern);
this.layoutControl1.Controls.Add(this.txtVTPort);
this.layoutControl1.Controls.Add(this.txtBlech);
this.layoutControl1.Dock = System.Windows.Forms.DockStyle.Fill;
this.layoutControl1.HiddenItems.AddRange(new DevExpress.XtraLayout.BaseLayoutItem[] {
this.layoutControlItem4});
this.layoutControl1.Location = new System.Drawing.Point(0, 0);
this.layoutControl1.Name = "layoutControl1";
this.layoutControl1.Root = this.layoutControlGroup1;
this.layoutControl1.Size = new System.Drawing.Size(335, 253);
this.layoutControl1.TabIndex = 1;
this.layoutControl1.Text = "layoutControl1";
//
// txtPatchcabel
//
this.txtPatchcabel.Location = new System.Drawing.Point(98, 162);
this.txtPatchcabel.Name = "txtPatchcabel";
this.txtPatchcabel.Size = new System.Drawing.Size(231, 20);
this.txtPatchcabel.StyleController = this.layoutControl1;
this.txtPatchcabel.TabIndex = 14;
//
// txtCableMultiTrunk
//
this.txtCableMultiTrunk.Location = new System.Drawing.Point(98, 100);
this.txtCableMultiTrunk.Name = "txtCableMultiTrunk";
this.txtCableMultiTrunk.Size = new System.Drawing.Size(231, 20);
this.txtCableMultiTrunk.StyleController = this.layoutControl1;
this.txtCableMultiTrunk.TabIndex = 13;
//
// btnSchliessen
//
this.btnSchliessen.Location = new System.Drawing.Point(260, 224);
this.btnSchliessen.Name = "btnSchliessen";
this.btnSchliessen.Size = new System.Drawing.Size(69, 22);
this.btnSchliessen.StyleController = this.layoutControl1;
this.btnSchliessen.TabIndex = 11;
this.btnSchliessen.Text = "Close";
this.btnSchliessen.Click += new System.EventHandler(this.btnSchliessen_Click);
//
// btnSpeichern
//
this.btnSpeichern.Location = new System.Drawing.Point(179, 224);
this.btnSpeichern.Name = "btnSpeichern";
this.btnSpeichern.Size = new System.Drawing.Size(70, 22);
this.btnSpeichern.StyleController = this.layoutControl1;
this.btnSpeichern.TabIndex = 10;
this.btnSpeichern.Text = "Save";
this.btnSpeichern.Click += new System.EventHandler(this.btnSpeichern_Click);
//
// txtVTPort
//
this.txtVTPort.Location = new System.Drawing.Point(98, 131);
this.txtVTPort.Name = "txtVTPort";
this.txtVTPort.Properties.MaxLength = 20;
this.txtVTPort.Size = new System.Drawing.Size(231, 20);
this.txtVTPort.StyleController = this.layoutControl1;
this.txtVTPort.TabIndex = 8;
conditionValidationRule2.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank;
conditionValidationRule2.ErrorText = "This value is not valid";
conditionValidationRule2.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning;
this.dxValidationProvider1.SetValidationRule(this.txtVTPort, conditionValidationRule2);
//
// txtBlech
//
this.txtBlech.Location = new System.Drawing.Point(98, 69);
this.txtBlech.Name = "txtBlech";
this.txtBlech.Properties.MaxLength = 20;
this.txtBlech.Size = new System.Drawing.Size(231, 20);
this.txtBlech.StyleController = this.layoutControl1;
this.txtBlech.TabIndex = 4;
conditionValidationRule3.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank;
conditionValidationRule3.ErrorText = "This value is not valid";
conditionValidationRule3.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning;
this.dxValidationProvider1.SetValidationRule(this.txtBlech, conditionValidationRule3);
//
// layoutControlGroup1
//
this.layoutControlGroup1.CustomizationFormText = "layoutControlGroup1";
this.layoutControlGroup1.Items.AddRange(new DevExpress.XtraLayout.BaseLayoutItem[] {
this.layoutControlItem1,
this.emptySpaceItem1,
this.layoutControlItem5,
this.layoutControlItem7,
this.layoutControlItem8,
this.layoutControlItem9,
this.layoutControlItem3,
this.layoutControlItem2,
this.layoutControlItem10,
this.layoutControlItem6});
this.layoutControlGroup1.Location = new System.Drawing.Point(0, 0);
this.layoutControlGroup1.Name = "Root";
this.layoutControlGroup1.Size = new System.Drawing.Size(335, 253);
this.layoutControlGroup1.Spacing = new DevExpress.XtraLayout.Utils.Padding(0, 0, 0, 0);
this.layoutControlGroup1.Text = "Root";
this.layoutControlGroup1.TextVisible = false;
//
// layoutControlItem1
//
this.layoutControlItem1.Control = this.txtBlech;
this.layoutControlItem1.CustomizationFormText = "1HE-Blech";
this.layoutControlItem1.Location = new System.Drawing.Point(0, 62);
this.layoutControlItem1.Name = "layoutControlItem1";
this.layoutControlItem1.Size = new System.Drawing.Size(333, 31);
this.layoutControlItem1.Text = "VT Port - SC Blech";
this.layoutControlItem1.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem1.TextSize = new System.Drawing.Size(86, 20);
//
// emptySpaceItem1
//
this.emptySpaceItem1.CustomizationFormText = "emptySpaceItem1";
this.emptySpaceItem1.Location = new System.Drawing.Point(0, 217);
this.emptySpaceItem1.Name = "emptySpaceItem1";
this.emptySpaceItem1.Size = new System.Drawing.Size(172, 34);
this.emptySpaceItem1.Text = "emptySpaceItem1";
this.emptySpaceItem1.TextSize = new System.Drawing.Size(0, 0);
//
// layoutControlItem5
//
this.layoutControlItem5.Control = this.txtVTPort;
this.layoutControlItem5.CustomizationFormText = "VT Port";
this.layoutControlItem5.Location = new System.Drawing.Point(0, 124);
this.layoutControlItem5.Name = "layoutControlItem5";
this.layoutControlItem5.Size = new System.Drawing.Size(333, 31);
this.layoutControlItem5.Text = "VT Port Switch";
this.layoutControlItem5.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem5.TextSize = new System.Drawing.Size(86, 20);
//
// layoutControlItem7
//
this.layoutControlItem7.Control = this.btnSpeichern;
this.layoutControlItem7.CustomizationFormText = "layoutControlItem7";
this.layoutControlItem7.Location = new System.Drawing.Point(172, 217);
this.layoutControlItem7.Name = "layoutControlItem7";
this.layoutControlItem7.Size = new System.Drawing.Size(81, 34);
this.layoutControlItem7.Text = "layoutControlItem7";
this.layoutControlItem7.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem7.TextSize = new System.Drawing.Size(0, 0);
this.layoutControlItem7.TextToControlDistance = 0;
this.layoutControlItem7.TextVisible = false;
//
// layoutControlItem8
//
this.layoutControlItem8.Control = this.btnSchliessen;
this.layoutControlItem8.CustomizationFormText = "layoutControlItem8";
this.layoutControlItem8.Location = new System.Drawing.Point(253, 217);
this.layoutControlItem8.Name = "layoutControlItem8";
this.layoutControlItem8.Size = new System.Drawing.Size(80, 34);
this.layoutControlItem8.Text = "layoutControlItem8";
this.layoutControlItem8.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem8.TextSize = new System.Drawing.Size(0, 0);
this.layoutControlItem8.TextToControlDistance = 0;
this.layoutControlItem8.TextVisible = false;
//
// layoutControlItem9
//
this.layoutControlItem9.Control = this.txtCableMultiTrunk;
this.layoutControlItem9.CustomizationFormText = "Kabel Multi Trunk";
this.layoutControlItem9.Location = new System.Drawing.Point(0, 93);
this.layoutControlItem9.Name = "layoutControlItem9";
this.layoutControlItem9.Size = new System.Drawing.Size(333, 31);
this.layoutControlItem9.Text = "Trunk Cable";
this.layoutControlItem9.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem9.TextSize = new System.Drawing.Size(86, 20);
//
// layoutControlItem3
//
this.layoutControlItem3.Control = this.txtPatchcabel;
this.layoutControlItem3.CustomizationFormText = "Patchkabel URM-URM";
this.layoutControlItem3.Location = new System.Drawing.Point(0, 155);
this.layoutControlItem3.Name = "layoutControlItem3";
this.layoutControlItem3.Size = new System.Drawing.Size(333, 31);
this.layoutControlItem3.Text = "Cable URM-URM";
this.layoutControlItem3.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem3.TextSize = new System.Drawing.Size(86, 20);
//
// dxValidationProvider1
//
this.dxValidationProvider1.ValidationMode = DevExpress.XtraEditors.DXErrorProvider.ValidationMode.Manual;
//
// layoutControlItem4
//
this.layoutControlItem4.Control = this.lkpSAN;
this.layoutControlItem4.CustomizationFormText = "SAN";
this.layoutControlItem4.Location = new System.Drawing.Point(0, 155);
this.layoutControlItem4.Name = "layoutControlItem4";
this.layoutControlItem4.Size = new System.Drawing.Size(717, 31);
this.layoutControlItem4.Text = "SAN";
this.layoutControlItem4.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem4.TextSize = new System.Drawing.Size(103, 20);
//
// lkpSAN
//
this.lkpSAN.Location = new System.Drawing.Point(115, 162);
this.lkpSAN.Name = "lkpSAN";
this.lkpSAN.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] {
new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)});
this.lkpSAN.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] {
new DevExpress.XtraEditors.Controls.LookUpColumnInfo("SAN", "SAN", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)});
this.lkpSAN.Properties.NullText = "";
this.lkpSAN.Size = new System.Drawing.Size(598, 20);
this.lkpSAN.StyleController = this.layoutControl1;
this.lkpSAN.TabIndex = 15;
//
// txtFinishUpdate
//
this.txtFinishUpdate.Location = new System.Drawing.Point(98, 193);
this.txtFinishUpdate.Name = "txtFinishUpdate";
this.txtFinishUpdate.Size = new System.Drawing.Size(231, 20);
this.txtFinishUpdate.StyleController = this.layoutControl1;
this.txtFinishUpdate.TabIndex = 2;
//
// layoutControlItem10
//
this.layoutControlItem10.Control = this.txtFinishUpdate;
this.layoutControlItem10.CustomizationFormText = "Finish";
this.layoutControlItem10.Location = new System.Drawing.Point(0, 186);
this.layoutControlItem10.Name = "layoutControlItem10";
this.layoutControlItem10.Size = new System.Drawing.Size(333, 31);
this.layoutControlItem10.Text = "Finish";
this.layoutControlItem10.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem10.TextSize = new System.Drawing.Size(86, 20);
//
// textEdit1
//
this.textEdit1.Location = new System.Drawing.Point(98, 7);
this.textEdit1.Name = "textEdit1";
this.textEdit1.Size = new System.Drawing.Size(231, 20);
this.textEdit1.StyleController = this.layoutControl1;
this.textEdit1.TabIndex = 2;
//
// layoutControlItem6
//
this.layoutControlItem6.Control = this.textEdit1;
this.layoutControlItem6.CustomizationFormText = "Port";
this.layoutControlItem6.Location = new System.Drawing.Point(0, 0);
this.layoutControlItem6.Name = "layoutControlItem6";
this.layoutControlItem6.Size = new System.Drawing.Size(333, 31);
this.layoutControlItem6.Text = "Port";
this.layoutControlItem6.TextLocation = DevExpress.Utils.Locations.Left;
this.layoutControlItem6.TextSize = new System.Drawing.Size(86, 20);
//
// frmSwitchDetailListeUpdate
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(335, 253);
this.Controls.Add(this.layoutControl1);
this.Name = "frmSwitchDetailListeUpdate";
this.Text = "frmSwitchDetailListeUpdate";
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.txtKabelURMLC.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).EndInit();
this.layoutControl1.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.txtPatchcabel.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.txtCableMultiTrunk.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.txtVTPort.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.txtBlech.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.emptySpaceItem1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem5)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem7)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem8)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem9)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.dxValidationProvider1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.styleController1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem4)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.lkpSAN.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.txtFinishUpdate.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem10)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.textEdit1.Properties)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.layoutControlItem6)).EndInit();
this.ResumeLayout(false);
}
#endregion
private DevExpress.XtraLayout.LayoutControl layoutControl1;
private DevExpress.XtraEditors.TextEdit txtFinishUpdate;
private DevExpress.XtraEditors.LookUpEdit lkpSAN;
private DevExpress.XtraEditors.TextEdit txtPatchcabel;
private DevExpress.XtraEditors.TextEdit txtCableMultiTrunk;
private DevExpress.XtraEditors.TextEdit txtKabelURMLC;
private DevExpress.XtraEditors.SimpleButton btnSchliessen;
private DevExpress.XtraEditors.SimpleButton btnSpeichern;
private DevExpress.XtraEditors.TextEdit txtVTPort;
private DevExpress.XtraEditors.DXErrorProvider.DXValidationProvider dxValidationProvider1;
private DevExpress.XtraEditors.TextEdit txtBlech;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem4;
private DevExpress.XtraLayout.LayoutControlGroup layoutControlGroup1;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem1;
private DevExpress.XtraLayout.EmptySpaceItem emptySpaceItem1;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem5;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem7;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem8;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem9;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem3;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem2;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem10;
private DevExpress.XtraEditors.StyleController styleController1;
private DevExpress.XtraEditors.TextEdit textEdit1;
private DevExpress.XtraLayout.LayoutControlItem layoutControlItem6;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace System.Net.Sockets
{
internal static partial class SocketPal
{
// The API that uses this information is not supported on *nix, and will throw
// PlatformNotSupportedException instead.
public const int ProtocolInformationSize = 0;
public const bool SupportsMultipleConnectAttempts = false;
private readonly static bool SupportsDualModeIPv4PacketInfo = GetPlatformSupportsDualModeIPv4PacketInfo();
private static bool GetPlatformSupportsDualModeIPv4PacketInfo()
{
return Interop.Sys.PlatformSupportsDualModeIPv4PacketInfo();
}
public static SocketError GetSocketErrorForErrorCode(Interop.Error errorCode)
{
return SocketErrorPal.GetSocketErrorForNativeError(errorCode);
}
public static void CheckDualModeReceiveSupport(Socket socket)
{
if (!SupportsDualModeIPv4PacketInfo && socket.AddressFamily == AddressFamily.InterNetworkV6 && socket.DualMode)
{
throw new PlatformNotSupportedException(SR.net_sockets_dualmode_receivefrom_notsupported);
}
}
private static unsafe IPPacketInformation GetIPPacketInformation(Interop.Sys.MessageHeader* messageHeader, bool isIPv4, bool isIPv6)
{
if (!isIPv4 && !isIPv6)
{
return default(IPPacketInformation);
}
Interop.Sys.IPPacketInformation nativePacketInfo;
if (!Interop.Sys.TryGetIPPacketInformation(messageHeader, isIPv4, &nativePacketInfo))
{
return default(IPPacketInformation);
}
return new IPPacketInformation(nativePacketInfo.Address.GetIPAddress(), nativePacketInfo.InterfaceIndex);
}
public static SocketError CreateSocket(AddressFamily addressFamily, SocketType socketType, ProtocolType protocolType, out SafeCloseSocket socket)
{
return SafeCloseSocket.CreateSocket(addressFamily, socketType, protocolType, out socket);
}
private static unsafe int Receive(SafeCloseSocket socket, SocketFlags flags, byte[] buffer, int offset, int count, byte[] socketAddress, ref int socketAddressLen, out SocketFlags receivedFlags, out Interop.Error errno)
{
Debug.Assert(socketAddress != null || socketAddressLen == 0, $"Unexpected values: socketAddress={socketAddress}, socketAddressLen={socketAddressLen}");
long received;
int sockAddrLen = 0;
if (socketAddress != null)
{
sockAddrLen = socketAddressLen;
}
fixed (byte* sockAddr = socketAddress)
fixed (byte* b = buffer)
{
var iov = new Interop.Sys.IOVector {
Base = &b[offset],
Count = (UIntPtr)count
};
var messageHeader = new Interop.Sys.MessageHeader {
SocketAddress = sockAddr,
SocketAddressLen = sockAddrLen,
IOVectors = &iov,
IOVectorCount = 1
};
errno = Interop.Sys.ReceiveMessage(socket, &messageHeader, flags, &received);
receivedFlags = messageHeader.Flags;
sockAddrLen = messageHeader.SocketAddressLen;
}
if (errno != Interop.Error.SUCCESS)
{
return -1;
}
socketAddressLen = sockAddrLen;
return checked((int)received);
}
private static unsafe int Send(SafeCloseSocket socket, SocketFlags flags, byte[] buffer, ref int offset, ref int count, byte[] socketAddress, int socketAddressLen, out Interop.Error errno)
{
int sent;
int sockAddrLen = 0;
if (socketAddress != null)
{
sockAddrLen = socketAddressLen;
}
fixed (byte* sockAddr = socketAddress)
fixed (byte* b = buffer)
{
var iov = new Interop.Sys.IOVector {
Base = &b[offset],
Count = (UIntPtr)count
};
var messageHeader = new Interop.Sys.MessageHeader {
SocketAddress = sockAddr,
SocketAddressLen = sockAddrLen,
IOVectors = &iov,
IOVectorCount = 1
};
long bytesSent;
errno = Interop.Sys.SendMessage(socket, &messageHeader, flags, &bytesSent);
sent = checked((int)bytesSent);
}
if (errno != Interop.Error.SUCCESS)
{
return -1;
}
offset += sent;
count -= sent;
return sent;
}
private static unsafe int Send(SafeCloseSocket socket, SocketFlags flags, IList<ArraySegment<byte>> buffers, ref int bufferIndex, ref int offset, byte[] socketAddress, int socketAddressLen, out Interop.Error errno)
{
// Pin buffers and set up iovecs.
int startIndex = bufferIndex, startOffset = offset;
int sockAddrLen = 0;
if (socketAddress != null)
{
sockAddrLen = socketAddressLen;
}
int maxBuffers = buffers.Count - startIndex;
var handles = new GCHandle[maxBuffers];
var iovecs = new Interop.Sys.IOVector[maxBuffers];
int sent;
int toSend = 0, iovCount = maxBuffers;
try
{
for (int i = 0; i < maxBuffers; i++, startOffset = 0)
{
ArraySegment<byte> buffer = buffers[startIndex + i];
Debug.Assert(buffer.Offset + startOffset < buffer.Array.Length, $"Unexpected values: Offset={buffer.Offset}, startOffset={startOffset}, Length={buffer.Array.Length}");
handles[i] = GCHandle.Alloc(buffer.Array, GCHandleType.Pinned);
iovecs[i].Base = &((byte*)handles[i].AddrOfPinnedObject())[buffer.Offset + startOffset];
toSend += (buffer.Count - startOffset);
iovecs[i].Count = (UIntPtr)(buffer.Count - startOffset);
}
// Make the call
fixed (byte* sockAddr = socketAddress)
fixed (Interop.Sys.IOVector* iov = iovecs)
{
var messageHeader = new Interop.Sys.MessageHeader {
SocketAddress = sockAddr,
SocketAddressLen = sockAddrLen,
IOVectors = iov,
IOVectorCount = iovCount
};
long bytesSent;
errno = Interop.Sys.SendMessage(socket, &messageHeader, flags, &bytesSent);
sent = checked((int)bytesSent);
}
}
finally
{
// Free GC handles.
for (int i = 0; i < iovCount; i++)
{
if (handles[i].IsAllocated)
{
handles[i].Free();
}
}
}
if (errno != Interop.Error.SUCCESS)
{
return -1;
}
// Update position.
int endIndex = bufferIndex, endOffset = offset, unconsumed = sent;
for (; endIndex < buffers.Count && unconsumed > 0; endIndex++, endOffset = 0)
{
int space = buffers[endIndex].Count - endOffset;
if (space > unconsumed)
{
endOffset += unconsumed;
break;
}
unconsumed -= space;
}
bufferIndex = endIndex;
offset = endOffset;
return sent;
}
private static unsafe int Receive(SafeCloseSocket socket, SocketFlags flags, IList<ArraySegment<byte>> buffers, byte[] socketAddress, ref int socketAddressLen, out SocketFlags receivedFlags, out Interop.Error errno)
{
int available;
errno = Interop.Sys.GetBytesAvailable(socket, &available);
if (errno != Interop.Error.SUCCESS)
{
receivedFlags = 0;
return -1;
}
if (available == 0)
{
// Always request at least one byte.
available = 1;
}
// Pin buffers and set up iovecs.
int maxBuffers = buffers.Count;
var handles = new GCHandle[maxBuffers];
var iovecs = new Interop.Sys.IOVector[maxBuffers];
int sockAddrLen = 0;
if (socketAddress != null)
{
sockAddrLen = socketAddressLen;
}
long received = 0;
int toReceive = 0, iovCount = maxBuffers;
try
{
for (int i = 0; i < maxBuffers; i++)
{
ArraySegment<byte> buffer = buffers[i];
handles[i] = GCHandle.Alloc(buffer.Array, GCHandleType.Pinned);
iovecs[i].Base = &((byte*)handles[i].AddrOfPinnedObject())[buffer.Offset];
int space = buffer.Count;
toReceive += space;
if (toReceive >= available)
{
iovecs[i].Count = (UIntPtr)(space - (toReceive - available));
toReceive = available;
iovCount = i + 1;
break;
}
iovecs[i].Count = (UIntPtr)space;
}
// Make the call.
fixed (byte* sockAddr = socketAddress)
fixed (Interop.Sys.IOVector* iov = iovecs)
{
var messageHeader = new Interop.Sys.MessageHeader {
SocketAddress = sockAddr,
SocketAddressLen = sockAddrLen,
IOVectors = iov,
IOVectorCount = iovCount
};
errno = Interop.Sys.ReceiveMessage(socket, &messageHeader, flags, &received);
receivedFlags = messageHeader.Flags;
sockAddrLen = messageHeader.SocketAddressLen;
}
}
finally
{
// Free GC handles.
for (int i = 0; i < iovCount; i++)
{
if (handles[i].IsAllocated)
{
handles[i].Free();
}
}
}
if (errno != Interop.Error.SUCCESS)
{
return -1;
}
socketAddressLen = sockAddrLen;
return checked((int)received);
}
private static unsafe int ReceiveMessageFrom(SafeCloseSocket socket, SocketFlags flags, byte[] buffer, int offset, int count, byte[] socketAddress, ref int socketAddressLen, bool isIPv4, bool isIPv6, out SocketFlags receivedFlags, out IPPacketInformation ipPacketInformation, out Interop.Error errno)
{
Debug.Assert(socketAddress != null, "Expected non-null socketAddress");
int cmsgBufferLen = Interop.Sys.GetControlMessageBufferSize(isIPv4, isIPv6);
var cmsgBuffer = stackalloc byte[cmsgBufferLen];
int sockAddrLen = socketAddressLen;
Interop.Sys.MessageHeader messageHeader;
long received;
fixed (byte* rawSocketAddress = socketAddress)
fixed (byte* b = buffer)
{
var sockAddr = (byte*)rawSocketAddress;
var iov = new Interop.Sys.IOVector {
Base = &b[offset],
Count = (UIntPtr)count
};
messageHeader = new Interop.Sys.MessageHeader {
SocketAddress = sockAddr,
SocketAddressLen = sockAddrLen,
IOVectors = &iov,
IOVectorCount = 1,
ControlBuffer = cmsgBuffer,
ControlBufferLen = cmsgBufferLen
};
errno = Interop.Sys.ReceiveMessage(socket, &messageHeader, flags, &received);
receivedFlags = messageHeader.Flags;
sockAddrLen = messageHeader.SocketAddressLen;
}
ipPacketInformation = GetIPPacketInformation(&messageHeader, isIPv4, isIPv6);
if (errno != Interop.Error.SUCCESS)
{
return -1;
}
socketAddressLen = sockAddrLen;
return checked((int)received);
}
public static unsafe bool TryCompleteAccept(SafeCloseSocket socket, byte[] socketAddress, ref int socketAddressLen, out int acceptedFd, out SocketError errorCode)
{
int fd;
Interop.Error errno;
int sockAddrLen = socketAddressLen;
fixed (byte* rawSocketAddress = socketAddress)
{
try
{
errno = Interop.Sys.Accept(socket, rawSocketAddress, &sockAddrLen, &fd);
}
catch (ObjectDisposedException)
{
// The socket was closed, or is closing.
errorCode = SocketError.OperationAborted;
acceptedFd = -1;
return true;
}
}
if (errno == Interop.Error.SUCCESS)
{
Debug.Assert(fd != -1, "Expected fd != -1");
socketAddressLen = sockAddrLen;
errorCode = SocketError.Success;
acceptedFd = fd;
return true;
}
acceptedFd = -1;
if (errno != Interop.Error.EAGAIN && errno != Interop.Error.EWOULDBLOCK)
{
errorCode = GetSocketErrorForErrorCode(errno);
return true;
}
errorCode = SocketError.Success;
return false;
}
public static unsafe bool TryStartConnect(SafeCloseSocket socket, byte[] socketAddress, int socketAddressLen, out SocketError errorCode)
{
Debug.Assert(socketAddress != null, "Expected non-null socketAddress");
Debug.Assert(socketAddressLen > 0, $"Unexpected socketAddressLen: {socketAddressLen}");
Interop.Error err;
fixed (byte* rawSocketAddress = socketAddress)
{
err = Interop.Sys.Connect(socket, rawSocketAddress, socketAddressLen);
}
if (err == Interop.Error.SUCCESS)
{
errorCode = SocketError.Success;
return true;
}
if (err != Interop.Error.EINPROGRESS)
{
errorCode = GetSocketErrorForErrorCode(err);
return true;
}
errorCode = SocketError.Success;
return false;
}
public static unsafe bool TryCompleteConnect(SafeCloseSocket socket, int socketAddressLen, out SocketError errorCode)
{
Interop.Error socketError;
Interop.Error err;
try
{
err = Interop.Sys.GetSocketErrorOption(socket, &socketError);
}
catch (ObjectDisposedException)
{
// The socket was closed, or is closing.
errorCode = SocketError.OperationAborted;
return true;
}
if (err != Interop.Error.SUCCESS)
{
Debug.Assert(err == Interop.Error.EBADF, $"Unexpected err: {err}");
errorCode = SocketError.SocketError;
return true;
}
if (socketError == Interop.Error.SUCCESS)
{
errorCode = SocketError.Success;
return true;
}
else if (socketError == Interop.Error.EINPROGRESS)
{
errorCode = SocketError.Success;
return false;
}
errorCode = GetSocketErrorForErrorCode(socketError);
return true;
}
public static bool TryCompleteReceiveFrom(SafeCloseSocket socket, byte[] buffer, int offset, int count, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, out int bytesReceived, out SocketFlags receivedFlags, out SocketError errorCode)
{
return TryCompleteReceiveFrom(socket, buffer, null, offset, count, flags, socketAddress, ref socketAddressLen, out bytesReceived, out receivedFlags, out errorCode);
}
public static bool TryCompleteReceiveFrom(SafeCloseSocket socket, IList<ArraySegment<byte>> buffers, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, out int bytesReceived, out SocketFlags receivedFlags, out SocketError errorCode)
{
return TryCompleteReceiveFrom(socket, null, buffers, 0, 0, flags, socketAddress, ref socketAddressLen, out bytesReceived, out receivedFlags, out errorCode);
}
public static unsafe bool TryCompleteReceiveFrom(SafeCloseSocket socket, byte[] buffer, IList<ArraySegment<byte>> buffers, int offset, int count, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, out int bytesReceived, out SocketFlags receivedFlags, out SocketError errorCode)
{
try
{
Interop.Error errno;
int received;
if (buffer != null)
{
received = Receive(socket, flags, buffer, offset, count, socketAddress, ref socketAddressLen, out receivedFlags, out errno);
}
else
{
received = Receive(socket, flags, buffers, socketAddress, ref socketAddressLen, out receivedFlags, out errno);
}
if (received != -1)
{
bytesReceived = received;
errorCode = SocketError.Success;
return true;
}
bytesReceived = 0;
if (errno != Interop.Error.EAGAIN && errno != Interop.Error.EWOULDBLOCK)
{
errorCode = GetSocketErrorForErrorCode(errno);
return true;
}
errorCode = SocketError.Success;
return false;
}
catch (ObjectDisposedException)
{
// The socket was closed, or is closing.
bytesReceived = 0;
receivedFlags = 0;
errorCode = SocketError.OperationAborted;
return true;
}
}
public static unsafe bool TryCompleteReceiveMessageFrom(SafeCloseSocket socket, byte[] buffer, int offset, int count, SocketFlags flags, byte[] socketAddress, ref int socketAddressLen, bool isIPv4, bool isIPv6, out int bytesReceived, out SocketFlags receivedFlags, out IPPacketInformation ipPacketInformation, out SocketError errorCode)
{
try
{
Interop.Error errno;
int received = ReceiveMessageFrom(socket, flags, buffer, offset, count, socketAddress, ref socketAddressLen, isIPv4, isIPv6, out receivedFlags, out ipPacketInformation, out errno);
if (received != -1)
{
bytesReceived = received;
errorCode = SocketError.Success;
return true;
}
bytesReceived = 0;
if (errno != Interop.Error.EAGAIN && errno != Interop.Error.EWOULDBLOCK)
{
errorCode = GetSocketErrorForErrorCode(errno);
return true;
}
errorCode = SocketError.Success;
return false;
}
catch (ObjectDisposedException)
{
// The socket was closed, or is closing.
bytesReceived = 0;
receivedFlags = 0;
ipPacketInformation = default(IPPacketInformation);
errorCode = SocketError.OperationAborted;
return true;
}
}
public static bool TryCompleteSendTo(SafeCloseSocket socket, byte[] buffer, ref int offset, ref int count, SocketFlags flags, byte[] socketAddress, int socketAddressLen, ref int bytesSent, out SocketError errorCode)
{
int bufferIndex = 0;
return TryCompleteSendTo(socket, buffer, null, ref bufferIndex, ref offset, ref count, flags, socketAddress, socketAddressLen, ref bytesSent, out errorCode);
}
public static bool TryCompleteSendTo(SafeCloseSocket socket, IList<ArraySegment<byte>> buffers, ref int bufferIndex, ref int offset, SocketFlags flags, byte[] socketAddress, int socketAddressLen, ref int bytesSent, out SocketError errorCode)
{
int count = 0;
return TryCompleteSendTo(socket, null, buffers, ref bufferIndex, ref offset, ref count, flags, socketAddress, socketAddressLen, ref bytesSent, out errorCode);
}
public static bool TryCompleteSendTo(SafeCloseSocket socket, byte[] buffer, IList<ArraySegment<byte>> buffers, ref int bufferIndex, ref int offset, ref int count, SocketFlags flags, byte[] socketAddress, int socketAddressLen, ref int bytesSent, out SocketError errorCode)
{
for (;;)
{
int sent;
Interop.Error errno;
try
{
if (buffer != null)
{
sent = Send(socket, flags, buffer, ref offset, ref count, socketAddress, socketAddressLen, out errno);
}
else
{
sent = Send(socket, flags, buffers, ref bufferIndex, ref offset, socketAddress, socketAddressLen, out errno);
}
}
catch (ObjectDisposedException)
{
// The socket was closed, or is closing.
errorCode = SocketError.OperationAborted;
return true;
}
if (sent == -1)
{
if (errno != Interop.Error.EAGAIN && errno != Interop.Error.EWOULDBLOCK)
{
errorCode = GetSocketErrorForErrorCode(errno);
return true;
}
errorCode = SocketError.Success;
return false;
}
bytesSent += sent;
bool isComplete = sent == 0 ||
(buffer != null && count == 0) ||
(buffers != null && bufferIndex == buffers.Count);
if (isComplete)
{
errorCode = SocketError.Success;
return true;
}
}
}
public static SocketError SetBlocking(SafeCloseSocket handle, bool shouldBlock, out bool willBlock)
{
handle.IsNonBlocking = !shouldBlock;
willBlock = shouldBlock;
return SocketError.Success;
}
public static unsafe SocketError GetSockName(SafeCloseSocket handle, byte[] buffer, ref int nameLen)
{
Interop.Error err;
int addrLen = nameLen;
fixed (byte* rawBuffer = buffer)
{
err = Interop.Sys.GetSockName(handle, rawBuffer, &addrLen);
}
nameLen = addrLen;
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError GetAvailable(SafeCloseSocket handle, out int available)
{
int value = 0;
Interop.Error err = Interop.Sys.GetBytesAvailable(handle, &value);
available = value;
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError GetPeerName(SafeCloseSocket handle, byte[] buffer, ref int nameLen)
{
Interop.Error err;
int addrLen = nameLen;
fixed (byte* rawBuffer = buffer)
{
err = Interop.Sys.GetPeerName(handle, rawBuffer, &addrLen);
}
nameLen = addrLen;
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError Bind(SafeCloseSocket handle, byte[] buffer, int nameLen)
{
Interop.Error err;
fixed (byte* rawBuffer = buffer)
{
err = Interop.Sys.Bind(handle, rawBuffer, nameLen);
}
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static SocketError Listen(SafeCloseSocket handle, int backlog)
{
Interop.Error err = Interop.Sys.Listen(handle, backlog);
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static SocketError Accept(SafeCloseSocket handle, byte[] buffer, ref int nameLen, out SafeCloseSocket socket)
{
return SafeCloseSocket.Accept(handle, buffer, ref nameLen, out socket);
}
public static SocketError Connect(SafeCloseSocket handle, byte[] socketAddress, int socketAddressLen)
{
if (!handle.IsNonBlocking)
{
return handle.AsyncContext.Connect(socketAddress, socketAddressLen, -1);
}
handle.AsyncContext.CheckForPriorConnectFailure();
SocketError errorCode;
bool completed = TryStartConnect(handle, socketAddress, socketAddressLen, out errorCode);
if (completed)
{
handle.AsyncContext.RegisterConnectResult(errorCode);
return errorCode;
}
else
{
return SocketError.WouldBlock;
}
}
public static SocketError Disconnect(Socket socket, SafeCloseSocket handle, bool reuseSocket)
{
throw new PlatformNotSupportedException();
}
public static SocketError Send(SafeCloseSocket handle, IList<ArraySegment<byte>> buffers, SocketFlags socketFlags, out int bytesTransferred)
{
var bufferList = buffers;
if (!handle.IsNonBlocking)
{
return handle.AsyncContext.Send(bufferList, socketFlags, handle.SendTimeout, out bytesTransferred);
}
bytesTransferred = 0;
int bufferIndex = 0;
int offset = 0;
SocketError errorCode;
bool completed = TryCompleteSendTo(handle, bufferList, ref bufferIndex, ref offset, socketFlags, null, 0, ref bytesTransferred, out errorCode);
return completed ? errorCode : SocketError.WouldBlock;
}
public static SocketError Send(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, out int bytesTransferred)
{
if (!handle.IsNonBlocking)
{
return handle.AsyncContext.Send(buffer, offset, count, socketFlags, handle.SendTimeout, out bytesTransferred);
}
bytesTransferred = 0;
SocketError errorCode;
bool completed = TryCompleteSendTo(handle, buffer, ref offset, ref count, socketFlags, null, 0, ref bytesTransferred, out errorCode);
return completed ? errorCode : SocketError.WouldBlock;
}
public static SocketError SendTo(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, byte[] socketAddress, int socketAddressLen, out int bytesTransferred)
{
if (!handle.IsNonBlocking)
{
return handle.AsyncContext.SendTo(buffer, offset, count, socketFlags, socketAddress, socketAddressLen, handle.SendTimeout, out bytesTransferred);
}
bytesTransferred = 0;
SocketError errorCode;
bool completed = TryCompleteSendTo(handle, buffer, ref offset, ref count, socketFlags, socketAddress, socketAddressLen, ref bytesTransferred, out errorCode);
return completed ? errorCode : SocketError.WouldBlock;
}
public static SocketError Receive(SafeCloseSocket handle, IList<ArraySegment<byte>> buffers, ref SocketFlags socketFlags, out int bytesTransferred)
{
SocketError errorCode;
if (!handle.IsNonBlocking)
{
errorCode = handle.AsyncContext.Receive(buffers, ref socketFlags, handle.ReceiveTimeout, out bytesTransferred);
}
else
{
int socketAddressLen = 0;
if (!TryCompleteReceiveFrom(handle, buffers, socketFlags, null, ref socketAddressLen, out bytesTransferred, out socketFlags, out errorCode))
{
errorCode = SocketError.WouldBlock;
}
}
return errorCode;
}
public static SocketError Receive(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, out int bytesTransferred)
{
if (!handle.IsNonBlocking)
{
return handle.AsyncContext.Receive(buffer, offset, count, ref socketFlags, handle.ReceiveTimeout, out bytesTransferred);
}
int socketAddressLen = 0;
SocketError errorCode;
bool completed = TryCompleteReceiveFrom(handle, buffer, offset, count, socketFlags, null, ref socketAddressLen, out bytesTransferred, out socketFlags, out errorCode);
return completed ? errorCode : SocketError.WouldBlock;
}
public static SocketError ReceiveMessageFrom(Socket socket, SafeCloseSocket handle, byte[] buffer, int offset, int count, ref SocketFlags socketFlags, Internals.SocketAddress socketAddress, out Internals.SocketAddress receiveAddress, out IPPacketInformation ipPacketInformation, out int bytesTransferred)
{
byte[] socketAddressBuffer = socketAddress.Buffer;
int socketAddressLen = socketAddress.Size;
bool isIPv4, isIPv6;
Socket.GetIPProtocolInformation(socket.AddressFamily, socketAddress, out isIPv4, out isIPv6);
SocketError errorCode;
if (!handle.IsNonBlocking)
{
errorCode = handle.AsyncContext.ReceiveMessageFrom(buffer, offset, count, ref socketFlags, socketAddressBuffer, ref socketAddressLen, isIPv4, isIPv6, handle.ReceiveTimeout, out ipPacketInformation, out bytesTransferred);
}
else
{
if (!TryCompleteReceiveMessageFrom(handle, buffer, offset, count, socketFlags, socketAddressBuffer, ref socketAddressLen, isIPv4, isIPv6, out bytesTransferred, out socketFlags, out ipPacketInformation, out errorCode))
{
errorCode = SocketError.WouldBlock;
}
}
socketAddress.InternalSize = socketAddressLen;
receiveAddress = socketAddress;
return errorCode;
}
public static SocketError ReceiveFrom(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, byte[] socketAddress, ref int socketAddressLen, out int bytesTransferred)
{
if (!handle.IsNonBlocking)
{
return handle.AsyncContext.ReceiveFrom(buffer, offset, count, ref socketFlags, socketAddress, ref socketAddressLen, handle.ReceiveTimeout, out bytesTransferred);
}
SocketError errorCode;
bool completed = TryCompleteReceiveFrom(handle, buffer, offset, count, socketFlags, socketAddress, ref socketAddressLen, out bytesTransferred, out socketFlags, out errorCode);
return completed ? errorCode : SocketError.WouldBlock;
}
public static SocketError WindowsIoctl(SafeCloseSocket handle, int ioControlCode, byte[] optionInValue, byte[] optionOutValue, out int optionLength)
{
throw new PlatformNotSupportedException();
}
public static unsafe SocketError SetSockOpt(SafeCloseSocket handle, SocketOptionLevel optionLevel, SocketOptionName optionName, int optionValue)
{
Interop.Error err;
if (optionLevel == SocketOptionLevel.Socket)
{
if (optionName == SocketOptionName.ReceiveTimeout)
{
handle.ReceiveTimeout = optionValue == 0 ? -1 : optionValue;
err = Interop.Sys.SetReceiveTimeout(handle, optionValue);
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
else if (optionName == SocketOptionName.SendTimeout)
{
handle.SendTimeout = optionValue == 0 ? -1 : optionValue;
err = Interop.Sys.SetSendTimeout(handle, optionValue);
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
}
else if (optionLevel == SocketOptionLevel.IP)
{
if (optionName == SocketOptionName.MulticastInterface)
{
// if the value of the IP_MULTICAST_IF is an address in the 0.x.x.x block
// the value is interpreted as an interface index
int interfaceIndex = IPAddress.NetworkToHostOrder(optionValue);
if ((interfaceIndex & 0xff000000) == 0)
{
var opt = new Interop.Sys.IPv4MulticastOption {
MulticastAddress = 0,
LocalAddress = 0,
InterfaceIndex = interfaceIndex
};
err = Interop.Sys.SetIPv4MulticastOption(handle, Interop.Sys.MulticastOption.MULTICAST_IF, &opt);
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
}
}
err = Interop.Sys.SetSockOpt(handle, optionLevel, optionName, (byte*)&optionValue, sizeof(int));
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError SetSockOpt(SafeCloseSocket handle, SocketOptionLevel optionLevel, SocketOptionName optionName, byte[] optionValue)
{
Interop.Error err;
if (optionValue == null || optionValue.Length == 0)
{
err = Interop.Sys.SetSockOpt(handle, optionLevel, optionName, null, 0);
}
else
{
fixed (byte* pinnedValue = optionValue)
{
err = Interop.Sys.SetSockOpt(handle, optionLevel, optionName, pinnedValue, optionValue.Length);
}
}
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError SetMulticastOption(SafeCloseSocket handle, SocketOptionName optionName, MulticastOption optionValue)
{
Debug.Assert(optionName == SocketOptionName.AddMembership || optionName == SocketOptionName.DropMembership, $"Unexpected optionName: {optionName}");
Interop.Sys.MulticastOption optName = optionName == SocketOptionName.AddMembership ?
Interop.Sys.MulticastOption.MULTICAST_ADD :
Interop.Sys.MulticastOption.MULTICAST_DROP;
IPAddress localAddress = optionValue.LocalAddress ?? IPAddress.Any;
var opt = new Interop.Sys.IPv4MulticastOption {
MulticastAddress = unchecked((uint)optionValue.Group.GetAddress()),
LocalAddress = unchecked((uint)localAddress.GetAddress()),
InterfaceIndex = optionValue.InterfaceIndex
};
Interop.Error err = Interop.Sys.SetIPv4MulticastOption(handle, optName, &opt);
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError SetIPv6MulticastOption(SafeCloseSocket handle, SocketOptionName optionName, IPv6MulticastOption optionValue)
{
Debug.Assert(optionName == SocketOptionName.AddMembership || optionName == SocketOptionName.DropMembership, $"Unexpected optionName={optionName}");
Interop.Sys.MulticastOption optName = optionName == SocketOptionName.AddMembership ?
Interop.Sys.MulticastOption.MULTICAST_ADD :
Interop.Sys.MulticastOption.MULTICAST_DROP;
var opt = new Interop.Sys.IPv6MulticastOption {
Address = optionValue.Group.GetNativeIPAddress(),
InterfaceIndex = (int)optionValue.InterfaceIndex
};
Interop.Error err = Interop.Sys.SetIPv6MulticastOption(handle, optName, &opt);
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError SetLingerOption(SafeCloseSocket handle, LingerOption optionValue)
{
var opt = new Interop.Sys.LingerOption {
OnOff = optionValue.Enabled ? 1 : 0,
Seconds = optionValue.LingerTime
};
Interop.Error err = Interop.Sys.SetLingerOption(handle, &opt);
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static void SetReceivingDualModeIPv4PacketInformation(Socket socket)
{
// NOTE: some platforms (e.g. OS X) do not support receiving IPv4 packet information for packets received
// on dual-mode sockets. On these platforms, this call is a no-op.
if (SupportsDualModeIPv4PacketInfo)
{
socket.SetSocketOption(SocketOptionLevel.IP, SocketOptionName.PacketInformation, true);
}
}
public static unsafe SocketError GetSockOpt(SafeCloseSocket handle, SocketOptionLevel optionLevel, SocketOptionName optionName, out int optionValue)
{
if (optionLevel == SocketOptionLevel.Socket)
{
if (optionName == SocketOptionName.ReceiveTimeout)
{
optionValue = handle.ReceiveTimeout == -1 ? 0 : handle.ReceiveTimeout;
return SocketError.Success;
}
else if (optionName == SocketOptionName.SendTimeout)
{
optionValue = handle.SendTimeout == -1 ? 0 : handle.SendTimeout;
return SocketError.Success;
}
}
int value = 0;
int optLen = sizeof(int);
Interop.Error err = Interop.Sys.GetSockOpt(handle, optionLevel, optionName, (byte*)&value, &optLen);
optionValue = value;
return err == Interop.Error.SUCCESS ? SocketError.Success : GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError GetSockOpt(SafeCloseSocket handle, SocketOptionLevel optionLevel, SocketOptionName optionName, byte[] optionValue, ref int optionLength)
{
int optLen = optionLength;
Interop.Error err;
if (optionValue == null || optionValue.Length == 0)
{
optLen = 0;
err = Interop.Sys.GetSockOpt(handle, optionLevel, optionName, null, &optLen);
}
else
{
fixed (byte* pinnedValue = optionValue)
{
err = Interop.Sys.GetSockOpt(handle, optionLevel, optionName, pinnedValue, &optLen);
}
}
if (err == Interop.Error.SUCCESS)
{
optionLength = optLen;
return SocketError.Success;
}
return GetSocketErrorForErrorCode(err);
}
public static unsafe SocketError GetMulticastOption(SafeCloseSocket handle, SocketOptionName optionName, out MulticastOption optionValue)
{
Debug.Assert(optionName == SocketOptionName.AddMembership || optionName == SocketOptionName.DropMembership, $"Unexpected optionName={optionName}");
Interop.Sys.MulticastOption optName = optionName == SocketOptionName.AddMembership ?
Interop.Sys.MulticastOption.MULTICAST_ADD :
Interop.Sys.MulticastOption.MULTICAST_DROP;
Interop.Sys.IPv4MulticastOption opt;
Interop.Error err = Interop.Sys.GetIPv4MulticastOption(handle, optName, &opt);
if (err != Interop.Error.SUCCESS)
{
optionValue = default(MulticastOption);
return GetSocketErrorForErrorCode(err);
}
var multicastAddress = new IPAddress((long)opt.MulticastAddress);
var localAddress = new IPAddress((long)opt.LocalAddress);
optionValue = new MulticastOption(multicastAddress, localAddress) {
InterfaceIndex = opt.InterfaceIndex
};
return SocketError.Success;
}
public static unsafe SocketError GetIPv6MulticastOption(SafeCloseSocket handle, SocketOptionName optionName, out IPv6MulticastOption optionValue)
{
Debug.Assert(optionName == SocketOptionName.AddMembership || optionName == SocketOptionName.DropMembership, $"Unexpected optionName={optionName}");
Interop.Sys.MulticastOption optName = optionName == SocketOptionName.AddMembership ?
Interop.Sys.MulticastOption.MULTICAST_ADD :
Interop.Sys.MulticastOption.MULTICAST_DROP;
Interop.Sys.IPv6MulticastOption opt;
Interop.Error err = Interop.Sys.GetIPv6MulticastOption(handle, optName, &opt);
if (err != Interop.Error.SUCCESS)
{
optionValue = default(IPv6MulticastOption);
return GetSocketErrorForErrorCode(err);
}
optionValue = new IPv6MulticastOption(opt.Address.GetIPAddress(), opt.InterfaceIndex);
return SocketError.Success;
}
public static unsafe SocketError GetLingerOption(SafeCloseSocket handle, out LingerOption optionValue)
{
var opt = new Interop.Sys.LingerOption();
Interop.Error err = Interop.Sys.GetLingerOption(handle, &opt);
if (err != Interop.Error.SUCCESS)
{
optionValue = default(LingerOption);
return GetSocketErrorForErrorCode(err);
}
optionValue = new LingerOption(opt.OnOff != 0, opt.Seconds);
return SocketError.Success;
}
public static unsafe SocketError Poll(SafeCloseSocket handle, int microseconds, SelectMode mode, out bool status)
{
Interop.Sys.PollEvents inEvent = Interop.Sys.PollEvents.POLLNONE;
switch (mode)
{
case SelectMode.SelectRead: inEvent = Interop.Sys.PollEvents.POLLIN; break;
case SelectMode.SelectWrite: inEvent = Interop.Sys.PollEvents.POLLOUT; break;
case SelectMode.SelectError: inEvent = Interop.Sys.PollEvents.POLLPRI; break;
}
int milliseconds = microseconds == -1 ? -1 : microseconds / 1000;
Interop.Sys.PollEvents outEvents;
Interop.Error err = Interop.Sys.Poll(handle, inEvent, milliseconds, out outEvents);
if (err != Interop.Error.SUCCESS)
{
status = false;
return GetSocketErrorForErrorCode(err);
}
switch (mode)
{
case SelectMode.SelectRead: status = (outEvents & (Interop.Sys.PollEvents.POLLIN | Interop.Sys.PollEvents.POLLHUP)) != 0; break;
case SelectMode.SelectWrite: status = (outEvents & Interop.Sys.PollEvents.POLLOUT) != 0; break;
case SelectMode.SelectError: status = (outEvents & (Interop.Sys.PollEvents.POLLERR | Interop.Sys.PollEvents.POLLPRI)) != 0; break;
default: status = false; break;
}
return SocketError.Success;
}
public static unsafe SocketError Select(IList checkRead, IList checkWrite, IList checkError, int microseconds)
{
int checkReadInitialCount = checkRead != null ? checkRead.Count : 0;
int checkWriteInitialCount = checkWrite != null ? checkWrite.Count : 0;
int checkErrorInitialCount = checkError != null ? checkError.Count : 0;
int count = checked(checkReadInitialCount + checkWriteInitialCount + checkErrorInitialCount);
Debug.Assert(count > 0, $"Expected at least one entry.");
// Rather than using the select syscall, we use poll. While this has a mismatch in API from Select and
// requires some translation, it avoids the significant limitation of select only working with file descriptors
// less than FD_SETSIZE, and thus failing arbitrarily depending on the file descriptor value assigned
// by the system. Since poll then expects an array of entries, we try to allocate the array on the stack,
// only falling back to allocating it on the heap if it's deemed too big.
const int StackThreshold = 80; // arbitrary limit to avoid too much space on stack
if (count < StackThreshold)
{
Interop.Sys.PollEvent* eventsOnStack = stackalloc Interop.Sys.PollEvent[count];
return SelectViaPoll(
checkRead, checkReadInitialCount,
checkWrite, checkWriteInitialCount,
checkError, checkErrorInitialCount,
eventsOnStack, count, microseconds);
}
else
{
var eventsOnHeap = new Interop.Sys.PollEvent[count];
fixed (Interop.Sys.PollEvent* eventsOnHeapPtr = eventsOnHeap)
{
return SelectViaPoll(
checkRead, checkReadInitialCount,
checkWrite, checkWriteInitialCount,
checkError, checkErrorInitialCount,
eventsOnHeapPtr, count, microseconds);
}
}
}
private static unsafe SocketError SelectViaPoll(
IList checkRead, int checkReadInitialCount,
IList checkWrite, int checkWriteInitialCount,
IList checkError, int checkErrorInitialCount,
Interop.Sys.PollEvent* events, int eventsLength,
int microseconds)
{
// Add each of the list's contents to the events array
Debug.Assert(eventsLength == checkReadInitialCount + checkWriteInitialCount + checkErrorInitialCount, "Invalid eventsLength");
int offset = 0;
AddToPollArray(events, eventsLength, checkRead, ref offset, Interop.Sys.PollEvents.POLLIN | Interop.Sys.PollEvents.POLLHUP);
AddToPollArray(events, eventsLength, checkWrite, ref offset, Interop.Sys.PollEvents.POLLOUT);
AddToPollArray(events, eventsLength, checkError, ref offset, Interop.Sys.PollEvents.POLLPRI);
Debug.Assert(offset == eventsLength, $"Invalid adds. offset={offset}, eventsLength={eventsLength}.");
// Do the poll
uint triggered = 0;
int milliseconds = microseconds == -1 ? -1 : microseconds / 1000;
Interop.Error err = Interop.Sys.Poll(events, (uint)eventsLength, milliseconds, &triggered);
if (err != Interop.Error.SUCCESS)
{
return GetSocketErrorForErrorCode(err);
}
// Remove from the lists any entries which weren't set
if (triggered == 0)
{
checkRead?.Clear();
checkWrite?.Clear();
checkError?.Clear();
}
else
{
FilterPollList(checkRead, events, checkReadInitialCount - 1, Interop.Sys.PollEvents.POLLIN | Interop.Sys.PollEvents.POLLHUP);
FilterPollList(checkWrite, events, checkWriteInitialCount + checkReadInitialCount - 1, Interop.Sys.PollEvents.POLLOUT);
FilterPollList(checkError, events, checkErrorInitialCount + checkWriteInitialCount + checkReadInitialCount - 1, Interop.Sys.PollEvents.POLLERR | Interop.Sys.PollEvents.POLLPRI);
}
return SocketError.Success;
}
private static unsafe void AddToPollArray(Interop.Sys.PollEvent* arr, int arrLength, IList socketList, ref int arrOffset, Interop.Sys.PollEvents events)
{
if (socketList == null)
return;
int listCount = socketList.Count;
for (int i = 0; i < listCount; i++)
{
if (arrOffset >= arrLength)
{
Debug.Fail("IList.Count must have been faulty, returning a negative value and/or returning a different value across calls.");
throw new ArgumentOutOfRangeException(nameof(socketList));
}
Socket socket = socketList[i] as Socket;
if (socket == null)
{
throw new ArgumentException(SR.Format(SR.net_sockets_select, socket?.GetType().FullName ?? "null", typeof(Socket).FullName));
}
int fd = (int)socket.SafeHandle.DangerousGetHandle();
arr[arrOffset++] = new Interop.Sys.PollEvent { Events = events, FileDescriptor = fd };
}
}
private static unsafe void FilterPollList(IList socketList, Interop.Sys.PollEvent* arr, int arrEndOffset, Interop.Sys.PollEvents desiredEvents)
{
if (socketList == null)
return;
// The Select API requires leaving in the input lists only those sockets that were ready. As such, we need to loop
// through each poll event, and for each that wasn't ready, remove the corresponding Socket from its list. Technically
// this is O(n^2), due to removing from the list requiring shifting down all elements after it. However, this doesn't
// happen with the most common cases. If very few sockets were ready, then as we iterate from the end of the list, each
// removal will typically be O(1) rather than O(n). If most sockets were ready, then we only need to remove a few, in
// which case we're only doing a small number of O(n) shifts. It's only for the intermediate case, where a non-trivial
// number of sockets are ready and a non-trivial number of sockets are not ready that we end up paying the most. We could
// avoid these costs by, for example, allocating a side list that we fill with the sockets that should remain, clearing
// the original list, and then populating the original list with the contents of the side list. That of course has its
// own costs, and so for now we do the "simple" thing. This can be changed in the future as needed.
for (int i = socketList.Count - 1; i >= 0; --i, --arrEndOffset)
{
if (arrEndOffset < 0)
{
Debug.Fail("IList.Count must have been faulty, returning a negative value and/or returning a different value across calls.");
throw new ArgumentOutOfRangeException(nameof(arrEndOffset));
}
if ((arr[arrEndOffset].TriggeredEvents & desiredEvents) == 0)
{
socketList.RemoveAt(i);
}
}
}
public static SocketError Shutdown(SafeCloseSocket handle, bool isConnected, bool isDisconnected, SocketShutdown how)
{
Interop.Error err = Interop.Sys.Shutdown(handle, how);
if (err == Interop.Error.SUCCESS)
{
return SocketError.Success;
}
// If shutdown returns ENOTCONN and we think that this socket has ever been connected,
// ignore the error. This can happen for TCP connections if the underlying connection
// has reached the CLOSE state. Ignoring the error matches Winsock behavior.
if (err == Interop.Error.ENOTCONN && (isConnected || isDisconnected))
{
return SocketError.Success;
}
return GetSocketErrorForErrorCode(err);
}
public static SocketError ConnectAsync(Socket socket, SafeCloseSocket handle, byte[] socketAddress, int socketAddressLen, ConnectOverlappedAsyncResult asyncResult)
{
return handle.AsyncContext.ConnectAsync(socketAddress, socketAddressLen, asyncResult.CompletionCallback);
}
public static SocketError SendAsync(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, OverlappedAsyncResult asyncResult)
{
return handle.AsyncContext.SendAsync(buffer, offset, count, socketFlags, asyncResult.CompletionCallback);
}
public static SocketError SendAsync(SafeCloseSocket handle, IList<ArraySegment<byte>> buffers, SocketFlags socketFlags, OverlappedAsyncResult asyncResult)
{
return handle.AsyncContext.SendAsync(buffers, socketFlags, asyncResult.CompletionCallback);
}
public static SocketError SendToAsync(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, Internals.SocketAddress socketAddress, OverlappedAsyncResult asyncResult)
{
asyncResult.SocketAddress = socketAddress;
return handle.AsyncContext.SendToAsync(buffer, offset, count, socketFlags, socketAddress.Buffer, socketAddress.Size, asyncResult.CompletionCallback);
}
public static SocketError ReceiveAsync(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, OverlappedAsyncResult asyncResult)
{
return handle.AsyncContext.ReceiveAsync(buffer, offset, count, socketFlags, asyncResult.CompletionCallback);
}
public static SocketError ReceiveAsync(SafeCloseSocket handle, IList<ArraySegment<byte>> buffers, SocketFlags socketFlags, OverlappedAsyncResult asyncResult)
{
return handle.AsyncContext.ReceiveAsync(buffers, socketFlags, asyncResult.CompletionCallback);
}
public static SocketError ReceiveFromAsync(SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, Internals.SocketAddress socketAddress, OverlappedAsyncResult asyncResult)
{
asyncResult.SocketAddress = socketAddress;
return handle.AsyncContext.ReceiveFromAsync(buffer, offset, count, socketFlags, socketAddress.Buffer, socketAddress.InternalSize, asyncResult.CompletionCallback);
}
public static SocketError ReceiveMessageFromAsync(Socket socket, SafeCloseSocket handle, byte[] buffer, int offset, int count, SocketFlags socketFlags, Internals.SocketAddress socketAddress, ReceiveMessageOverlappedAsyncResult asyncResult)
{
asyncResult.SocketAddress = socketAddress;
bool isIPv4, isIPv6;
Socket.GetIPProtocolInformation(((Socket)asyncResult.AsyncObject).AddressFamily, socketAddress, out isIPv4, out isIPv6);
return handle.AsyncContext.ReceiveMessageFromAsync(buffer, offset, count, socketFlags, socketAddress.Buffer, socketAddress.InternalSize, isIPv4, isIPv6, asyncResult.CompletionCallback);
}
public static SocketError AcceptAsync(Socket socket, SafeCloseSocket handle, SafeCloseSocket acceptHandle, int receiveSize, int socketAddressSize, AcceptOverlappedAsyncResult asyncResult)
{
Debug.Assert(acceptHandle == null, $"Unexpected acceptHandle: {acceptHandle}");
byte[] socketAddressBuffer = new byte[socketAddressSize];
return handle.AsyncContext.AcceptAsync(socketAddressBuffer, socketAddressSize, asyncResult.CompletionCallback);
}
}
}
| |
// created on 4/3/2003 at 19:45
// Npgsql.NpgsqlBinaryRow.cs
//
// Author:
// Francisco Jr. (fxjrlists@yahoo.com.br)
//
// Copyright (C) 2002 The Npgsql Development Team
// npgsql-general@gborg.postgresql.org
// http://gborg.postgresql.org/project/npgsql/projdisplay.php
//
// Permission to use, copy, modify, and distribute this software and its
// documentation for any purpose, without fee, and without a written
// agreement is hereby granted, provided that the above copyright notice
// and this paragraph and the following two paragraphs appear in all copies.
//
// IN NO EVENT SHALL THE NPGSQL DEVELOPMENT TEAM BE LIABLE TO ANY PARTY
// FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
// INCLUDING LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS
// DOCUMENTATION, EVEN IF THE NPGSQL DEVELOPMENT TEAM HAS BEEN ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
// THE NPGSQL DEVELOPMENT TEAM SPECIFICALLY DISCLAIMS ANY WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
// AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
// ON AN "AS IS" BASIS, AND THE NPGSQL DEVELOPMENT TEAM HAS NO OBLIGATIONS
// TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
using System;
using System.IO;
using System.Reflection;
using System.Resources;
using System.Text;
using Revenj.DatabasePersistence.Postgres.NpgsqlTypes;
namespace Revenj.DatabasePersistence.Postgres.Npgsql
{
/// <summary>
/// This is the abstract base class for NpgsqlAsciiRow and NpgsqlBinaryRow.
/// </summary>
internal abstract class NpgsqlRow : IStreamOwner
{
public abstract object this[int index] { get; }
public abstract int NumFields { get; }
public abstract bool IsDBNull(int index);
public abstract void Dispose();
public abstract long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length);
public abstract long GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length);
}
internal sealed class CachingRow : NpgsqlRow
{
private readonly object[] _data;
private readonly int _numFields;
public CachingRow(ForwardsOnlyRow fo)
{
_numFields = fo.NumFields;
_data = new object[_numFields];
for (int i = 0; i < _data.Length; i++)
_data[i] = fo[i];
fo.Dispose();
}
public override object this[Int32 index]
{
get
{
if ((index < 0) || (index >= NumFields))
{
throw new IndexOutOfRangeException("this[] index value");
}
return _data[index];
}
}
public override int NumFields { get { return _numFields; } }
public override bool IsDBNull(int index)
{
return this[index] == DBNull.Value;
}
public override long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length)
{
byte[] source = (byte[])this[i];
if (buffer == null)
{
return source.Length - fieldOffset;
}
long finalLength = Math.Max(0, Math.Min(length, source.Length - fieldOffset));
Array.Copy(source, fieldOffset, buffer, bufferoffset, finalLength);
return finalLength;
}
public override long GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length)
{
string source = (string)this[i];
if (buffer == null)
{
return source.Length - fieldoffset;
}
long finalLength = Math.Max(0, Math.Min(length, source.Length - fieldoffset));
Array.Copy(source.ToCharArray(), fieldoffset, buffer, bufferoffset, finalLength);
return finalLength;
}
public override void Dispose()
{
}
}
internal sealed class ForwardsOnlyRow : NpgsqlRow
{
private int _lastIndex = -1;
private readonly RowReader _reader;
public ForwardsOnlyRow(RowReader reader)
{
_reader = reader;
}
private void SetIndex(int index, bool allowCurrent)
{
if (index < 0 || index >= NumFields)
{
throw new IndexOutOfRangeException();
}
if ((!allowCurrent || _reader.CurrentlyStreaming) ? index <= _lastIndex : index < _lastIndex)
{
throw new InvalidOperationException(
string.Format("Invalid attempt to read from column ordinal '{0}'. With CommandBehavior.SequentialAccess, you may only read from column ordinal '{1}' or greater.", index, _lastIndex + 1));
}
_reader.Skip(index - _lastIndex - 1);
_lastIndex = index;
}
public override object this[int index]
{
get
{
SetIndex(index, false);
return _reader.GetNext();
}
}
public override long GetBytes(int i, long fieldOffset, byte[] buffer, int bufferoffset, int length)
{
if (buffer == null)
{
throw new NotSupportedException();
}
if (!_reader.CanGetByteStream(i))
{
throw new InvalidCastException();
}
SetIndex(i, true);
_reader.SkipBytesTo(fieldOffset);
return _reader.Read(buffer, bufferoffset, length);
}
public override long GetChars(int i, long fieldoffset, char[] buffer, int bufferoffset, int length)
{
if (buffer == null)
{
throw new NotSupportedException();
}
if (!_reader.CanGetCharStream(i))
{
throw new InvalidCastException();
}
SetIndex(i, true);
_reader.SkipCharsTo(fieldoffset);
return _reader.Read(buffer, bufferoffset, length);
}
public override int NumFields
{
get { return _reader.NumFields; }
}
public override bool IsDBNull(int index)
{
if (_lastIndex > -1)
{
SetIndex(index - 1, true);
}
return _reader.IsNextDBNull;
}
public void Reset()
{
_lastIndex = -1;
}
public override void Dispose()
{
_reader.Dispose();
}
}
/// <summary>
/// Reads a row, field by field, allowing a DataRow to be built appropriately.
/// </summary>
internal abstract class RowReader : IStreamOwner
{
/// <summary>
/// Reads part of a field, as needed (for <see cref="System.Data.IDataRecord.GetChars()"/>
/// and <see cref="System.Data.IDataRecord.GetBytes()"/>
/// </summary>
protected abstract class Streamer : IStreamOwner
{
protected readonly Stream _stream;
protected int _remainingBytes;
private int _alreadyRead = 0;
protected Streamer(Stream stream, int remainingBytes)
{
_stream = stream;
_remainingBytes = remainingBytes;
}
public int AlreadyRead
{
get { return _alreadyRead; }
protected set { _alreadyRead = value; }
}
public void Dispose()
{
PGUtil.EatStreamBytes(_stream, _remainingBytes);
}
}
/// <summary>
/// Adds further functionality to stream that is dependant upon the type of data read.
/// </summary>
protected abstract class Streamer<T> : Streamer
{
protected Streamer(Stream stream, int remainingBytes)
: base(stream, remainingBytes)
{
}
public abstract int DoRead(T[] output, int outputIdx, int length);
public abstract int DoSkip(int length);
public int Read(T[] output, int outputIdx, int length)
{
int ret = DoRead(output, outputIdx, length);
AlreadyRead += ret;
return ret;
}
private void Skip(int length)
{
AlreadyRead += DoSkip(length);
}
public void SkipTo(long position)
{
if (position < AlreadyRead)
{
throw new InvalidOperationException();
}
Skip((int)position - AlreadyRead);
}
}
/// <summary>
/// Completes the implementation of Streamer for char data.
/// </summary>
protected sealed class CharStreamer : Streamer<char>
{
public CharStreamer(Stream stream, int remainingBytes)
: base(stream, remainingBytes)
{
}
public override int DoRead(char[] output, int outputIdx, int length)
{
return PGUtil.ReadChars(_stream, output, length, ref _remainingBytes, outputIdx);
}
public override int DoSkip(int length)
{
return PGUtil.SkipChars(_stream, length, ref _remainingBytes);
}
}
/// <summary>
/// Completes the implementation of Streamer for byte data.
/// </summary>
protected sealed class ByteStreamer : Streamer<byte>
{
private readonly byte[] buffer;
public ByteStreamer(Stream stream, byte[] buffer, int remainingBytes)
: base(stream, remainingBytes)
{
this.buffer = buffer;
}
public override int DoRead(byte[] output, int outputIdx, int length)
{
return PGUtil.ReadEscapedBytes(_stream, buffer, output, length, ref _remainingBytes, outputIdx);
}
public override int DoSkip(int length)
{
return PGUtil.SkipEscapedBytes(_stream, buffer, length, ref _remainingBytes);
}
}
protected static readonly Encoding UTF8Encoding = Encoding.UTF8;
protected readonly NpgsqlRowDescription _rowDesc;
protected Stream _stream;
private Streamer _streamer;
private int _currentField = -1;
protected readonly byte[] buffer;
protected readonly ByteBuffer bytes;
public RowReader(NpgsqlRowDescription rowDesc, Stream stream, byte[] buffer, ByteBuffer bytes)
{
_rowDesc = rowDesc;
_stream = stream;
this.buffer = buffer;
this.bytes = bytes;
}
protected Streamer CurrentStreamer
{
get { return _streamer; }
set
{
if (_streamer != null)
{
_streamer.Dispose();
}
_streamer = value;
}
}
public bool CurrentlyStreaming
{
get { return _streamer != null; }
}
public bool CanGetByteStream(int index)
{
//TODO: Add support for byte[] being read as a stream of bytes.
return _rowDesc[index].TypeInfo.NpgsqlDbType == NpgsqlDbType.Bytea;
}
public bool CanGetCharStream(int index)
{
//TODO: Add support for arrays of string types?
return _rowDesc[index].TypeInfo.Type.Equals(typeof(string));
}
protected Streamer<byte> CurrentByteStreamer
{
get
{
if (CurrentStreamer == null)
{
if (!CanGetByteStream(_currentField + 1))
{
throw new InvalidCastException();
}
++_currentField;
return (CurrentStreamer = new ByteStreamer(Stream, buffer, GetNextFieldCount())) as ByteStreamer;
}
else if (!(CurrentStreamer is Streamer<byte>))
{
throw new InvalidOperationException();
}
else
{
return CurrentStreamer as ByteStreamer;
}
}
}
protected Streamer<char> CurrentCharStreamer
{
get
{
if (CurrentStreamer == null)
{
if (!CanGetCharStream(_currentField + 1))
{
throw new InvalidCastException();
}
++_currentField;
return (CurrentStreamer = new CharStreamer(Stream, GetNextFieldCount())) as CharStreamer;
}
else if (!(CurrentStreamer is Streamer<char>))
{
throw new InvalidOperationException();
}
else
{
return CurrentStreamer as CharStreamer;
}
}
}
protected Stream Stream
{
get { return _stream; }
}
protected NpgsqlRowDescription.FieldData FieldData
{
get { return _rowDesc[_currentField]; }
}
public int NumFields
{
get { return _rowDesc.NumFields; }
}
protected int CurrentField
{
get { return _currentField; }
}
protected abstract object ReadNext();
public object GetNext()
{
if (++_currentField == _rowDesc.NumFields)
{
throw new IndexOutOfRangeException();
}
return ReadNext();
}
public abstract bool IsNextDBNull { get; }
protected abstract void SkipOne();
public void Skip(int count)
{
if (count > 0)
{
if (_currentField + count >= _rowDesc.NumFields)
{
throw new IndexOutOfRangeException();
}
while (count-- > 0)
{
++_currentField;
SkipOne();
}
}
}
protected abstract int GetNextFieldCount();
public int Read(byte[] output, int outputIdx, int length)
{
return CurrentByteStreamer.Read(output, outputIdx, length);
}
public void SkipBytesTo(long position)
{
CurrentByteStreamer.SkipTo(position);
}
public int Read(char[] output, int outputIdx, int length)
{
return CurrentCharStreamer.Read(output, outputIdx, length);
}
public void SkipCharsTo(long position)
{
CurrentCharStreamer.SkipTo(position);
}
public virtual void Dispose()
{
CurrentStreamer = null;
Skip(_rowDesc.NumFields - _currentField - 1);
_currentField = -1;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
/// <summary>
/// System.Collections.Generic.ICollection.IsReadOnly
/// </summary>
public class ICollectionIsReadOnly
{
public static int Main(string[] args)
{
ICollectionIsReadOnly testObj = new ICollectionIsReadOnly();
TestLibrary.TestFramework.BeginTestCase("Testing for Property: System.Collections.Generic.ICollection.IsReadOnly");
if (testObj.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
return retVal;
}
#region Positive tests
public bool PosTest1()
{
bool retVal = true;
const string c_TEST_DESC = "PosTest1: Verify the IsReadOnly property of List<T> which implemented the IsReadOnly property in ICollection<T> is false...";
const string c_TEST_ID = "P001";
List<int> list = new List<int>();
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
if (((ICollection<int>)list).IsReadOnly)
{
string errorDesc = "the IsReadOnly property of List<T> should be false";
TestLibrary.TestFramework.LogError("001" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002", "Unecpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
const string c_TEST_DESC = "PosTest2: Verify the IsReadOnly property of List<T> which implemented the IsReadOnly property in ICollection<T> and Type is reference Type is false...";
const string c_TEST_ID = "P002";
List<String> list = new List<String>();
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
if (((ICollection<String>)list).IsReadOnly)
{
string errorDesc = "the IsReadOnly property of List<T> should be false";
TestLibrary.TestFramework.LogError("003" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004", "Unecpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
const string c_TEST_DESC = "PosTest3: Using custome class which implemented the IsReadOnly property in ICollection<T>...";
const string c_TEST_ID = "P003";
MyCollection<int> myC = new MyCollection<int>();
myC.isReadOnly = true;
TestLibrary.TestFramework.BeginScenario(c_TEST_DESC);
try
{
if (!((ICollection<int>)myC).IsReadOnly)
{
string errorDesc = "the IsReadOnly property of MyCollection<int> should be true";
TestLibrary.TestFramework.LogError("005" + " TestId-" + c_TEST_ID, errorDesc);
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006", "Unecpected exception occurs :" + e);
retVal = false;
}
return retVal;
}
#endregion
#region Help Class
public class MyCollection<T> : ICollection<T>
{
public T[] _items;
protected int length;
public bool isReadOnly = false;
public MyCollection()
{
_items = new T[10];
length = 0;
}
#region ICollection<T> Members
public void Add(T item)
{
if (isReadOnly)
{
throw new NotSupportedException();
}
else
{
_items[length] = item;
length++;
}
}
public void Clear()
{
throw new Exception("The method or operation is not implemented.");
}
public bool Contains(T item)
{
throw new Exception("The method or operation is not implemented.");
}
public void CopyTo(T[] array, int arrayIndex)
{
throw new Exception("The method or operation is not implemented.");
}
public int Count
{
get { return length; }
}
public bool IsReadOnly
{
get { return isReadOnly; }
}
public bool Remove(T item)
{
if (isReadOnly)
{
throw new NotSupportedException();
}
int index = Array.IndexOf(_items, item, 0, length);
if (index < 0)
{
return false;
}
else
{
if ((uint)index >= (uint)length)
{
throw new ArgumentOutOfRangeException(); ;
}
length--;
if (index < length)
{
Array.Copy(_items, index + 1, _items, index, length - index);
}
_items[length] = default(T);
return true;
}
}
#endregion
#region IEnumerable<T> Members
public IEnumerator<T> GetEnumerator()
{
throw new Exception("The method or operation is not implemented.");
}
#endregion
#region IEnumerable Members
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
throw new Exception("The method or operation is not implemented.");
}
#endregion
}
#endregion
}
| |
using System.Collections.ObjectModel;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace Vulnerator.Model.Entity
{
public class StepOneQuestionnaire : INotifyPropertyChanged
{
public event PropertyChangedEventHandler PropertyChanged;
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")]
public StepOneQuestionnaire()
{
Connectivities = new ObservableCollection<Connectivity>();
ExternalSecurityServices = new ObservableCollection<ExternalSecurityService>();
EncryptionTechniques = new ObservableCollection<EncryptionTechnique>();
NetworkConnectionRules = new ObservableCollection<NetworkConnectionRule>();
UserCategories = new ObservableCollection<UserCategory>();
AuthorizationConditions = new ObservableCollection<AuthorizationCondition>();
AuthorizationToConnectOrInterim_ATC_PendingItems = new ObservableCollection<AuthorizationToConnectOrInterim_ATC_PendingItem>();
DeploymentLocations = new ObservableCollection<Location>();
}
[Key]
[DatabaseGenerated(DatabaseGeneratedOption.None)]
public long StepOneQuestionnaire_ID { get; set; }
[Required]
[StringLength(25)]
public string LogicalAccess { get; set; }
[Required]
[StringLength(25)]
public string PhysicalAccess { get; set; }
[Required]
[StringLength(25)]
public string AV_Scan { get; set; }
[Required]
[StringLength(25)]
public string DODIN_ConnectionPeriodicity { get; set; }
[Required]
[StringLength(25)]
public string RegistrationType { get; set; }
[Required]
[StringLength(100)]
public string SystemType { get; set; }
[Required]
[StringLength(5)]
public string IsNationalSecuritySystem { get; set; }
[Required]
[StringLength(5)]
public string HasPublicFacingPresence { get; set; }
[Required]
[StringLength(2000)]
public string SystemDescription { get; set; }
[Required]
[StringLength(2000)]
public string MissionDescription { get; set; }
[Required]
[StringLength(2000)]
public string CONOPS_Statement { get; set; }
public long? DITPR_DON_Number { get; set; }
[Required]
[StringLength(200)]
public string DOD_IT_RegistrationNumber { get; set; }
[StringLength(100)]
public string DVS_Site { get; set; }
[Required]
[StringLength(200)]
public string PPSM_RegistrationNumber { get; set; }
[Required]
[StringLength(2000)]
public string SystemAuthorizationBoundary { get; set; }
[Required]
[StringLength(2000)]
public string HardwareSoftwareFirmware { get; set; }
[Required]
[StringLength(2000)]
public string SystemEnterpriseArchitecture { get; set; }
[Required]
[StringLength(2000)]
public string InformationFlowsAndPaths { get; set; }
[Required]
[StringLength(25)]
public string SystemLocation { get; set; }
[Required]
[StringLength(5)]
public string IsTypeAuthorization { get; set; }
public long? BaselineLocation_ID { get; set; }
public virtual Location BaselineLocation { get; set; }
[Required]
[StringLength(500)]
public string InstallationNameOrOwningOrganization { get; set; }
[Required]
[StringLength(50)]
public string SecurityPlanApprovalStatus { get; set; }
public DateTime? SecurityPlanApprovalDate { get; set; }
[Required]
[StringLength(25)]
public string AuthorizationStatus { get; set; }
[Required]
[StringLength(5)]
public string HasAuthorizationDocumentation { get; set; }
public DateTime? AssessmentCompletionDate { get; set; }
public DateTime? AuthorizationDate { get; set; }
public DateTime? AuthorizationTerminationDate { get; set; }
[Required]
[StringLength(25)]
public string RMF_Activity { get; set; }
[Required]
[StringLength(2000)]
public string AuthorizationTermsAndConditions { get; set; }
[Required]
[StringLength(5)]
public string IsSecurityReviewCompleted{ get; set; }
public DateTime? SecurityReviewDate { get; set; }
[Required]
[StringLength(5)]
public string IsContingencyPlanRequired{ get; set; }
[StringLength(5)]
public string IsContingencyPlanTested{ get; set; }
public DateTime? ContingencyPlanTestDate { get; set; }
[Required]
[StringLength(5)]
public string IsPIA_Required{ get; set; }
public DateTime? PIA_Date { get; set; }
[Required]
[StringLength(5)]
public string IsPrivacyActNoticeRequired{ get; set; }
[Required]
[StringLength(5)]
public string Is_eAuthenticationRiskAssessmentRequired{ get; set; }
public DateTime? eAuthenticationRiskAssessmentDate { get; set; }
[Required]
[StringLength(5)]
public string IsReportableToFISMA{ get; set; }
[Required]
[StringLength(5)]
public string IsReportableToERS{ get; set; }
[Required]
[StringLength(25)]
public string MissionCriticality { get; set; }
[Required]
[StringLength(25)]
public string GoverningMissionArea { get; set; }
[Required]
[StringLength(25)]
public string DOD_Component { get; set; }
[Required]
[StringLength(25)]
public string ACQ_Category { get; set; }
[Required]
[StringLength(25)]
public string ACQ_Phase { get; set; }
[Required]
[StringLength(25)]
public string SoftwareCategory { get; set; }
[Required]
[StringLength(50)]
public string SystemOwnershipAndControl { get; set; }
[StringLength(2000)]
public string OtherInformation { get; set; }
[Required]
public DateTime? AuthorizationToConnectOrInterim_ATC_GrantedDate { get; set; }
[Required]
public DateTime? AuthorizationToConnectOrInterim_ATC_ExpirationDate { get; set; }
[StringLength(25)]
public string AuthorizationToConnectOrInterim_ATC_CND_ServiceProvider { get; set; }
[Required]
[StringLength(50)]
public string PrimaryNIST_ControlSet { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<Connectivity> Connectivities { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<ExternalSecurityService> ExternalSecurityServices { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<EncryptionTechnique> EncryptionTechniques { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<NetworkConnectionRule> NetworkConnectionRules { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<UserCategory> UserCategories { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<AuthorizationCondition> AuthorizationConditions { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<AuthorizationToConnectOrInterim_ATC_PendingItem> AuthorizationToConnectOrInterim_ATC_PendingItems { get; set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public virtual ICollection<Location> DeploymentLocations { get; set; }
public virtual Group Group { get; set; }
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
**
**
** Purpose: Exposes routines for enumerating through a
** directory.
**
** April 11,2000
**
===========================================================*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Security;
using System.Security.Permissions;
using Microsoft.Win32;
using Microsoft.Win32.SafeHandles;
using System.Text;
using System.Runtime.InteropServices;
using System.Globalization;
using System.Runtime.Versioning;
using System.Diagnostics.Contracts;
using System.Threading;
#if FEATURE_MACL
using System.Security.AccessControl;
#endif
namespace System.IO {
[ComVisible(true)]
public static class Directory {
public static DirectoryInfo GetParent(String path)
{
if (path==null)
throw new ArgumentNullException("path");
if (path.Length==0)
throw new ArgumentException(Environment.GetResourceString("Argument_PathEmpty"), "path");
Contract.EndContractBlock();
String fullPath = Path.GetFullPathInternal(path);
String s = Path.GetDirectoryName(fullPath);
if (s==null)
return null;
return new DirectoryInfo(s);
}
[System.Security.SecuritySafeCritical]
public static DirectoryInfo CreateDirectory(String path) {
if (path == null)
throw new ArgumentNullException("path");
if (path.Length == 0)
throw new ArgumentException(Environment.GetResourceString("Argument_PathEmpty"));
Contract.EndContractBlock();
return InternalCreateDirectoryHelper(path, true);
}
[System.Security.SecurityCritical]
internal static DirectoryInfo UnsafeCreateDirectory(String path)
{
if (path == null)
throw new ArgumentNullException("path");
if (path.Length == 0)
throw new ArgumentException(Environment.GetResourceString("Argument_PathEmpty"));
Contract.EndContractBlock();
return InternalCreateDirectoryHelper(path, false);
}
[System.Security.SecurityCritical]
internal static DirectoryInfo InternalCreateDirectoryHelper(String path, bool checkHost)
{
Contract.Requires(path != null);
Contract.Requires(path.Length != 0);
String fullPath = Path.GetFullPathInternal(path);
// You need read access to the directory to be returned back and write access to all the directories
// that you need to create. If we fail any security checks we will not create any directories at all.
// We attempt to create directories only after all the security checks have passed. This is avoid doing
// a demand at every level.
String demandDir = GetDemandDir(fullPath, true);
#if FEATURE_CORECLR
if (checkHost)
{
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.Read, path, demandDir);
state.EnsureState(); // do the check on the AppDomainManager to make sure this is allowed
}
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.Read, demandDir, false, false);
#endif
InternalCreateDirectory(fullPath, path, null, checkHost);
return new DirectoryInfo(fullPath, false);
}
#if FEATURE_MACL
[System.Security.SecuritySafeCritical] // auto-generated
public static DirectoryInfo CreateDirectory(String path, DirectorySecurity directorySecurity) {
if (path==null)
throw new ArgumentNullException("path");
if (path.Length == 0)
throw new ArgumentException(Environment.GetResourceString("Argument_PathEmpty"));
Contract.EndContractBlock();
String fullPath = Path.GetFullPathInternal(path);
// You need read access to the directory to be returned back and write access to all the directories
// that you need to create. If we fail any security checks we will not create any directories at all.
// We attempt to create directories only after all the security checks have passed. This is avoid doing
// a demand at every level.
String demandDir = GetDemandDir(fullPath, true);
FileIOPermission.QuickDemand(FileIOPermissionAccess.Read, demandDir, false, false );
InternalCreateDirectory(fullPath, path, directorySecurity);
return new DirectoryInfo(fullPath, false);
}
#endif // FEATURE_MACL
// Input to this method should already be fullpath. This method will ensure that we append
// the trailing slash only when appropriate and when thisDirOnly is specified append a "."
// at the end of the path to indicate that the demand is only for the fullpath and not
// everything underneath it.
internal static String GetDemandDir(string fullPath, bool thisDirOnly)
{
String demandPath;
if (thisDirOnly) {
if (fullPath.EndsWith( Path.DirectorySeparatorChar )
|| fullPath.EndsWith( Path.AltDirectorySeparatorChar ) )
demandPath = fullPath + ".";
else
demandPath = fullPath + Path.DirectorySeparatorCharAsString + ".";
}
else {
if (!(fullPath.EndsWith( Path.DirectorySeparatorChar )
|| fullPath.EndsWith( Path.AltDirectorySeparatorChar )) )
demandPath = fullPath + Path.DirectorySeparatorCharAsString;
else
demandPath = fullPath;
}
return demandPath;
}
internal static void InternalCreateDirectory(String fullPath, String path, Object dirSecurityObj)
{
InternalCreateDirectory(fullPath, path, dirSecurityObj, false);
}
[System.Security.SecuritySafeCritical]
internal unsafe static void InternalCreateDirectory(String fullPath, String path, Object dirSecurityObj, bool checkHost)
{
#if FEATURE_MACL
DirectorySecurity dirSecurity = (DirectorySecurity)dirSecurityObj;
#endif // FEATURE_MACL
int length = fullPath.Length;
// We need to trim the trailing slash or the code will try to create 2 directories of the same name.
if (length >= 2 && Path.IsDirectorySeparator(fullPath[length - 1]))
length--;
int lengthRoot = Path.GetRootLength(fullPath);
// For UNC paths that are only // or ///
if (length == 2 && Path.IsDirectorySeparator(fullPath[1]))
throw new IOException(Environment.GetResourceString("IO.IO_CannotCreateDirectory", path));
// We can save a bunch of work if the directory we want to create already exists. This also
// saves us in the case where sub paths are inaccessible (due to ERROR_ACCESS_DENIED) but the
// final path is accessable and the directory already exists. For example, consider trying
// to create c:\Foo\Bar\Baz, where everything already exists but ACLS prevent access to c:\Foo
// and c:\Foo\Bar. In that case, this code will think it needs to create c:\Foo, and c:\Foo\Bar
// and fail to due so, causing an exception to be thrown. This is not what we want.
if (InternalExists(fullPath)) {
return;
}
List<string> stackDir = new List<string>();
// Attempt to figure out which directories don't exist, and only
// create the ones we need. Note that InternalExists may fail due
// to Win32 ACL's preventing us from seeing a directory, and this
// isn't threadsafe.
bool somepathexists = false;
if (length > lengthRoot) { // Special case root (fullpath = X:\\)
int i = length-1;
while (i >= lengthRoot && !somepathexists) {
String dir = fullPath.Substring(0, i+1);
if (!InternalExists(dir)) // Create only the ones missing
stackDir.Add(dir);
else
somepathexists = true;
while (i > lengthRoot && fullPath[i] != Path.DirectorySeparatorChar && fullPath[i] != Path.AltDirectorySeparatorChar) i--;
i--;
}
}
int count = stackDir.Count;
if (stackDir.Count != 0
#if FEATURE_CAS_POLICY
// All demands in full trust domains are no-ops, so skip
//
// The full path went through validity checks by being passed through FileIOPermissions already.
// As a sub string of the full path can't fail the checks if the full path passes.
&& !CodeAccessSecurityEngine.QuickCheckForAllDemands()
#endif
)
{
String[] securityList = new String[stackDir.Count];
stackDir.CopyTo(securityList, 0);
for (int j = 0 ; j < securityList.Length; j++)
securityList[j] += "\\."; // leaf will never have a slash at the end
// Security check for all directories not present only.
#if FEATURE_MACL
AccessControlActions control = (dirSecurity == null) ? AccessControlActions.None : AccessControlActions.Change;
FileIOPermission.QuickDemand(FileIOPermissionAccess.Write, control, securityList, false, false);
#else
#if FEATURE_CORECLR
if (checkHost)
{
foreach (String demandPath in securityList)
{
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.Write, String.Empty, demandPath);
state.EnsureState();
}
}
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.Write, securityList, false, false);
#endif
#endif //FEATURE_MACL
}
// If we were passed a DirectorySecurity, convert it to a security
// descriptor and set it in he call to CreateDirectory.
Win32Native.SECURITY_ATTRIBUTES secAttrs = null;
#if FEATURE_MACL
if (dirSecurity != null) {
secAttrs = new Win32Native.SECURITY_ATTRIBUTES();
secAttrs.nLength = (int)Marshal.SizeOf(secAttrs);
// For ACL's, get the security descriptor from the FileSecurity.
byte[] sd = dirSecurity.GetSecurityDescriptorBinaryForm();
byte * bytesOnStack = stackalloc byte[sd.Length];
Buffer.Memcpy(bytesOnStack, 0, sd, 0, sd.Length);
secAttrs.pSecurityDescriptor = bytesOnStack;
}
#endif
bool r = true;
int firstError = 0;
String errorString = path;
// If all the security checks succeeded create all the directories
while (stackDir.Count > 0) {
String name = stackDir[stackDir.Count - 1];
stackDir.RemoveAt(stackDir.Count - 1);
if (PathInternal.IsDirectoryTooLong(name))
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
r = Win32Native.CreateDirectory(name, secAttrs);
if (!r && (firstError == 0)) {
int currentError = Marshal.GetLastWin32Error();
// While we tried to avoid creating directories that don't
// exist above, there are at least two cases that will
// cause us to see ERROR_ALREADY_EXISTS here. InternalExists
// can fail because we didn't have permission to the
// directory. Secondly, another thread or process could
// create the directory between the time we check and the
// time we try using the directory. Thirdly, it could
// fail because the target does exist, but is a file.
if (currentError != Win32Native.ERROR_ALREADY_EXISTS)
firstError = currentError;
else {
// If there's a file in this directory's place, or if we have ERROR_ACCESS_DENIED when checking if the directory already exists throw.
if (File.InternalExists(name) || (!InternalExists(name, out currentError) && currentError == Win32Native.ERROR_ACCESS_DENIED)) {
firstError = currentError;
// Give the user a nice error message, but don't leak path information.
try {
#if FEATURE_CORECLR
if (checkHost)
{
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.PathDiscovery, String.Empty, GetDemandDir(name, true));
state.EnsureState();
}
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.PathDiscovery, GetDemandDir(name, true));
#endif // FEATURE_CORECLR
errorString = name;
}
catch(SecurityException) {}
}
}
}
}
// We need this check to mask OS differences
// Handle CreateDirectory("X:\\foo") when X: doesn't exist. Similarly for n/w paths.
if ((count == 0) && !somepathexists) {
String root = InternalGetDirectoryRoot(fullPath);
if (!InternalExists(root)) {
// Extract the root from the passed in path again for security.
__Error.WinIOError(Win32Native.ERROR_PATH_NOT_FOUND, InternalGetDirectoryRoot(path));
}
return;
}
// Only throw an exception if creating the exact directory we
// wanted failed to work correctly.
if (!r && (firstError != 0)) {
__Error.WinIOError(firstError, errorString);
}
}
// Tests if the given path refers to an existing DirectoryInfo on disk.
//
// Your application must have Read permission to the directory's
// contents.
//
[System.Security.SecuritySafeCritical] // auto-generated
public static bool Exists(String path)
{
return InternalExistsHelper(path, true);
}
[System.Security.SecurityCritical]
internal static bool UnsafeExists(String path)
{
return InternalExistsHelper(path, false);
}
[System.Security.SecurityCritical]
internal static bool InternalExistsHelper(String path, bool checkHost) {
try
{
if (path == null)
return false;
if (path.Length == 0)
return false;
// Get fully qualified file name ending in \* for security check
String fullPath = Path.GetFullPathInternal(path);
String demandPath = GetDemandDir(fullPath, true);
#if FEATURE_CORECLR
if (checkHost)
{
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.Read, path, demandPath);
state.EnsureState();
}
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.Read, demandPath, false, false);
#endif
return InternalExists(fullPath);
}
catch (ArgumentException) { }
catch (NotSupportedException) { } // Security can throw this on ":"
catch (SecurityException) { }
catch (IOException) { }
catch (UnauthorizedAccessException)
{
Contract.Assert(false, "Ignore this assert and send a repro to Microsoft. This assert was tracking purposes only.");
}
return false;
}
// Determine whether path describes an existing directory
// on disk, avoiding security checks.
[System.Security.SecurityCritical] // auto-generated
internal static bool InternalExists(String path) {
int lastError = Win32Native.ERROR_SUCCESS;
return InternalExists(path, out lastError);
}
// Determine whether path describes an existing directory
// on disk, avoiding security checks.
[System.Security.SecurityCritical] // auto-generated
internal static bool InternalExists(String path, out int lastError) {
Win32Native.WIN32_FILE_ATTRIBUTE_DATA data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA();
lastError = File.FillAttributeInfo(path, ref data, false, true);
return (lastError == 0) && (data.fileAttributes != -1)
&& ((data.fileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY) != 0);
}
#if !FEATURE_CORECLR
public static void SetCreationTime(String path,DateTime creationTime)
{
SetCreationTimeUtc(path, creationTime.ToUniversalTime());
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe static void SetCreationTimeUtc(String path,DateTime creationTimeUtc)
{
using (SafeFileHandle handle = Directory.OpenHandle(path)) {
Win32Native.FILE_TIME fileTime = new Win32Native.FILE_TIME(creationTimeUtc.ToFileTimeUtc());
bool r = Win32Native.SetFileTime(handle, &fileTime, null, null);
if (!r)
{
int errorCode = Marshal.GetLastWin32Error();
__Error.WinIOError(errorCode, path);
}
}
}
#endif // !FEATURE_CORECLR
public static DateTime GetCreationTime(String path)
{
return File.GetCreationTime(path);
}
public static DateTime GetCreationTimeUtc(String path)
{
return File.GetCreationTimeUtc(path);
}
#if !FEATURE_CORECLR
public static void SetLastWriteTime(String path,DateTime lastWriteTime)
{
SetLastWriteTimeUtc(path, lastWriteTime.ToUniversalTime());
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe static void SetLastWriteTimeUtc(String path,DateTime lastWriteTimeUtc)
{
using (SafeFileHandle handle = Directory.OpenHandle(path)) {
Win32Native.FILE_TIME fileTime = new Win32Native.FILE_TIME(lastWriteTimeUtc.ToFileTimeUtc());
bool r = Win32Native.SetFileTime(handle, null, null, &fileTime);
if (!r)
{
int errorCode = Marshal.GetLastWin32Error();
__Error.WinIOError(errorCode, path);
}
}
}
#endif // !FEATURE_CORECLR
public static DateTime GetLastWriteTime(String path)
{
return File.GetLastWriteTime(path);
}
public static DateTime GetLastWriteTimeUtc(String path)
{
return File.GetLastWriteTimeUtc(path);
}
#if !FEATURE_CORECLR
public static void SetLastAccessTime(String path,DateTime lastAccessTime)
{
SetLastAccessTimeUtc(path, lastAccessTime.ToUniversalTime());
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe static void SetLastAccessTimeUtc(String path,DateTime lastAccessTimeUtc)
{
using (SafeFileHandle handle = Directory.OpenHandle(path)) {
Win32Native.FILE_TIME fileTime = new Win32Native.FILE_TIME(lastAccessTimeUtc.ToFileTimeUtc());
bool r = Win32Native.SetFileTime(handle, null, &fileTime, null);
if (!r)
{
int errorCode = Marshal.GetLastWin32Error();
__Error.WinIOError(errorCode, path);
}
}
}
#endif // !FEATURE_CORECLR
public static DateTime GetLastAccessTime(String path)
{
return File.GetLastAccessTime(path);
}
public static DateTime GetLastAccessTimeUtc(String path)
{
return File.GetLastAccessTimeUtc(path);
}
#if FEATURE_MACL
public static DirectorySecurity GetAccessControl(String path)
{
return new DirectorySecurity(path, AccessControlSections.Access | AccessControlSections.Owner | AccessControlSections.Group);
}
public static DirectorySecurity GetAccessControl(String path, AccessControlSections includeSections)
{
return new DirectorySecurity(path, includeSections);
}
[System.Security.SecuritySafeCritical] // auto-generated
public static void SetAccessControl(String path, DirectorySecurity directorySecurity)
{
if (directorySecurity == null)
throw new ArgumentNullException("directorySecurity");
Contract.EndContractBlock();
String fullPath = Path.GetFullPathInternal(path);
directorySecurity.Persist(fullPath);
}
#endif
// Returns an array of filenames in the DirectoryInfo specified by path
public static String[] GetFiles(String path)
{
if (path == null)
throw new ArgumentNullException("path");
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetFiles(path, "*", SearchOption.TopDirectoryOnly);
}
// Returns an array of Files in the current DirectoryInfo matching the
// given search pattern (ie, "*.txt").
public static String[] GetFiles(String path, String searchPattern)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetFiles(path, searchPattern, SearchOption.TopDirectoryOnly);
}
// Returns an array of Files in the current DirectoryInfo matching the
// given search pattern (ie, "*.txt") and search option
public static String[] GetFiles(String path, String searchPattern, SearchOption searchOption)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories))
throw new ArgumentOutOfRangeException("searchOption", Environment.GetResourceString("ArgumentOutOfRange_Enum"));
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetFiles(path, searchPattern, searchOption);
}
// Returns an array of Files in the current DirectoryInfo matching the
// given search pattern (ie, "*.txt") and search option
private static String[] InternalGetFiles(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
return InternalGetFileDirectoryNames(path, path, searchPattern, true, false, searchOption, true);
}
[System.Security.SecurityCritical]
internal static String[] UnsafeGetFiles(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
return InternalGetFileDirectoryNames(path, path, searchPattern, true, false, searchOption, false);
}
// Returns an array of Directories in the current directory.
public static String[] GetDirectories(String path)
{
if (path == null)
throw new ArgumentNullException("path");
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetDirectories(path, "*", SearchOption.TopDirectoryOnly);
}
// Returns an array of Directories in the current DirectoryInfo matching the
// given search criteria (ie, "*.txt").
public static String[] GetDirectories(String path, String searchPattern)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetDirectories(path, searchPattern, SearchOption.TopDirectoryOnly);
}
// Returns an array of Directories in the current DirectoryInfo matching the
// given search criteria (ie, "*.txt").
public static String[] GetDirectories(String path, String searchPattern, SearchOption searchOption)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories))
throw new ArgumentOutOfRangeException("searchOption", Environment.GetResourceString("ArgumentOutOfRange_Enum"));
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetDirectories(path, searchPattern, searchOption);
}
// Returns an array of Directories in the current DirectoryInfo matching the
// given search criteria (ie, "*.txt").
private static String[] InternalGetDirectories(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
Contract.Ensures(Contract.Result<String[]>() != null);
return InternalGetFileDirectoryNames(path, path, searchPattern, false, true, searchOption, true);
}
[System.Security.SecurityCritical]
internal static String[] UnsafeGetDirectories(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
Contract.Ensures(Contract.Result<String[]>() != null);
return InternalGetFileDirectoryNames(path, path, searchPattern, false, true, searchOption, false);
}
// Returns an array of strongly typed FileSystemInfo entries in the path
public static String[] GetFileSystemEntries(String path)
{
if (path == null)
throw new ArgumentNullException("path");
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetFileSystemEntries(path, "*", SearchOption.TopDirectoryOnly);
}
// Returns an array of strongly typed FileSystemInfo entries in the path with the
// given search criteria (ie, "*.txt"). We disallow .. as a part of the search criteria
public static String[] GetFileSystemEntries(String path, String searchPattern)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetFileSystemEntries(path, searchPattern, SearchOption.TopDirectoryOnly);
}
// Returns an array of strongly typed FileSystemInfo entries in the path with the
// given search criteria (ie, "*.txt"). We disallow .. as a part of the search criteria
public static String[] GetFileSystemEntries(String path, String searchPattern, SearchOption searchOption)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories))
throw new ArgumentOutOfRangeException("searchOption", Environment.GetResourceString("ArgumentOutOfRange_Enum"));
Contract.Ensures(Contract.Result<String[]>() != null);
Contract.EndContractBlock();
return InternalGetFileSystemEntries(path, searchPattern, searchOption);
}
private static String[] InternalGetFileSystemEntries(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
return InternalGetFileDirectoryNames(path, path, searchPattern, true, true, searchOption, true);
}
// Private class that holds search data that is passed around
// in the heap based stack recursion
internal sealed class SearchData
{
public SearchData(String fullPath, String userPath, SearchOption searchOption)
{
Contract.Requires(fullPath != null && fullPath.Length > 0);
Contract.Requires(userPath != null && userPath.Length > 0);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
this.fullPath = fullPath;
this.userPath = userPath;
this.searchOption = searchOption;
}
public readonly string fullPath; // Fully qualified search path excluding the search criteria in the end (ex, c:\temp\bar\foo)
public readonly string userPath; // User specified path (ex, bar\foo)
public readonly SearchOption searchOption;
}
// Returns fully qualified user path of dirs/files that matches the search parameters.
// For recursive search this method will search through all the sub dirs and execute
// the given search criteria against every dir.
// For all the dirs/files returned, it will then demand path discovery permission for
// their parent folders (it will avoid duplicate permission checks)
internal static String[] InternalGetFileDirectoryNames(String path, String userPathOriginal, String searchPattern, bool includeFiles, bool includeDirs, SearchOption searchOption, bool checkHost)
{
Contract.Requires(path != null);
Contract.Requires(userPathOriginal != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
IEnumerable<String> enble = FileSystemEnumerableFactory.CreateFileNameIterator(
path, userPathOriginal, searchPattern,
includeFiles, includeDirs, searchOption, checkHost);
List<String> fileList = new List<String>(enble);
return fileList.ToArray();
}
public static IEnumerable<String> EnumerateDirectories(String path)
{
if (path == null)
throw new ArgumentNullException("path");
Contract.EndContractBlock();
return InternalEnumerateDirectories(path, "*", SearchOption.TopDirectoryOnly);
}
public static IEnumerable<String> EnumerateDirectories(String path, String searchPattern)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
Contract.EndContractBlock();
return InternalEnumerateDirectories(path, searchPattern, SearchOption.TopDirectoryOnly);
}
public static IEnumerable<String> EnumerateDirectories(String path, String searchPattern, SearchOption searchOption)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories))
throw new ArgumentOutOfRangeException("searchOption", Environment.GetResourceString("ArgumentOutOfRange_Enum"));
Contract.EndContractBlock();
return InternalEnumerateDirectories(path, searchPattern, searchOption);
}
private static IEnumerable<String> InternalEnumerateDirectories(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
return EnumerateFileSystemNames(path, searchPattern, searchOption, false, true);
}
public static IEnumerable<String> EnumerateFiles(String path)
{
if (path == null)
throw new ArgumentNullException("path");
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
Contract.EndContractBlock();
return InternalEnumerateFiles(path, "*", SearchOption.TopDirectoryOnly);
}
public static IEnumerable<String> EnumerateFiles(String path, String searchPattern)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
Contract.EndContractBlock();
return InternalEnumerateFiles(path, searchPattern, SearchOption.TopDirectoryOnly);
}
public static IEnumerable<String> EnumerateFiles(String path, String searchPattern, SearchOption searchOption)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories))
throw new ArgumentOutOfRangeException("searchOption", Environment.GetResourceString("ArgumentOutOfRange_Enum"));
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
Contract.EndContractBlock();
return InternalEnumerateFiles(path, searchPattern, searchOption);
}
private static IEnumerable<String> InternalEnumerateFiles(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
return EnumerateFileSystemNames(path, searchPattern, searchOption, true, false);
}
public static IEnumerable<String> EnumerateFileSystemEntries(String path)
{
if (path == null)
throw new ArgumentNullException("path");
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
Contract.EndContractBlock();
return InternalEnumerateFileSystemEntries(path, "*", SearchOption.TopDirectoryOnly);
}
public static IEnumerable<String> EnumerateFileSystemEntries(String path, String searchPattern)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
Contract.EndContractBlock();
return InternalEnumerateFileSystemEntries(path, searchPattern, SearchOption.TopDirectoryOnly);
}
public static IEnumerable<String> EnumerateFileSystemEntries(String path, String searchPattern, SearchOption searchOption)
{
if (path == null)
throw new ArgumentNullException("path");
if (searchPattern == null)
throw new ArgumentNullException("searchPattern");
if ((searchOption != SearchOption.TopDirectoryOnly) && (searchOption != SearchOption.AllDirectories))
throw new ArgumentOutOfRangeException("searchOption", Environment.GetResourceString("ArgumentOutOfRange_Enum"));
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
Contract.EndContractBlock();
return InternalEnumerateFileSystemEntries(path, searchPattern, searchOption);
}
private static IEnumerable<String> InternalEnumerateFileSystemEntries(String path, String searchPattern, SearchOption searchOption)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
return EnumerateFileSystemNames(path, searchPattern, searchOption, true, true);
}
private static IEnumerable<String> EnumerateFileSystemNames(String path, String searchPattern, SearchOption searchOption,
bool includeFiles, bool includeDirs)
{
Contract.Requires(path != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
Contract.Ensures(Contract.Result<IEnumerable<String>>() != null);
return FileSystemEnumerableFactory.CreateFileNameIterator(path, path, searchPattern,
includeFiles, includeDirs, searchOption, true);
}
// Retrieves the names of the logical drives on this machine in the
// form "C:\".
//
// Your application must have System Info permission.
//
[System.Security.SecuritySafeCritical] // auto-generated
public static String[] GetLogicalDrives()
{
Contract.Ensures(Contract.Result<String[]>() != null);
#pragma warning disable 618
new SecurityPermission(SecurityPermissionFlag.UnmanagedCode).Demand();
#pragma warning restore 618
int drives = Win32Native.GetLogicalDrives();
if (drives==0)
__Error.WinIOError();
uint d = (uint)drives;
int count = 0;
while (d != 0) {
if (((int)d & 1) != 0) count++;
d >>= 1;
}
String[] result = new String[count];
char[] root = new char[] {'A', ':', '\\'};
d = (uint)drives;
count = 0;
while (d != 0) {
if (((int)d & 1) != 0) {
result[count++] = new String(root);
}
d >>= 1;
root[0]++;
}
return result;
}
[System.Security.SecuritySafeCritical]
public static String GetDirectoryRoot(String path) {
if (path==null)
throw new ArgumentNullException("path");
Contract.EndContractBlock();
String fullPath = Path.GetFullPathInternal(path);
String root = fullPath.Substring(0, Path.GetRootLength(fullPath));
String demandPath = GetDemandDir(root, true);
#if FEATURE_CORECLR
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.PathDiscovery, path, demandPath);
state.EnsureState();
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.PathDiscovery, demandPath, false, false);
#endif
return root;
}
internal static String InternalGetDirectoryRoot(String path) {
if (path == null) return null;
return path.Substring(0, Path.GetRootLength(path));
}
/*===============================CurrentDirectory===============================
**Action: Provides a getter and setter for the current directory. The original
** current DirectoryInfo is the one from which the process was started.
**Returns: The current DirectoryInfo (from the getter). Void from the setter.
**Arguments: The current DirectoryInfo to which to switch to the setter.
**Exceptions:
==============================================================================*/
[System.Security.SecuritySafeCritical]
public static String GetCurrentDirectory()
{
return InternalGetCurrentDirectory(true);
}
[System.Security.SecurityCritical]
internal static String UnsafeGetCurrentDirectory()
{
return InternalGetCurrentDirectory(false);
}
[System.Security.SecuritySafeCritical]
private static string InternalGetCurrentDirectory(bool checkHost)
{
string currentDirectory = (
#if FEATURE_PATHCOMPAT
AppContextSwitches.UseLegacyPathHandling ? LegacyGetCurrentDirectory() :
#endif
NewGetCurrentDirectory());
string demandPath = GetDemandDir(currentDirectory, true);
#if FEATURE_CORECLR
if (checkHost)
{
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.PathDiscovery, String.Empty, demandPath);
state.EnsureState();
}
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.PathDiscovery, demandPath, false, false);
#endif
return currentDirectory;
}
#if FEATURE_PATHCOMPAT
[System.Security.SecurityCritical]
private static String LegacyGetCurrentDirectory()
{
StringBuilder sb = StringBuilderCache.Acquire(Path.MaxPath + 1);
if (Win32Native.GetCurrentDirectory(sb.Capacity, sb) == 0)
__Error.WinIOError();
String currentDirectory = sb.ToString();
// Note that if we have somehow put our command prompt into short
// file name mode (ie, by running edlin or a DOS grep, etc), then
// this will return a short file name.
if (currentDirectory.IndexOf('~') >= 0) {
int r = Win32Native.GetLongPathName(currentDirectory, sb, sb.Capacity);
if (r == 0 || r >= Path.MaxPath) {
int errorCode = Marshal.GetLastWin32Error();
if (r >= Path.MaxPath)
errorCode = Win32Native.ERROR_FILENAME_EXCED_RANGE;
if (errorCode != Win32Native.ERROR_FILE_NOT_FOUND &&
errorCode != Win32Native.ERROR_PATH_NOT_FOUND &&
errorCode != Win32Native.ERROR_INVALID_FUNCTION && // by design - enough said.
errorCode != Win32Native.ERROR_ACCESS_DENIED)
__Error.WinIOError(errorCode, String.Empty);
}
currentDirectory = sb.ToString();
}
StringBuilderCache.Release(sb);
String demandPath = GetDemandDir(currentDirectory, true);
return currentDirectory;
}
#endif // FEATURE_PATHCOMPAT
[System.Security.SecurityCritical]
private static string NewGetCurrentDirectory()
{
using (StringBuffer buffer = new StringBuffer(PathInternal.MaxShortPath))
{
uint result = 0;
while ((result = Win32Native.GetCurrentDirectoryW(buffer.CharCapacity, buffer.GetHandle())) > buffer.CharCapacity)
{
// Reported size is greater than the buffer size. Increase the capacity.
// The size returned includes the null only if more space is needed (this case).
buffer.EnsureCharCapacity(result);
}
if (result == 0)
__Error.WinIOError();
buffer.Length = result;
#if !PLATFORM_UNIX
if (buffer.Contains('~'))
return LongPathHelper.GetLongPathName(buffer);
#endif
return buffer.ToString();
}
}
#if FEATURE_CORECLR
[System.Security.SecurityCritical] // auto-generated
#else
[System.Security.SecuritySafeCritical]
#endif
public static void SetCurrentDirectory(String path)
{
if (path==null)
throw new ArgumentNullException("value");
if (path.Length==0)
throw new ArgumentException(Environment.GetResourceString("Argument_PathEmpty"));
Contract.EndContractBlock();
if (path.Length >= Path.MaxPath)
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
// This will have some large effects on the rest of the runtime
// and other appdomains in this process. Demand unmanaged code.
#pragma warning disable 618
new SecurityPermission(SecurityPermissionFlag.UnmanagedCode).Demand();
#pragma warning restore 618
String fulldestDirName = Path.GetFullPathInternal(path);
if (!Win32Native.SetCurrentDirectory(fulldestDirName)) {
// If path doesn't exist, this sets last error to 2 (File
// not Found). LEGACY: This may potentially have worked correctly
// on Win9x, maybe.
int errorCode = Marshal.GetLastWin32Error();
if (errorCode == Win32Native.ERROR_FILE_NOT_FOUND)
errorCode = Win32Native.ERROR_PATH_NOT_FOUND;
__Error.WinIOError(errorCode, fulldestDirName);
}
}
[System.Security.SecuritySafeCritical]
public static void Move(String sourceDirName,String destDirName) {
InternalMove(sourceDirName, destDirName, true);
}
[System.Security.SecurityCritical]
internal static void UnsafeMove(String sourceDirName,String destDirName) {
InternalMove(sourceDirName, destDirName, false);
}
[System.Security.SecurityCritical]
private static void InternalMove(String sourceDirName,String destDirName,bool checkHost) {
if (sourceDirName==null)
throw new ArgumentNullException("sourceDirName");
if (sourceDirName.Length==0)
throw new ArgumentException(Environment.GetResourceString("Argument_EmptyFileName"), "sourceDirName");
if (destDirName==null)
throw new ArgumentNullException("destDirName");
if (destDirName.Length==0)
throw new ArgumentException(Environment.GetResourceString("Argument_EmptyFileName"), "destDirName");
Contract.EndContractBlock();
String fullsourceDirName = Path.GetFullPathInternal(sourceDirName);
String sourcePath = GetDemandDir(fullsourceDirName, false);
if (PathInternal.IsDirectoryTooLong(sourcePath))
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
String fulldestDirName = Path.GetFullPathInternal(destDirName);
String destPath = GetDemandDir(fulldestDirName, false);
if (PathInternal.IsDirectoryTooLong(destPath))
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
#if FEATURE_CORECLR
if (checkHost) {
FileSecurityState sourceState = new FileSecurityState(FileSecurityStateAccess.Write | FileSecurityStateAccess.Read, sourceDirName, sourcePath);
FileSecurityState destState = new FileSecurityState(FileSecurityStateAccess.Write, destDirName, destPath);
sourceState.EnsureState();
destState.EnsureState();
}
#else
FileIOPermission.QuickDemand(FileIOPermissionAccess.Write | FileIOPermissionAccess.Read, sourcePath, false, false);
FileIOPermission.QuickDemand(FileIOPermissionAccess.Write, destPath, false, false);
#endif
if (String.Compare(sourcePath, destPath, StringComparison.OrdinalIgnoreCase) == 0)
throw new IOException(Environment.GetResourceString("IO.IO_SourceDestMustBeDifferent"));
String sourceRoot = Path.GetPathRoot(sourcePath);
String destinationRoot = Path.GetPathRoot(destPath);
if (String.Compare(sourceRoot, destinationRoot, StringComparison.OrdinalIgnoreCase) != 0)
throw new IOException(Environment.GetResourceString("IO.IO_SourceDestMustHaveSameRoot"));
if (!Win32Native.MoveFile(sourceDirName, destDirName))
{
int hr = Marshal.GetLastWin32Error();
if (hr == Win32Native.ERROR_FILE_NOT_FOUND) // Source dir not found
{
hr = Win32Native.ERROR_PATH_NOT_FOUND;
__Error.WinIOError(hr, fullsourceDirName);
}
// This check was originally put in for Win9x (unfortunately without special casing it to be for Win9x only). We can't change the NT codepath now for backcomp reasons.
if (hr == Win32Native.ERROR_ACCESS_DENIED) // WinNT throws IOException. This check is for Win9x. We can't change it for backcomp.
throw new IOException(Environment.GetResourceString("UnauthorizedAccess_IODenied_Path", sourceDirName), Win32Native.MakeHRFromErrorCode(hr));
__Error.WinIOError(hr, String.Empty);
}
}
[System.Security.SecuritySafeCritical]
public static void Delete(String path)
{
String fullPath = Path.GetFullPathInternal(path);
Delete(fullPath, path, false, true);
}
[System.Security.SecuritySafeCritical]
public static void Delete(String path, bool recursive)
{
String fullPath = Path.GetFullPathInternal(path);
Delete(fullPath, path, recursive, true);
}
[System.Security.SecurityCritical]
internal static void UnsafeDelete(String path, bool recursive)
{
String fullPath = Path.GetFullPathInternal(path);
Delete(fullPath, path, recursive, false);
}
// Called from DirectoryInfo as well. FullPath is fully qualified,
// while the user path is used for feedback in exceptions.
[System.Security.SecurityCritical] // auto-generated
internal static void Delete(String fullPath, String userPath, bool recursive, bool checkHost)
{
String demandPath;
// If not recursive, do permission check only on this directory
// else check for the whole directory structure rooted below
demandPath = GetDemandDir(fullPath, !recursive);
#if FEATURE_CORECLR
if (checkHost)
{
FileSecurityState state = new FileSecurityState(FileSecurityStateAccess.Write, userPath, demandPath);
state.EnsureState();
}
#else
// Make sure we have write permission to this directory
new FileIOPermission(FileIOPermissionAccess.Write, new String[] { demandPath }, false, false ).Demand();
#endif
// Do not recursively delete through reparse points. Perhaps in a
// future version we will add a new flag to control this behavior,
// but for now we're much safer if we err on the conservative side.
// This applies to symbolic links and mount points.
Win32Native.WIN32_FILE_ATTRIBUTE_DATA data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA();
int dataInitialised = File.FillAttributeInfo(fullPath, ref data, false, true);
if (dataInitialised != 0) {
// Ensure we throw a DirectoryNotFoundException.
if (dataInitialised == Win32Native.ERROR_FILE_NOT_FOUND)
dataInitialised = Win32Native.ERROR_PATH_NOT_FOUND;
__Error.WinIOError(dataInitialised, fullPath);
}
if (((FileAttributes)data.fileAttributes & FileAttributes.ReparsePoint) != 0)
recursive = false;
DeleteHelper(fullPath, userPath, recursive, true);
}
// Note that fullPath is fully qualified, while userPath may be
// relative. Use userPath for all exception messages to avoid leaking
// fully qualified path information.
[System.Security.SecurityCritical] // auto-generated
private static void DeleteHelper(String fullPath, String userPath, bool recursive, bool throwOnTopLevelDirectoryNotFound)
{
bool r;
int hr;
Exception ex = null;
// Do not recursively delete through reparse points. Perhaps in a
// future version we will add a new flag to control this behavior,
// but for now we're much safer if we err on the conservative side.
// This applies to symbolic links and mount points.
// Note the logic to check whether fullPath is a reparse point is
// in Delete(String, String, bool), and will set "recursive" to false.
// Note that Win32's DeleteFile and RemoveDirectory will just delete
// the reparse point itself.
if (recursive) {
Win32Native.WIN32_FIND_DATA data = new Win32Native.WIN32_FIND_DATA();
// Open a Find handle
using (SafeFindHandle hnd = Win32Native.FindFirstFile(fullPath+Path.DirectorySeparatorCharAsString+"*", data)) {
if (hnd.IsInvalid) {
hr = Marshal.GetLastWin32Error();
__Error.WinIOError(hr, fullPath);
}
do {
bool isDir = (0!=(data.dwFileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY));
if (isDir) {
// Skip ".", "..".
if (data.cFileName.Equals(".") || data.cFileName.Equals(".."))
continue;
// Recurse for all directories, unless they are
// reparse points. Do not follow mount points nor
// symbolic links, but do delete the reparse point
// itself.
bool shouldRecurse = (0 == (data.dwFileAttributes & (int) FileAttributes.ReparsePoint));
if (shouldRecurse) {
String newFullPath = Path.InternalCombine(fullPath, data.cFileName);
String newUserPath = Path.InternalCombine(userPath, data.cFileName);
try {
DeleteHelper(newFullPath, newUserPath, recursive, false);
}
catch(Exception e) {
if (ex == null) {
ex = e;
}
}
}
else {
// Check to see if this is a mount point, and
// unmount it.
if (data.dwReserved0 == Win32Native.IO_REPARSE_TAG_MOUNT_POINT) {
// Use full path plus a trailing '\'
String mountPoint = Path.InternalCombine(fullPath, data.cFileName + Path.DirectorySeparatorChar);
r = Win32Native.DeleteVolumeMountPoint(mountPoint);
if (!r) {
hr = Marshal.GetLastWin32Error();
if (hr != Win32Native.ERROR_PATH_NOT_FOUND) {
try {
__Error.WinIOError(hr, data.cFileName);
}
catch(Exception e) {
if (ex == null) {
ex = e;
}
}
}
}
}
// RemoveDirectory on a symbolic link will
// remove the link itself.
String reparsePoint = Path.InternalCombine(fullPath, data.cFileName);
r = Win32Native.RemoveDirectory(reparsePoint);
if (!r) {
hr = Marshal.GetLastWin32Error();
if (hr != Win32Native.ERROR_PATH_NOT_FOUND) {
try {
__Error.WinIOError(hr, data.cFileName);
}
catch(Exception e) {
if (ex == null) {
ex = e;
}
}
}
}
}
}
else {
String fileName = Path.InternalCombine(fullPath, data.cFileName);
r = Win32Native.DeleteFile(fileName);
if (!r) {
hr = Marshal.GetLastWin32Error();
if (hr != Win32Native.ERROR_FILE_NOT_FOUND) {
try {
__Error.WinIOError(hr, data.cFileName);
}
catch (Exception e) {
if (ex == null) {
ex = e;
}
}
}
}
}
} while (Win32Native.FindNextFile(hnd, data));
// Make sure we quit with a sensible error.
hr = Marshal.GetLastWin32Error();
}
if (ex != null)
throw ex;
if (hr!=0 && hr!=Win32Native.ERROR_NO_MORE_FILES)
__Error.WinIOError(hr, userPath);
}
r = Win32Native.RemoveDirectory(fullPath);
if (!r) {
hr = Marshal.GetLastWin32Error();
if (hr == Win32Native.ERROR_FILE_NOT_FOUND) // A dubious error code.
hr = Win32Native.ERROR_PATH_NOT_FOUND;
// This check was originally put in for Win9x (unfortunately without special casing it to be for Win9x only). We can't change the NT codepath now for backcomp reasons.
if (hr == Win32Native.ERROR_ACCESS_DENIED)
throw new IOException(Environment.GetResourceString("UnauthorizedAccess_IODenied_Path", userPath));
// don't throw the DirectoryNotFoundException since this is a subdir and there could be a race condition
// between two Directory.Delete callers
if (hr == Win32Native.ERROR_PATH_NOT_FOUND && !throwOnTopLevelDirectoryNotFound)
return;
__Error.WinIOError(hr, fullPath);
}
}
#if !FEATURE_CORECLR
// WinNT only. Win9x this code will not work.
[System.Security.SecurityCritical] // auto-generated
private static SafeFileHandle OpenHandle(String path)
{
String fullPath = Path.GetFullPathInternal(path);
String root = Path.GetPathRoot(fullPath);
if (root == fullPath && root[1] == Path.VolumeSeparatorChar)
throw new ArgumentException(Environment.GetResourceString("Arg_PathIsVolume"));
#if !FEATURE_CORECLR
FileIOPermission.QuickDemand(FileIOPermissionAccess.Write, GetDemandDir(fullPath, true), false, false);
#endif
SafeFileHandle handle = Win32Native.SafeCreateFile (
fullPath,
GENERIC_WRITE,
(FileShare) (FILE_SHARE_WRITE|FILE_SHARE_DELETE),
null,
FileMode.Open,
FILE_FLAG_BACKUP_SEMANTICS,
IntPtr.Zero
);
if (handle.IsInvalid) {
int hr = Marshal.GetLastWin32Error();
__Error.WinIOError(hr, fullPath);
}
return handle;
}
#endif // !FEATURE_CORECLR
private const int FILE_ATTRIBUTE_DIRECTORY = 0x00000010;
private const int GENERIC_WRITE = unchecked((int)0x40000000);
private const int FILE_SHARE_WRITE = 0x00000002;
private const int FILE_SHARE_DELETE = 0x00000004;
private const int OPEN_EXISTING = 0x00000003;
private const int FILE_FLAG_BACKUP_SEMANTICS = 0x02000000;
}
}
| |
using System;
using System.Diagnostics;
using System.Linq;
using Mono.Cecil;
using NUnit.Framework;
// ReSharper disable UnusedMember.Global
// ReSharper disable ConvertToAutoPropertyWhenPossible
// ReSharper disable UnusedVariable
[TestFixture]
public class HasEqualityCheckerTests
{
Mono.Collections.Generic.Collection<PropertyDefinition> properties;
Mono.Collections.Generic.Collection<FieldDefinition> fields;
public HasEqualityCheckerTests()
{
var moduleDefinition = ModuleDefinition.ReadModule(GetType().Assembly.Location);
var typeDefinition = moduleDefinition.Types.First(definition => definition.Name == "HasEqualityCheckerTests");
properties = typeDefinition.Properties;
fields = typeDefinition.Fields;
}
[Test]
public void EqualityShortCutTest()
{
var instructions = GetInstructions("EqualityShortCut");
var field = GetField("intField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void EqualsNoFieldTest()
{
var instructions = GetInstructions("EqualsNoField");
var field = GetField("intField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, null));
}
[Test]
public void NoEqualsNoFieldTest()
{
var instructions = GetInstructions("NoEqualsNoField");
var field = GetField("intField");
Assert.IsFalse(HasEqualityChecker.AlreadyHasEquality(instructions, null));
}
[Test]
public void EqualityShortCutInverseTest()
{
var instructions = GetInstructions("EqualityShortCutInverse");
var field = GetField("intField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void EqualityNestedTest()
{
var instructions = GetInstructions("EqualityNested");
var field = GetField("intField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void EqualityNestedInverseTest()
{
var instructions = GetInstructions("EqualityNestedInverse");
var field = GetField("intField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void EqualsShortCutTest()
{
var instructions = GetInstructions("EqualsShortCut");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void EqualsShortCutInverseTest()
{
var instructions = GetInstructions("EqualsShortCutInverse");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void EqualsNestedInverseTest()
{
var instructions = GetInstructions("EqualsNestedInverse");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void EqualsNestedTest()
{
var instructions = GetInstructions("EqualsNested");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void StringEqualsShortCutTest()
{
var instructions = GetInstructions("StringEqualsShortCut");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void StringEqualsShortCutInverseTest()
{
var instructions = GetInstructions("StringEqualsShortCutInverse");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void StringEqualsNestedTest()
{
var instructions = GetInstructions("StringEqualsNested");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void StringEqualsOrdinalTest()
{
var instructions = GetInstructions("StringEqualsOrdinal");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void StringEqualsNestedInverseTest()
{
var instructions = GetInstructions("StringEqualsNestedInverse");
var field = GetField("stringField");
Assert.IsTrue(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
[Test]
public void NoEqualityTest()
{
var instructions = GetInstructions("NoEquality");
var field = GetField("stringField");
Assert.IsFalse(HasEqualityChecker.AlreadyHasEquality(instructions, field));
}
PropertyDefinition GetInstructions(string equalityShortcut)
{
return properties.First(definition => definition.Name == equalityShortcut);
}
FieldDefinition GetField(string equalityShortcut)
{
return fields.First(x => x.Name == equalityShortcut);
}
int intField;
string stringField;
public int EqualityShortCut
{
get { return intField; }
set
{
if (value == intField)
{
return;
}
intField = value;
}
}
public int EqualityShortCutInverse
{
get { return intField; }
set
{
if (intField == value)
{
return;
}
intField = value;
}
}
public int EqualityNested
{
get { return intField; }
set
{
if (value != intField)
{
intField = value;
}
}
}
public int EqualityNestedInverse
{
get { return intField; }
set
{
if (intField != value)
{
intField = value;
}
}
}
public string EqualsShortCut
{
get { return stringField; }
set
{
if (Equals(value, stringField))
{
return;
}
stringField = value;
}
}
public string EqualsShortCutInverse
{
get { return stringField; }
set
{
if (Equals(stringField, value))
{
return;
}
stringField = value;
}
}
public string EqualsNested
{
get { return stringField; }
set
{
if (!Equals(value, stringField))
{
stringField = value;
}
}
}
public string EqualsNestedInverse
{
get { return stringField; }
set
{
if (!Equals(stringField, value))
{
stringField = value;
}
}
}
public string NoEqualsNoField
{
get { return ""; }
set { }
}
public string EqualsNoField
{
get { return ""; }
set
{
if (EqualsNoField == value)
{
// ReSharper disable once RedundantJumpStatement
return;
}
Debug.WriteLine(value);
}
}
public string StringEqualsShortCut
{
get { return stringField; }
set
{
if (string.Equals(value, stringField))
{
return;
}
stringField = value;
}
}
public string StringEqualsShortCutInverse
{
get { return stringField; }
set
{
if (string.Equals(stringField, value))
{
return;
}
stringField = value;
}
}
public string StringEqualsNested
{
get { return stringField; }
set
{
if (!string.Equals(value, stringField))
{
stringField = value;
}
}
}
public string StringEqualsNestedInverse
{
get { return stringField; }
set
{
if (!string.Equals(stringField, value))
{
stringField = value;
}
}
}
public string StringEqualsOrdinal
{
get { return stringField; }
set
{
if (!string.Equals(stringField, value, StringComparison.Ordinal))
{
stringField = value;
}
}
}
public string NoEquality
{
get { return stringField; }
set
{
stringField = value;
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
using osu.Framework.Extensions.TypeExtensions;
using osu.Framework.Screens;
using osu.Framework.Utils;
using osu.Game.Beatmaps;
using osu.Game.Beatmaps.ControlPoints;
using osu.Game.Replays;
using osu.Game.Rulesets.Judgements;
using osu.Game.Rulesets.Objects;
using osu.Game.Rulesets.Objects.Types;
using osu.Game.Rulesets.Osu.Objects;
using osu.Game.Rulesets.Osu.Replays;
using osu.Game.Rulesets.Replays;
using osu.Game.Rulesets.Scoring;
using osu.Game.Scoring;
using osu.Game.Screens.Play;
using osu.Game.Tests.Visual;
using osuTK;
namespace osu.Game.Rulesets.Osu.Tests
{
public class TestSceneOutOfOrderHits : RateAdjustedBeatmapTestScene
{
private const double early_miss_window = 1000; // time after -1000 to -500 is considered a miss
private const double late_miss_window = 500; // time after +500 is considered a miss
/// <summary>
/// Tests clicking a future circle before the first circle's start time, while the first circle HAS NOT been judged.
/// </summary>
[Test]
public void TestClickSecondCircleBeforeFirstCircleTime()
{
const double time_first_circle = 1500;
const double time_second_circle = 1600;
Vector2 positionFirstCircle = Vector2.Zero;
Vector2 positionSecondCircle = new Vector2(80);
var hitObjects = new List<OsuHitObject>
{
new TestHitCircle
{
StartTime = time_first_circle,
Position = positionFirstCircle
},
new TestHitCircle
{
StartTime = time_second_circle,
Position = positionSecondCircle
}
};
performTest(hitObjects, new List<ReplayFrame>
{
new OsuReplayFrame { Time = time_first_circle - 100, Position = positionSecondCircle, Actions = { OsuAction.LeftButton } }
});
addJudgementAssert(hitObjects[0], HitResult.Miss);
addJudgementAssert(hitObjects[1], HitResult.Miss);
addJudgementOffsetAssert(hitObjects[0], late_miss_window);
}
/// <summary>
/// Tests clicking a future circle at the first circle's start time, while the first circle HAS NOT been judged.
/// </summary>
[Test]
public void TestClickSecondCircleAtFirstCircleTime()
{
const double time_first_circle = 1500;
const double time_second_circle = 1600;
Vector2 positionFirstCircle = Vector2.Zero;
Vector2 positionSecondCircle = new Vector2(80);
var hitObjects = new List<OsuHitObject>
{
new TestHitCircle
{
StartTime = time_first_circle,
Position = positionFirstCircle
},
new TestHitCircle
{
StartTime = time_second_circle,
Position = positionSecondCircle
}
};
performTest(hitObjects, new List<ReplayFrame>
{
new OsuReplayFrame { Time = time_first_circle, Position = positionSecondCircle, Actions = { OsuAction.LeftButton } }
});
addJudgementAssert(hitObjects[0], HitResult.Miss);
addJudgementAssert(hitObjects[1], HitResult.Great);
addJudgementOffsetAssert(hitObjects[0], 0);
}
/// <summary>
/// Tests clicking a future circle after the first circle's start time, while the first circle HAS NOT been judged.
/// </summary>
[Test]
public void TestClickSecondCircleAfterFirstCircleTime()
{
const double time_first_circle = 1500;
const double time_second_circle = 1600;
Vector2 positionFirstCircle = Vector2.Zero;
Vector2 positionSecondCircle = new Vector2(80);
var hitObjects = new List<OsuHitObject>
{
new TestHitCircle
{
StartTime = time_first_circle,
Position = positionFirstCircle
},
new TestHitCircle
{
StartTime = time_second_circle,
Position = positionSecondCircle
}
};
performTest(hitObjects, new List<ReplayFrame>
{
new OsuReplayFrame { Time = time_first_circle + 100, Position = positionSecondCircle, Actions = { OsuAction.LeftButton } }
});
addJudgementAssert(hitObjects[0], HitResult.Miss);
addJudgementAssert(hitObjects[1], HitResult.Great);
addJudgementOffsetAssert(hitObjects[0], 100);
}
/// <summary>
/// Tests clicking a future circle before the first circle's start time, while the first circle HAS been judged.
/// </summary>
[Test]
public void TestClickSecondCircleBeforeFirstCircleTimeWithFirstCircleJudged()
{
const double time_first_circle = 1500;
const double time_second_circle = 1600;
Vector2 positionFirstCircle = Vector2.Zero;
Vector2 positionSecondCircle = new Vector2(80);
var hitObjects = new List<OsuHitObject>
{
new TestHitCircle
{
StartTime = time_first_circle,
Position = positionFirstCircle
},
new TestHitCircle
{
StartTime = time_second_circle,
Position = positionSecondCircle
}
};
performTest(hitObjects, new List<ReplayFrame>
{
new OsuReplayFrame { Time = time_first_circle - 200, Position = positionFirstCircle, Actions = { OsuAction.LeftButton } },
new OsuReplayFrame { Time = time_first_circle - 100, Position = positionSecondCircle, Actions = { OsuAction.RightButton } }
});
addJudgementAssert(hitObjects[0], HitResult.Great);
addJudgementAssert(hitObjects[1], HitResult.Great);
addJudgementOffsetAssert(hitObjects[0], -200); // time_first_circle - 200
addJudgementOffsetAssert(hitObjects[0], -200); // time_second_circle - first_circle_time - 100
}
/// <summary>
/// Tests clicking a future circle after a slider's start time, but hitting all slider ticks.
/// </summary>
[Test]
public void TestMissSliderHeadAndHitAllSliderTicks()
{
const double time_slider = 1500;
const double time_circle = 1510;
Vector2 positionCircle = Vector2.Zero;
Vector2 positionSlider = new Vector2(80);
var hitObjects = new List<OsuHitObject>
{
new TestHitCircle
{
StartTime = time_circle,
Position = positionCircle
},
new TestSlider
{
StartTime = time_slider,
Position = positionSlider,
Path = new SliderPath(PathType.Linear, new[]
{
Vector2.Zero,
new Vector2(25, 0),
})
}
};
performTest(hitObjects, new List<ReplayFrame>
{
new OsuReplayFrame { Time = time_slider, Position = positionCircle, Actions = { OsuAction.LeftButton } },
new OsuReplayFrame { Time = time_slider + 10, Position = positionSlider, Actions = { OsuAction.RightButton } }
});
addJudgementAssert(hitObjects[0], HitResult.Great);
addJudgementAssert(hitObjects[1], HitResult.Great);
addJudgementAssert("slider head", () => ((Slider)hitObjects[1]).HeadCircle, HitResult.Miss);
addJudgementAssert("slider tick", () => ((Slider)hitObjects[1]).NestedHitObjects[1] as SliderTick, HitResult.Great);
}
/// <summary>
/// Tests clicking hitting future slider ticks before a circle.
/// </summary>
[Test]
public void TestHitSliderTicksBeforeCircle()
{
const double time_slider = 1500;
const double time_circle = 1510;
Vector2 positionCircle = Vector2.Zero;
Vector2 positionSlider = new Vector2(80);
var hitObjects = new List<OsuHitObject>
{
new TestHitCircle
{
StartTime = time_circle,
Position = positionCircle
},
new TestSlider
{
StartTime = time_slider,
Position = positionSlider,
Path = new SliderPath(PathType.Linear, new[]
{
Vector2.Zero,
new Vector2(25, 0),
})
}
};
performTest(hitObjects, new List<ReplayFrame>
{
new OsuReplayFrame { Time = time_slider, Position = positionSlider, Actions = { OsuAction.LeftButton } },
new OsuReplayFrame { Time = time_circle + late_miss_window - 100, Position = positionCircle, Actions = { OsuAction.RightButton } },
new OsuReplayFrame { Time = time_circle + late_miss_window - 90, Position = positionSlider, Actions = { OsuAction.LeftButton } },
});
addJudgementAssert(hitObjects[0], HitResult.Great);
addJudgementAssert(hitObjects[1], HitResult.Great);
addJudgementAssert("slider head", () => ((Slider)hitObjects[1]).HeadCircle, HitResult.Great);
addJudgementAssert("slider tick", () => ((Slider)hitObjects[1]).NestedHitObjects[1] as SliderTick, HitResult.Great);
}
/// <summary>
/// Tests clicking a future circle before a spinner.
/// </summary>
[Test]
public void TestHitCircleBeforeSpinner()
{
const double time_spinner = 1500;
const double time_circle = 1800;
Vector2 positionCircle = Vector2.Zero;
var hitObjects = new List<OsuHitObject>
{
new TestSpinner
{
StartTime = time_spinner,
Position = new Vector2(256, 192),
EndTime = time_spinner + 1000,
},
new TestHitCircle
{
StartTime = time_circle,
Position = positionCircle
},
};
performTest(hitObjects, new List<ReplayFrame>
{
new OsuReplayFrame { Time = time_spinner - 100, Position = positionCircle, Actions = { OsuAction.LeftButton } },
new OsuReplayFrame { Time = time_spinner + 10, Position = new Vector2(236, 192), Actions = { OsuAction.RightButton } },
new OsuReplayFrame { Time = time_spinner + 20, Position = new Vector2(256, 172), Actions = { OsuAction.RightButton } },
new OsuReplayFrame { Time = time_spinner + 30, Position = new Vector2(276, 192), Actions = { OsuAction.RightButton } },
new OsuReplayFrame { Time = time_spinner + 40, Position = new Vector2(256, 212), Actions = { OsuAction.RightButton } },
new OsuReplayFrame { Time = time_spinner + 50, Position = new Vector2(236, 192), Actions = { OsuAction.RightButton } },
});
addJudgementAssert(hitObjects[0], HitResult.Great);
addJudgementAssert(hitObjects[1], HitResult.Great);
}
private void addJudgementAssert(OsuHitObject hitObject, HitResult result)
{
AddAssert($"({hitObject.GetType().ReadableName()} @ {hitObject.StartTime}) judgement is {result}",
() => judgementResults.Single(r => r.HitObject == hitObject).Type == result);
}
private void addJudgementAssert(string name, Func<OsuHitObject> hitObject, HitResult result)
{
AddAssert($"{name} judgement is {result}",
() => judgementResults.Single(r => r.HitObject == hitObject()).Type == result);
}
private void addJudgementOffsetAssert(OsuHitObject hitObject, double offset)
{
AddAssert($"({hitObject.GetType().ReadableName()} @ {hitObject.StartTime}) judged at {offset}",
() => Precision.AlmostEquals(judgementResults.Single(r => r.HitObject == hitObject).TimeOffset, offset, 100));
}
private ScoreAccessibleReplayPlayer currentPlayer;
private List<JudgementResult> judgementResults;
private bool allJudgedFired;
private void performTest(List<OsuHitObject> hitObjects, List<ReplayFrame> frames)
{
AddStep("load player", () =>
{
Beatmap.Value = CreateWorkingBeatmap(new Beatmap<OsuHitObject>
{
HitObjects = hitObjects,
BeatmapInfo =
{
BaseDifficulty = new BeatmapDifficulty { SliderTickRate = 3 },
Ruleset = new OsuRuleset().RulesetInfo
},
});
Beatmap.Value.Beatmap.ControlPointInfo.Add(0, new DifficultyControlPoint { SpeedMultiplier = 0.1f });
var p = new ScoreAccessibleReplayPlayer(new Score { Replay = new Replay { Frames = frames } });
p.OnLoadComplete += _ =>
{
p.ScoreProcessor.NewJudgement += result =>
{
if (currentPlayer == p) judgementResults.Add(result);
};
p.ScoreProcessor.AllJudged += () =>
{
if (currentPlayer == p) allJudgedFired = true;
};
};
LoadScreen(currentPlayer = p);
allJudgedFired = false;
judgementResults = new List<JudgementResult>();
});
AddUntilStep("Beatmap at 0", () => Beatmap.Value.Track.CurrentTime == 0);
AddUntilStep("Wait until player is loaded", () => currentPlayer.IsCurrentScreen());
AddUntilStep("Wait for all judged", () => allJudgedFired);
}
private class TestHitCircle : HitCircle
{
protected override HitWindows CreateHitWindows() => new TestHitWindows();
}
private class TestSlider : Slider
{
public TestSlider()
{
DefaultsApplied += () =>
{
HeadCircle.HitWindows = new TestHitWindows();
TailCircle.HitWindows = new TestHitWindows();
};
}
}
private class TestSpinner : Spinner
{
protected override void ApplyDefaultsToSelf(ControlPointInfo controlPointInfo, BeatmapDifficulty difficulty)
{
base.ApplyDefaultsToSelf(controlPointInfo, difficulty);
SpinsRequired = 1;
}
}
private class TestHitWindows : HitWindows
{
private static readonly DifficultyRange[] ranges =
{
new DifficultyRange(HitResult.Great, 500, 500, 500),
new DifficultyRange(HitResult.Miss, early_miss_window, early_miss_window, early_miss_window),
};
public override bool IsHitResultAllowed(HitResult result) => result == HitResult.Great || result == HitResult.Miss;
protected override DifficultyRange[] GetRanges() => ranges;
}
private class ScoreAccessibleReplayPlayer : ReplayPlayer
{
public new ScoreProcessor ScoreProcessor => base.ScoreProcessor;
protected override bool PauseOnFocusLost => false;
public ScoreAccessibleReplayPlayer(Score score)
: base(score, false, false)
{
}
}
}
}
| |
#region License
// Copyright 2014-2014 Matthew Ducker
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
//
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using System;
using Obscur.Core.Cryptography.Entropy;
using Obscur.Core.Cryptography.Information;
using Obscur.Core.Cryptography.Information.EllipticCurve;
using Obscur.Core.Cryptography.Support;
using Obscur.Core.Cryptography.Support.Math;
using Obscur.Core.Cryptography.Support.Math.EllipticCurve;
using Obscur.Core.Cryptography.Support.Math.EllipticCurve.Multiplier;
using Obscur.Core.DTO;
namespace Obscur.Core.Cryptography.Signing.Primitives
{
/// <summary>
/// EC-DSA as described in X9.62 .
/// </summary>
/// <remarks>
/// Implementation is as recommended in X9.62 <see cref="http://www.x9.org/" />
/// (Accredited Standards Committee: American National Standard X9.62-2005,
/// Public Key Cryptography for the Financial Services Industry,
/// The Elliptic Curve Digital Signature Algorithm (ECDSA), November 16, 2005) .
/// </remarks>
public class ECDsaSigner : IDsa
{
protected static readonly ECMultiplier EcBasePointMultiplier = new FixedPointCombMultiplier();
private readonly ECKey _publicKey;
private readonly ECKey _privateKey;
private ECDomainParameters _ecDomain;
protected readonly IDsaKCalculator _kCalculator;
private readonly CsRng _random;
/// <summary>
/// Initialise for ECDSA signature generation.
/// </summary>
/// <param name="privateKey">
/// Private EC key used for signing (verification performed with corresponding public key).
/// </param>
/// <param name="random">
/// Supplier of random numbers (null for default is <see cref="StratCom.EntropySupplier"/>).
/// Not used if <paramref name="kCalculator"/> is deterministic.
/// </param>
/// <param name="kCalculator">Calculator utility for generating k value in signature generation.</param>
/// <seealso cref="HmacDsaKCalculator"/>
public ECDsaSigner(ECKey privateKey, CsRng random = null, IDsaKCalculator kCalculator = null)
{
if (privateKey.PublicComponent) {
throw new ArgumentException("EC private key required for signing.");
}
_privateKey = privateKey;
_kCalculator = kCalculator ?? new RandomDsaKCalculator();
if (_kCalculator.IsDeterministic == false) {
_random = random ?? StratCom.EntropySupplier;
}
}
/// <summary>
/// Initialise for (either) ECDSA signature generation or verification.
/// </summary>
/// <param name="forSigning">
/// If <c>true</c>, the instance will be used for signing.
/// If <c>false</c>, it will be used for verification.
/// </param>
/// <param name="key">Individual EC key.</param>
/// <param name="random">
/// Supplier of random numbers (null for default is <see cref="StratCom.EntropySupplier"/>).
/// </param>
/// <param name="kCalculator">Calculator utility for generating k value in signature generation.</param>
/// <seealso cref="HmacDsaKCalculator"/>
public ECDsaSigner(bool forSigning, ECKey key, CsRng random = null, IDsaKCalculator kCalculator = null)
{
if (key == null) {
throw new ArgumentNullException("key");
}
if (forSigning) {
if (key.PublicComponent) {
throw new ArgumentException("EC private key required for signing.");
}
_privateKey = key;
} else {
if (key.PublicComponent == false) {
throw new ArgumentException("EC public key required for verification.");
}
}
_kCalculator = kCalculator ?? new RandomDsaKCalculator();
if (forSigning && _kCalculator.IsDeterministic == false) {
_random = random ?? StratCom.EntropySupplier;
}
SetupECDomain();
}
/// <summary>
/// Initialise for ECDSA signature generation and verification.
/// </summary>
/// <param name="publicKey">Public EC key (used for verifying) Null if not required.</param>
/// <param name="privateKey">Private EC key (used for signing). Null if not required.</param>
/// <param name="random">
/// Supplier of random numbers (null for default is <see cref="StratCom.EntropySupplier"/>).
/// </param>
/// <param name="kCalculator">Calculator utility for generating k value in signature generation.</param>
/// <seealso cref="HmacDsaKCalculator"/>
public ECDsaSigner(ECKey publicKey, ECKey privateKey, CsRng random = null, IDsaKCalculator kCalculator = null)
{
if (publicKey != null && privateKey != null) {
throw new ArgumentNullException();
}
if (publicKey != null) {
if (publicKey.PublicComponent == false) {
throw new ArgumentException("Not a public EC key.", "publicKey");
}
}
if (privateKey != null) {
if (privateKey.PublicComponent) {
throw new ArgumentException("Not a private EC key.", "privateKey");
}
}
_publicKey = publicKey;
_privateKey = privateKey;
_kCalculator = kCalculator ?? new RandomDsaKCalculator();
if (_kCalculator.IsDeterministic == false) {
_random = random ?? StratCom.EntropySupplier;
}
SetupECDomain();
}
private void SetupECDomain()
{
string curveName = _publicKey != null ? _publicKey.CurveName : _privateKey.CurveName;
EcCurveInformation curveInfo;
try {
curveInfo = EcInformationStore.GetECCurveData(curveName);
} catch (Exception e) {
throw new ConfigurationInvalidException("Curve cannot be used in ECDSA.", e);
}
_ecDomain = curveInfo.GetParameters();
}
/// <inheritdoc />
public bool SigningCapable
{
get { return _privateKey != null; }
}
/// <inheritdoc />
public bool VerificationCapable
{
get { return _publicKey != null; }
}
/// <inheritdoc />
public string AlgorithmName
{
get { return "ECDSA"; }
}
/// <inheritdoc />
public void GenerateSignature(byte[] message, out BigInteger r, out BigInteger s)
{
BigInteger n = _ecDomain.N;
BigInteger e = CalculateE(n, message);
var d = new BigInteger(_privateKey.EncodedKey);
if (_kCalculator.IsDeterministic) {
_kCalculator.Init(n, d, message);
} else {
_kCalculator.Init(n, _random);
}
// 5.3.2
// Generate s
do {
BigInteger k;
// Generate r
do {
k = _kCalculator.NextK();
ECPoint p = EcBasePointMultiplier.Multiply(_ecDomain.G, k).Normalize();
// 5.3.3
r = p.AffineXCoord.ToBigInteger().Mod(n);
} while (r.SignValue == 0);
s = k.ModInverse(n).Multiply(e.Add(d.Multiply(r))).Mod(n);
} while (s.SignValue == 0);
}
/// <inheritdoc />
/// <returns>
/// <c>true</c> if the values <paramref name="r" /> and <paramref name="s" />
/// represent a valid ECDSA signature. Otherwise, <c>false</c>.
/// </returns>
public bool VerifySignature(
byte[] message,
BigInteger r,
BigInteger s)
{
BigInteger n = _ecDomain.N;
// r and s should both in the range [1,n-1]
if (r.SignValue < 1 || s.SignValue < 1
|| r.CompareTo(n) >= 0 || s.CompareTo(n) >= 0) {
return false;
}
BigInteger e = CalculateE(n, message);
BigInteger c = s.ModInverse(n);
BigInteger u1 = e.Multiply(c).Mod(n);
BigInteger u2 = r.Multiply(c).Mod(n);
ECPoint G = _ecDomain.G;
ECPoint Q = _ecDomain.Curve.DecodePoint(_publicKey.EncodedKey);
ECPoint point = ECAlgorithms.SumOfTwoMultiplies(G, u1, Q, u2).Normalize();
if (point.IsInfinity) {
return false;
}
BigInteger v = point.AffineXCoord.ToBigInteger().Mod(n);
return v.Equals(r);
}
private static BigInteger CalculateE(
BigInteger n,
byte[] message)
{
int messageBitLength = message.Length * 8;
var trunc = new BigInteger(1, message);
if (n.BitLength < messageBitLength) {
trunc = trunc.ShiftRight(messageBitLength - n.BitLength);
}
return trunc;
}
}
}
| |
/*
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License. See License.txt in the project root for license information.
*/
using System;
using System.Collections.Generic;
using Adxstudio.Xrm.Web.Mvc.Html;
using Adxstudio.Xrm.Web.UI.JsonConfiguration;
using Microsoft.Xrm.Sdk.Client;
namespace Adxstudio.Xrm.Web.UI.CrmEntityListView
{
/// <summary>
/// Settings needed to be able to retrieve a view and configure its display.
/// </summary>
public interface IViewConfiguration
{
/// <summary>
/// Logical name of the entity associated with the view.
/// </summary>
string EntityName { get; set; }
/// <summary>
/// Logical name of the primary key attribute of the entity associated with the view.
/// </summary>
string PrimaryKeyName { get; set; }
/// <summary>
/// Name of a view (savedquery) record.
/// </summary>
string ViewName { get; set; }
/// <summary>
/// Unique identifier of a view (savedquery) record.
/// </summary>
Guid ViewId { get; set; }
/// <summary>
/// Unique identifier of the configuration.
/// </summary>
Guid Id { get; set; }
/// <summary>
/// Number of records per page.
/// </summary>
int PageSize { get; set; }
/// <summary>
/// Controls the visibility of the pager.
/// </summary>
bool? DataPagerEnabled { get; set; }
/// <summary>
/// XML that defines the layout of the view expressed in the LayoutXml language. See http://msdn.microsoft.com/en-us/library/gg334522.aspx
/// </summary>
string LayoutXml { get; set; }
/// <summary>
/// XML that defines the query expressed in the FetchXml language. See http://msdn.microsoft.com/en-us/library/gg309405.aspx
/// </summary>
string FetchXml { get; set; }
/// <summary>
/// Override the display name of the view. Default display name uses the ViewName.
/// </summary>
string ViewDisplayName { get; set; }
/// <summary>
/// Override the column display names and widths
/// </summary>
List<JsonConfiguration.ViewColumn> ColumnOverrides { get; set; }
/// <summary>
/// Indicates whether entity permission rules should be applied to the query.
/// </summary>
bool EnableEntityPermissions { get; set; }
/// <summary>
/// Configuration of the search control.
/// </summary>
ViewSearch Search { get; set; }
/// <summary>
/// Gets or sets the Query String parameter name for the filter.
/// </summary>
string FilterQueryStringParameterName { get; set; }
/// <summary>
/// Gets or sets the Query String parameter name for the sort expression.
/// </summary>
string SortQueryStringParameterName { get; set; }
/// <summary>
/// Gets or sets the Query String parameter name for the page number.
/// </summary>
string PageQueryStringParameterName { get; set; }
/// <summary>
/// Gets or sets the text to display when the list does not contain any records.
/// </summary>
string EmptyListText { get; set; }
/// <summary>
/// Gets or sets the Text to display when the rendering a filter dropdown to select 'my' records.
/// </summary>
string FilterByUserOptionLabel { get; set; }
/// <summary>
/// Attribute logical name on the target entity of the lookup field of type contact that is used to assign the current portal user's contact id to filter the query results.
/// </summary>
string FilterPortalUserAttributeName { get; set; }
/// <summary>
/// Attribute logical name on the target entity of the lookup field of type account that is used to assign the current portal user's contact's parent customer account id to filter the query results.
/// </summary>
string FilterAccountAttributeName { get; set; }
/// <summary>
/// Attribute logical name on the target entity of the lookup field of type adx_website that is used to assign the current website id to filter the query results.
/// </summary>
string FilterWebsiteAttributeName { get; set; }
/// <summary>
/// Link for details action.
/// </summary>
DetailsActionLink DetailsActionLink { get; set; }
/// <summary>
/// Link for insert action.
/// </summary>
InsertActionLink InsertActionLink { get; set; }
/// <summary>
/// Link for association action.
/// </summary>
AssociateActionLink AssociateActionLink { get; set; }
/// <summary>
/// Link for Edit Action
/// </summary>
EditActionLink EditActionLink { get; set; }
/// <summary>
/// Link for Delete Action
/// </summary>
DeleteActionLink DeleteActionLink { get; set; }
/// <summary>
/// Link for Disassociate Action
/// </summary>
DisassociateActionLink DisassociateActionLink { get; set; }
/// <summary>
/// Text displayed for the column header of the column containing row level action links.
/// </summary>
string ActionColumnHeaderText { get; set; }
/// <summary>
/// Width in pixels of the column containing the action links to the details view page. Default is 20 pixels.
/// </summary>
int ActionLinksColumnWidth { get; set; }
/// <summary>
/// Gets or sets the name of the portal configuration that the control binds to.
/// </summary>
string PortalName { get; set; }
/// <summary>
/// Gets or sets the language code
/// </summary>
int LanguageCode { get; set; }
/// <summary>
/// Configuration settings for a map view
/// </summary>
MapConfiguration MapSettings { get; set; }
/// <summary>
/// Configuration settings for a calendar view
/// </summary>
CalendarConfiguration CalendarSettings { get; set; }
/// <summary>
/// Configuration settings for the metadata filter control.
/// </summary>
FilterConfiguration FilterSettings { get; set; }
/// <summary>
/// Css class for the View
/// </summary>
string CssClass { get; set; }
/// <summary>
/// Css class for the Grid
/// </summary>
string GridCssClass { get; set; }
/// <summary>
/// Column Width setting: pixels or percentage
/// </summary>
EntityGridExtensions.GridColumnWidthStyle? GridColumnWidthStyle { get; set; }
/// <summary>
/// Message to display while loading
/// </summary>
string LoadingMessage { get; set; }
/// <summary>
/// Error message
/// </summary>
string ErrorMessage { get; set; }
/// <summary>
/// Access Denied Message
/// </summary>
string AccessDeniedMessage { get; set; }
/// <summary>
/// Actions that are applicable to the view or entire record set.
/// </summary>
List<ViewActionLink> ViewActionLinks { get; set; }
/// <summary>
/// Gets or sets the create related record action link.
/// </summary>
/// <value>
/// The create related record action link.
/// </value>
List<ViewActionLink> CreateRelatedRecordActionLinks { get; set; }
/// <summary>
/// Actions that are applicable to a single record item.
/// </summary>
List<ViewActionLink> ItemActionLinks { get; set; }
string ModalLookupAttributeLogicalName { get; set; }
string ModalLookupEntityLogicalName { get; set; }
Guid? ModalLookupFormReferenceEntityId { get; set; }
string ModalLookupFormReferenceEntityLogicalName { get; set; }
string ModalLookupFormReferenceRelationshipName { get; set; }
string ModalLookupFormReferenceRelationshipRole { get; set; }
/// <summary>
/// Gets the savedquery (view) for the corresponding view configuration properties.
/// </summary>
/// <param name="serviceContext"></param>
/// <param name="languageCode"></param>
/// <returns><see cref="SavedQueryView"/></returns>
SavedQueryView GetSavedQueryView(OrganizationServiceContext serviceContext, int languageCode = 0);
/// <summary>
/// Gets the current view from a collection of <see cref="SavedQueryView"/> records.
/// </summary>
/// <param name="views">collection of <see cref="SavedQueryView"/> records.</param>
/// <returns><see cref="SavedQueryView"/></returns>
SavedQueryView GetSavedQueryView(IEnumerable<SavedQueryView> views);
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the MamResultadoExFisico class.
/// </summary>
[Serializable]
public partial class MamResultadoExFisicoCollection : ActiveList<MamResultadoExFisico, MamResultadoExFisicoCollection>
{
public MamResultadoExFisicoCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>MamResultadoExFisicoCollection</returns>
public MamResultadoExFisicoCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
MamResultadoExFisico o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the MAM_ResultadoExFisico table.
/// </summary>
[Serializable]
public partial class MamResultadoExFisico : ActiveRecord<MamResultadoExFisico>, IActiveRecord
{
#region .ctors and Default Settings
public MamResultadoExFisico()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public MamResultadoExFisico(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public MamResultadoExFisico(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public MamResultadoExFisico(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("MAM_ResultadoExFisico", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdResultadoExFisico = new TableSchema.TableColumn(schema);
colvarIdResultadoExFisico.ColumnName = "idResultadoExFisico";
colvarIdResultadoExFisico.DataType = DbType.Int32;
colvarIdResultadoExFisico.MaxLength = 0;
colvarIdResultadoExFisico.AutoIncrement = true;
colvarIdResultadoExFisico.IsNullable = false;
colvarIdResultadoExFisico.IsPrimaryKey = true;
colvarIdResultadoExFisico.IsForeignKey = false;
colvarIdResultadoExFisico.IsReadOnly = false;
colvarIdResultadoExFisico.DefaultSetting = @"";
colvarIdResultadoExFisico.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdResultadoExFisico);
TableSchema.TableColumn colvarNombre = new TableSchema.TableColumn(schema);
colvarNombre.ColumnName = "nombre";
colvarNombre.DataType = DbType.AnsiString;
colvarNombre.MaxLength = 50;
colvarNombre.AutoIncrement = false;
colvarNombre.IsNullable = false;
colvarNombre.IsPrimaryKey = false;
colvarNombre.IsForeignKey = false;
colvarNombre.IsReadOnly = false;
colvarNombre.DefaultSetting = @"('')";
colvarNombre.ForeignKeyTableName = "";
schema.Columns.Add(colvarNombre);
TableSchema.TableColumn colvarActivo = new TableSchema.TableColumn(schema);
colvarActivo.ColumnName = "activo";
colvarActivo.DataType = DbType.Boolean;
colvarActivo.MaxLength = 0;
colvarActivo.AutoIncrement = false;
colvarActivo.IsNullable = false;
colvarActivo.IsPrimaryKey = false;
colvarActivo.IsForeignKey = false;
colvarActivo.IsReadOnly = false;
colvarActivo.DefaultSetting = @"((1))";
colvarActivo.ForeignKeyTableName = "";
schema.Columns.Add(colvarActivo);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("MAM_ResultadoExFisico",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdResultadoExFisico")]
[Bindable(true)]
public int IdResultadoExFisico
{
get { return GetColumnValue<int>(Columns.IdResultadoExFisico); }
set { SetColumnValue(Columns.IdResultadoExFisico, value); }
}
[XmlAttribute("Nombre")]
[Bindable(true)]
public string Nombre
{
get { return GetColumnValue<string>(Columns.Nombre); }
set { SetColumnValue(Columns.Nombre, value); }
}
[XmlAttribute("Activo")]
[Bindable(true)]
public bool Activo
{
get { return GetColumnValue<bool>(Columns.Activo); }
set { SetColumnValue(Columns.Activo, value); }
}
#endregion
#region PrimaryKey Methods
protected override void SetPrimaryKey(object oValue)
{
base.SetPrimaryKey(oValue);
SetPKValues();
}
private DalSic.MamExamenFisicoCollection colMamExamenFisicoRecords;
public DalSic.MamExamenFisicoCollection MamExamenFisicoRecords
{
get
{
if(colMamExamenFisicoRecords == null)
{
colMamExamenFisicoRecords = new DalSic.MamExamenFisicoCollection().Where(MamExamenFisico.Columns.IdResultadoExFisico, IdResultadoExFisico).Load();
colMamExamenFisicoRecords.ListChanged += new ListChangedEventHandler(colMamExamenFisicoRecords_ListChanged);
}
return colMamExamenFisicoRecords;
}
set
{
colMamExamenFisicoRecords = value;
colMamExamenFisicoRecords.ListChanged += new ListChangedEventHandler(colMamExamenFisicoRecords_ListChanged);
}
}
void colMamExamenFisicoRecords_ListChanged(object sender, ListChangedEventArgs e)
{
if (e.ListChangedType == ListChangedType.ItemAdded)
{
// Set foreign key value
colMamExamenFisicoRecords[e.NewIndex].IdResultadoExFisico = IdResultadoExFisico;
}
}
#endregion
//no foreign key tables defined (0)
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(string varNombre,bool varActivo)
{
MamResultadoExFisico item = new MamResultadoExFisico();
item.Nombre = varNombre;
item.Activo = varActivo;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdResultadoExFisico,string varNombre,bool varActivo)
{
MamResultadoExFisico item = new MamResultadoExFisico();
item.IdResultadoExFisico = varIdResultadoExFisico;
item.Nombre = varNombre;
item.Activo = varActivo;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdResultadoExFisicoColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn NombreColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn ActivoColumn
{
get { return Schema.Columns[2]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdResultadoExFisico = @"idResultadoExFisico";
public static string Nombre = @"nombre";
public static string Activo = @"activo";
}
#endregion
#region Update PK Collections
public void SetPKValues()
{
if (colMamExamenFisicoRecords != null)
{
foreach (DalSic.MamExamenFisico item in colMamExamenFisicoRecords)
{
if (item.IdResultadoExFisico != IdResultadoExFisico)
{
item.IdResultadoExFisico = IdResultadoExFisico;
}
}
}
}
#endregion
#region Deep Save
public void DeepSave()
{
Save();
if (colMamExamenFisicoRecords != null)
{
colMamExamenFisicoRecords.SaveAll();
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Editor.Shared.Extensions;
using Microsoft.CodeAnalysis.Editor.Shared.Options;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.ErrorReporting;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Text;
using Microsoft.VisualStudio.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.RenameTracking
{
internal sealed partial class RenameTrackingTaggerProvider
{
/// <summary>
/// Keeps track of the rename tracking state for a given text buffer by tracking its
/// changes over time.
/// </summary>
private class StateMachine : ForegroundThreadAffinitizedObject
{
private readonly IInlineRenameService _inlineRenameService;
private readonly IAsynchronousOperationListener _asyncListener;
private readonly ITextBuffer _buffer;
private readonly IDiagnosticAnalyzerService _diagnosticAnalyzerService;
private int _refCount;
public TrackingSession TrackingSession { get; private set; }
public ITextBuffer Buffer { get { return _buffer; } }
public event Action TrackingSessionUpdated = delegate { };
public event Action<ITrackingSpan> TrackingSessionCleared = delegate { };
public StateMachine(
ITextBuffer buffer,
IInlineRenameService inlineRenameService,
IAsynchronousOperationListener asyncListener,
IDiagnosticAnalyzerService diagnosticAnalyzerService)
{
_buffer = buffer;
_buffer.Changed += Buffer_Changed;
_inlineRenameService = inlineRenameService;
_asyncListener = asyncListener;
_diagnosticAnalyzerService = diagnosticAnalyzerService;
}
private void Buffer_Changed(object sender, TextContentChangedEventArgs e)
{
AssertIsForeground();
if (!_buffer.GetOption(InternalFeatureOnOffOptions.RenameTracking))
{
// When disabled, ignore all text buffer changes and do not trigger retagging
return;
}
using (Logger.LogBlock(FunctionId.Rename_Tracking_BufferChanged, CancellationToken.None))
{
// When the buffer changes, several things might be happening:
// 1. If a non-identifier character has been added or deleted, we stop tracking
// completely.
// 2. Otherwise, if the changes are completely contained an existing session, then
// continue that session.
// 3. Otherwise, we're starting a new tracking session. Find and track the span of
// the relevant word in the foreground, and use a task to figure out whether the
// original word was a renamable identifier or not.
if (e.Changes.Count != 1 || ShouldClearTrackingSession(e.Changes.Single()))
{
ClearTrackingSession();
return;
}
// The change is trackable. Figure out whether we should continue an existing
// session
var change = e.Changes.Single();
if (this.TrackingSession == null)
{
StartTrackingSession(e);
return;
}
// There's an existing session. Continue that session if the current change is
// contained inside the tracking span.
SnapshotSpan trackingSpanInNewSnapshot = this.TrackingSession.TrackingSpan.GetSpan(e.After);
if (trackingSpanInNewSnapshot.Contains(change.NewSpan))
{
// Continuing an existing tracking session. If there may have been a tag
// showing, then update the tags.
UpdateTrackingSessionIfRenamable();
}
else
{
StartTrackingSession(e);
}
}
}
public void UpdateTrackingSessionIfRenamable()
{
AssertIsForeground();
if (this.TrackingSession.IsDefinitelyRenamableIdentifier())
{
this.TrackingSession.CheckNewIdentifier(this, _buffer.CurrentSnapshot);
TrackingSessionUpdated();
}
}
private bool ShouldClearTrackingSession(ITextChange change)
{
AssertIsForeground();
ISyntaxFactsService syntaxFactsService;
if (!TryGetSyntaxFactsService(out syntaxFactsService))
{
return true;
}
// The editor will replace virtual space with spaces and/or tabs when typing on a
// previously blank line. Trim these characters from the start of change.NewText. If
// the resulting change is empty (the user just typed a <space>), clear the session.
var changedText = change.OldText + change.NewText.TrimStart(' ', '\t');
if (changedText.IsEmpty())
{
return true;
}
return changedText.Any(c => !IsTrackableCharacter(syntaxFactsService, c));
}
private void StartTrackingSession(TextContentChangedEventArgs eventArgs)
{
AssertIsForeground();
ClearTrackingSession();
if (_inlineRenameService.ActiveSession != null)
{
return;
}
// Synchronously find the tracking span in the old document.
var change = eventArgs.Changes.Single();
var beforeText = eventArgs.Before.AsText();
ISyntaxFactsService syntaxFactsService;
if (!TryGetSyntaxFactsService(out syntaxFactsService))
{
return;
}
int leftSidePosition = change.OldPosition;
int rightSidePosition = change.OldPosition + change.OldText.Length;
while (leftSidePosition > 0 && IsTrackableCharacter(syntaxFactsService, beforeText[leftSidePosition - 1]))
{
leftSidePosition--;
}
while (rightSidePosition < beforeText.Length && IsTrackableCharacter(syntaxFactsService, beforeText[rightSidePosition]))
{
rightSidePosition++;
}
var originalSpan = new Span(leftSidePosition, rightSidePosition - leftSidePosition);
this.TrackingSession = new TrackingSession(this, new SnapshotSpan(eventArgs.Before, originalSpan), _asyncListener);
}
private bool IsTrackableCharacter(ISyntaxFactsService syntaxFactsService, char c)
{
// Allow identifier part characters at the beginning of strings (even if they are
// not identifier start characters). If an intermediate name is not valid, the smart
// tag will not be shown due to later checks. Also allow escape chars anywhere as
// they might be in the middle of a complex edit.
return syntaxFactsService.IsIdentifierPartCharacter(c) || syntaxFactsService.IsIdentifierEscapeCharacter(c);
}
public bool ClearTrackingSession()
{
AssertIsForeground();
if (this.TrackingSession != null)
{
// Disallow the existing TrackingSession from triggering IdentifierFound.
var previousTrackingSession = this.TrackingSession;
this.TrackingSession = null;
previousTrackingSession.Cancel();
// If there may have been a tag showing, then actually clear the tags.
if (previousTrackingSession.IsDefinitelyRenamableIdentifier())
{
TrackingSessionCleared(previousTrackingSession.TrackingSpan);
}
return true;
}
return false;
}
public bool ClearVisibleTrackingSession()
{
AssertIsForeground();
if (this.TrackingSession != null && this.TrackingSession.IsDefinitelyRenamableIdentifier())
{
var document = _buffer.CurrentSnapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
// When rename tracking is dismissed via escape, we no longer wish to
// provide a diagnostic/codefix, but nothing has changed in the workspace
// to trigger the diagnostic system to reanalyze, so we trigger it
// manually.
_diagnosticAnalyzerService?.Reanalyze(
document.Project.Solution.Workspace,
documentIds: SpecializedCollections.SingletonEnumerable(document.Id));
}
// Disallow the existing TrackingSession from triggering IdentifierFound.
var previousTrackingSession = this.TrackingSession;
this.TrackingSession = null;
previousTrackingSession.Cancel();
TrackingSessionCleared(previousTrackingSession.TrackingSpan);
return true;
}
return false;
}
public bool CanInvokeRename(out TrackingSession trackingSession, bool isSmartTagCheck = false, bool waitForResult = false, CancellationToken cancellationToken = default(CancellationToken))
{
// This needs to be able to run on a background thread for the diagnostic.
trackingSession = this.TrackingSession;
if (trackingSession == null)
{
return false;
}
ISyntaxFactsService syntaxFactsService;
IRenameTrackingLanguageHeuristicsService languageHeuristicsService;
return TryGetSyntaxFactsService(out syntaxFactsService) && TryGetLanguageHeuristicsService(out languageHeuristicsService) &&
trackingSession.CanInvokeRename(syntaxFactsService, languageHeuristicsService, isSmartTagCheck, waitForResult, cancellationToken);
}
internal async Task<IEnumerable<Diagnostic>> GetDiagnostic(SyntaxTree tree, DiagnosticDescriptor diagnosticDescriptor, CancellationToken cancellationToken)
{
try
{
// This can be called on a background thread. We are being asked whether a
// lightbulb should be shown for the given document, but we only know about the
// current state of the buffer. Compare the text to see if we should bail early.
// Even if the text is the same, the buffer may change on the UI thread during this
// method. If it does, we may give an incorrect response, but the diagnostics
// engine will know that the document changed and not display the lightbulb anyway.
if (Buffer.AsTextContainer().CurrentText != await tree.GetTextAsync(cancellationToken).ConfigureAwait(false))
{
return SpecializedCollections.EmptyEnumerable<Diagnostic>();
}
TrackingSession trackingSession;
if (CanInvokeRename(out trackingSession, waitForResult: true, cancellationToken: cancellationToken))
{
SnapshotSpan snapshotSpan = trackingSession.TrackingSpan.GetSpan(Buffer.CurrentSnapshot);
var textSpan = snapshotSpan.Span.ToTextSpan();
var builder = ImmutableDictionary.CreateBuilder<string, string>();
builder.Add(RenameTrackingDiagnosticAnalyzer.RenameFromPropertyKey, trackingSession.OriginalName);
builder.Add(RenameTrackingDiagnosticAnalyzer.RenameToPropertyKey, snapshotSpan.GetText());
var properties = builder.ToImmutable();
var diagnostic = Diagnostic.Create(diagnosticDescriptor,
tree.GetLocation(textSpan),
properties);
return SpecializedCollections.SingletonEnumerable(diagnostic);
}
return SpecializedCollections.EmptyEnumerable<Diagnostic>();
}
catch (Exception e) when (FatalError.ReportUnlessCanceled(e))
{
throw ExceptionUtilities.Unreachable;
}
}
public void RestoreTrackingSession(TrackingSession trackingSession)
{
AssertIsForeground();
ClearTrackingSession();
this.TrackingSession = trackingSession;
TrackingSessionUpdated();
}
public void OnTrackingSessionUpdated(TrackingSession trackingSession)
{
AssertIsForeground();
if (this.TrackingSession == trackingSession)
{
TrackingSessionUpdated();
}
}
private bool TryGetSyntaxFactsService(out ISyntaxFactsService syntaxFactsService)
{
// Can be called on a background thread
syntaxFactsService = null;
var document = _buffer.CurrentSnapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
syntaxFactsService = document.Project.LanguageServices.GetService<ISyntaxFactsService>();
}
return syntaxFactsService != null;
}
private bool TryGetLanguageHeuristicsService(out IRenameTrackingLanguageHeuristicsService languageHeuristicsService)
{
// Can be called on a background thread
languageHeuristicsService = null;
var document = _buffer.CurrentSnapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
languageHeuristicsService = document.Project.LanguageServices.GetService<IRenameTrackingLanguageHeuristicsService>();
}
return languageHeuristicsService != null;
}
public void Connect()
{
AssertIsForeground();
_refCount++;
}
public void Disconnect()
{
AssertIsForeground();
_refCount--;
Contract.ThrowIfFalse(_refCount >= 0);
if (_refCount == 0)
{
this.Buffer.Properties.RemoveProperty(typeof(StateMachine));
this.Buffer.Changed -= Buffer_Changed;
}
}
}
}
}
| |
//
// System.Runtime.Remoting.MetadataServices.MetaDataCodeGenerator
//
// Authors:
// Lluis Sanchez Gual (lluis@ximian.com)
//
// (C) 2003 Novell, Inc
//
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System.Collections;
using System.IO;
using System.Xml;
using System.Reflection;
using System.Runtime.Remoting;
using System.Runtime.Remoting.Metadata;
namespace System.Runtime.Remoting.MetadataServices
{
internal class MetaDataCodeGenerator
{
XmlDocument doc;
CodeFile currentFile;
XmlNamespaceManager nsManager;
Hashtable sudsTypes;
public void GenerateCode (bool clientProxy, string outputDirectory, Stream inputStream,
ArrayList outCodeStreamList, string proxyUrl, string proxyNamespace)
{
doc = new XmlDocument ();
doc.Load (inputStream);
nsManager = new XmlNamespaceManager (doc.NameTable);
nsManager.AddNamespace ("wsdl", MetaData.WsdlNamespace);
nsManager.AddNamespace ("s", MetaData.SchemaNamespace);
nsManager.AddNamespace ("suds", MetaData.SudsNamespace);
if (outputDirectory == null) outputDirectory = Directory.GetCurrentDirectory();
CodeFile mainFile = new CodeFile (outputDirectory);
currentFile = mainFile;
// Suds types
sudsTypes = new Hashtable ();
XmlNodeList nodes = doc.DocumentElement.SelectNodes ("wsdl:binding/suds:class|wsdl:binding/suds:interface|wsdl:binding/suds:struct", nsManager);
foreach (XmlElement node in nodes)
sudsTypes [GetTypeQualifiedName (node, node.GetAttribute ("type"))] = node;
// Data types
nodes = doc.SelectNodes ("wsdl:definitions/wsdl:types/s:schema", nsManager);
foreach (XmlElement schema in nodes)
GenerateSchemaCode (schema);
// Services
nodes = doc.SelectNodes ("wsdl:definitions/wsdl:service/wsdl:port", nsManager);
foreach (XmlElement port in nodes)
GeneratePortCode (port);
mainFile.Write ();
if (mainFile.FileName != null)
outCodeStreamList.Add (mainFile.FilePath);
}
void GeneratePortCode (XmlElement port)
{
XmlElement binding = GetBinding (port.GetAttribute ("binding"));
XmlElement type = null;
foreach (XmlNode node in binding)
if ((node is XmlElement) && ((XmlElement)node).NamespaceURI == MetaData.SudsNamespace)
{ type = (XmlElement) node; break; }
string rootType = type.GetAttribute ("rootType");
if (rootType == "Delegate")
GenerateServiceDelegateCode (port, binding, type);
else
GenerateServiceClassCode (port, binding, type);
}
void GenerateServiceDelegateCode (XmlElement port, XmlElement binding, XmlElement type)
{
string typeName = (type != null) ? type.GetAttribute ("type") : port.GetAttribute ("name");
string portName = GetNameFromQn (binding.GetAttribute ("type"));
string name, ns;
GetTypeQualifiedName (port, typeName, out name, out ns);
currentFile.SetCurrentNamespace (ns);
XmlElement oper = (XmlElement) binding.SelectSingleNode ("wsdl:operation[@name='Invoke']", nsManager);
if (oper == null) throw new InvalidOperationException ("Invalid delegate schema");
string parsDec;
string returnType;
GetParameters (oper, portName, "Invoke", out parsDec, out returnType);
currentFile.WriteLine ("public delegate " + returnType + " " + name + " (" + parsDec + ");");
currentFile.WriteLine ("");
}
void GenerateServiceClassCode (XmlElement port, XmlElement binding, XmlElement type)
{
string typeName = (type != null) ? type.GetAttribute ("type") : port.GetAttribute ("name");
string name, ns;
GetTypeQualifiedName (port, typeName, out name, out ns);
currentFile.SetCurrentNamespace (ns);
string cls = "public " + type.LocalName + " " + name;
string baset = type.GetAttribute ("extends");
if (baset != "") cls += ": " + GetTypeQualifiedName (port, baset);
// Interfaces
XmlNodeList interfaces = type.SelectNodes ("suds:implements",nsManager);
if (interfaces.Count == 0) interfaces = type.SelectNodes ("suds:extends",nsManager);
foreach (XmlElement interf in interfaces)
{
string iname = GetTypeQualifiedName (interf, interf.GetAttribute ("type"));
if (cls.IndexOf (':') == -1) cls += ": " + iname;
else cls += ", " + iname;
}
currentFile.WriteLine (cls);
currentFile.WriteLineInd ("{");
string portName = GetNameFromQn (binding.GetAttribute ("type"));
bool isInterface = type.LocalName == "interface";
string vis = isInterface? "":"public ";
ArrayList mets = GetMethods (portName, binding);
foreach (MethodData met in mets)
{
if (met.IsProperty)
{
string prop = vis + met.ReturnType + " ";
if (met.Signature != "") prop += "this [" + met.Signature + "]";
else prop += met.Name;
if (isInterface)
{
prop += " { ";
if (met.HasGet) prop += "get; ";
if (met.HasSet) prop += "set; ";
prop += "}";
currentFile.WriteLine (prop);
}
else
{
currentFile.WriteLine (prop);
currentFile.WriteLineInd ("{");
if (met.HasGet) currentFile.WriteLine ("get { throw new NotImplementedException (); }");
if (met.HasSet) currentFile.WriteLine ("set { throw new NotImplementedException (); }");
currentFile.WriteLineUni ("}");
currentFile.WriteLine ("");
}
}
else
{
currentFile.WriteLine (vis + met.ReturnType + " " + met.Name + " (" + met.Signature + ")" + (isInterface?";":""));
if (!isInterface)
{
currentFile.WriteLineInd ("{");
currentFile.WriteLine ("throw new NotImplementedException ();");
currentFile.WriteLineUni ("}");
currentFile.WriteLine ("");
}
}
}
currentFile.WriteLineUni ("}");
currentFile.WriteLine ("");
}
class MethodData
{
public string ReturnType;
public string Signature;
public string Name;
public bool HasSet;
public bool HasGet;
public bool IsProperty { get { return HasGet || HasSet; } }
}
ArrayList GetMethods (string portName, XmlElement binding)
{
ArrayList mets = new ArrayList ();
XmlNodeList nodes = binding.SelectNodes ("wsdl:operation", nsManager);
foreach (XmlElement oper in nodes)
{
MethodData md = new MethodData ();
md.Name = oper.GetAttribute ("name");
GetParameters (oper, portName, md.Name, out md.Signature, out md.ReturnType);
if (md.Name.StartsWith ("set_") || md.Name.StartsWith ("get_"))
{
string tmp = ", " + md.Signature;
if (tmp.IndexOf (", out ") == -1 && tmp.IndexOf (", ref ") == -1)
{
bool isSet = md.Name[0]=='s';
md.Name = md.Name.Substring (4);
MethodData previousProp = null;
foreach (MethodData fmd in mets)
if (fmd.Name == md.Name && fmd.IsProperty)
previousProp = fmd;
if (previousProp != null) {
if (isSet) previousProp.HasSet = true;
else { previousProp.HasGet = true; previousProp.Signature = md.Signature; }
continue;
}
else {
if (isSet) { md.HasSet = true; md.Signature = ""; }
else md.HasGet = true;
}
}
}
mets.Add (md);
}
return mets;
}
void GetParameters (XmlElement oper, string portName, string operName, out string signature, out string returnType)
{
returnType = null;
XmlElement portType = (XmlElement) doc.SelectSingleNode ("wsdl:definitions/wsdl:portType[@name='" + portName + "']", nsManager);
XmlElement portOper = (XmlElement) portType.SelectSingleNode ("wsdl:operation[@name='" + operName + "']", nsManager);
string[] parNames = portOper.GetAttribute ("parameterOrder").Split (' ');
XmlElement inPortMsg = (XmlElement) portOper.SelectSingleNode ("wsdl:input", nsManager);
XmlElement inMsg = FindMessageFromPortMessage (inPortMsg);
XmlElement outPortMsg = (XmlElement) portOper.SelectSingleNode ("wsdl:output", nsManager);
XmlElement outMsg = FindMessageFromPortMessage (outPortMsg);
string[] parameters;
if (parNames [0] != "") parameters = new string [parNames.Length];
else parameters = new string [0];
foreach (XmlElement part in inMsg.SelectNodes ("wsdl:part",nsManager))
{
int i = Array.IndexOf (parNames, part.GetAttribute ("name"));
string type = GetTypeQualifiedName (part, part.GetAttribute ("type"));
parameters [i] = type + " " + parNames [i];
}
foreach (XmlElement part in outMsg.SelectNodes ("wsdl:part",nsManager))
{
string pn = part.GetAttribute ("name");
string type = GetTypeQualifiedName (part, part.GetAttribute ("type"));
if (pn == "return")
returnType = type;
else {
int i = Array.IndexOf (parNames, pn);
if (parameters [i] != null) parameters [i] = "ref " + parameters [i];
else parameters [i] = "out " + type + " " + pn;
}
}
signature = string.Join (", ", parameters);
if (returnType == null) returnType = "void";
}
XmlElement FindMessageFromPortMessage (XmlElement portMsg)
{
string msgName = portMsg.GetAttribute ("message");
msgName = GetNameFromQn (msgName);
return (XmlElement) doc.SelectSingleNode ("wsdl:definitions/wsdl:message[@name='" + msgName + "']", nsManager);
}
void GenerateSchemaCode (XmlElement schema)
{
string ns = schema.GetAttribute ("targetNamespace");
string clrNs = DecodeNamespace (ns);
currentFile.SetCurrentNamespace (clrNs);
foreach (XmlNode node in schema)
{
XmlElement elem = node as XmlElement;
if (elem == null) continue;
if (elem.LocalName == "complexType")
GenerateClassCode (ns, elem);
else if (elem.LocalName == "simpleType")
GenerateEnumCode (ns, elem);
}
}
void GenerateClassCode (string ns, XmlElement elem)
{
if (elem.SelectSingleNode ("s:complexContent/s:restriction", nsManager) != null) return;
string clrNs = DecodeNamespace (ns);
string typeName = GetTypeName (elem.GetAttribute ("name"), ns);
XmlElement sudsType = (XmlElement) sudsTypes [clrNs + "." + typeName];
string typetype = "class";
if (sudsType != null) typetype = sudsType.LocalName;
currentFile.WriteLine ("[Serializable, SoapType (XmlNamespace = @\"" + ns + "\", XmlTypeNamespace = @\"" + ns + "\")]");
string cls = "public " + typetype + " " + typeName;
string baseType = elem.GetAttribute ("base");
if (baseType != "") cls += ": " + GetTypeQualifiedName (elem, baseType);
bool isSerializable = (sudsType.GetAttribute ("rootType") == "ISerializable");
if (isSerializable)
{
if (cls.IndexOf (':') == -1) cls += ": ";
else cls += ", ";
cls += "System.Runtime.Serialization.ISerializable";
}
currentFile.WriteLine (cls);
currentFile.WriteLineInd ("{");
XmlNodeList elems = elem.GetElementsByTagName ("element", MetaData.SchemaNamespace);
foreach (XmlElement elemField in elems)
WriteField (elemField);
elems = elem.GetElementsByTagName ("attribute", MetaData.SchemaNamespace);
foreach (XmlElement elemField in elems)
WriteField (elemField);
if (isSerializable)
{
currentFile.WriteLine ("");
currentFile.WriteLine ("public " + typeName + " ()");
currentFile.WriteLineInd ("{");
currentFile.WriteLineUni ("}");
currentFile.WriteLine ("");
currentFile.WriteLine ("public " + typeName + " (System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)");
currentFile.WriteLineInd ("{");
currentFile.WriteLine ("throw new NotImplementedException ();");
currentFile.WriteLineUni ("}");
currentFile.WriteLine ("");
currentFile.WriteLine ("public void GetObjectData (System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context)");
currentFile.WriteLineInd ("{");
currentFile.WriteLine ("throw new NotImplementedException ();");
currentFile.WriteLineUni ("}");
}
currentFile.WriteLineUni ("}");
currentFile.WriteLine ("");
}
void WriteField (XmlElement elemField)
{
bool isAttr = elemField.LocalName == "attribute";
string type = elemField.GetAttribute ("type");
if (isAttr)
currentFile.WriteLine ("[SoapField (UseAttribute = true)]");
else if (!IsPrimitive (elemField, type))
currentFile.WriteLine ("[SoapField (Embedded = true)]");
currentFile.WriteLine ("public " + GetTypeQualifiedName (elemField, type) + " " + elemField.GetAttribute ("name") + ";");
}
void GenerateEnumCode (string ns, XmlElement elem)
{
currentFile.WriteLine ("public enum " + GetTypeName (elem.GetAttribute ("name"), ns));
currentFile.WriteLineInd ("{");
XmlNodeList nodes = elem.SelectNodes ("s:restriction/s:enumeration/@value", nsManager);
foreach (XmlNode node in nodes)
currentFile.WriteLine (node.Value + ",");
currentFile.WriteLineUni ("}");
currentFile.WriteLine ("");
}
bool IsPrimitive (XmlNode node, string qname)
{
string name = GetTypeQualifiedName (node, qname);
return name.IndexOf ('.') == -1;
}
string GetTypeName (string localName, string ns)
{
return localName;
}
void GetTypeQualifiedName (XmlNode node, string qualifiedName, out string name, out string ns)
{
int i = qualifiedName.IndexOf (':');
if (i == -1)
{
name = qualifiedName;
ns = "";
return;
}
string prefix = qualifiedName.Substring (0,i);
name = qualifiedName.Substring (i+1);
ns = node.GetNamespaceOfPrefix (prefix);
string arrayType = GetArrayType (node, name, ns);
if (arrayType != null) {
name = arrayType;
ns = "";
}
else if (ns != MetaData.SchemaNamespace) {
ns = DecodeNamespace (ns);
}
else {
ns = "";
name = GetClrFromXsd (name);
}
}
string GetClrFromXsd (string type)
{
switch (type)
{
case "boolean": return "bool";
case "unsignedByte": return "byte";
case "char": return "char";
case "dateTime": return "DateTime";
case "decimal": return "decimal";
case "double": return "double";
case "short": return "short";
case "int": return "int";
case "long": return "long";
case "byte": return "sbyte";
case "float": return "float";
case "unsignedShort": return "ushort";
case "unsignedInt": return "uint";
case "unsignedLong": return "ulong";
case "string": return "string";
case "duration": return "TimeSpan";
case "anyType": return "object";
}
throw new InvalidOperationException ("Unknown schema type: " + type);
}
string GetTypeQualifiedName (XmlNode node, string qualifiedName)
{
string name, ns;
GetTypeQualifiedName (node, qualifiedName, out name, out ns);
if (ns != "") return ns + "." + name;
else return name;
}
string GetTypeNamespace (XmlNode node, string qualifiedName)
{
string name, ns;
GetTypeQualifiedName (node, qualifiedName, out name, out ns);
return ns;
}
string GetArrayType (XmlNode node, string name, string ns)
{
XmlNode anod = doc.SelectSingleNode ("wsdl:definitions/wsdl:types/s:schema[@targetNamespace='" + ns + "']/s:complexType[@name='" + name + "']/s:complexContent/s:restriction/s:attribute/@wsdl:arrayType", nsManager);
if (anod == null) return null;
string atype = anod.Value;
int i = atype.IndexOf ('[');
string itemType = GetTypeQualifiedName (node, atype.Substring (0,i));
return itemType + atype.Substring (i);
}
XmlElement GetBinding (string name)
{
int i = name.IndexOf (':');
name = name.Substring (i+1);
return doc.SelectSingleNode ("wsdl:definitions/wsdl:binding[@name='" + name + "']", nsManager) as XmlElement;
}
string DecodeNamespace (string xmlNamespace)
{
string tns, tasm;
if (!SoapServices.DecodeXmlNamespaceForClrTypeNamespace (xmlNamespace, out tns, out tasm))
tns = xmlNamespace;
return tns;
}
string GetLiteral (object ob)
{
if (ob == null) return "null";
if (ob is string) return "\"" + ob.ToString().Replace("\"","\"\"") + "\"";
if (ob is bool) return ((bool)ob) ? "true" : "false";
if (ob is XmlQualifiedName) {
XmlQualifiedName qn = (XmlQualifiedName)ob;
return "new XmlQualifiedName (" + GetLiteral(qn.Name) + "," + GetLiteral(qn.Namespace) + ")";
}
else return ob.ToString ();
}
string Params (params string[] pars)
{
string res = "";
foreach (string p in pars)
{
if (res != "") res += ", ";
res += p;
}
return res;
}
string GetNameFromQn (string qn)
{
int i = qn.IndexOf (':');
if (i == -1) return qn;
else return qn.Substring (i+1);
}
}
class CodeFile
{
public string FileName;
public string Directory;
public string FilePath;
Hashtable namespaces = new Hashtable ();
public StringWriter writer;
int indent;
string currentNamespace;
public CodeFile (string directory)
{
Directory = directory;
}
public void SetCurrentNamespace (string ns)
{
writer = namespaces [ns] as StringWriter;
if (writer == null)
{
indent = 0;
writer = new StringWriter ();
namespaces [ns] = writer;
WriteLine ("namespace " + ns);
WriteLineInd ("{");
}
indent = 1;
if (FileName == null)
FileName = ns + ".cs";
}
public void WriteLineInd (string code)
{
WriteLine (code);
indent++;
}
public void WriteLineUni (string code)
{
if (indent > 0) indent--;
WriteLine (code);
}
public void WriteLine (string code)
{
if (code != "") writer.Write (new String ('\t',indent));
writer.WriteLine (code);
}
public void Write ()
{
if (FileName == null) return;
FilePath = Path.Combine (Directory, FileName);
StreamWriter sw = new StreamWriter (FilePath);
sw.WriteLine ("using System;");
sw.WriteLine ("using System.Runtime.Remoting.Metadata;");
sw.WriteLine ();
foreach (StringWriter nsWriter in namespaces.Values)
{
sw.Write (nsWriter.ToString ());
sw.WriteLine ("}");
sw.WriteLine ();
}
sw.Close ();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
namespace RemoteStorageHelper
{
public class RestoreHelper
{
private readonly AzureStorageHelper m_azureHelper;
private readonly FileStorageHelper m_fileHelper;
private readonly ItemClass m_remoteItemClass;
public RestoreHelper(ItemClass itemClass)
{
m_remoteItemClass = itemClass;
if (m_remoteItemClass == ItemClass.Blob)
{
m_azureHelper = new AzureStorageHelper();
}
else
{
m_fileHelper = new FileStorageHelper();
}
}
/// <summary>
/// Parses a list of blob items to obtain the latest backups
/// </summary>
/// <param name="files">The files as a list of blob items.</param>
/// <param name="database">The database.</param>
/// <returns></returns>
private IEnumerable<RemoteItem> ParseLatestBackup(List<RemoteItem> files, string database)
{
var filesToFetch = new List<RemoteItem>();
var possibleFiles = m_remoteItemClass == ItemClass.Blob
? files.FindAll(x => x.Name.StartsWith(database)).OrderBy(y => y.BackupDate).ToList()
: files.FindAll(x => x.FakePath.StartsWith(database)).OrderBy(y => Common.ParseDate(y.FakePath)).ToList();
if (ConfigHelper.GetConfigurationBoolean("StopAtEnabled"))
{
var stopAtDateTime = Common.ParseDate(ConfigHelper.GetConfigurationValue("StopAtDateTime"));
if (stopAtDateTime != null)
{
var flag = -1;
for (var i = 0; i < possibleFiles.Count; i++)
{
if (Common.ParseDateFromFileName(possibleFiles[i].Name) > stopAtDateTime)
{
flag = i + 1;
break;
}
}
if (flag > -1)
{
possibleFiles.RemoveRange(flag, possibleFiles.Count - flag);
}
}
}
// Find the latest full backup
var latestFullBackup = possibleFiles.FindLast(x => x.BackupType == Backup.Full);
if (latestFullBackup == null)
{
return filesToFetch;
}
filesToFetch.Add(latestFullBackup);
// Look for latest differential after full backup
var latestDifferentialBackup =
possibleFiles.FindLast(x => x.BackupType == Backup.Differential && x.BackupDate > latestFullBackup.BackupDate);
if (latestDifferentialBackup != null)
{
filesToFetch.Add(latestDifferentialBackup);
}
// Look for all logs after the full backup or differential backup
var latestLogBackups =
possibleFiles.FindAll(
x =>
x.BackupType == Backup.TransactionLog &&
(x.BackupDate >
(latestDifferentialBackup != null ? latestDifferentialBackup.BackupDate : latestFullBackup.BackupDate)));
if (latestLogBackups.Count > 0)
{
filesToFetch.AddRange(latestLogBackups);
}
return filesToFetch;
}
/// <summary>
/// Builds the restore script.
/// </summary>
/// <param name="files">The fetched files.</param>
/// <returns></returns>
private StringBuilder BuildRestoreScript(Dictionary<FileInfo, string> files)
{
var database = ConfigHelper.GetConfigurationValue("DatabaseRestoredName");
var localDatabasePath = new DirectoryInfo(ConfigHelper.GetConfigurationValue("DatabaseRestoredPath"));
var databaseFilePrefix = ConfigHelper.GetConfigurationValue("DatabaseRestoredFilePrefix");
var stopAtEnabled = ConfigHelper.GetConfigurationBoolean("StopAtEnabled");
var stopAtDateTime = Common.ParseDate(ConfigHelper.GetConfigurationValue("StopAtDateTime"));
var template = new StringBuilder();
var script = new StringBuilder();
var nl = Environment.NewLine;
script.AppendFormat("-- RemoteStorageRestore Script to restore {2}. Generated on {0:yyyy-MM-dd HH:mm:ss}{1}{1}", DateTime.Now, nl, database);
const string batchSeparator = @"GO";
script.AppendLine(@"USE [master];");
script.AppendLine(batchSeparator);
script.AppendFormat(@"IF DB_ID('[{0}]') IS NOT NULL ALTER DATABASE [{0}] SET SINGLE_USER WITH ROLLBACK IMMEDIATE;{1}", database, nl);
script.AppendLine(batchSeparator);
var full = true;
foreach (var fileInfo in files)
{
if (full)
{
var line = @"SET NOCOUNT ON;";
template.AppendLine(line);
// We want to run the template for the first file only (full backup)
line = $@"DECLARE @fullBackup NVARCHAR(MAX) = N'{fileInfo.Key.FullName}';";
template.AppendLine(line);
line = $@"DECLARE @path NVARCHAR(255) = N'{localDatabasePath.FullName}\{databaseFilePrefix}';";
template.AppendLine(line);
// Create template restore script for moving files
line =
$@"DECLARE @template NVARCHAR(MAX) = N'RESTORE DATABASE [{database}] FROM DISK = N''{fileInfo.Key.FullName
}'' WITH {{%%MOVE%%}} REPLACE, NOUNLOAD, NORECOVERY, STATS = 5;'";
template.AppendLine(line);
// Create the temp table to populate the backup file list
line =
@"CREATE TABLE #FileListInfo ([FileListInfoID] INT IDENTITY(1,1), [LogicalName] NVARCHAR(128), [PhysicalName] NVARCHAR(260), [Type] CHAR(1), [FileGroupName] NVARCHAR(128), [Size] NUMERIC(20, 0), [MaxSize] NUMERIC(20, 0), [FileId] INT, [CreateLSN] NUMERIC(25, 0), [DropLSN] NUMERIC(25, 0), [UniqueId] UNIQUEIDENTIFIER, [ReadOnlyLSN] NUMERIC(25, 0), [ReadWriteLSN] NUMERIC(25, 0), [BackupSizeInBytes] BIGINT, [SourceBlockSize] INT, [FilegroupId] INT, [LogGroupGUID] UNIQUEIDENTIFIER, [DifferentialBaseLSN] NUMERIC(25), [DifferentialBaseGUID] UNIQUEIDENTIFIER, [IsReadOnly] INT, [IsPresent] INT, [TDEThumbprint] NVARCHAR(128));";
template.AppendLine(line);
line = $@"DECLARE @header NVARCHAR(MAX) = 'RESTORE FILELISTONLY FROM DISK = ''{fileInfo.Key.FullName}''';";
template.AppendLine(line);
// Get the file header info for this backup
// If restoring to SQL Server 2016, there is an extra column at the end
line =
@"BEGIN TRY INSERT INTO #FileListInfo EXEC (@header); END TRY BEGIN CATCH ALTER TABLE #FileListInfo ADD [SnapshotUrl] NVARCHAR(128); INSERT INTO #FileListInfo EXEC (@header); END CATCH";
template.AppendLine(line);
// Declare the variables to loop through the backup file list
line = @"DECLARE @sql NVARCHAR(MAX) = N'', @i INT, @x INT = 1;";
template.AppendLine(line);
line = @"SELECT @i = MAX([FileListInfoID]) FROM #FileListInfo;";
template.AppendLine(line);
line =
@"WHILE @x <= @i BEGIN SELECT @sql = @sql + N' MOVE N''' + [LogicalName] + N''' TO N''' + @path + [LogicalName] + CASE WHEN [Type] = N'D' AND [FileId] = 1 THEN N'.mdf' WHEN [Type] = N'D' AND [FileId] <> 1 THEN N'.ndf' ELSE N'.ldf' END + ''',' FROM #FileListInfo WHERE [FileListInfoID] = @x; SELECT @x = @x + 1; END;";
template.AppendLine(line);
line = @"SELECT @sql = REPLACE(@template, N'{%%MOVE%%}', @sql);";
template.AppendLine(line);
// Drop the temp table
line = @"DROP TABLE[#FileListInfo];";
template.AppendLine(line);
line = @"SET NOCOUNT OFF;";
template.AppendLine(line);
// Generate the T-SQL
line = @"EXEC sp_executesql @sql;";
template.AppendLine(line);
script.AppendLine(template.ToString());
script.AppendLine(batchSeparator);
full = false; // Now run the rest of the script
}
else
{
script.AppendFormat(
@"RESTORE {2} [{0}] FROM DISK = N'{1}' WITH FILE = 1, NOUNLOAD, REPLACE, NORECOVERY, {3}STATS = 10;{4}", database,
fileInfo.Key.FullName, fileInfo.Value,
stopAtEnabled ? $"STOPAT = '{stopAtDateTime?.ToString("yyyy-MM-dd HH:mm:ss")}', " : string.Empty, nl);
script.AppendLine(batchSeparator);
}
}
script.AppendFormat(@"RESTORE DATABASE [{0}] WITH RECOVERY;{1}", database, nl);
script.AppendLine(batchSeparator);
script.AppendFormat(@"DBCC CHECKDB ([{0}]) WITH ALL_ERRORMSGS, NO_INFOMSGS, DATA_PURITY;{1}", database, nl);
script.AppendLine(batchSeparator);
script.AppendFormat(@"ALTER DATABASE [{0}] SET MULTI_USER;{1}", database, nl);
script.AppendLine(batchSeparator);
return script;
}
/// <summary>
/// Fetches the files from Remote Storage for the restore script.
/// </summary>
/// <param name="database">The database.</param>
/// <param name="sortOrder">The sort order.</param>
/// <returns></returns>
public StringBuilder FetchFilesForRestoreScript(string database, ItemSortOrder sortOrder)
{
var remoteItems = m_remoteItemClass == ItemClass.Blob
? m_azureHelper.GetBlobItems()
: m_fileHelper.GetFileItems();
Console.WriteLine("Number of items found in Remote Storage: {0}", remoteItems.Count);
Console.WriteLine();
Console.WriteLine("Starting file review ...");
var filesToFetch = ParseLatestBackup(remoteItems, database);
var rfh = new RemoteFetchHelper(m_remoteItemClass);
// Actually fetch the files
var files = rfh.FetchItemsFromRemoteStorage(filesToFetch, sortOrder);
// Generate the script and return it
return BuildRestoreScript(files);
}
/// <summary>
/// Generates the restore script.
/// </summary>
/// <param name="database">The database.</param>
/// <param name="dumpFileList">Dump list of all files on RemoteStorage</param>
/// <param name="sortOrder">The sort order</param>
/// <returns></returns>
public StringBuilder GenerateRestoreScript(string database, bool dumpFileList, ItemSortOrder sortOrder)
{
if (!dumpFileList)
{
return FetchFilesForRestoreScript(database, sortOrder);
}
var remoteItems = m_remoteItemClass == ItemClass.Blob
? m_azureHelper.GetBlobItems()
: m_fileHelper.GetFileItems();
Console.WriteLine("Number of items found in Remote Storage: {0}", remoteItems.Count);
Console.WriteLine();
Console.WriteLine("Dumping list of Blob Items to disk ...");
var fileList = new StringBuilder();
foreach (var item in remoteItems)
{
fileList.AppendLine(item.Name);
}
return fileList;
}
}
}
| |
#region File Description
//-----------------------------------------------------------------------------
// ScreenManager.cs
//
// Microsoft XNA Community Game Platform
// Copyright (C) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
#endregion
#region Using Statements
using System;
using System.Diagnostics;
using System.Collections.Generic;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Content;
using Microsoft.Xna.Framework.Graphics;
using Microsoft.Xna.Framework.Input.Touch;
using System.IO;
using System.IO.IsolatedStorage;
#endregion
namespace UserInterfaceSample
{
/// <summary>
/// The screen manager is a component which manages one or more GameScreen
/// instances. It maintains a stack of screens, calls their Update and Draw
/// methods at the appropriate times, and automatically routes input to the
/// topmost active screen.
/// </summary>
public class ScreenManager : DrawableGameComponent
{
#region Fields
List<GameScreen> screens = new List<GameScreen>();
List<GameScreen> screensToUpdate = new List<GameScreen>();
InputState input = new InputState();
SpriteBatch spriteBatch;
SpriteFont font;
Texture2D blankTexture;
bool isInitialized;
bool traceEnabled;
#endregion
#region Properties
/// <summary>
/// A default SpriteBatch shared by all the screens. This saves
/// each screen having to bother creating their own local instance.
/// </summary>
public SpriteBatch SpriteBatch
{
get { return spriteBatch; }
}
/// <summary>
/// A default font shared by all the screens. This saves
/// each screen having to bother loading their own local copy.
/// </summary>
public SpriteFont Font
{
get { return font; }
}
/// <summary>
/// A blank white texture shared by all the screens, useful
/// for drawing solid lines and rectangles with SpriteBatch.
/// This saves each screen from having to bother loading
/// their own local copy.
/// </summary>
public Texture2D BlankTexture
{
get { return blankTexture; }
}
/// <summary>
/// If true, the manager prints out a list of all the screens
/// each time it is updated. This can be useful for making sure
/// everything is being added and removed at the right times.
/// </summary>
public bool TraceEnabled
{
get { return traceEnabled; }
set { traceEnabled = value; }
}
#endregion
#region Initialization
/// <summary>
/// Constructs a new screen manager component.
/// </summary>
public ScreenManager(Game game)
: base(game)
{
// we must set EnabledGestures before we can query for them, but
// we don't assume the game wants to read them.
TouchPanel.EnabledGestures = GestureType.None;
}
/// <summary>
/// Initializes the screen manager component.
/// </summary>
public override void Initialize()
{
base.Initialize();
isInitialized = true;
}
/// <summary>
/// Load your graphics content.
/// </summary>
protected override void LoadContent()
{
// Load content belonging to the screen manager.
ContentManager content = Game.Content;
spriteBatch = new SpriteBatch(GraphicsDevice);
font = content.Load<SpriteFont>("Font\\MenuTitle");
blankTexture = new Texture2D(GraphicsDevice, 1, 1, false, SurfaceFormat.Color);
blankTexture.SetData(new[] { Color.White });
// Tell each of the screens to load their content.
foreach (GameScreen screen in screens)
{
screen.LoadContent();
}
}
/// <summary>
/// Unload your graphics content.
/// </summary>
protected override void UnloadContent()
{
// Tell each of the screens to unload their content.
foreach (GameScreen screen in screens)
{
screen.UnloadContent();
}
}
#endregion
#region Update and Draw
/// <summary>
/// Allows each screen to run logic.
/// </summary>
public override void Update(GameTime gameTime)
{
// Read the keyboard and gamepad.
input.Update();
// Make a copy of the master screen list, to avoid confusion if
// the process of updating one screen adds or removes others.
screensToUpdate.Clear();
foreach (GameScreen screen in screens)
screensToUpdate.Add(screen);
bool otherScreenHasFocus = !Game.IsActive;
bool coveredByOtherScreen = false;
// Loop as long as there are screens waiting to be updated.
while (screensToUpdate.Count > 0)
{
// Pop the topmost screen off the waiting list.
GameScreen screen = screensToUpdate[screensToUpdate.Count - 1];
screensToUpdate.RemoveAt(screensToUpdate.Count - 1);
// Update the screen.
screen.Update(gameTime, otherScreenHasFocus, coveredByOtherScreen);
if (screen.ScreenState == ScreenState.TransitionOn ||
screen.ScreenState == ScreenState.Active)
{
// If this is the first active screen we came across,
// give it a chance to handle input.
if (!otherScreenHasFocus)
{
screen.HandleInput(input);
otherScreenHasFocus = true;
}
// If this is an active non-popup, inform any subsequent
// screens that they are covered by it.
if (!screen.IsPopup)
coveredByOtherScreen = true;
}
}
// Print debug trace?
if (traceEnabled)
TraceScreens();
}
/// <summary>
/// Prints a list of all the screens, for debugging.
/// </summary>
void TraceScreens()
{
List<string> screenNames = new List<string>();
foreach (GameScreen screen in screens)
screenNames.Add(screen.GetType().Name);
Debug.WriteLine(string.Join(", ", screenNames.ToArray()));
}
/// <summary>
/// Tells each screen to draw itself.
/// </summary>
public override void Draw(GameTime gameTime)
{
foreach (GameScreen screen in screens)
{
if (screen.ScreenState == ScreenState.Hidden)
continue;
screen.Draw(gameTime);
}
}
#endregion
#region Public Methods
/// <summary>
/// Adds a new screen to the screen manager.
/// </summary>
public void AddScreen(GameScreen screen, PlayerIndex? controllingPlayer)
{
screen.ControllingPlayer = controllingPlayer;
screen.ScreenManager = this;
screen.IsExiting = false;
// If we have a graphics device, tell the screen to load content.
if (isInitialized)
{
screen.LoadContent();
}
screens.Add(screen);
// update the TouchPanel to respond to gestures this screen is interested in
TouchPanel.EnabledGestures = screen.EnabledGestures;
}
/// <summary>
/// Removes a screen from the screen manager. You should normally
/// use GameScreen.ExitScreen instead of calling this directly, so
/// the screen can gradually transition off rather than just being
/// instantly removed.
/// </summary>
public void RemoveScreen(GameScreen screen)
{
// If we have a graphics device, tell the screen to unload content.
if (isInitialized)
{
screen.UnloadContent();
}
screens.Remove(screen);
screensToUpdate.Remove(screen);
// if there is a screen still in the manager, update TouchPanel
// to respond to gestures that screen is interested in.
if (screens.Count > 0)
{
TouchPanel.EnabledGestures = screens[screens.Count - 1].EnabledGestures;
}
}
/// <summary>
/// Expose an array holding all the screens. We return a copy rather
/// than the real master list, because screens should only ever be added
/// or removed using the AddScreen and RemoveScreen methods.
/// </summary>
public GameScreen[] GetScreens()
{
return screens.ToArray();
}
/// <summary>
/// Helper draws a translucent black fullscreen sprite, used for fading
/// screens in and out, and for darkening the background behind popups.
/// </summary>
public void FadeBackBufferToBlack(float alpha)
{
Viewport viewport = GraphicsDevice.Viewport;
spriteBatch.Begin();
spriteBatch.Draw(blankTexture,
new Rectangle(0, 0, viewport.Width, viewport.Height),
Color.Black * alpha);
spriteBatch.End();
}
/// <summary>
/// Informs the screen manager to serialize its state to disk.
/// </summary>
public void SerializeState()
{
// open up isolated storage
using (IsolatedStorageFile storage = IsolatedStorageFile.GetUserStoreForApplication())
{
// if our screen manager directory already exists, delete the contents
if (storage.DirectoryExists("ScreenManager"))
{
DeleteState(storage);
}
// otherwise just create the directory
else
{
storage.CreateDirectory("ScreenManager");
}
// create a file we'll use to store the list of screens in the stack
using (IsolatedStorageFileStream stream = storage.CreateFile("ScreenManager\\ScreenList.dat"))
{
using (BinaryWriter writer = new BinaryWriter(stream))
{
// write out the full name of all the types in our stack so we can
// recreate them if needed.
foreach (GameScreen screen in screens)
{
if (screen.IsSerializable)
{
writer.Write(screen.GetType().AssemblyQualifiedName);
}
}
}
}
// now we create a new file stream for each screen so it can save its state
// if it needs to. we name each file "ScreenX.dat" where X is the index of
// the screen in the stack, to ensure the files are uniquely named
int screenIndex = 0;
foreach (GameScreen screen in screens)
{
if (screen.IsSerializable)
{
string fileName = string.Format("ScreenManager\\Screen{0}.dat", screenIndex);
// open up the stream and let the screen serialize whatever state it wants
using (IsolatedStorageFileStream stream = storage.CreateFile(fileName))
{
screen.Serialize(stream);
}
screenIndex++;
}
}
}
}
public bool DeserializeState()
{
// open up isolated storage
using (IsolatedStorageFile storage = IsolatedStorageFile.GetUserStoreForApplication())
{
// see if our saved state directory exists
if (storage.DirectoryExists("ScreenManager"))
{
try
{
// see if we have a screen list
if (storage.FileExists("ScreenManager\\ScreenList.dat"))
{
// load the list of screen types
using (IsolatedStorageFileStream stream = storage.OpenFile("ScreenManager\\ScreenList.dat", FileMode.Open, FileAccess.Read))
{
using (BinaryReader reader = new BinaryReader(stream))
{
while (reader.BaseStream.Position < reader.BaseStream.Length)
{
// read a line from our file
string line = reader.ReadString();
// if it isn't blank, we can create a screen from it
if (!string.IsNullOrEmpty(line))
{
Type screenType = Type.GetType(line);
GameScreen screen = Activator.CreateInstance(screenType) as GameScreen;
AddScreen(screen, PlayerIndex.One);
}
}
}
}
}
// next we give each screen a chance to deserialize from the disk
for (int i = 0; i < screens.Count; i++)
{
string filename = string.Format("ScreenManager\\Screen{0}.dat", i);
using (IsolatedStorageFileStream stream = storage.OpenFile(filename, FileMode.Open, FileAccess.Read))
{
screens[i].Deserialize(stream);
}
}
return true;
}
catch (Exception)
{
// if an exception was thrown while reading, odds are we cannot recover
// from the saved state, so we will delete it so the game can correctly
// launch.
DeleteState(storage);
}
}
}
return false;
}
/// <summary>
/// Deletes the saved state files from isolated storage.
/// </summary>
private void DeleteState(IsolatedStorageFile storage)
{
// get all of the files in the directory and delete them
string[] files = storage.GetFileNames("ScreenManager\\*");
foreach (string file in files)
{
storage.DeleteFile(Path.Combine("ScreenManager", file));
}
}
#endregion
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using NUnit.Framework;
using QuantConnect.Data;
using QuantConnect.Data.Auxiliary;
using QuantConnect.Data.Market;
using QuantConnect.Logging;
using QuantConnect.Securities;
using QuantConnect.Util;
namespace QuantConnect.Tests.Common.Data.Auxiliary
{
[TestFixture]
public class FactorFileTests
{
[Test]
public void ReadsFactorFileWithoutInfValues()
{
var PermTick = "AAPL";
var Market = "usa";
var _symbol = new Symbol(SecurityIdentifier.GenerateEquity(PermTick, Market), PermTick);
var factorFile = TestGlobals.FactorFileProvider.Get(_symbol) as CorporateFactorProvider;
Assert.AreEqual(41, factorFile.SortedFactorFileData.Count);
Assert.AreEqual(new DateTime(1998, 01, 01), factorFile.FactorFileMinimumDate.Value);
}
[Test]
public void ReadsFactorFileWithExponentialNotation()
{
// Source NEWL factor file at 2019-12-09
var lines = new[]
{
"19980102,0.8116779,1e+07",
"20051108,0.8116779,1e+07",
"20060217,0.8416761,1e+07",
"20060516,0.8644420,1e+07",
"20060814,0.8747766,1e+07",
"20061115,0.8901232,1e+07",
"20070314,0.9082148,1e+07",
"20070522,0.9166239,1e+07",
"20070814,0.9306799,1e+07",
"20071120,0.9534326,1e+07",
"20080520,0.9830510,1e+07",
"20100802,1.0000000,1e+07",
"20131016,1.0000000,1.11111e+06",
"20131205,1.0000000,75188",
"20140305,1.0000000,25000",
"20140514,1.0000000,2500",
"20140714,1.0000000,50",
"20501231,1.0000000,1"
};
var factorFile = PriceScalingExtensions.SafeRead("PermTick", lines, SecurityType.Equity);
Assert.AreEqual(5, factorFile.Count());
Assert.IsNotNull(factorFile.FactorFileMinimumDate);
Assert.AreEqual(new DateTime(2013, 12, 04), factorFile.FactorFileMinimumDate.Value);
}
[Test]
public void ReadsFactorFileWithInfValues()
{
var lines = new[]
{
"19980102,1.0000000,inf",
"20151211,1.0000000,inf",
"20160330,1.0000000,2500",
"20160915,1.0000000,80",
"20501231,1.0000000,1"
};
DateTime? factorFileMinimumDate;
var factorFile = PriceScalingExtensions.SafeRead("PermTick", lines, SecurityType.Equity);
Assert.AreEqual(3, factorFile.Count());
Assert.IsNotNull(factorFile.FactorFileMinimumDate);
Assert.AreEqual(new DateTime(2016, 3, 29), factorFile.FactorFileMinimumDate.Value);
}
[Test]
public void CorrectlyDeterminesTimePriceFactors()
{
var reference = DateTime.Today;
const string symbol = "n/a";
var file = GetTestFactorFile(symbol, reference);
// time price factors should be the price factor * split factor
Assert.AreEqual(1, file.GetPriceFactor(reference, DataNormalizationMode.Adjusted));
Assert.AreEqual(1, file.GetPriceFactor(reference.AddDays(-6), DataNormalizationMode.Adjusted));
Assert.AreEqual(.9, file.GetPriceFactor(reference.AddDays(-7), DataNormalizationMode.Adjusted));
Assert.AreEqual(.9, file.GetPriceFactor(reference.AddDays(-13), DataNormalizationMode.Adjusted));
Assert.AreEqual(.8, file.GetPriceFactor(reference.AddDays(-14), DataNormalizationMode.Adjusted));
Assert.AreEqual(.8, file.GetPriceFactor(reference.AddDays(-20), DataNormalizationMode.Adjusted));
Assert.AreEqual(.8m * .5m, file.GetPriceFactor(reference.AddDays(-21), DataNormalizationMode.Adjusted));
Assert.AreEqual(.8m * .5m, file.GetPriceFactor(reference.AddDays(-22), DataNormalizationMode.Adjusted));
Assert.AreEqual(.8m * .5m, file.GetPriceFactor(reference.AddDays(-89), DataNormalizationMode.Adjusted));
Assert.AreEqual(.8m * .25m, file.GetPriceFactor(reference.AddDays(-91), DataNormalizationMode.Adjusted));
}
[Test]
public void HasDividendEventOnNextTradingDay()
{
var reference = DateTime.Today;
const string symbol = "n/a";
decimal priceFactorRatio;
decimal referencePrice;
var file = GetTestFactorFile(symbol, reference);
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference, out priceFactorRatio, out referencePrice));
Assert.IsFalse(file.HasDividendEventOnNextTradingDay(reference.AddDays(-6), out priceFactorRatio, out referencePrice));
Assert.IsTrue(file.HasDividendEventOnNextTradingDay(reference.AddDays(-7), out priceFactorRatio, out referencePrice));
Assert.AreEqual(.9m/1m, priceFactorRatio);
Assert.IsFalse(file.HasDividendEventOnNextTradingDay(reference.AddDays(-8), out priceFactorRatio, out referencePrice));
Assert.IsFalse(file.HasDividendEventOnNextTradingDay(reference.AddDays(-13), out priceFactorRatio, out referencePrice));
Assert.IsTrue(file.HasDividendEventOnNextTradingDay(reference.AddDays(-14), out priceFactorRatio, out referencePrice));
Assert.AreEqual(.8m / .9m, priceFactorRatio);
Assert.IsFalse(file.HasDividendEventOnNextTradingDay(reference.AddDays(-15), out priceFactorRatio, out referencePrice));
Assert.IsFalse(file.HasDividendEventOnNextTradingDay(reference.AddDays(-364), out priceFactorRatio, out referencePrice));
Assert.IsTrue(file.HasDividendEventOnNextTradingDay(reference.AddDays(-365), out priceFactorRatio, out referencePrice));
Assert.AreEqual(.7m / .8m, priceFactorRatio);
Assert.IsFalse(file.HasDividendEventOnNextTradingDay(reference.AddDays(-366), out priceFactorRatio, out referencePrice));
Assert.IsNull(file.FactorFileMinimumDate);
}
[Test]
public void HasSplitEventOnNextTradingDay()
{
var reference = DateTime.Today;
const string symbol = "n/a";
decimal splitFactor;
decimal referencePrice;
var file = GetTestFactorFile(symbol, reference);
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference, out splitFactor, out referencePrice));
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference.AddDays(-20), out splitFactor, out referencePrice));
Assert.IsTrue(file.HasSplitEventOnNextTradingDay(reference.AddDays(-21), out splitFactor, out referencePrice));
Assert.AreEqual(.5, splitFactor);
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference.AddDays(-22), out splitFactor, out referencePrice));
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference.AddDays(-89), out splitFactor, out referencePrice));
Assert.IsTrue(file.HasSplitEventOnNextTradingDay(reference.AddDays(-90), out splitFactor, out referencePrice));
Assert.AreEqual(.5, splitFactor);
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference.AddDays(-91), out splitFactor, out referencePrice));
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference.AddDays(-364), out splitFactor, out referencePrice));
Assert.IsTrue(file.HasSplitEventOnNextTradingDay(reference.AddDays(-365), out splitFactor, out referencePrice));
Assert.AreEqual(.5, splitFactor);
Assert.IsFalse(file.HasSplitEventOnNextTradingDay(reference.AddDays(-366), out splitFactor, out referencePrice));
Assert.IsNull(file.FactorFileMinimumDate);
}
[Test]
public void GeneratesCorrectSplitsAndDividends()
{
var reference = new DateTime(2018, 01, 01);
var file = GetTestFactorFile("SPY", reference);
var exchangeHours = MarketHoursDatabase.FromDataFolder().GetExchangeHours(QuantConnect.Market.USA, Symbols.SPY, SecurityType.Equity);
var splitsAndDividends = file.GetSplitsAndDividends(Symbols.SPY, exchangeHours);
var dividend = (Dividend)splitsAndDividends.Single(d => d.Time == reference.AddDays(-6));
var distribution = Dividend.ComputeDistribution(100m, .9m / 1m, 2);
Assert.AreEqual(distribution, dividend.Distribution);
dividend = (Dividend) splitsAndDividends.Single(d => d.Time == reference.AddDays(-13));
distribution = Math.Round(Dividend.ComputeDistribution(100m, .8m / .9m, 2), 2);
Assert.AreEqual(distribution, dividend.Distribution);
var split = (Split) splitsAndDividends.Single(d => d.Time == reference.AddDays(-20));
var splitFactor = .5m;
Assert.AreEqual(splitFactor, split.SplitFactor);
split = (Split) splitsAndDividends.Single(d => d.Time == reference.AddDays(-89));
splitFactor = .5m;
Assert.AreEqual(splitFactor, split.SplitFactor);
dividend = splitsAndDividends.OfType<Dividend>().Single(d => d.Time == reference.AddDays(-363));
distribution = Dividend.ComputeDistribution(100m, .7m / .8m, 2);
Assert.AreEqual(distribution, dividend.Distribution);
split = splitsAndDividends.OfType<Split>().Single(d => d.Time == reference.AddDays(-363));
splitFactor = .5m;
Assert.AreEqual(splitFactor, split.SplitFactor);
}
[Test]
public void GetsSplitsAndDividends()
{
var factorFile = GetFactorFile_AAPL2018_05_11();
var exchangeHours = MarketHoursDatabase.FromDataFolder().GetExchangeHours(QuantConnect.Market.USA, Symbols.SPY, SecurityType.Equity);
var splitsAndDividends = factorFile.GetSplitsAndDividends(Symbols.AAPL, exchangeHours).ToList();
foreach (var sad in splitsAndDividends)
{
Log.Trace($"{sad.Time.Date:yyyy-MM-dd}: {sad}");
}
var splits = splitsAndDividends.OfType<Split>().ToList();
var dividends = splitsAndDividends.OfType<Dividend>().ToList();
var dividend = dividends.Single(d => d.Time == new DateTime(2018, 05, 11));
Assert.AreEqual(0.73m, dividend.Distribution.RoundToSignificantDigits(6));
var split = splits.Single(d => d.Time == new DateTime(2014, 06, 09));
Assert.AreEqual((1/7m).RoundToSignificantDigits(6), split.SplitFactor);
}
[Test]
public void AppliesDividend()
{
var factorFileBeforeDividend = GetFactorFile_AAPL2018_05_08();
var factorFileAfterDividend = GetFactorFile_AAPL2018_05_11();
var exchangeHours = MarketHoursDatabase.FromDataFolder().GetExchangeHours(QuantConnect.Market.USA, Symbols.SPY, SecurityType.Equity);
var dividend = new Dividend(Symbols.AAPL, new DateTime(2018, 05, 11), 0.73m, 190.03m);
var actual = factorFileBeforeDividend.Apply(new List<BaseData> {dividend}, exchangeHours);
Assert.AreEqual(factorFileAfterDividend.Count(), actual.Count());
Assert.True(actual.First().Date == new DateTime(1998, 01, 02),
$"Factor file first row changed from 1998-01-02 to {actual.First().Date:yyyy-MM-dd} after applying new event");
foreach (var item in factorFileAfterDividend.Reverse().Zip(actual.Reverse(), (a,e) => new{actual=a, expected=e}))
{
var expected = (CorporateFactorRow)item.expected;
var actualRow = (CorporateFactorRow)item.actual;
Log.Trace($"expected: {item.expected} actual: {item.actual} diff: {100* (1 - actualRow.PriceFactor/expected.PriceFactor):0.0000}%");
Assert.AreEqual(item.expected.Date, item.actual.Date);
Assert.AreEqual(expected.ReferencePrice, actualRow.ReferencePrice);
Assert.AreEqual(expected.SplitFactor, actualRow.SplitFactor);
var delta = (double)expected.PriceFactor * 1e-5;
Assert.AreEqual((double)expected.PriceFactor, (double)actualRow.PriceFactor, delta);
}
}
[Test]
public void AppliesSplit()
{
var factorFileBeforeSplit = GetFactorFile_LODE20191127();
var factorFileAfterSplit = GetFactorFile_LODE20191129();
var exchangeHours = MarketHoursDatabase.FromDataFolder().GetExchangeHours(QuantConnect.Market.USA, Symbols.SPY, SecurityType.Equity);
var eventTime = new DateTime(2019, 11, 29);
var split = new Split(Symbols.LODE, eventTime, 0.06m, 5, SplitType.SplitOccurred);
var actual = factorFileBeforeSplit.Apply(new List<BaseData> { split }, exchangeHours);
Assert.AreEqual(factorFileAfterSplit.Count(), actual.Count());
Assert.True(actual.First().Date == new DateTime(1998, 01, 02),
$"Factor file first row changed from 1998-01-02 to {actual.First().Date:yyyy-MM-dd} after applying new event");
Assert.True(((CorporateFactorRow)actual.First()).SplitFactor == 25m, "Factor File split factor is not computed correctly");
foreach (var item in actual.Reverse().Zip(factorFileAfterSplit.Reverse(), (a, e) => new { actual = a, expected = e }))
{
var expected = (CorporateFactorRow)item.expected;
var actualRow = (CorporateFactorRow)item.actual;
Log.Trace($"expected: {item.expected} actual: {item.actual} diff: {100 * (1 - actualRow.PriceFactor / expected.PriceFactor):0.0000}%");
Assert.AreEqual(item.expected.Date, item.actual.Date);
Assert.AreEqual(expected.ReferencePrice, actualRow.ReferencePrice);
Assert.AreEqual(expected.SplitFactor, actualRow.SplitFactor);
var delta = (double)expected.PriceFactor * 1e-5;
Assert.AreEqual((double)expected.PriceFactor, (double)actualRow.PriceFactor, delta);
}
}
[Test]
public void CanHandleRepeatedEventsCorrectly()
{
var factorFileBeforeSplit = GetFactorFile_LODE20191127();
var factorFileAfterSplit = GetFactorFile_LODE20191129();
var exchangeHours = MarketHoursDatabase.FromDataFolder().GetExchangeHours(QuantConnect.Market.USA, Symbols.SPY, SecurityType.Equity);
var eventTime = new DateTime(2019, 11, 29);
var split = new Split(Symbols.LODE, eventTime, 0.06m, 5, SplitType.SplitOccurred);
var events = new List<BaseData> { split, split, split };
var actual = factorFileBeforeSplit.Apply(events, exchangeHours);
Assert.AreEqual(factorFileAfterSplit.Count(), actual.Count());
Assert.True(actual.First().Date == new DateTime(1998, 01, 02),
$"Factor file first row changed from 1998-01-02 to {actual.First().Date:yyyy-MM-dd} after applying new event");
Assert.True(((CorporateFactorRow)actual.First()).SplitFactor == 25m, "Factor File split factor is not computed correctly");
foreach (var item in actual.Reverse().Zip(factorFileAfterSplit.Reverse(), (a, e) => new { actual = a, expected = e }))
{
var expectedRow = (CorporateFactorRow)item.expected;
var actualRow = (CorporateFactorRow)item.actual;
Log.Trace($"expected: {item.expected} actual: {item.actual} diff: {100 * (1 - actualRow.PriceFactor / expectedRow.PriceFactor):0.0000}%");
Assert.AreEqual(item.expected.Date, item.actual.Date);
Assert.AreEqual(expectedRow.ReferencePrice, actualRow.ReferencePrice);
Assert.AreEqual(expectedRow.SplitFactor, actualRow.SplitFactor);
var delta = (double)expectedRow.PriceFactor * 1e-5;
Assert.AreEqual((double)expectedRow.PriceFactor, (double)actualRow.PriceFactor, delta);
}
}
[Test]
public void AppliesSplitAndDividendAtSameTime()
{
var reference = new DateTime(2018, 08, 01);
var exchangeHours = MarketHoursDatabase.FromDataFolder().GetExchangeHours(QuantConnect.Market.USA, Symbols.SPY, SecurityType.Equity);
var expected = GetTestFactorFile("AAPL", reference);
// remove the last entry that contains a split and dividend at the same time
var factorFile = new CorporateFactorProvider("AAPL", expected.SortedFactorFileData.Where(kvp => kvp.Value.Single().PriceFactor >= .8m).Select(kvp => kvp.Value.Single()));
var actual = factorFile.Apply(new List<BaseData>
{
new Split(Symbols.AAPL, reference.AddDays(-364), 100m, 1 / 2m, SplitType.SplitOccurred),
new Dividend(Symbols.AAPL, reference.AddDays(-364), 12.5m, 100m)
}, exchangeHours);
foreach (var item in actual.Reverse().Zip(expected.Reverse(), (a, e) => new {actual = a, expected = e}))
{
var expectedRow = (CorporateFactorRow)item.expected;
var actualRow = (CorporateFactorRow)item.actual;
Log.Trace($"expected: {item.expected} actual: {item.actual} diff: {100 * (1 - actualRow.PriceFactor / expectedRow.PriceFactor):0.0000}%");
Assert.AreEqual(item.expected.Date, item.actual.Date);
Assert.AreEqual(expectedRow.ReferencePrice, actualRow.ReferencePrice);
Assert.AreEqual(expectedRow.SplitFactor, actualRow.SplitFactor);
Assert.AreEqual(expectedRow.PriceFactor.RoundToSignificantDigits(4), actualRow.PriceFactor.RoundToSignificantDigits(4));
}
}
[Test]
public void ReadsOldFactorFileFormat()
{
var lines = new[]
{
"19980102,1.0000000,0.5",
"20130828,1.0000000,0.5",
"20501231,1.0000000,1"
};
var factorFile = PriceScalingExtensions.SafeRead("bno", lines, SecurityType.Equity) as CorporateFactorProvider;
var firstRow = factorFile.SortedFactorFileData[new DateTime(1998, 01, 02)].Single();
Assert.AreEqual(1m, firstRow.PriceFactor);
Assert.AreEqual(0.5m, firstRow.SplitFactor);
Assert.AreEqual(0m, firstRow.ReferencePrice);
var secondRow = factorFile.SortedFactorFileData[new DateTime(2013, 08, 28)].Single();
Assert.AreEqual(1m, secondRow.PriceFactor);
Assert.AreEqual(0.5m, secondRow.SplitFactor);
Assert.AreEqual(0m, firstRow.ReferencePrice);
var thirdRow = factorFile.SortedFactorFileData[Time.EndOfTime].Single();
Assert.AreEqual(1m, thirdRow.PriceFactor);
Assert.AreEqual(1m, thirdRow.SplitFactor);
Assert.AreEqual(0m, firstRow.ReferencePrice);
}
[Test]
public void ResolvesCorrectMostRecentFactorChangeDate()
{
var lines = new[]
{
"19980102,1.0000000,0.5",
"20130828,1.0000000,0.5",
"20501231,1.0000000,1"
};
var factorFile = PriceScalingExtensions.SafeRead("bno", lines, SecurityType.Equity) as CorporateFactorProvider;
Assert.AreEqual(new DateTime(2013, 08, 28), factorFile.MostRecentFactorChange);
}
[Test]
[TestCase("")]
[TestCase("20501231,1.0000000,1")]
public void EmptyFactorFileReturnsEmptyListForSplitsAndDividends(string contents)
{
var lines = contents.Split('\n').Where(l => !string.IsNullOrWhiteSpace(l));
var factorFile = PriceScalingExtensions.SafeRead("bno", lines, SecurityType.Equity) as CorporateFactorProvider;
Assert.IsEmpty(factorFile.GetSplitsAndDividends(Symbols.SPY, SecurityExchangeHours.AlwaysOpen(TimeZones.NewYork)));
}
private static CorporateFactorProvider GetTestFactorFile(string symbol, DateTime reference)
{
var file = new CorporateFactorProvider(symbol, new List<CorporateFactorRow>
{
new CorporateFactorRow(reference, 1, 1),
new CorporateFactorRow(reference.AddDays(-7), .9m, 1, 100m), // dividend
new CorporateFactorRow(reference.AddDays(-14), .8m, 1, 100m), // dividend
new CorporateFactorRow(reference.AddDays(-21), .8m, .5m, 100m), // split
new CorporateFactorRow(reference.AddDays(-90), .8m, .25m, 100m), // split
new CorporateFactorRow(reference.AddDays(-365), .7m, .125m, 100m) // split+dividend
});
return file;
}
private static IFactorProvider GetFactorFile(string permtick)
{
return TestGlobals.FactorFileProvider.Get(permtick);
}
private static CorporateFactorProvider GetFactorFile_LODE20191127()
{
const string factorFileContents = @"
19980102,1,5,8.5,qq
20171109,1,5,0.12,qq
20501231,1,1,0,qq
";
DateTime? factorFileMinimumDate;
var reader = new StreamReader(factorFileContents.ToStream());
var enumerable = new StreamReaderEnumerable(reader).Where(line => line.Length > 0);
var factorFileRows = CorporateFactorRow.Parse(enumerable, out factorFileMinimumDate);
return new CorporateFactorProvider("lode", factorFileRows, factorFileMinimumDate);
}
private static CorporateFactorProvider GetFactorFile_LODE20191129()
{
const string factorFileContents = @"
19980102,1,25,8.5,qq
20171109,1,25,0.12,qq
20191127,1,5,0.06,qq
20501231,1,1,0,qq
";
DateTime? factorFileMinimumDate;
var reader = new StreamReader(factorFileContents.ToStream());
var enumerable = new StreamReaderEnumerable(reader).Where(line => line.Length > 0);
var factorFileRows = CorporateFactorRow.Parse(enumerable, out factorFileMinimumDate);
return new CorporateFactorProvider("lode", factorFileRows, factorFileMinimumDate);
}
private static CorporateFactorProvider GetFactorFile_AAPL2018_05_11()
{
const string factorFileContents = @"
19980102,0.8893653,0.0357143,16.25
20000620,0.8893653,0.0357143,101
20050225,0.8893653,0.0714286,88.97
20120808,0.8893653,0.142857,619.85
20121106,0.8931837,0.142857,582.85
20130206,0.8972636,0.142857,457.285
20130508,0.9024937,0.142857,463.71
20130807,0.908469,0.142857,464.94
20131105,0.9144679,0.142857,525.58
20140205,0.9198056,0.142857,512.59
20140507,0.9253111,0.142857,592.34
20140606,0.9304792,0.142857,645.57
20140806,0.9304792,1,94.96
20141105,0.9351075,1,108.86
20150204,0.9391624,1,119.55
20150506,0.9428692,1,125.085
20150805,0.9468052,1,115.4
20151104,0.9510909,1,122.01
20160203,0.9551617,1,96.34
20160504,0.9603451,1,94.19
20160803,0.9661922,1,105.8
20161102,0.9714257,1,111.6
20170208,0.9764128,1,132.04
20170510,0.9806461,1,153.26
20170809,0.9846939,1,161.1
20171109,0.9885598,1,175.87
20180208,0.9921138,1,155.16
20180510,0.9961585,1,190.03
20501231,1,1,0
";
DateTime? factorFileMinimumDate;
var reader = new StreamReader(factorFileContents.ToStream());
var enumerable = new StreamReaderEnumerable(reader).Where(line => line.Length > 0);
var factorFileRows = CorporateFactorRow.Parse(enumerable, out factorFileMinimumDate);
return new CorporateFactorProvider("aapl", factorFileRows, factorFileMinimumDate);
}
// AAPL experiences a 0.73 dividend distribution on 2018.05.11
private static CorporateFactorProvider GetFactorFile_AAPL2018_05_08()
{
const string factorFileContents = @"
19980102,0.8927948,0.0357143,16.25
20000620,0.8927948,0.0357143,101
20050225,0.8927948,0.0714286,88.97
20120808,0.8927948,0.142857,619.85
20121106,0.8966279,0.142857,582.85
20130206,0.9007235,0.142857,457.285
20130508,0.9059737,0.142857,463.71
20130807,0.9119721,0.142857,464.94
20131105,0.9179942,0.142857,525.58
20140205,0.9233525,0.142857,512.59
20140507,0.9288793,0.142857,592.34
20140606,0.9340673,0.142857,645.57
20140806,0.9340673,1,94.96
20141105,0.9387135,1,108.86
20150204,0.942784,1,119.55
20150506,0.9465051,1,125.085
20150805,0.9504563,1,115.4
20151104,0.9547586,1,122.01
20160203,0.9588451,1,96.34
20160504,0.9640485,1,94.19
20160803,0.9699181,1,105.8
20161102,0.9751718,1,111.6
20170208,0.9801781,1,132.04
20170510,0.9844278,1,153.26
20170809,0.9884911,1,161.1
20171109,0.992372,1,175.87
20180208,0.9959397,1,155.16
20501231,1,1,0
";
DateTime? factorFileMinimumDate;
var reader = new StreamReader(factorFileContents.ToStream());
var enumerable = new StreamReaderEnumerable(reader).Where(line => line.Length > 0);
var factorFileRows = CorporateFactorRow.Parse(enumerable, out factorFileMinimumDate);
return new CorporateFactorProvider("aapl", factorFileRows, factorFileMinimumDate);
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="SessionStateSection.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Web.Configuration {
using System;
using System.Xml;
using System.Configuration;
using System.Collections.Specialized;
using System.Collections;
using System.Globalization;
using System.IO;
using System.Text;
using System.ComponentModel;
using System.Web.SessionState;
using System.Diagnostics;
using System.Security.Permissions;
/* <!-- sessionState Attributes:
mode="[Off|InProc|StateServer|SQLServer|Custom]"
stateConnectionString="tcpip=server:port"
stateNetworkTimeout="timeout for network operations with State Server, in seconds"
sqlConnectionString="valid System.Data.SqlClient.SqlConnection string, minus Initial Catalog"
sqlCommandTimeout="timeout for SQL commands sent to SQL Server, in seconds"
sqlConnectionRetryInterval="the interval the SQL State provider will retry opening connections and executing SQL commands when fatal errors occur, in seconds"
customProvider="name of the custom provider"
cookieless="[true|false|UseCookies|UseUri|AutoDetect|UseDeviceProfile]"
cookieName="To override the default cookie name used for storing session ID"
allowCustomSqlDatabase="[true|false]" - If true, the user can specify the Initial Catalog value in sqlConnectionString
compressionEnabled="[true|false]"
timeout="timeout in minutes"
partitionResolverType="[fully qualified type of partition resolver]"
useHostingIdentity="[true|false]"
sessionIDManagerType="[fully qualified type of session ID Manager]"
Child nodes:
<providers> Custom store providers (class must inherit SessionStateStoreProviderBase)
<add Add a provider
name="string" Name to identify this provider instance by
type="string" Class that implements ISessionStateStore
provider-specific-configuration />
<remove Remove a provider
name="string" /> Name of provider to remove
<clear/> Remove all providers
</providers>
-->
<sessionState
mode="InProc"
stateConnectionString="tcpip=loopback:42424"
stateNetworkTimeout="10"
sqlConnectionString="data source=localhost;Integrated Security=SSPI"
sqlCommandTimeout="30"
customProvider=""
cookieless="false"
allowCustomSqlDatabase="false"
compressionEnabled="false"
regenerateExpiredSessionId="false"
timeout="20"
>
<providers>
</providers>
</sessionState>
*/
public sealed class SessionStateSection : ConfigurationSection {
private static readonly ConfigurationElementProperty s_elemProperty =
new ConfigurationElementProperty(new CallbackValidator(typeof(SessionStateSection), Validate));
private static ConfigurationPropertyCollection _properties;
private static readonly ConfigurationProperty _propMode =
new ConfigurationProperty("mode",
typeof(SessionStateMode),
SessionStateModule.MODE_DEFAULT,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propStateConnectionString =
new ConfigurationProperty("stateConnectionString",
typeof(string),
SessionStateModule.STATE_CONNECTION_STRING_DEFAULT,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propStateNetworkTimeout =
new ConfigurationProperty("stateNetworkTimeout",
typeof(TimeSpan),
#if FEATURE_PAL // FEATURE_PAL does not enable OutOfProcSessionStore
TimeSpan.FromSeconds(600),
#else // FEATURE_PAL
TimeSpan.FromSeconds((long)
OutOfProcSessionStateStore.STATE_NETWORK_TIMEOUT_DEFAULT),
#endif // FEATURE_PAL
StdValidatorsAndConverters.TimeSpanSecondsOrInfiniteConverter,
StdValidatorsAndConverters.PositiveTimeSpanValidator,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propSqlConnectionString =
new ConfigurationProperty("sqlConnectionString",
typeof(string),
#if FEATURE_PAL // FEATURE_PAL does not enable SessionStateModule
"data source=localhost;Integrated Security=SSPI",
#else // FEATURE_PAL
SessionStateModule.SQL_CONNECTION_STRING_DEFAULT,
#endif // FEATURE_PAL
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propSqlCommandTimeout =
new ConfigurationProperty("sqlCommandTimeout",
typeof(TimeSpan),
#if FEATURE_PAL // FEATURE_PAL does not enable SqlSessionStateStore
TimeSpan.FromSeconds(1800),
#else // FEATURE_PAL
TimeSpan.FromSeconds((long)
SqlSessionStateStore.SQL_COMMAND_TIMEOUT_DEFAULT),
#endif // FEATURE_PAL
StdValidatorsAndConverters.TimeSpanSecondsOrInfiniteConverter,
null,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propSqlConnectionRetryInterval =
new ConfigurationProperty("sqlConnectionRetryInterval",
typeof(TimeSpan),
TimeSpan.FromSeconds(0),
StdValidatorsAndConverters.TimeSpanSecondsOrInfiniteConverter,
null,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propCustomProvider =
new ConfigurationProperty("customProvider",
typeof(string),
String.Empty,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propCookieless =
new ConfigurationProperty("cookieless",
typeof(string),
SessionIDManager.COOKIEMODE_DEFAULT.ToString(),
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propCookieName =
new ConfigurationProperty("cookieName",
typeof(string),
SessionIDManager.SESSION_COOKIE_DEFAULT,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propTimeout =
new ConfigurationProperty("timeout",
typeof(TimeSpan),
TimeSpan.FromMinutes((long)SessionStateModule.TIMEOUT_DEFAULT),
StdValidatorsAndConverters.TimeSpanMinutesOrInfiniteConverter,
new TimeSpanValidator(TimeSpan.FromMinutes(1), TimeSpan.MaxValue),
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propAllowCustomSqlDatabase =
new ConfigurationProperty("allowCustomSqlDatabase",
typeof(bool),
false,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propCompressionEnabled =
new ConfigurationProperty("compressionEnabled",
typeof(bool),
false,
ConfigurationPropertyOptions.None);
// private static readonly ConfigurationProperty _propLockAttributes =
// new ConfigurationProperty("lockAttributes",
// typeof(string),
// "",
// ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propProviders =
new ConfigurationProperty("providers",
typeof(ProviderSettingsCollection),
null,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propRegenerateExpiredSessionId =
new ConfigurationProperty("regenerateExpiredSessionId",
typeof(bool),
true,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propPartitionResolverType =
new ConfigurationProperty("partitionResolverType",
typeof(string),
String.Empty,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propUseHostingIdentity =
new ConfigurationProperty("useHostingIdentity",
typeof(bool),
true,
ConfigurationPropertyOptions.None);
private static readonly ConfigurationProperty _propSessionIDManagerType =
new ConfigurationProperty("sessionIDManagerType",
typeof(string),
String.Empty,
ConfigurationPropertyOptions.None);
private HttpCookieMode cookielessCache = SessionIDManager.COOKIEMODE_DEFAULT;
private bool cookielessCached = false;
private bool regenerateExpiredSessionIdCache = false;
private bool regenerateExpiredSessionIdCached = false;
static SessionStateSection() {
// Property initialization
_properties = new ConfigurationPropertyCollection();
_properties.Add(_propMode);
_properties.Add(_propStateConnectionString);
_properties.Add(_propStateNetworkTimeout);
_properties.Add(_propSqlConnectionString);
_properties.Add(_propSqlCommandTimeout);
_properties.Add(_propSqlConnectionRetryInterval);
_properties.Add(_propCustomProvider);
_properties.Add(_propCookieless);
_properties.Add(_propCookieName);
_properties.Add(_propTimeout);
_properties.Add(_propAllowCustomSqlDatabase);
_properties.Add(_propCompressionEnabled);
// _properties.Add(_propLockAttributes);
_properties.Add(_propProviders);
_properties.Add(_propRegenerateExpiredSessionId);
_properties.Add(_propPartitionResolverType);
_properties.Add(_propUseHostingIdentity);
_properties.Add(_propSessionIDManagerType);
}
public SessionStateSection() {
}
protected override ConfigurationPropertyCollection Properties {
get {
return _properties;
}
}
[ConfigurationProperty("mode", DefaultValue = SessionStateModule.MODE_DEFAULT)]
public SessionStateMode Mode {
get {
return (SessionStateMode)base[_propMode];
}
set {
base[_propMode] = value;
}
}
[ConfigurationProperty("stateConnectionString", DefaultValue = SessionStateModule.STATE_CONNECTION_STRING_DEFAULT)]
public string StateConnectionString {
get {
return (string)base[_propStateConnectionString];
}
set {
base[_propStateConnectionString] = value;
}
}
[ConfigurationProperty("stateNetworkTimeout", DefaultValue = "00:00:10")]
[TypeConverter(typeof(TimeSpanSecondsOrInfiniteConverter))]
public TimeSpan StateNetworkTimeout {
get {
return (TimeSpan)base[_propStateNetworkTimeout];
}
set {
base[_propStateNetworkTimeout] = value;
}
}
[ConfigurationProperty("sqlConnectionString", DefaultValue = SessionStateModule.SQL_CONNECTION_STRING_DEFAULT)]
public string SqlConnectionString {
get {
return (string)base[_propSqlConnectionString];
}
set {
base[_propSqlConnectionString] = value;
}
}
[ConfigurationProperty("sqlCommandTimeout", DefaultValue = "00:00:30")]
[TypeConverter(typeof(TimeSpanSecondsOrInfiniteConverter))]
public TimeSpan SqlCommandTimeout {
get {
return (TimeSpan)base[_propSqlCommandTimeout];
}
set {
base[_propSqlCommandTimeout] = value;
}
}
[ConfigurationProperty("sqlConnectionRetryInterval", DefaultValue = "00:00:00")]
[TypeConverter(typeof(TimeSpanSecondsOrInfiniteConverter))]
public TimeSpan SqlConnectionRetryInterval {
get {
return (TimeSpan)base[_propSqlConnectionRetryInterval];
}
set {
base[_propSqlConnectionRetryInterval] = value;
}
}
[ConfigurationProperty("customProvider", DefaultValue = "")]
public string CustomProvider {
get {
return (string)base[_propCustomProvider];
}
set {
base[_propCustomProvider] = value;
}
}
[ConfigurationProperty("cookieless")]
public HttpCookieMode Cookieless {
get {
if (cookielessCached == false) {
cookielessCache = ConvertToCookieMode((string)base[_propCookieless]);
cookielessCached = true;
}
return cookielessCache;
}
set {
base[_propCookieless] = value.ToString();
cookielessCache = value;
}
}
[ConfigurationProperty("cookieName", DefaultValue = SessionIDManager.SESSION_COOKIE_DEFAULT)]
public string CookieName {
get {
return (string)base[_propCookieName];
}
set {
base[_propCookieName] = value;
}
}
[ConfigurationProperty("timeout", DefaultValue = "00:20:00")]
[TypeConverter(typeof(TimeSpanMinutesOrInfiniteConverter))]
[TimeSpanValidator(MinValueString = "00:01:00", MaxValueString = TimeSpanValidatorAttribute.TimeSpanMaxValue)]
public TimeSpan Timeout {
get {
return (TimeSpan)base[_propTimeout];
}
set {
base[_propTimeout] = value;
}
}
[ConfigurationProperty("allowCustomSqlDatabase", DefaultValue = false)]
public bool AllowCustomSqlDatabase {
get {
return (bool)base[_propAllowCustomSqlDatabase];
}
set {
base[_propAllowCustomSqlDatabase] = value;
}
}
[ConfigurationProperty("compressionEnabled", DefaultValue = false)]
public bool CompressionEnabled{
get {
return (bool)base[_propCompressionEnabled];
}
set {
base[_propCompressionEnabled] = value;
}
}
[ConfigurationProperty("regenerateExpiredSessionId", DefaultValue = true)]
public bool RegenerateExpiredSessionId {
get {
if (regenerateExpiredSessionIdCached == false) {
regenerateExpiredSessionIdCache = (bool)base[_propRegenerateExpiredSessionId];
regenerateExpiredSessionIdCached = true;
}
return regenerateExpiredSessionIdCache;
}
set {
base[_propRegenerateExpiredSessionId] = value;
regenerateExpiredSessionIdCache = value;
}
}
#if DONTCOMPILE
[ConfigurationProperty("lockAttributes", DefaultValue = "")]
public string LockAttributes {
get {
return (string)base[_propLockAttributes];
}
set {
// base.LockedAttributes.SetFromList(value); // keep the internal list in [....]
base[_propLockAttributes] = value;
}
}
#endif
[ConfigurationProperty("providers")]
public ProviderSettingsCollection Providers {
get {
return (ProviderSettingsCollection)base[_propProviders];
}
}
[ConfigurationProperty("partitionResolverType", DefaultValue = "")]
public string PartitionResolverType {
get {
return (string)base[_propPartitionResolverType];
}
set {
base[_propPartitionResolverType] = value;
}
}
[ConfigurationProperty("useHostingIdentity", DefaultValue = true)]
public bool UseHostingIdentity {
get {
return (bool)base[_propUseHostingIdentity];
}
set {
base[_propUseHostingIdentity] = value;
}
}
[ConfigurationProperty("sessionIDManagerType", DefaultValue = "")]
public string SessionIDManagerType {
get {
return (string)base[_propSessionIDManagerType];
}
set {
base[_propSessionIDManagerType] = value;
}
}
HttpCookieMode ConvertToCookieMode(string s) {
if (s == "true") {
return HttpCookieMode.UseUri;
}
else if (s == "false") {
return HttpCookieMode.UseCookies;
}
else {
int iTemp = 0;
Type enumType = typeof(HttpCookieMode);
if (Enum.IsDefined(enumType, s)) {
iTemp = (int)Enum.Parse(enumType, s);
}
else {
// if not null and not defined throw error
string names = "true, false";
foreach (string name in Enum.GetNames(enumType)) {
if (names == null) {
names = name;
}
else {
names += ", " + name;
}
}
throw new ConfigurationErrorsException(
SR.GetString(SR.Invalid_enum_attribute, "cookieless", names),
ElementInformation.Properties["cookieless"].Source,
ElementInformation.Properties["cookieless"].LineNumber);
}
return (HttpCookieMode)iTemp;
}
}
protected override void PostDeserialize() {
ConvertToCookieMode((string)base[_propCookieless]);
}
protected override ConfigurationElementProperty ElementProperty {
get {
return s_elemProperty;
}
}
private static void Validate(object value) {
if (value == null) {
throw new ArgumentNullException("sessionState");
}
Debug.Assert(value is SessionStateSection);
SessionStateSection elem = (SessionStateSection)value;
if (elem.Timeout.TotalMinutes > SessionStateModule.MAX_CACHE_BASED_TIMEOUT_MINUTES &&
(elem.Mode == SessionStateMode.InProc ||
elem.Mode == SessionStateMode.StateServer)) {
throw new ConfigurationErrorsException(
SR.GetString(SR.Invalid_cache_based_session_timeout),
elem.ElementInformation.Properties["timeout"].Source,
elem.ElementInformation.Properties["timeout"].LineNumber);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using NUnit.Framework;
using Analyzer = Lucene.Net.Analysis.Analyzer;
using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer;
using LowerCaseTokenizer = Lucene.Net.Analysis.LowerCaseTokenizer;
using Token = Lucene.Net.Analysis.Token;
using TokenStream = Lucene.Net.Analysis.TokenStream;
using StandardAnalyzer = Lucene.Net.Analysis.Standard.StandardAnalyzer;
using Document = Lucene.Net.Documents.Document;
using Field = Lucene.Net.Documents.Field;
using IndexReader = Lucene.Net.Index.IndexReader;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Term = Lucene.Net.Index.Term;
using ParseException = Lucene.Net.QueryParsers.ParseException;
using QueryParser = Lucene.Net.QueryParsers.QueryParser;
using FilteredQuery = Lucene.Net.Search.FilteredQuery;
using Hits = Lucene.Net.Search.Hits;
using IndexSearcher = Lucene.Net.Search.IndexSearcher;
using MultiSearcher = Lucene.Net.Search.MultiSearcher;
using PhraseQuery = Lucene.Net.Search.PhraseQuery;
using Query = Lucene.Net.Search.Query;
using RangeFilter = Lucene.Net.Search.RangeFilter;
using Searcher = Lucene.Net.Search.Searcher;
using TermQuery = Lucene.Net.Search.TermQuery;
using SpanNearQuery = Lucene.Net.Search.Spans.SpanNearQuery;
using SpanQuery = Lucene.Net.Search.Spans.SpanQuery;
using SpanTermQuery = Lucene.Net.Search.Spans.SpanTermQuery;
using RAMDirectory = Lucene.Net.Store.RAMDirectory;
using Formatter = Lucene.Net.Highlight.Formatter;
using Highlighter = Lucene.Net.Highlight.Highlighter;
using NullFragmenter = Lucene.Net.Highlight.NullFragmenter;
using QueryScorer = Lucene.Net.Highlight.QueryScorer;
using Scorer = Lucene.Net.Highlight.Scorer;
using SimpleFragmenter = Lucene.Net.Highlight.SimpleFragmenter;
using SimpleHTMLEncoder = Lucene.Net.Highlight.SimpleHTMLEncoder;
using SimpleHTMLFormatter = Lucene.Net.Highlight.SimpleHTMLFormatter;
using TextFragment = Lucene.Net.Highlight.TextFragment;
using TokenGroup = Lucene.Net.Highlight.TokenGroup;
using WeightedTerm = Lucene.Net.Highlight.WeightedTerm;
namespace Lucene.Net.Search.Highlight
{
/// <summary> JUnit Test for Highlighter class.</summary>
/// <author> mark@searcharea.co.uk
/// </author>
[TestFixture]
public class HighlighterTest : Formatter
{
// {{Aroush-2.0.0}} Fix me
/*
private class AnonymousClassScorer : Scorer
{
public AnonymousClassScorer(HighlighterTest enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(HighlighterTest enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
}
private HighlighterTest enclosingInstance;
public HighlighterTest Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
public virtual void StartFragment(TextFragment newFragment)
{
}
public virtual float GetTokenScore(Token token)
{
return 0;
}
public virtual float GetFragmentScore()
{
return 1;
}
public override bool SkipTo(int target)
{
return false;
}
public override int Doc()
{
return -1;
}
public override Explanation Explain(int doc)
{
return null;
}
public override bool Next()
{
return false;
}
public override float Score()
{
return 0;
}
}
*/
private class AnonymousClassTokenStream : TokenStream
{
public AnonymousClassTokenStream(HighlighterTest enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(HighlighterTest enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
lst = new System.Collections.ArrayList();
Token t;
t = new Token("hi", 0, 2);
lst.Add(t);
t = new Token("hispeed", 0, 8);
lst.Add(t);
t = new Token("speed", 3, 8);
t.SetPositionIncrement(0);
lst.Add(t);
t = new Token("10", 8, 10);
lst.Add(t);
t = new Token("foo", 11, 14);
lst.Add(t);
iter = lst.GetEnumerator();
}
private HighlighterTest enclosingInstance;
public HighlighterTest Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
internal System.Collections.IEnumerator iter;
internal System.Collections.ArrayList lst;
public override Token Next()
{
return iter.MoveNext() ? (Token) iter.Current : null;
}
}
private class AnonymousClassTokenStream1 : TokenStream
{
public AnonymousClassTokenStream1(HighlighterTest enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(HighlighterTest enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
lst = new System.Collections.ArrayList();
Token t;
t = new Token("hispeed", 0, 8);
lst.Add(t);
t = new Token("hi", 0, 2);
t.SetPositionIncrement(0);
lst.Add(t);
t = new Token("speed", 3, 8);
lst.Add(t);
t = new Token("10", 8, 10);
lst.Add(t);
t = new Token("foo", 11, 14);
lst.Add(t);
iter = lst.GetEnumerator();
}
private HighlighterTest enclosingInstance;
public HighlighterTest Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
internal System.Collections.IEnumerator iter;
internal System.Collections.ArrayList lst;
public override Token Next()
{
return iter.MoveNext() ? (Token) iter.Current : null;
}
}
private IndexReader reader;
private const System.String FIELD_NAME = "contents";
private Query query;
internal RAMDirectory ramDir;
public Searcher searcher = null;
public Hits hits = null;
internal int numHighlights = 0;
internal Analyzer analyzer = new StandardAnalyzer();
internal System.String[] texts = new System.String[]{"Hello this is a piece of text that is very long and contains too much preamble and the meat is really here which says kennedy has been shot", "This piece of text refers to Kennedy at the beginning then has a longer piece of text that is very long in the middle and finally ends with another reference to Kennedy", "JFK has been shot", "John Kennedy has been shot", "This text has a typo in referring to Keneddy"};
/// <summary> Constructor for HighlightExtractorTest.</summary>
/// <param name="">arg0
/// </param>
//public HighlighterTest(System.String arg0)
//{
//}
[Test]
public virtual void TestSimpleHighlighter()
{
DoSearching("Kennedy");
Highlighter highlighter = new Highlighter(new QueryScorer(query));
highlighter.SetTextFragmenter(new SimpleFragmenter(40));
int maxNumFragmentsRequired = 2;
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String result = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired, "...");
System.Console.Out.WriteLine("\t" + result);
}
//Not sure we can assert anything here - just running to check we dont throw any exceptions
}
[Test]
public virtual void TestGetBestFragmentsSimpleQuery()
{
DoSearching("Kennedy");
DoStandardHighlights();
Assert.IsTrue(numHighlights == 4, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetFuzzyFragments()
{
DoSearching("Kinnedy~");
DoStandardHighlights();
Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetWildCardFragments()
{
DoSearching("K?nnedy");
DoStandardHighlights();
Assert.IsTrue(numHighlights == 4, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetMidWildCardFragments()
{
DoSearching("K*dy");
DoStandardHighlights();
Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetRangeFragments()
{
System.String queryString = FIELD_NAME + ":[kannedy TO kznnedy]";
//Need to explicitly set the QueryParser property to use RangeQuery rather than RangeFilters
QueryParser parser = new QueryParser(FIELD_NAME, new StandardAnalyzer());
parser.SetUseOldRangeQuery(true);
query = parser.Parse(queryString);
DoSearching(query);
DoStandardHighlights();
Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetBestFragmentsPhrase()
{
DoSearching("\"John Kennedy\"");
DoStandardHighlights();
//Currently highlights "John" and "Kennedy" separately
Assert.IsTrue(numHighlights == 2, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetBestFragmentsSpan()
{
SpanQuery[] clauses = new SpanQuery[]{new SpanTermQuery(new Term("contents", "john")), new SpanTermQuery(new Term("contents", "kennedy"))};
SpanNearQuery snq = new SpanNearQuery(clauses, 1, true);
DoSearching(snq);
DoStandardHighlights();
//Currently highlights "John" and "Kennedy" separately
Assert.IsTrue(numHighlights == 2, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestOffByOne()
{
TermQuery query = new TermQuery(new Term("data", "help"));
Highlighter hg = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(query));
hg.SetTextFragmenter(new NullFragmenter());
System.String match = null;
match = hg.GetBestFragment(new StandardAnalyzer(), "data", "help me [54-65]");
Assert.AreEqual("<B>help</B> me [54-65]", match);
}
[Test]
public virtual void TestGetBestFragmentsFilteredQuery()
{
RangeFilter rf = new RangeFilter("contents", "john", "john", true, true);
SpanQuery[] clauses = new SpanQuery[]{new SpanTermQuery(new Term("contents", "john")), new SpanTermQuery(new Term("contents", "kennedy"))};
SpanNearQuery snq = new SpanNearQuery(clauses, 1, true);
FilteredQuery fq = new FilteredQuery(snq, rf);
DoSearching(fq);
DoStandardHighlights();
//Currently highlights "John" and "Kennedy" separately
Assert.IsTrue(numHighlights == 2, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetBestFragmentsFilteredPhraseQuery()
{
RangeFilter rf = new RangeFilter("contents", "john", "john", true, true);
PhraseQuery pq = new PhraseQuery();
pq.Add(new Term("contents", "john"));
pq.Add(new Term("contents", "kennedy"));
FilteredQuery fq = new FilteredQuery(pq, rf);
DoSearching(fq);
DoStandardHighlights();
//Currently highlights "John" and "Kennedy" separately
Assert.IsTrue(numHighlights == 2, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetBestFragmentsMultiTerm()
{
DoSearching("John Kenn*");
DoStandardHighlights();
Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetBestFragmentsWithOr()
{
DoSearching("JFK OR Kennedy");
DoStandardHighlights();
Assert.IsTrue(numHighlights == 5, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetBestSingleFragment()
{
DoSearching("Kennedy");
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
highlighter.SetTextFragmenter(new SimpleFragmenter(40));
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String result = highlighter.GetBestFragment(tokenStream, text);
System.Console.Out.WriteLine("\t" + result);
}
Assert.IsTrue(numHighlights == 4, "Failed to find correct number of highlights " + numHighlights + " found");
numHighlights = 0;
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
highlighter.GetBestFragment(analyzer, FIELD_NAME, text);
}
Assert.IsTrue(numHighlights == 4, "Failed to find correct number of highlights " + numHighlights + " found");
numHighlights = 0;
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
highlighter.GetBestFragments(analyzer, FIELD_NAME, text, 10);
}
Assert.IsTrue(numHighlights == 4, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetBestSingleFragmentWithWeights()
{
WeightedTerm[] wTerms = new WeightedTerm[2];
wTerms[0] = new WeightedTerm(10f, "hello");
wTerms[1] = new WeightedTerm(1f, "kennedy");
Highlighter highlighter = new Highlighter(new QueryScorer(wTerms));
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(texts[0]));
highlighter.SetTextFragmenter(new SimpleFragmenter(2));
System.String result = highlighter.GetBestFragment(tokenStream, texts[0]).Trim();
Assert.IsTrue("<B>Hello</B>".Equals(result), "Failed to find best section using weighted terms. Found: [" + result + "]");
//readjust weights
wTerms[1].SetWeight(50f);
tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(texts[0]));
highlighter = new Highlighter(new QueryScorer(wTerms));
highlighter.SetTextFragmenter(new SimpleFragmenter(2));
result = highlighter.GetBestFragment(tokenStream, texts[0]).Trim();
Assert.IsTrue("<B>kennedy</B>".Equals(result), "Failed to find best section using weighted terms. Found: " + result);
}
// tests a "complex" analyzer that produces multiple
// overlapping tokens
[Test]
public virtual void TestOverlapAnalyzer()
{
//UPGRADE_TODO: Class 'java.util.HashMap' was converted to 'System.Collections.Hashtable' which has a different behavior. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1073_javautilHashMap_3"'
System.Collections.Hashtable synonyms = new System.Collections.Hashtable();
synonyms["football"] = "soccer,footie";
Analyzer analyzer = new SynonymAnalyzer(synonyms);
System.String srchkey = "football";
System.String s = "football-soccer in the euro 2004 footie competition";
QueryParser parser = new QueryParser("bookid", analyzer);
Query query = parser.Parse(srchkey);
Highlighter highlighter = new Highlighter(new QueryScorer(query));
TokenStream tokenStream = analyzer.TokenStream(null, new System.IO.StringReader(s));
// Get 3 best fragments and seperate with a "..."
System.String result = highlighter.GetBestFragments(tokenStream, s, 3, "...");
System.String expectedResult = "<B>football</B>-<B>soccer</B> in the euro 2004 <B>footie</B> competition";
Assert.IsTrue(expectedResult.Equals(result), "overlapping analyzer should handle highlights OK");
}
[Test]
public virtual void TestGetSimpleHighlight()
{
DoSearching("Kennedy");
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String result = highlighter.GetBestFragment(tokenStream, text);
System.Console.Out.WriteLine("\t" + result);
}
Assert.IsTrue(numHighlights == 4, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestGetTextFragments()
{
DoSearching("Kennedy");
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
highlighter.SetTextFragmenter(new SimpleFragmenter(20));
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String[] stringResults = highlighter.GetBestFragments(tokenStream, text, 10);
tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
TextFragment[] fragmentResults = highlighter.GetBestTextFragments(tokenStream, text, true, 10);
Assert.IsTrue(fragmentResults.Length == stringResults.Length, "Failed to find correct number of text Fragments: " + fragmentResults.Length + " vs " + stringResults.Length);
for (int j = 0; j < stringResults.Length; j++)
{
//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1073_javaioPrintStreamprintln_javalangObject_3"'
System.Console.Out.WriteLine(fragmentResults[j]);
Assert.IsTrue(fragmentResults[j].ToString().Equals(stringResults[j]), "Failed to find same text Fragments: " + fragmentResults[j] + " found");
}
}
}
[Test]
public virtual void TestMaxSizeHighlight()
{
DoSearching("meat");
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
highlighter.SetMaxDocBytesToAnalyze(30);
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(texts[0]));
highlighter.GetBestFragment(tokenStream, texts[0]);
Assert.IsTrue(numHighlights == 0, "Setting MaxDocBytesToAnalyze should have prevented " + "us from finding matches for this record: " + numHighlights + " found");
}
[Test]
public virtual void TestMaxSizeHighlightTruncates()
{
System.String goodWord = "goodtoken";
System.String[] stopWords = new System.String[]{"stoppedtoken"};
TermQuery query = new TermQuery(new Term("data", goodWord));
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
Highlighter hg = new Highlighter(fm, new QueryScorer(query));
hg.SetTextFragmenter(new NullFragmenter());
System.String match = null;
System.Text.StringBuilder sb = new System.Text.StringBuilder();
sb.Append(goodWord);
for (int i = 0; i < 10000; i++)
{
sb.Append(" ");
sb.Append(stopWords[0]);
}
hg.SetMaxDocBytesToAnalyze(100);
match = hg.GetBestFragment(new StandardAnalyzer(stopWords), "data", sb.ToString());
Assert.IsTrue(match.Length < hg.GetMaxDocBytesToAnalyze(), "Matched text should be no more than 100 chars in length ");
//add another tokenized word to the overrall length - but set way beyond
//the length of text under consideration (after a large slug of stop words + whitespace)
sb.Append(" ");
sb.Append(goodWord);
match = hg.GetBestFragment(new StandardAnalyzer(stopWords), "data", sb.ToString());
Assert.IsTrue(match.Length < hg.GetMaxDocBytesToAnalyze(), "Matched text should be no more than 100 chars in length ");
}
[Test]
public virtual void TestUnRewrittenQuery()
{
//test to show how rewritten query can still be used
searcher = new IndexSearcher(ramDir);
Analyzer analyzer = new StandardAnalyzer();
QueryParser parser = new QueryParser(FIELD_NAME, analyzer);
Query query = parser.Parse("JF? or Kenned*");
System.Console.Out.WriteLine("Searching with primitive query");
//forget to set this and...
//query=query.rewrite(reader);
Hits hits = searcher.Search(query);
//create an instance of the highlighter with the tags used to surround highlighted text
// QueryHighlightExtractor highlighter = new QueryHighlightExtractor(this, query, new StandardAnalyzer());
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
highlighter.SetTextFragmenter(new SimpleFragmenter(40));
int maxNumFragmentsRequired = 3;
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String highlightedText = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired, "...");
System.Console.Out.WriteLine(highlightedText);
}
//We expect to have zero highlights if the query is multi-terms and is not rewritten!
Assert.IsTrue(numHighlights == 0, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestNoFragments()
{
DoSearching("AnInvalidQueryWhichShouldYieldNoResults");
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
for (int i = 0; i < texts.Length; i++)
{
System.String text = texts[i];
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String result = highlighter.GetBestFragment(tokenStream, text);
Assert.IsNull(result, "The highlight result should be null for text with no query terms");
}
}
/// <summary> Demonstrates creation of an XHTML compliant doc using new encoding facilities.</summary>
/// <throws> Exception </throws>
[Test]
public virtual void TestEncoding()
{
Assert.Fail("This test is failing because it has porting issues.");
// {{Aroush-2.0.0}} Fix me
/*
System.String rawDocContent = "\"Smith & sons' prices < 3 and >4\" claims article";
//run the highlighter on the raw content (scorer does not score any tokens for
// highlighting but scores a single fragment for selection
Highlighter highlighter = new Highlighter(this, new SimpleHTMLEncoder(), new AnonymousClassScorer(this));
highlighter.SetTextFragmenter(new SimpleFragmenter(2000));
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(rawDocContent));
System.String encodedSnippet = highlighter.GetBestFragments(tokenStream, rawDocContent, 1, "");
//An ugly bit of XML creation:
System.String xhtml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<!DOCTYPE html\n" + "PUBLIC \"//W3C//DTD XHTML 1.0 Transitional//EN\"\n" + "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">\n" + "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\">\n" + "<head>\n" + "<title>My Test HTML Document</title>\n" + "</head>\n" + "<body>\n" + "<h2>" + encodedSnippet + "</h2>\n" + "</body>\n" + "</html>";
//now an ugly built of XML parsing to test the snippet is encoded OK
//UPGRADE_ISSUE: Class 'javax.xml.parsers.DocumentBuilderFactory' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaxxmlparsersDocumentBuilderFactory_3"'
//UPGRADE_ISSUE: Method 'javax.xml.parsers.DocumentBuilderFactory.newInstance' was not converted. 'ms-help://MS.VSCC.2003/commoner/redir/redirect.htm?keyword="jlca1000_javaxxmlparsersDocumentBuilderFactory_3"'
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
System.Xml.XmlDocument db = new System.Xml.XmlDocument();
System.Xml.XmlDocument tempDocument;
tempDocument = (System.Xml.XmlDocument) db.Clone();
tempDocument.Load(new System.IO.MemoryStream(System.Text.UTF8Encoding.UTF8.GetBytes(xhtml)));
System.Xml.XmlDocument doc = tempDocument;
System.Xml.XmlElement root = (System.Xml.XmlElement) doc.DocumentElement;
System.Xml.XmlNodeList nodes = root.GetElementsByTagName("body");
System.Xml.XmlElement body = (System.Xml.XmlElement) nodes.Item(0);
nodes = body.GetElementsByTagName("h2");
System.Xml.XmlElement h2 = (System.Xml.XmlElement) nodes.Item(0);
System.String decodedSnippet = h2.FirstChild.Value;
Assert.AreEqual(rawDocContent, decodedSnippet, "XHTML Encoding should have worked:");
*/
}
[Test]
public virtual void TestMultiSearcher()
{
//setup index 1
RAMDirectory ramDir1 = new RAMDirectory();
IndexWriter writer1 = new IndexWriter(ramDir1, new StandardAnalyzer(), true);
Document d = new Document();
Field f = new Field(FIELD_NAME, "multiOne", Field.Store.YES, Field.Index.TOKENIZED);
d.Add(f);
writer1.AddDocument(d);
writer1.Optimize();
writer1.Close();
IndexReader reader1 = IndexReader.Open(ramDir1);
//setup index 2
RAMDirectory ramDir2 = new RAMDirectory();
IndexWriter writer2 = new IndexWriter(ramDir2, new StandardAnalyzer(), true);
d = new Document();
f = new Field(FIELD_NAME, "multiTwo", Field.Store.YES, Field.Index.TOKENIZED);
d.Add(f);
writer2.AddDocument(d);
writer2.Optimize();
writer2.Close();
IndexReader reader2 = IndexReader.Open(ramDir2);
IndexSearcher[] searchers = new IndexSearcher[2];
searchers[0] = new IndexSearcher(ramDir1);
searchers[1] = new IndexSearcher(ramDir2);
MultiSearcher multiSearcher = new MultiSearcher(searchers);
QueryParser parser = new QueryParser(FIELD_NAME, new StandardAnalyzer());
query = parser.Parse("multi*");
System.Console.Out.WriteLine("Searching for: " + query.ToString(FIELD_NAME));
//at this point the multisearcher calls combine(query[])
hits = multiSearcher.Search(query);
//query = QueryParser.parse("multi*", FIELD_NAME, new StandardAnalyzer());
Query[] expandedQueries = new Query[2];
expandedQueries[0] = query.Rewrite(reader1);
expandedQueries[1] = query.Rewrite(reader2);
query = query.Combine(expandedQueries);
//create an instance of the highlighter with the tags used to surround highlighted text
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String highlightedText = highlighter.GetBestFragment(tokenStream, text);
System.Console.Out.WriteLine(highlightedText);
}
Assert.IsTrue(numHighlights == 2, "Failed to find correct number of highlights " + numHighlights + " found");
}
[Test]
public virtual void TestFieldSpecificHighlighting()
{
System.String docMainText = "fred is one of the people";
QueryParser parser = new QueryParser(FIELD_NAME, analyzer);
Query query = parser.Parse("fred category:people");
//highlighting respects fieldnames used in query
QueryScorer fieldSpecificScorer = new QueryScorer(query, "contents");
Highlighter fieldSpecificHighlighter = new Highlighter(new SimpleHTMLFormatter(), fieldSpecificScorer);
fieldSpecificHighlighter.SetTextFragmenter(new NullFragmenter());
System.String result = fieldSpecificHighlighter.GetBestFragment(analyzer, FIELD_NAME, docMainText);
Assert.AreEqual(result, "<B>fred</B> is one of the people", "Should match");
//highlighting does not respect fieldnames used in query
QueryScorer fieldInSpecificScorer = new QueryScorer(query);
Highlighter fieldInSpecificHighlighter = new Highlighter(new SimpleHTMLFormatter(), fieldInSpecificScorer);
fieldInSpecificHighlighter.SetTextFragmenter(new NullFragmenter());
result = fieldInSpecificHighlighter.GetBestFragment(analyzer, FIELD_NAME, docMainText);
Assert.AreEqual(result, "<B>fred</B> is one of the <B>people</B>", "Should match");
reader.Close();
}
protected internal virtual TokenStream GetTS2()
{
//String s = "Hi-Speed10 foo";
return new AnonymousClassTokenStream(this);
}
// same token-stream as above, but the bigger token comes first this time
protected internal virtual TokenStream GetTS2a()
{
//String s = "Hi-Speed10 foo";
return new AnonymousClassTokenStream1(this);
}
[Test]
public virtual void TestOverlapAnalyzer2()
{
System.String s = "Hi-Speed10 foo";
Query query; Highlighter highlighter; System.String result;
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("foo");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2(), s, 3, "...");
Assert.AreEqual(result, "Hi-Speed10 <B>foo</B>");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("10");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2(), s, 3, "...");
Assert.AreEqual(result, "Hi-Speed<B>10</B> foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2(), s, 3, "...");
Assert.AreEqual(result, "<B>Hi</B>-Speed10 foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("speed");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2(), s, 3, "...");
Assert.AreEqual(result, "Hi-<B>Speed</B>10 foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hispeed");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2(), s, 3, "...");
Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi speed");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2(), s, 3, "...");
Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo");
/////////////////// same tests, just put the bigger overlapping token first
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("foo");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2a(), s, 3, "...");
Assert.AreEqual(result, "Hi-Speed10 <B>foo</B>");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("10");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2a(), s, 3, "...");
Assert.AreEqual(result, "Hi-Speed<B>10</B> foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2a(), s, 3, "...");
Assert.AreEqual(result, "<B>Hi</B>-Speed10 foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("speed");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2a(), s, 3, "...");
Assert.AreEqual(result, "Hi-<B>Speed</B>10 foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hispeed");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2a(), s, 3, "...");
Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo");
query = new QueryParser("text", new WhitespaceAnalyzer()).Parse("hi speed");
highlighter = new Highlighter(new QueryScorer(query));
result = highlighter.GetBestFragments(GetTS2a(), s, 3, "...");
Assert.AreEqual(result, "<B>Hi-Speed</B>10 foo");
}
/*
public void testBigramAnalyzer() throws IOException, ParseException
{
//test to ensure analyzers with none-consecutive start/end offsets
//dont double-highlight text
//setup index 1
RAMDirectory ramDir = new RAMDirectory();
Analyzer bigramAnalyzer=new CJKAnalyzer();
IndexWriter writer = new IndexWriter(ramDir,bigramAnalyzer , true);
Document d = new Document();
Field f = new Field(FIELD_NAME, "java abc def", true, true, true);
d.add(f);
writer.addDocument(d);
writer.close();
IndexReader reader = IndexReader.open(ramDir);
IndexSearcher searcher=new IndexSearcher(reader);
query = QueryParser.parse("abc", FIELD_NAME, bigramAnalyzer);
System.out.println("Searching for: " + query.toString(FIELD_NAME));
hits = searcher.search(query);
Highlighter highlighter =
new Highlighter(this,new QueryFragmentScorer(query));
for (int i = 0; i < hits.length(); i++)
{
String text = hits.doc(i).get(FIELD_NAME);
TokenStream tokenStream=bigramAnalyzer.tokenStream(FIELD_NAME,new StringReader(text));
String highlightedText = highlighter.getBestFragment(tokenStream,text);
System.out.println(highlightedText);
}
}*/
public virtual System.String HighlightTerm(System.String originalText, TokenGroup group)
{
if (group.GetTotalScore() <= 0)
{
return originalText;
}
numHighlights++; //update stats used in assertions
return "<b>" + originalText + "</b>";
}
public virtual void DoSearching(System.String queryString)
{
QueryParser parser = new QueryParser(FIELD_NAME, new StandardAnalyzer());
query = parser.Parse(queryString);
DoSearching(query);
}
public virtual void DoSearching(Query unReWrittenQuery)
{
searcher = new IndexSearcher(ramDir);
//for any multi-term queries to work (prefix, wildcard, range,fuzzy etc) you must use a rewritten query!
query = unReWrittenQuery.Rewrite(reader);
System.Console.Out.WriteLine("Searching for: " + query.ToString(FIELD_NAME));
hits = searcher.Search(query);
}
internal virtual void DoStandardHighlights()
{
Highlighter highlighter = new Highlighter(this, new QueryScorer(query));
highlighter.SetTextFragmenter(new SimpleFragmenter(20));
for (int i = 0; i < hits.Length(); i++)
{
System.String text = hits.Doc(i).Get(FIELD_NAME);
int maxNumFragmentsRequired = 2;
System.String fragmentSeparator = "...";
TokenStream tokenStream = analyzer.TokenStream(FIELD_NAME, new System.IO.StringReader(text));
System.String result = highlighter.GetBestFragments(tokenStream, text, maxNumFragmentsRequired, fragmentSeparator);
System.Console.Out.WriteLine("\t" + result);
}
}
/*
* @see TestCase#setUp()
*/
[SetUp]
protected virtual void SetUp()
{
ramDir = new RAMDirectory();
IndexWriter writer = new IndexWriter(ramDir, new StandardAnalyzer(), true);
for (int i = 0; i < texts.Length; i++)
{
AddDoc(writer, texts[i]);
}
writer.Optimize();
writer.Close();
reader = IndexReader.Open(ramDir);
numHighlights = 0;
}
private void AddDoc(IndexWriter writer, System.String text)
{
Document d = new Document();
Field f = new Field(FIELD_NAME, text, Field.Store.YES, Field.Index.TOKENIZED);
d.Add(f);
writer.AddDocument(d);
}
/*
* @see TestCase#tearDown()
*/
[TearDown]
protected virtual void TearDown()
{
//base.TearDown();
}
}
//===================================================================
//========== BEGIN TEST SUPPORTING CLASSES
//========== THESE LOOK LIKE, WITH SOME MORE EFFORT THESE COULD BE
//========== MADE MORE GENERALLY USEFUL.
// TODO - make synonyms all interchangeable with each other and produce
// a version that does hyponyms - the "is a specialised type of ...."
// so that car = audi, bmw and volkswagen but bmw != audi so different
// behaviour to synonyms
//===================================================================
class SynonymAnalyzer : Analyzer
{
private System.Collections.IDictionary synonyms;
public SynonymAnalyzer(System.Collections.IDictionary synonyms)
{
this.synonyms = synonyms;
}
/* (non-Javadoc)
* @see org.apache.lucene.analysis.Analyzer#tokenStream(java.lang.String, java.io.Reader)
*/
public override TokenStream TokenStream(System.String arg0, System.IO.TextReader arg1)
{
return new SynonymTokenizer(new LowerCaseTokenizer(arg1), synonyms);
}
}
/// <summary> Expands a token stream with synonyms (TODO - make the synonyms analyzed by choice of analyzer)</summary>
/// <author> MAHarwood
/// </author>
class SynonymTokenizer : TokenStream
{
private TokenStream realStream;
private Token currentRealToken = null;
private System.Collections.IDictionary synonyms;
internal Tokenizer st = null;
public SynonymTokenizer(TokenStream realStream, System.Collections.IDictionary synonyms)
{
this.realStream = realStream;
this.synonyms = synonyms;
}
public override Token Next()
{
if (currentRealToken == null)
{
Token nextRealToken = realStream.Next();
if (nextRealToken == null)
{
return null;
}
System.String expansions = (System.String) synonyms[nextRealToken.TermText()];
if (expansions == null)
{
return nextRealToken;
}
st = new Tokenizer(expansions, ",");
if (st.HasMoreTokens())
{
currentRealToken = nextRealToken;
}
return currentRealToken;
}
else
{
System.String nextExpandedValue = st.NextToken();
Token expandedToken = new Token(nextExpandedValue, currentRealToken.StartOffset(), currentRealToken.EndOffset());
expandedToken.SetPositionIncrement(0);
if (!st.HasMoreTokens())
{
currentRealToken = null;
st = null;
}
return expandedToken;
}
}
}
/// <summary>
/// The class performs token processing in strings
/// </summary>
public class Tokenizer : System.Collections.IEnumerator
{
/// Position over the string
private long currentPos = 0;
/// Include demiliters in the results.
private bool includeDelims = false;
/// Char representation of the String to tokenize.
private char[] chars = null;
//The tokenizer uses the default delimiter set: the space character, the tab character, the newline character, and the carriage-return character and the form-feed character
private string delimiters = " \t\n\r\f";
/// <summary>
/// Initializes a new class instance with a specified string to process
/// </summary>
/// <param name="source">String to tokenize</param>
public Tokenizer(System.String source)
{
this.chars = source.ToCharArray();
}
/// <summary>
/// Initializes a new class instance with a specified string to process
/// and the specified token delimiters to use
/// </summary>
/// <param name="source">String to tokenize</param>
/// <param name="delimiters">String containing the delimiters</param>
public Tokenizer(System.String source, System.String delimiters):this(source)
{
this.delimiters = delimiters;
}
/// <summary>
/// Initializes a new class instance with a specified string to process, the specified token
/// delimiters to use, and whether the delimiters must be included in the results.
/// </summary>
/// <param name="source">String to tokenize</param>
/// <param name="delimiters">String containing the delimiters</param>
/// <param name="includeDelims">Determines if delimiters are included in the results.</param>
public Tokenizer(System.String source, System.String delimiters, bool includeDelims):this(source,delimiters)
{
this.includeDelims = includeDelims;
}
/// <summary>
/// Returns the next token from the token list
/// </summary>
/// <returns>The string value of the token</returns>
public System.String NextToken()
{
return NextToken(this.delimiters);
}
/// <summary>
/// Returns the next token from the source string, using the provided
/// token delimiters
/// </summary>
/// <param name="delimiters">String containing the delimiters to use</param>
/// <returns>The string value of the token</returns>
public System.String NextToken(System.String delimiters)
{
//According to documentation, the usage of the received delimiters should be temporary (only for this call).
//However, it seems it is not true, so the following line is necessary.
this.delimiters = delimiters;
//at the end
if (this.currentPos == this.chars.Length)
throw new System.ArgumentOutOfRangeException();
//if over a delimiter and delimiters must be returned
else if ( (System.Array.IndexOf(delimiters.ToCharArray(),chars[this.currentPos]) != -1)
&& this.includeDelims )
return "" + this.chars[this.currentPos++];
//need to get the token wo delimiters.
else
return nextToken(delimiters.ToCharArray());
}
//Returns the nextToken wo delimiters
private System.String nextToken(char[] delimiters)
{
string token="";
long pos = this.currentPos;
//skip possible delimiters
while (System.Array.IndexOf(delimiters,this.chars[currentPos]) != -1)
//The last one is a delimiter (i.e there is no more tokens)
if (++this.currentPos == this.chars.Length)
{
this.currentPos = pos;
throw new System.ArgumentOutOfRangeException();
}
//getting the token
while (System.Array.IndexOf(delimiters,this.chars[this.currentPos]) == -1)
{
token+=this.chars[this.currentPos];
//the last one is not a delimiter
if (++this.currentPos == this.chars.Length)
break;
}
return token;
}
/// <summary>
/// Determines if there are more tokens to return from the source string
/// </summary>
/// <returns>True or false, depending if there are more tokens</returns>
public bool HasMoreTokens()
{
//keeping the current pos
long pos = this.currentPos;
try
{
this.NextToken();
}
catch (System.ArgumentOutOfRangeException)
{
return false;
}
finally
{
this.currentPos = pos;
}
return true;
}
/// <summary>
/// Remaining tokens count
/// </summary>
public int Count
{
get
{
//keeping the current pos
long pos = this.currentPos;
int i = 0;
try
{
while (true)
{
this.NextToken();
i++;
}
}
catch (System.ArgumentOutOfRangeException)
{
this.currentPos = pos;
return i;
}
}
}
/// <summary>
/// Performs the same action as NextToken.
/// </summary>
public System.Object Current
{
get
{
return (Object) this.NextToken();
}
}
/// <summary>
// Performs the same action as HasMoreTokens.
/// </summary>
/// <returns>True or false, depending if there are more tokens</returns>
public bool MoveNext()
{
return this.HasMoreTokens();
}
/// <summary>
/// Does nothing.
/// </summary>
public void Reset()
{
;
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2008-2015 Charlie Poole, Rob Prouse
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using NUnit.Compatibility;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal;
using NUnit.Framework.Internal.Builders;
namespace NUnit.Framework
{
/// <summary>
/// TestCaseSourceAttribute indicates the source to be used to
/// provide test fixture instances for a test class.
/// </summary>
[AttributeUsage(AttributeTargets.Class, AllowMultiple = true, Inherited = false)]
public class TestFixtureSourceAttribute : NUnitAttribute, IFixtureBuilder
{
private readonly NUnitTestFixtureBuilder _builder = new NUnitTestFixtureBuilder();
/// <summary>
/// Error message string is public so the tests can use it
/// </summary>
public const string MUST_BE_STATIC = "The sourceName specified on a TestCaseSourceAttribute must refer to a static field, property or method.";
#region Constructors
/// <summary>
/// Construct with the name of the method, property or field that will provide data
/// </summary>
/// <param name="sourceName">The name of a static method, property or field that will provide data.</param>
public TestFixtureSourceAttribute(string sourceName)
{
this.SourceName = sourceName;
}
/// <summary>
/// Construct with a Type and name
/// </summary>
/// <param name="sourceType">The Type that will provide data</param>
/// <param name="sourceName">The name of a static method, property or field that will provide data.</param>
public TestFixtureSourceAttribute(Type sourceType, string sourceName)
{
this.SourceType = sourceType;
this.SourceName = sourceName;
}
/// <summary>
/// Construct with a Type
/// </summary>
/// <param name="sourceType">The type that will provide data</param>
public TestFixtureSourceAttribute(Type sourceType)
{
this.SourceType = sourceType;
}
#endregion
#region Properties
/// <summary>
/// The name of a the method, property or fiend to be used as a source
/// </summary>
public string SourceName { get; private set; }
/// <summary>
/// A Type to be used as a source
/// </summary>
public Type SourceType { get; private set; }
/// <summary>
/// Gets or sets the category associated with every fixture created from
/// this attribute. May be a single category or a comma-separated list.
/// </summary>
public string Category { get; set; }
#endregion
#region IFixtureBuilder Members
/// <summary>
/// Construct one or more TestFixtures from a given Type,
/// using available parameter data.
/// </summary>
/// <param name="typeInfo">The TypeInfo for which fixtures are to be constructed.</param>
/// <returns>One or more TestFixtures as TestSuite</returns>
public IEnumerable<TestSuite> BuildFrom(ITypeInfo typeInfo)
{
Type sourceType = SourceType ?? typeInfo.Type;
foreach (TestFixtureParameters parms in GetParametersFor(sourceType))
yield return _builder.BuildFrom(typeInfo, parms);
}
#endregion
#region Helper Methods
/// <summary>
/// Returns a set of ITestFixtureData items for use as arguments
/// to a parameterized test fixture.
/// </summary>
/// <param name="sourceType">The type for which data is needed.</param>
/// <returns></returns>
public IEnumerable<ITestFixtureData> GetParametersFor(Type sourceType)
{
List<ITestFixtureData> data = new List<ITestFixtureData>();
try
{
IEnumerable source = GetTestFixtureSource(sourceType);
if (source != null)
{
foreach (object item in source)
{
var parms = item as ITestFixtureData;
if (parms == null)
{
object[] args = item as object[];
if (args == null)
{
args = new object[] { item };
}
parms = new TestFixtureParameters(args);
}
if (this.Category != null)
foreach (string cat in this.Category.Split(new char[] { ',' }))
parms.Properties.Add(PropertyNames.Category, cat);
data.Add(parms);
}
}
}
catch (Exception ex)
{
data.Clear();
data.Add(new TestFixtureParameters(ex));
}
return data;
}
private IEnumerable GetTestFixtureSource(Type sourceType)
{
// Handle Type implementing IEnumerable separately
if (SourceName == null)
return Reflect.Construct(sourceType) as IEnumerable;
MemberInfo[] members = sourceType.GetMember(SourceName,
BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static | BindingFlags.Instance | BindingFlags.FlattenHierarchy);
if (members.Length == 1)
{
MemberInfo member = members[0];
var field = member as FieldInfo;
if (field != null)
return field.IsStatic
? (IEnumerable)field.GetValue(null)
: SourceMustBeStaticError();
var property = member as PropertyInfo;
if (property != null)
return property.GetGetMethod(true).IsStatic
? (IEnumerable)property.GetValue(null, null)
: SourceMustBeStaticError();
var m = member as MethodInfo;
if (m != null)
return m.IsStatic
? (IEnumerable)m.Invoke(null, null)
: SourceMustBeStaticError();
}
return null;
}
private static IEnumerable SourceMustBeStaticError()
{
var parms = new TestFixtureParameters();
parms.RunState = RunState.NotRunnable;
parms.Properties.Set(PropertyNames.SkipReason, MUST_BE_STATIC);
return new TestFixtureParameters[] { parms };
}
#endregion
}
}
| |
using Orleans.Serialization.Buffers;
using Orleans.Serialization.Cloning;
using Orleans.Serialization.GeneratedCodeHelpers;
using Orleans.Serialization.Serializers;
using Orleans.Serialization.WireProtocol;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Runtime.CompilerServices;
namespace Orleans.Serialization.Codecs
{
[RegisterSerializer]
public sealed class ImmutableStackCodec<T> : GeneralizedReferenceTypeSurrogateCodec<ImmutableStack<T>, ImmutableStackSurrogate<T>>
{
public ImmutableStackCodec(IValueSerializer<ImmutableStackSurrogate<T>> surrogateSerializer) : base(surrogateSerializer)
{
}
public override ImmutableStack<T> ConvertFromSurrogate(ref ImmutableStackSurrogate<T> surrogate) => surrogate.Values switch
{
null => default,
object => ImmutableStack.CreateRange(surrogate.Values)
};
public override void ConvertToSurrogate(ImmutableStack<T> value, ref ImmutableStackSurrogate<T> surrogate) => surrogate = value switch
{
null => default,
_ => new ImmutableStackSurrogate<T>
{
Values = new List<T>(value)
},
};
}
[GenerateSerializer]
public struct ImmutableStackSurrogate<T>
{
[Id(1)]
public List<T> Values { get; set; }
}
[RegisterCopier]
public sealed class ImmutableStackCopier<T> : IDeepCopier<ImmutableStack<T>>
{
public ImmutableStack<T> DeepCopy(ImmutableStack<T> input, CopyContext _) => input;
}
/// <summary>
/// Codec for <see cref="Stack{T}"/>.
/// </summary>
/// <typeparam name="T">The element type.</typeparam>
[RegisterSerializer]
public sealed class StackCodec<T> : IFieldCodec<Stack<T>>
{
private static readonly Type CodecElementType = typeof(T);
private readonly IFieldCodec<T> _fieldCodec;
public StackCodec(IFieldCodec<T> fieldCodec)
{
_fieldCodec = OrleansGeneratedCodeHelper.UnwrapService(this, fieldCodec);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void WriteField<TBufferWriter>(ref Writer<TBufferWriter> writer, uint fieldIdDelta, Type expectedType, Stack<T> value) where TBufferWriter : IBufferWriter<byte>
{
if (ReferenceCodec.TryWriteReferenceField(ref writer, fieldIdDelta, expectedType, value))
{
return;
}
writer.WriteFieldHeader(fieldIdDelta, expectedType, value.GetType(), WireType.TagDelimited);
Int32Codec.WriteField(ref writer, 0, Int32Codec.CodecFieldType, value.Count);
uint innerFieldIdDelta = 1;
foreach (var element in value)
{
_fieldCodec.WriteField(ref writer, innerFieldIdDelta, CodecElementType, element);
innerFieldIdDelta = 0;
}
writer.WriteEndObject();
}
public Stack<T> ReadValue<TInput>(ref Reader<TInput> reader, Field field)
{
if (field.WireType == WireType.Reference)
{
return ReferenceCodec.ReadReference<Stack<T>, TInput>(ref reader, field);
}
if (field.WireType != WireType.TagDelimited)
{
ThrowUnsupportedWireTypeException(field);
}
var placeholderReferenceId = ReferenceCodec.CreateRecordPlaceholder(reader.Session);
Stack<T> result = null;
uint fieldId = 0;
var length = 0;
var index = 0;
while (true)
{
var header = reader.ReadFieldHeader();
if (header.IsEndBaseOrEndObject)
{
break;
}
fieldId += header.FieldIdDelta;
switch (fieldId)
{
case 0:
length = Int32Codec.ReadValue(ref reader, header);
if (length > 10240 && length > reader.Length)
{
ThrowInvalidSizeException(length);
}
result = new Stack<T>(length);
ReferenceCodec.RecordObject(reader.Session, result, placeholderReferenceId);
break;
case 1:
if (result is null)
{
ThrowLengthFieldMissing();
}
if (index >= length)
{
ThrowIndexOutOfRangeException(length);
}
result.Push(_fieldCodec.ReadValue(ref reader, header));
++index;
break;
default:
reader.ConsumeUnknownField(header);
break;
}
}
return result;
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowUnsupportedWireTypeException(Field field) => throw new UnsupportedWireTypeException(
$"Only a {nameof(WireType)} value of {WireType.TagDelimited} is supported for string fields. {field}");
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowIndexOutOfRangeException(int length) => throw new IndexOutOfRangeException(
$"Encountered too many elements in array of type {typeof(Stack<T>)} with declared length {length}.");
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowInvalidSizeException(int length) => throw new IndexOutOfRangeException(
$"Declared length of {typeof(Stack<T>)}, {length}, is greater than total length of input.");
[MethodImpl(MethodImplOptions.NoInlining)]
private static void ThrowLengthFieldMissing() => throw new RequiredFieldMissingException("Serialized array is missing its length field.");
}
[RegisterCopier]
public sealed class StackCopier<T> : IDeepCopier<Stack<T>>, IBaseCopier<Stack<T>>
{
private readonly IDeepCopier<T> _copier;
public StackCopier(IDeepCopier<T> valueCopier)
{
_copier = valueCopier;
}
public Stack<T> DeepCopy(Stack<T> input, CopyContext context)
{
if (context.TryGetCopy<Stack<T>>(input, out var result))
{
return result;
}
if (input.GetType() != typeof(Stack<T>))
{
return context.Copy(input);
}
result = new Stack<T>(input.Count);
context.RecordCopy(input, result);
foreach (var item in input)
{
result.Push(_copier.DeepCopy(item, context));
}
return result;
}
public void DeepCopy(Stack<T> input, Stack<T> output, CopyContext context)
{
foreach (var item in input)
{
output.Push(_copier.DeepCopy(item, context));
}
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="SmartDateTests.cs" company="Marimer LLC">
// Copyright (c) Marimer LLC. All rights reserved.
// Website: https://cslanet.com
// </copyright>
// <summary>no summary</summary>
//-----------------------------------------------------------------------
using Csla;
using Csla.Serialization;
using System;
using UnitDriven;
#if !WINDOWS_PHONE
using Microsoft.VisualBasic;
#endif
using Csla.Serialization.Mobile;
using System.Threading;
#if NUNIT
using NUnit.Framework;
using TestClass = NUnit.Framework.TestFixtureAttribute;
using TestInitialize = NUnit.Framework.SetUpAttribute;
using TestCleanup = NUnit.Framework.TearDownAttribute;
using TestMethod = NUnit.Framework.TestAttribute;
using TestSetup = NUnit.Framework.SetUpAttribute;
using Microsoft.VisualBasic;
using Csla.Serialization.Mobile;
#elif MSTEST
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System.IO;
using Csla.TestHelpers;
#endif
namespace Csla.Test.SmartDate
{
[TestClass()]
public class SmartDateTests
{
private static TestDIContext _testDIContext;
System.Globalization.CultureInfo CurrentCulture { get; set; }
System.Globalization.CultureInfo CurrentUICulture { get; set; }
[ClassInitialize]
public static void ClassInitialize(TestContext context)
{
_testDIContext = TestDIContextFactory.CreateDefaultContext();
}
[TestInitialize]
public void Setup()
{
// store current cultures
CurrentCulture = System.Threading.Thread.CurrentThread.CurrentCulture;
CurrentUICulture = System.Threading.Thread.CurrentThread.CurrentUICulture;
// set to "en-US" for all tests
System.Threading.Thread.CurrentThread.CurrentCulture = System.Globalization.CultureInfo.GetCultureInfo("en-US");
System.Threading.Thread.CurrentThread.CurrentUICulture = System.Globalization.CultureInfo.GetCultureInfo("en-US");
}
[TestCleanup]
public void Cleanup()
{
// restore original cultures
System.Threading.Thread.CurrentThread.CurrentCulture = CurrentCulture;
System.Threading.Thread.CurrentThread.CurrentUICulture = CurrentUICulture;
}
#region Test Constructors
[TestMethod()]
public void TestSmartDateConstructors()
{
DateTime now = DateTime.Now;
Csla.SmartDate d = new Csla.SmartDate(now);
Assert.AreEqual(now, d.Date);
d = new Csla.SmartDate(true);
Assert.IsTrue(d.EmptyIsMin);
d = new Csla.SmartDate(false);
Assert.IsFalse(d.EmptyIsMin);
d = new Csla.SmartDate("1/1/2005");
Assert.AreEqual("1/1/2005", d.ToString());
d = new Csla.SmartDate("Jan/1/2005");
Assert.AreEqual("1/1/2005", d.ToString());
d = new Csla.SmartDate("January-1-2005");
Assert.AreEqual("1/1/2005", d.ToString());
d = new Csla.SmartDate("1-1-2005");
Assert.AreEqual("1/1/2005", d.ToString());
d = new Csla.SmartDate("");
Assert.AreEqual("", d.ToString());
Assert.IsTrue(d.IsEmpty);
d = new Csla.SmartDate("1/1/2005", true);
Assert.AreEqual("1/1/2005", d.ToString());
Assert.IsTrue(d.EmptyIsMin);
d = new Csla.SmartDate("1/1/2005", false);
Assert.AreEqual("1/1/2005", d.ToString());
Assert.IsFalse(d.EmptyIsMin);
d = new Csla.SmartDate("", true);
Assert.AreEqual(DateTime.MinValue, d.Date);
Assert.AreEqual("", d.ToString());
d = new Csla.SmartDate("", false);
Assert.AreEqual(DateTime.MaxValue, d.Date);
Assert.AreEqual("", d.ToString());
try
{
d = new Csla.SmartDate("Invalid Date", true);
}
catch (Exception ex) { Assert.IsTrue(ex is ArgumentException); }
try
{
d = new Csla.SmartDate("Invalid Date", false);
}
catch (Exception ex) { Assert.IsTrue(ex is ArgumentException); }
d = new Csla.SmartDate(now, true);
Assert.AreEqual(now, d.Date);
Assert.IsTrue(d.EmptyIsMin);
d = new Csla.SmartDate(now, false);
Assert.AreEqual(now, d.Date);
Assert.IsFalse(d.EmptyIsMin);
d = new Csla.SmartDate((DateTime?)null, true);
Assert.AreEqual(DateTime.MinValue, d.Date);
d = new Csla.SmartDate((DateTime?)null, false);
Assert.AreEqual(DateTime.MaxValue, d.Date);
d = new Csla.SmartDate((DateTime?)null, Csla.SmartDate.EmptyValue.MinDate);
Assert.AreEqual(DateTime.MinValue, d.Date);
d = new Csla.SmartDate((DateTime?)null, Csla.SmartDate.EmptyValue.MaxDate);
Assert.AreEqual(DateTime.MaxValue, d.Date);
}
#endregion
#region Converters
[TestMethod]
public void TestConverters()
{
DateTime d = Csla.SmartDate.StringToDate("1/1/2005");
Assert.AreEqual("1/1/2005", d.ToShortDateString());
d = Csla.SmartDate.StringToDate("january-1-2005");
Assert.AreEqual("1/1/2005", d.ToShortDateString());
d = Csla.SmartDate.StringToDate(".");
Assert.AreEqual(DateTime.Now.ToShortDateString(), d.ToShortDateString());
d = Csla.SmartDate.StringToDate("-");
Assert.AreEqual(DateTime.Now.AddDays(-1.0).ToShortDateString(), d.ToShortDateString());
d = Csla.SmartDate.StringToDate("+");
Assert.AreEqual(DateTime.Now.AddDays(1.0).ToShortDateString(), d.ToShortDateString());
try
{
d = Csla.SmartDate.StringToDate("Invalid Date");
}
catch (Exception ex)
{
Assert.IsTrue(ex is System.ArgumentException);
}
d = Csla.SmartDate.StringToDate("");
Assert.AreEqual(DateTime.MinValue, d);
d = Csla.SmartDate.StringToDate(null);
Assert.AreEqual(DateTime.MinValue, d);
d = Csla.SmartDate.StringToDate("", true);
Assert.AreEqual(DateTime.MinValue, d);
d = Csla.SmartDate.StringToDate("", false);
Assert.AreEqual(DateTime.MaxValue, d);
try
{
d = Csla.SmartDate.StringToDate("Invalid Date", true);
}
catch (Exception ex)
{
Assert.IsTrue(ex is ArgumentException);
}
try
{
d = Csla.SmartDate.StringToDate("Invalid Date", false);
}
catch (Exception ex)
{
Assert.IsTrue(ex is ArgumentException);
}
d = Csla.SmartDate.StringToDate(null, true);
Assert.AreEqual(DateTime.MinValue, d);
d = Csla.SmartDate.StringToDate(null, false);
Assert.AreEqual(DateTime.MaxValue, d);
d = new DateTime(2005, 1, 2);
string date = Csla.SmartDate.DateToString(d, "dd/MM/yyyy");
Assert.AreEqual("02/01/2005", date, "dd/MM/yyyy test");
date = Csla.SmartDate.DateToString(d, "MM/dd/yy");
Assert.AreEqual("01/02/05", date, "MM/dd/yy test");
date = Csla.SmartDate.DateToString(d, "");
Assert.AreEqual("1/2/2005 12:00:00 AM", date);
date = Csla.SmartDate.DateToString(d, "d");
Assert.AreEqual("1/2/2005", date);
date = new Csla.SmartDate(d).ToString();
Assert.AreEqual("1/2/2005", date);
date = Csla.SmartDate.DateToString(DateTime.MinValue, "dd/MM/yyyy", true);
Assert.AreEqual("", date, "MinValue w/ emptyIsMin=true");
date = Csla.SmartDate.DateToString(DateTime.MinValue, "dd/MM/yyyy", false);
Assert.AreEqual(DateTime.MinValue.ToString("dd/MM/yyyy"), date, "MinValue w/ emptyIsMin=false");
date = Csla.SmartDate.DateToString(DateTime.MaxValue, "dd/MM/yyyy", true);
Assert.AreEqual(DateTime.MaxValue.ToString("dd/MM/yyyy"), date, "MaxValue w/ emptyIsMin=true");
date = Csla.SmartDate.DateToString(DateTime.MaxValue, "dd/MM/yyyy", false);
Assert.AreEqual("", date, "MaxValue w/ emptyIsMin=false");
}
#endregion
#if !WINDOWS_PHONE
#region Add
[TestMethod()]
public void Add()
{
Csla.SmartDate d2 = new Csla.SmartDate();
Csla.SmartDate d3;
d2.Date = new DateTime(2005, 1, 1);
d3 = new Csla.SmartDate(d2.Add(new TimeSpan(30, 0, 0, 0)));
Assert.AreEqual(d2.Date.AddDays(30), d3.Date, "Dates should be equal");
Assert.AreEqual(d3, d2 + new TimeSpan(30, 0, 0, 0, 0), "Dates should be equal");
}
#endregion
#region Subtract
[TestMethod()]
public void Subtract()
{
Csla.SmartDate d2 = new Csla.SmartDate();
Csla.SmartDate d3;
d2.Date = new DateTime(2005, 1, 1);
d3 = new Csla.SmartDate(d2.Subtract(new TimeSpan(30, 0, 0, 0)));
Assert.AreEqual(d2.Date.AddDays(-30), d3.Date, "Dates should be equal");
Assert.AreEqual(30, ((TimeSpan)(d2 - d3)).Days, "Should be 30 days different");
Assert.AreEqual(d3, d2 - new TimeSpan(30, 0, 0, 0, 0), "Should be equal");
}
#endregion
#endif
#region Comparison
[TestMethod()]
public void Comparison()
{
Csla.SmartDate d2 = new Csla.SmartDate(true);
Csla.SmartDate d3 = new Csla.SmartDate(false);
Csla.SmartDate d4 = new Csla.SmartDate(Csla.SmartDate.EmptyValue.MinDate);
Csla.SmartDate d5 = new Csla.SmartDate(Csla.SmartDate.EmptyValue.MaxDate);
Assert.IsTrue(d2.Equals(d3), "Empty dates should be equal");
Assert.IsTrue(Csla.SmartDate.Equals(d2, d3), "Empty dates should be equal (shared)");
Assert.IsTrue(d2.Equals(d3), "Empty dates should be equal (unary)");
Assert.IsTrue(d2.Equals(""), "Should be equal to an empty string (d2)");
Assert.IsTrue(d3.Equals(""), "Should be equal to an empty string (d3)");
Assert.IsTrue(d2.Date.Equals(DateTime.MinValue), "Should be DateTime.MinValue");
Assert.IsTrue(d3.Date.Equals(DateTime.MaxValue), "Should be DateTime.MaxValue");
Assert.IsTrue(d4.Date.Equals(DateTime.MinValue), "Should be DateTime.MinValue (d4)");
Assert.IsTrue(d5.Date.Equals(DateTime.MaxValue), "Should be DateTime.MaxValue (d5)");
d2.Date = new DateTime(2005, 1, 1);
d3 = new Csla.SmartDate(d2.Date, d2.EmptyIsMin);
Assert.AreEqual(d2, d3, "Assigned dates should be equal");
d3.Date = new DateTime(2005, 2, 2);
Assert.AreEqual(1, d3.CompareTo(d2), "Should be greater than");
Assert.AreEqual(-1, d2.CompareTo(d3), "Should be less than");
Assert.IsFalse(d2.CompareTo(d3) == 0, "should not be equal");
d3.Date = new DateTime(2005, 1, 1);
Assert.IsFalse(1 == d2.CompareTo(d3), "should be equal");
Assert.IsFalse(-1 == d2.CompareTo(d3), "should be equal");
Assert.AreEqual(0, d2.CompareTo(d3), "should be equal");
Assert.IsTrue(d3.Equals("1/1/2005"), "Should be equal to string date");
Assert.IsTrue(d3.Equals(new DateTime(2005, 1, 1)), "should be equal to DateTime");
Assert.IsTrue(d3.Equals(d2.Date.ToString()), "Should be equal to any date time string");
Assert.IsTrue(d3.Equals(d2.Date.ToLongDateString()), "Should be equal to any date time string");
Assert.IsTrue(d3.Equals(d2.Date.ToShortDateString()), "Should be equal to any date time string");
Assert.IsFalse(d3.Equals(""), "Should not be equal to a blank string");
//DateTime can be compared using all sorts of formats but the SmartDate cannot.
//DateTime dt = DateTime.Now;
//long ldt = dt.ToBinary();
//Assert.IsTrue(dt.Equals(ldt), "Should be equal");
//Should smart date also be converted into these various types?
}
#endregion
#region Empty
[TestMethod()]
public void Empty()
{
Csla.SmartDate d2 = new Csla.SmartDate();
Csla.SmartDate d3;
d3 = new Csla.SmartDate(d2.Add(new TimeSpan(30, 0, 0, 0)));
Assert.AreEqual(d2, d3, "Dates should be equal");
Assert.AreEqual("", d2.Text, "Text should be empty");
d3 = new Csla.SmartDate(d2.Subtract(new TimeSpan(30, 0, 0, 0)));
Assert.AreEqual(d2, d3, "Dates should be equal");
Assert.AreEqual("", d2.Text, "Text should be empty");
d3 = new Csla.SmartDate();
Assert.AreEqual(0, d2.CompareTo(d3), "d2 and d3 should be the same");
Assert.IsTrue(d2.Equals(d3), "d2 and d3 should be the same");
Assert.IsTrue(Csla.SmartDate.Equals(d2, d3), "d2 and d3 should be the same");
d3.Date = DateTime.Now;
Assert.AreEqual(-1, d2.CompareTo(d3), "d2 and d3 should not be the same");
Assert.AreEqual(1, d3.CompareTo(d2), "d2 and d3 should not be the same");
Assert.IsFalse(d2.Equals(d3), "d2 and d3 should not be the same");
Assert.IsFalse(Csla.SmartDate.Equals(d2, d3), "d2 and d3 should not be the same");
Assert.IsFalse(d3.Equals(d2), "d2 and d3 should not be the same");
Assert.IsFalse(Csla.SmartDate.Equals(d3, d2), "d2 and d3 should not be the same");
}
[TestMethod]
public void MaxDateMaxValue()
{
// test for maxDateValue
Csla.SmartDate target = new Csla.SmartDate(Csla.SmartDate.EmptyValue.MaxDate);
DateTime expected = DateTime.MaxValue;
DateTime actual = target.Date;
Assert.AreEqual(expected, actual);
}
#endregion
#region Comparison Operators
[TestMethod()]
public void ComparisonOperators()
{
Csla.SmartDate d1 = new Csla.SmartDate();
Csla.SmartDate d2 = new Csla.SmartDate();
d1.Date = new DateTime(2005, 1, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 < d2, "d1 should be less than d2");
d1.Date = new DateTime(2005, 2, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsFalse(d1 < d2, "d1 should be equal to d2");
d1.Date = new DateTime(2005, 3, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsFalse(d1 < d2, "d1 should be greater than d2");
d1.Date = new DateTime(2005, 3, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 > d2, "d1 should be greater than d2");
d1.Date = new DateTime(2005, 2, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsFalse(d1 > d2, "d1 should be equal to d2");
d1.Date = new DateTime(2005, 1, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsFalse(d1 > d2, "d1 should be less than d2");
d1.Date = new DateTime(2005, 2, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 == d2, "d1 should be equal to d2");
d1.Date = new DateTime(2005, 1, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsFalse(d1 == d2, "d1 should not be equal to d2");
//#warning Smart date does not overload the <= or >= operators!
//Assert.Fail("Missing <= and >= operators");
d1.Date = new DateTime(2005, 1, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 <= d2, "d1 should be less than or equal to d2");
d1.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 <= d2, "d1 should be less than or equal to d2");
d1.Date = new DateTime(2005, 3, 1);
Assert.IsFalse(d1 <= d2, "d1 should be greater than to d2");
d1.Date = new DateTime(2005, 3, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 >= d2, "d1 should be greater than or equal to d2");
d1.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 >= d2, "d1 should be greater than or equal to d2");
d1.Date = new DateTime(2005, 1, 1);
Assert.IsFalse(d1 >= d2, "d1 should be less than to d2");
d1.Date = new DateTime(2005, 1, 1);
d2.Date = new DateTime(2005, 2, 1);
Assert.IsTrue(d1 != d2, "d1 should not be equal to d2");
d1.Date = new DateTime(2005, 2, 1);
Assert.IsFalse(d1 != d2, "d1 should be equal to d2");
d1.Date = new DateTime(2005, 3, 1);
Assert.IsTrue(d1 != d2, "d1 should be greater than d2");
}
[TestMethod]
public void TryParseTest()
{
Csla.SmartDate sd = new Csla.SmartDate();
if (Csla.SmartDate.TryParse("blah", ref sd))
Assert.AreEqual(true, false, "TryParse should have failed");
if (Csla.SmartDate.TryParse("t", ref sd))
Assert.AreEqual(DateTime.Now.Date, sd.Date.Date, "Date should have been now");
else
Assert.AreEqual(true, false, "TryParse should have succeeded");
}
#endregion
#region Serialization
[TestMethod()]
public void SerializationTest()
{
Csla.SmartDate d2;
Csla.SmartDate clone;
MemoryStream memoryStream;
MobileFormatter mobileFormatter;
ApplicationContext applicationContext = _testDIContext.CreateTestApplicationContext();
d2 = new Csla.SmartDate();
memoryStream = new MemoryStream();
mobileFormatter = new MobileFormatter(applicationContext);
mobileFormatter.Serialize(memoryStream, d2);
memoryStream.Seek(0, SeekOrigin.Begin);
clone = (Csla.SmartDate)mobileFormatter.Deserialize(memoryStream);
Assert.AreEqual(d2, clone, "Dates should have ben the same");
d2 = new Csla.SmartDate(DateTime.Now, false);
mobileFormatter = new MobileFormatter(applicationContext);
mobileFormatter.Serialize(memoryStream, d2);
memoryStream.Seek(0, SeekOrigin.Begin);
clone = (Csla.SmartDate)mobileFormatter.Deserialize(memoryStream);
Assert.AreEqual(d2, clone, "Dates should have ben the same");
d2 = new Csla.SmartDate(DateTime.Now.AddDays(10), false);
d2.FormatString = "YYYY/DD/MM";
mobileFormatter = new MobileFormatter(applicationContext);
mobileFormatter.Serialize(memoryStream, d2);
memoryStream.Seek(0, SeekOrigin.Begin);
clone = (Csla.SmartDate)mobileFormatter.Deserialize(memoryStream);
Assert.AreEqual(d2, clone, "Dates should have ben the same");
//cslalighttest.Serialization.PersonWIthSmartDateField person;
//person = cslalighttest.Serialization.PersonWIthSmartDateField.GetPersonWIthSmartDateField("Sergey", 2000);
//Assert.AreEqual(person.Birthdate, person.Clone().Birthdate, "Dates should have ben the same");
//
//Csla.SmartDate expected = person.Birthdate;
//person.BeginEdit();
//person.Birthdate = new Csla.SmartDate(expected.Date.AddDays(10)); // to guarantee it's a different value
//person.CancelEdit();
//Csla.SmartDate actual = person.Birthdate;
//Assert.AreEqual(expected, actual);
}
#endregion
[TestMethod]
public void DefaultFormat()
{
IDataPortal<SDtest> dataPortal = _testDIContext.CreateDataPortal<SDtest>();
TestResults.Reinitialise();
var obj = SDtest.NewSDTest(dataPortal);
Assert.AreEqual("", obj.TextDate, "Should be empty");
var now = DateTime.Now;
obj.TextDate = string.Format("{0:g}", now);
Assert.AreEqual(string.Format("{0:g}", now), obj.TextDate, "Should be today");
}
[TestMethod]
public void CustomParserReturnsDateTime()
{
Csla.SmartDate.CustomParser = (s) =>
{
if (s == "test") return DateTime.Now;
return null;
};
// uses custom parser
var date = new Csla.SmartDate("test");
Assert.AreEqual(DateTime.Now.Date, date.Date.Date);
// uses buildin parser
var date2 = new Csla.SmartDate("t");
Assert.AreEqual(DateTime.Now.Date, date.Date.Date);
}
}
[Serializable]
public class SDtest : BusinessBase<SDtest>
{
public static PropertyInfo<Csla.SmartDate> TextDateProperty =
RegisterProperty<Csla.SmartDate>(c => c.TextDate, null, new Csla.SmartDate { FormatString = "g" });
public string TextDate
{
get { return GetPropertyConvert<Csla.SmartDate, string>(TextDateProperty); }
set { SetPropertyConvert<Csla.SmartDate, string>(TextDateProperty, value); }
}
public static PropertyInfo<Csla.SmartDate> MyDateProperty = RegisterProperty<Csla.SmartDate>(c => c.MyDate);
public Csla.SmartDate MyDate
{
get { return GetProperty(MyDateProperty); }
set { SetProperty(MyDateProperty, value); }
}
public static SDtest NewSDTest(IDataPortal<SDtest> dataPortal)
{
return dataPortal.Create();
}
[Create]
private void Create()
{
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Linq;
using System.Reactive;
using System.Reactive.Linq;
using System.Reflection;
using Avalonia.Controls;
using Avalonia.Data;
using Avalonia.Markup.Data;
using Avalonia.VisualTree;
namespace Avalonia.Markup.Xaml.Data
{
/// <summary>
/// A XAML binding.
/// </summary>
public class Binding : IBinding
{
/// <summary>
/// Initializes a new instance of the <see cref="Binding"/> class.
/// </summary>
public Binding()
{
FallbackValue = AvaloniaProperty.UnsetValue;
}
/// <summary>
/// Initializes a new instance of the <see cref="Binding"/> class.
/// </summary>
/// <param name="path">The binding path.</param>
/// <param name="mode">The binding mode.</param>
public Binding(string path, BindingMode mode = BindingMode.Default)
: this()
{
Path = path;
Mode = mode;
}
/// <summary>
/// Gets or sets the <see cref="IValueConverter"/> to use.
/// </summary>
public IValueConverter Converter { get; set; }
/// <summary>
/// Gets or sets a parameter to pass to <see cref="Converter"/>.
/// </summary>
public object ConverterParameter { get; set; }
/// <summary>
/// Gets or sets the name of the element to use as the binding source.
/// </summary>
public string ElementName { get; set; }
/// <summary>
/// Gets or sets the value to use when the binding is unable to produce a value.
/// </summary>
public object FallbackValue { get; set; }
/// <summary>
/// Gets or sets the binding mode.
/// </summary>
public BindingMode Mode { get; set; }
/// <summary>
/// Gets or sets the binding path.
/// </summary>
public string Path { get; set; } = "";
/// <summary>
/// Gets or sets the binding priority.
/// </summary>
public BindingPriority Priority { get; set; }
/// <summary>
/// Gets or sets the relative source for the binding.
/// </summary>
public RelativeSource RelativeSource { get; set; }
/// <summary>
/// Gets or sets the source for the binding.
/// </summary>
public object Source { get; set; }
internal WeakReference DefaultAnchor { get; set; }
/// <inheritdoc/>
public InstancedBinding Initiate(
IAvaloniaObject target,
AvaloniaProperty targetProperty,
object anchor = null,
bool enableDataValidation = false)
{
Contract.Requires<ArgumentNullException>(target != null);
anchor = anchor ?? DefaultAnchor?.Target;
enableDataValidation = enableDataValidation && Priority == BindingPriority.LocalValue;
ExpressionObserver observer;
if (ElementName != null)
{
observer = CreateElementObserver(
(target as IControl) ?? (anchor as IControl),
ElementName,
Path,
enableDataValidation);
}
else if (Source != null)
{
observer = CreateSourceObserver(Source, Path, enableDataValidation);
}
else if (RelativeSource == null || RelativeSource.Mode == RelativeSourceMode.DataContext)
{
observer = CreateDataContexObserver(
target,
Path,
targetProperty == Control.DataContextProperty,
anchor,
enableDataValidation);
}
else if (RelativeSource.Mode == RelativeSourceMode.Self)
{
observer = CreateSourceObserver(target, Path, enableDataValidation);
}
else if (RelativeSource.Mode == RelativeSourceMode.TemplatedParent)
{
observer = CreateTemplatedParentObserver(target, Path, enableDataValidation);
}
else if (RelativeSource.Mode == RelativeSourceMode.FindAncestor)
{
if (RelativeSource.Tree == TreeType.Visual && RelativeSource.AncestorType == null)
{
throw new InvalidOperationException("AncestorType must be set for RelativeSourceMode.FindAncestor when searching the visual tree.");
}
observer = CreateFindAncestorObserver(
(target as IControl) ?? (anchor as IControl),
RelativeSource,
Path,
enableDataValidation);
}
else
{
throw new NotSupportedException();
}
var fallback = FallbackValue;
// If we're binding to DataContext and our fallback is UnsetValue then override
// the fallback value to null, as broken bindings to DataContext must reset the
// DataContext in order to not propagate incorrect DataContexts to child controls.
// See Avalonia.Markup.Xaml.UnitTests.Data.DataContext_Binding_Should_Produce_Correct_Results.
if (targetProperty == Control.DataContextProperty && fallback == AvaloniaProperty.UnsetValue)
{
fallback = null;
}
var subject = new BindingExpression(
observer,
targetProperty?.PropertyType ?? typeof(object),
fallback,
Converter ?? DefaultValueConverter.Instance,
ConverterParameter,
Priority);
return new InstancedBinding(subject, Mode, Priority);
}
private ExpressionObserver CreateDataContexObserver(
IAvaloniaObject target,
string path,
bool targetIsDataContext,
object anchor,
bool enableDataValidation)
{
Contract.Requires<ArgumentNullException>(target != null);
if (!(target is IControl))
{
target = anchor as IControl;
if (target == null)
{
throw new InvalidOperationException("Cannot find a DataContext to bind to.");
}
}
if (!targetIsDataContext)
{
var update = target.GetObservable(Control.DataContextProperty)
.Skip(1)
.Select(_ => Unit.Default);
var result = new ExpressionObserver(
() => target.GetValue(Control.DataContextProperty),
path,
update,
enableDataValidation);
return result;
}
else
{
return new ExpressionObserver(
GetParentDataContext(target),
path,
enableDataValidation);
}
}
private ExpressionObserver CreateElementObserver(
IControl target,
string elementName,
string path,
bool enableDataValidation)
{
Contract.Requires<ArgumentNullException>(target != null);
var description = $"#{elementName}.{path}";
var result = new ExpressionObserver(
ControlLocator.Track(target, elementName),
path,
enableDataValidation,
description);
return result;
}
private ExpressionObserver CreateFindAncestorObserver(
IControl target,
RelativeSource relativeSource,
string path,
bool enableDataValidation)
{
Contract.Requires<ArgumentNullException>(target != null);
return new ExpressionObserver(
ControlLocator.Track(target, relativeSource.Tree, relativeSource.AncestorLevel - 1, relativeSource.AncestorType),
path,
enableDataValidation);
}
private ExpressionObserver CreateSourceObserver(
object source,
string path,
bool enableDataValidation)
{
Contract.Requires<ArgumentNullException>(source != null);
return new ExpressionObserver(source, path, enableDataValidation);
}
private ExpressionObserver CreateTemplatedParentObserver(
IAvaloniaObject target,
string path,
bool enableDataValidation)
{
Contract.Requires<ArgumentNullException>(target != null);
var update = target.GetObservable(Control.TemplatedParentProperty)
.Skip(1)
.Select(_ => Unit.Default);
var result = new ExpressionObserver(
() => target.GetValue(Control.TemplatedParentProperty),
path,
update,
enableDataValidation);
return result;
}
private IObservable<object> GetParentDataContext(IAvaloniaObject target)
{
// The DataContext is based on the visual parent and not the logical parent: this may
// seem unintuitive considering the fact that property inheritance works on the logical
// tree, but consider a ContentControl with a ContentPresenter. The ContentControl's
// Content property is bound to a value which becomes the ContentPresenter's
// DataContext - it is from this that the child hosted by the ContentPresenter needs to
// inherit its DataContext.
return target.GetObservable(Visual.VisualParentProperty)
.Select(x =>
{
return (x as IAvaloniaObject)?.GetObservable(Control.DataContextProperty) ??
Observable.Return((object)null);
}).Switch();
}
private class PathInfo
{
public string Path { get; set; }
public string ElementName { get; set; }
public RelativeSource RelativeSource { get; set; }
}
}
}
| |
#if UNITY_IPHONE
#define __NOGEN__
#endif
namespace LuaInterface
{
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Reflection;
using System.Security;
using System.Runtime.InteropServices;
using System.Threading;
using System.Text;
using UnityEngine;
public class LuaState : IDisposable
{
public IntPtr L;
internal LuaCSFunction tracebackFunction;
internal ObjectTranslator translator;
internal LuaCSFunction panicCallback;
// Overrides
internal LuaCSFunction printFunction;
internal LuaCSFunction loadfileFunction;
internal LuaCSFunction loaderFunction;
internal LuaCSFunction dofileFunction;
public LuaState()
{
// Create State
L = LuaDLL.luaL_newstate();
if (L == IntPtr.Zero)
{
Debug.LogError("Failed to create lua state!");
return;
}
// Create LuaInterface library
LuaDLL.luaL_openlibs(L);
LuaDLL.lua_pushstring(L, "LUAINTERFACE LOADED");
LuaDLL.lua_pushboolean(L, true);
LuaDLL.lua_settable(L, (int) LuaIndexes.LUA_REGISTRYINDEX);
LuaDLL.lua_newtable(L);
LuaDLL.lua_setglobal(L, "luanet");
LuaDLL.lua_pushvalue(L, (int)LuaIndexes.LUA_GLOBALSINDEX);
LuaDLL.lua_getglobal(L, "luanet");
LuaDLL.lua_pushstring(L, "getmetatable");
LuaDLL.lua_getglobal(L, "getmetatable");
LuaDLL.lua_settable(L, -3);
// Set luanet as global for object translator
LuaDLL.lua_replace(L, (int)LuaIndexes.LUA_GLOBALSINDEX);
translator = new ObjectTranslator(this,L);
LuaDLL.lua_replace(L, (int)LuaIndexes.LUA_GLOBALSINDEX);
GCHandle handle = GCHandle.Alloc( translator, GCHandleType.Pinned );
ObjectTranslator.PushTranslator(L, handle);
tracebackFunction = new LuaCSFunction(LuaStatic.traceback);
// We need to keep this in a managed reference so the delegate doesn't get garbage collected
panicCallback = new LuaCSFunction(LuaStatic.panic);
LuaDLL.lua_atpanic(L, panicCallback);
printFunction = new LuaCSFunction(LuaStatic.print);
LuaDLL.lua_pushstdcallcfunction(L, printFunction);
LuaDLL.lua_setfield(L, LuaIndexes.LUA_GLOBALSINDEX, "print");
loadfileFunction = new LuaCSFunction(LuaStatic.loadfile);
LuaDLL.lua_pushstdcallcfunction(L, loadfileFunction);
LuaDLL.lua_setfield(L, LuaIndexes.LUA_GLOBALSINDEX, "loadfile");
dofileFunction = new LuaCSFunction(LuaStatic.dofile);
LuaDLL.lua_pushstdcallcfunction(L, dofileFunction);
LuaDLL.lua_setfield(L, LuaIndexes.LUA_GLOBALSINDEX, "dofile");
// Insert our loader FIRST
loaderFunction = new LuaCSFunction(LuaStatic.loader);
LuaDLL.lua_pushstdcallcfunction(L, loaderFunction);
int loaderFunc = LuaDLL.lua_gettop( L );
LuaDLL.lua_getfield( L, LuaIndexes.LUA_GLOBALSINDEX, "package" );
LuaDLL.lua_getfield( L, -1, "loaders" );
int loaderTable = LuaDLL.lua_gettop( L );
// Shift table elements right
for( int e = LuaDLL.luaL_getn( L, loaderTable ) + 1; e > 1; e-- )
{
LuaDLL.lua_rawgeti( L, loaderTable, e-1 );
LuaDLL.lua_rawseti( L, loaderTable, e );
}
LuaDLL.lua_pushvalue( L, loaderFunc );
LuaDLL.lua_rawseti( L, loaderTable, 1 );
LuaDLL.luaopen_pb( L );
LuaDLL.lua_settop( L, 0 );
//DoString(LuaStatic.init_luanet);
DoString( System.Text.Encoding.ASCII.GetBytes( LuaStatic.init_luanet ) );
}
public void Close()
{
if (L != IntPtr.Zero)
{
LuaDLL.lua_close(L);
}
}
/// <summary>
/// Assuming we have a Lua error string sitting on the stack, throw a C# exception out to the user's app
/// </summary>
/// <exception cref="LuaScriptException">Thrown if the script caused an exception</exception>
internal void ThrowExceptionFromError(int oldTop)
{
object err = translator.getObject(L, -1);
LuaDLL.lua_settop(L, oldTop);
// A pre-wrapped exception - just rethrow it (stack trace of InnerException will be preserved)
LuaScriptException luaEx = err as LuaScriptException;
if (luaEx != null) throw luaEx;
// A non-wrapped Lua error (best interpreted as a string) - wrap it and throw it
if (err == null) err = "Unknown Lua Error";
throw new LuaScriptException(err.ToString(), "");
}
/// <summary>
/// Convert C# exceptions into Lua errors
/// </summary>
/// <returns>num of things on stack</returns>
/// <param name="e">null for no pending exception</param>
internal int SetPendingException(Exception e)
{
Exception caughtExcept = e;
if (caughtExcept != null)
{
translator.throwError(L, caughtExcept);
LuaDLL.lua_pushnil(L);
return 1;
}
else
return 0;
}
/// <summary>
///
/// </summary>
/// <param name="chunk"></param>
/// <param name="name"></param>
/// <returns></returns>
public LuaFunction LoadString(byte[] chunk, string name, LuaTable env)
{
int oldTop = LuaDLL.lua_gettop(L);
if (LuaDLL.luaL_loadbuffer(L, chunk, chunk.Length, name) != 0)
ThrowExceptionFromError(oldTop);
if (env != null)
{
env.push(L);
LuaDLL.lua_setfenv(L, -2);
}
LuaFunction result = translator.getFunction(L, -1);
translator.popValues(L, oldTop);
return result;
}
public LuaFunction LoadString(byte[] chunk, string name)
{
return LoadString(chunk, name, null);
}
/// <summary>
///
/// </summary>
/// <param name="fileName"></param>
/// <returns></returns>
public LuaFunction LoadFile(string fileName)
{
int oldTop = LuaDLL.lua_gettop(L);
// Load with Unity3D resources
TextAsset file = (TextAsset)Resources.Load(fileName);
if( file == null )
{
ThrowExceptionFromError(oldTop);
}
if( LuaDLL.luaL_loadbuffer(L, file.bytes, file.bytes.Length, fileName) != 0 )
{
ThrowExceptionFromError(oldTop);
}
LuaFunction result = translator.getFunction(L, -1);
translator.popValues(L, oldTop);
return result;
}
/*
* Excutes a Lua chunk and returns all the chunk's return
* values in an array
*/
public object[] DoString(byte[] chunk)
{
return DoString(chunk,"chunk", null);
}
/// <summary>
/// Executes a Lua chnk and returns all the chunk's return values in an array.
/// </summary>
/// <param name="chunk">Chunk to execute</param>
/// <param name="chunkName">Name to associate with the chunk</param>
/// <returns></returns>
public object[] DoString(byte[] chunk, string chunkName, LuaTable env)
{
int oldTop = LuaDLL.lua_gettop(L);
if (LuaDLL.luaL_loadbuffer(L, chunk, chunk.Length, chunkName) == 0)
{
if (env != null)
{
env.push(L);
//LuaDLL.lua_setfenv(L, -1);
LuaDLL.lua_setfenv(L, -2);
}
if (LuaDLL.lua_pcall(L, 0, -1, 0) == 0)
return translator.popValues(L, oldTop);
else
ThrowExceptionFromError(oldTop);
}
else
ThrowExceptionFromError(oldTop);
return null; // Never reached - keeps compiler happy
}
public object[] DoFile(string fileName)
{
return DoFile(fileName, null);
}
/*
* Excutes a Lua file and returns all the chunk's return
* values in an array
*/
public object[] DoFile(string fileName, LuaTable env)
{
LuaDLL.lua_pushstdcallcfunction(L,tracebackFunction);
int oldTop=LuaDLL.lua_gettop(L);
// Load with Unity3D resources
TextAsset file = (TextAsset)Resources.Load(fileName);
if( file == null )
{
ThrowExceptionFromError(oldTop);
}
if( LuaDLL.luaL_loadbuffer(L, file.bytes, file.bytes.Length, fileName) == 0 )
{
if (env != null)
{
env.push(L);
//LuaDLL.lua_setfenv(L, -1);
LuaDLL.lua_setfenv(L, -2);
}
if (LuaDLL.lua_pcall(L, 0, -1, -2) == 0)
{
object[] results = translator.popValues(L, oldTop);
LuaDLL.lua_pop(L, 1);
return results;
}
else
{
ThrowExceptionFromError(oldTop);
}
}
else
{
ThrowExceptionFromError(oldTop);
}
return null; // Never reached - keeps compiler happy
}
/*
* Indexer for global variables from the LuaInterpreter
* Supports navigation of tables by using . operator
*/
public object this[string fullPath]
{
get
{
object returnValue=null;
int oldTop=LuaDLL.lua_gettop(L);
string[] path=fullPath.Split(new char[] { '.' });
LuaDLL.lua_getglobal(L,path[0]);
returnValue=translator.getObject(L,-1);
if(path.Length>1)
{
string[] remainingPath=new string[path.Length-1];
Array.Copy(path,1,remainingPath,0,path.Length-1);
returnValue=getObject(remainingPath);
}
LuaDLL.lua_settop(L,oldTop);
return returnValue;
}
set
{
int oldTop=LuaDLL.lua_gettop(L);
string[] path=fullPath.Split(new char[] { '.' });
if(path.Length==1)
{
translator.push(L,value);
LuaDLL.lua_setglobal(L,fullPath);
}
else
{
LuaDLL.lua_getglobal(L,path[0]);
string[] remainingPath=new string[path.Length-1];
Array.Copy(path,1,remainingPath,0,path.Length-1);
setObject(remainingPath,value);
}
LuaDLL.lua_settop(L,oldTop);
// Globals auto-complete
if (value == null)
{
// Remove now obsolete entries
globals.Remove(fullPath);
}
else
{
// Add new entries
if (!globals.Contains(fullPath))
registerGlobal(fullPath, value.GetType(), 0);
}
}
}
#region Globals auto-complete
private readonly List<string> globals = new List<string>();
private bool globalsSorted;
/// <summary>
/// An alphabetically sorted list of all globals (objects, methods, etc.) externally added to this Lua instance
/// </summary>
/// <remarks>Members of globals are also listed. The formatting is optimized for text input auto-completion.</remarks>
public IEnumerable<string> Globals
{
get
{
// Only sort list when necessary
if (!globalsSorted)
{
globals.Sort();
globalsSorted = true;
}
return globals;
}
}
/// <summary>
/// Adds an entry to <see cref="globals"/> (recursivley handles 2 levels of members)
/// </summary>
/// <param name="path">The index accessor path ot the entry</param>
/// <param name="type">The type of the entry</param>
/// <param name="recursionCounter">How deep have we gone with recursion?</param>
private void registerGlobal(string path, Type type, int recursionCounter)
{
// If the type is a global method, list it directly
if (type == typeof(LuaCSFunction))
{
// Format for easy method invocation
globals.Add(path + "(");
}
// If the type is a class or an interface and recursion hasn't been running too long, list the members
else if ((type.IsClass || type.IsInterface) && type != typeof(string) && recursionCounter < 2)
{
#region Methods
foreach (MethodInfo method in type.GetMethods(BindingFlags.Public | BindingFlags.Instance))
{
if (
// Check that the LuaHideAttribute and LuaGlobalAttribute were not applied
(method.GetCustomAttributes(typeof(LuaHideAttribute), false).Length == 0) &&
(method.GetCustomAttributes(typeof(LuaGlobalAttribute), false).Length == 0) &&
// Exclude some generic .NET methods that wouldn't be very usefull in Lua
method.Name != "GetType" && method.Name != "GetHashCode" && method.Name != "Equals" &&
method.Name != "ToString" && method.Name != "Clone" && method.Name != "Dispose" &&
method.Name != "GetEnumerator" && method.Name != "CopyTo" &&
!method.Name.StartsWith("get_", StringComparison.Ordinal) &&
!method.Name.StartsWith("set_", StringComparison.Ordinal) &&
!method.Name.StartsWith("add_", StringComparison.Ordinal) &&
!method.Name.StartsWith("remove_", StringComparison.Ordinal))
{
// Format for easy method invocation
string command = path + ":" + method.Name + "(";
if (method.GetParameters().Length == 0) command += ")";
globals.Add(command);
}
}
#endregion
#region Fields
foreach (FieldInfo field in type.GetFields(BindingFlags.Public | BindingFlags.Instance))
{
if (
// Check that the LuaHideAttribute and LuaGlobalAttribute were not applied
(field.GetCustomAttributes(typeof(LuaHideAttribute), false).Length == 0) &&
(field.GetCustomAttributes(typeof(LuaGlobalAttribute), false).Length == 0))
{
// Go into recursion for members
registerGlobal(path + "." + field.Name, field.FieldType, recursionCounter + 1);
}
}
#endregion
#region Properties
foreach (PropertyInfo property in type.GetProperties(BindingFlags.Public | BindingFlags.Instance))
{
if (
// Check that the LuaHideAttribute and LuaGlobalAttribute were not applied
(property.GetCustomAttributes(typeof(LuaHideAttribute), false).Length == 0) &&
(property.GetCustomAttributes(typeof(LuaGlobalAttribute), false).Length == 0)
// Exclude some generic .NET properties that wouldn't be very usefull in Lua
&& property.Name != "Item")
{
// Go into recursion for members
registerGlobal(path + "." + property.Name, property.PropertyType, recursionCounter + 1);
}
}
#endregion
}
// Otherwise simply add the element to the list
else globals.Add(path);
// List will need to be sorted on next access
globalsSorted = false;
}
#endregion
/*
* Navigates a table in the top of the stack, returning
* the value of the specified field
*/
internal object getObject(string[] remainingPath)
{
object returnValue=null;
for(int i=0;i<remainingPath.Length;i++)
{
LuaDLL.lua_pushstring(L,remainingPath[i]);
LuaDLL.lua_gettable(L,-2);
returnValue=translator.getObject(L,-1);
if(returnValue==null) break;
}
return returnValue;
}
/*
* Gets a numeric global variable
*/
public double GetNumber(string fullPath)
{
return (double)this[fullPath];
}
/*
* Gets a string global variable
*/
public string GetString(string fullPath)
{
return (string)this[fullPath];
}
/*
* Gets a table global variable
*/
public LuaTable GetTable(string fullPath)
{
return (LuaTable)this[fullPath];
}
#if ! __NOGEN__
/*
* Gets a table global variable as an object implementing
* the interfaceType interface
*/
public object GetTable(Type interfaceType, string fullPath)
{
translator.throwError(L,"Tables as interfaces not implemnented");
return CodeGeneration.Instance.GetClassInstance(interfaceType,GetTable(fullPath));
}
#endif
/*
* Gets a function global variable
*/
public LuaFunction GetFunction(string fullPath)
{
object obj=this[fullPath];
return (obj is LuaCSFunction ? new LuaFunction((LuaCSFunction)obj,this) : (LuaFunction)obj);
}
/*
* Gets a function global variable as a delegate of
* type delegateType
*/
public Delegate GetFunction(Type delegateType,string fullPath)
{
#if __NOGEN__
translator.throwError(L,"function delegates not implemnented");
return null;
#else
return CodeGeneration.Instance.GetDelegate(delegateType,GetFunction(fullPath));
#endif
}
/*
* Calls the object as a function with the provided arguments,
* returning the function's returned values inside an array
*/
internal object[] callFunction(object function,object[] args)
{
return callFunction(function, args, null);
}
/*
* Calls the object as a function with the provided arguments and
* casting returned values to the types in returnTypes before returning
* them in an array
*/
internal object[] callFunction(object function,object[] args,Type[] returnTypes)
{
int nArgs=0;
int oldTop=LuaDLL.lua_gettop(L);
if(!LuaDLL.lua_checkstack(L,args.Length+6))
throw new LuaException("Lua stack overflow");
translator.push(L,function);
if(args!=null)
{
nArgs=args.Length;
for(int i=0;i<args.Length;i++)
{
translator.push(L,args[i]);
}
}
int error = LuaDLL.lua_pcall(L, nArgs, -1, 0);
if (error != 0)
ThrowExceptionFromError(oldTop);
if(returnTypes != null)
return translator.popValues(L,oldTop,returnTypes);
else
return translator.popValues(L, oldTop);
}
/*
* Navigates a table to set the value of one of its fields
*/
internal void setObject(string[] remainingPath, object val)
{
for(int i=0; i<remainingPath.Length-1;i++)
{
LuaDLL.lua_pushstring(L,remainingPath[i]);
LuaDLL.lua_gettable(L,-2);
}
LuaDLL.lua_pushstring(L,remainingPath[remainingPath.Length-1]);
translator.push(L,val);
LuaDLL.lua_settable(L,-3);
}
/*
* Creates a new table as a global variable or as a field
* inside an existing table
*/
public void NewTable(string fullPath)
{
string[] path=fullPath.Split(new char[] { '.' });
int oldTop=LuaDLL.lua_gettop(L);
if(path.Length==1)
{
LuaDLL.lua_newtable(L);
LuaDLL.lua_setglobal(L,fullPath);
}
else
{
LuaDLL.lua_getglobal(L,path[0]);
for(int i=1; i<path.Length-1;i++)
{
LuaDLL.lua_pushstring(L,path[i]);
LuaDLL.lua_gettable(L,-2);
}
LuaDLL.lua_pushstring(L,path[path.Length-1]);
LuaDLL.lua_newtable(L);
LuaDLL.lua_settable(L,-3);
}
LuaDLL.lua_settop(L,oldTop);
}
public LuaTable NewTable()
{
int oldTop=LuaDLL.lua_gettop(L);
LuaDLL.lua_newtable(L);
LuaTable returnVal = (LuaTable)translator.getObject(L,-1);
LuaDLL.lua_settop(L,oldTop);
return returnVal;
}
public ListDictionary GetTableDict(LuaTable table)
{
ListDictionary dict = new ListDictionary();
int oldTop = LuaDLL.lua_gettop(L);
translator.push(L, table);
LuaDLL.lua_pushnil(L);
while (LuaDLL.lua_next(L, -2) != 0)
{
dict[translator.getObject(L, -2)] = translator.getObject(L, -1);
LuaDLL.lua_settop(L, -2);
}
LuaDLL.lua_settop(L, oldTop);
return dict;
}
/*
* Lets go of a previously allocated reference to a table, function
* or userdata
*/
internal void dispose(int reference)
{
if (L != IntPtr.Zero) //Fix submitted by Qingrui Li
LuaDLL.lua_unref(L,reference);
}
/*
* Gets a field of the table corresponding to the provided reference
* using rawget (do not use metatables)
*/
internal object rawGetObject(int reference,string field)
{
int oldTop=LuaDLL.lua_gettop(L);
LuaDLL.lua_getref(L,reference);
LuaDLL.lua_pushstring(L,field);
LuaDLL.lua_rawget(L,-2);
object obj=translator.getObject(L,-1);
LuaDLL.lua_settop(L,oldTop);
return obj;
}
/*
* Gets a field of the table or userdata corresponding to the provided reference
*/
internal object getObject(int reference,string field)
{
int oldTop=LuaDLL.lua_gettop(L);
LuaDLL.lua_getref(L,reference);
object returnValue=getObject(field.Split(new char[] {'.'}));
LuaDLL.lua_settop(L,oldTop);
return returnValue;
}
/*
* Gets a numeric field of the table or userdata corresponding the the provided reference
*/
internal object getObject(int reference,object field)
{
int oldTop=LuaDLL.lua_gettop(L);
LuaDLL.lua_getref(L,reference);
translator.push(L,field);
LuaDLL.lua_gettable(L,-2);
object returnValue=translator.getObject(L,-1);
LuaDLL.lua_settop(L,oldTop);
return returnValue;
}
/*
* Sets a field of the table or userdata corresponding the the provided reference
* to the provided value
*/
internal void setObject(int reference, string field, object val)
{
int oldTop=LuaDLL.lua_gettop(L);
LuaDLL.lua_getref(L,reference);
setObject(field.Split(new char[] {'.'}),val);
LuaDLL.lua_settop(L,oldTop);
}
/*
* Sets a numeric field of the table or userdata corresponding the the provided reference
* to the provided value
*/
internal void setObject(int reference, object field, object val)
{
int oldTop=LuaDLL.lua_gettop(L);
LuaDLL.lua_getref(L,reference);
translator.push(L,field);
translator.push(L,val);
LuaDLL.lua_settable(L,-3);
LuaDLL.lua_settop(L,oldTop);
}
/*
* Registers an object's method as a Lua function (global or table field)
* The method may have any signature
*/
public LuaFunction RegisterFunction(string path, object target, MethodBase function /*MethodInfo function*/) //CP: Fix for struct constructor by Alexander Kappner (link: http://luaforge.net/forum/forum.php?thread_id=2859&forum_id=145)
{
// We leave nothing on the stack when we are done
int oldTop = LuaDLL.lua_gettop(L);
LuaMethodWrapper wrapper=new LuaMethodWrapper(translator,target,function.DeclaringType,function);
translator.push(L,new LuaCSFunction(wrapper.call));
this[path]=translator.getObject(L,-1);
LuaFunction f = GetFunction(path);
LuaDLL.lua_settop(L, oldTop);
return f;
}
public LuaFunction CreateFunction(object target, MethodBase function /*MethodInfo function*/) //CP: Fix for struct constructor by Alexander Kappner (link: http://luaforge.net/forum/forum.php?thread_id=2859&forum_id=145)
{
// We leave nothing on the stack when we are done
int oldTop = LuaDLL.lua_gettop(L);
LuaMethodWrapper wrapper=new LuaMethodWrapper(translator,target,function.DeclaringType,function);
translator.push(L,new LuaCSFunction(wrapper.call));
object obj = translator.getObject(L,-1);
LuaFunction f = (obj is LuaCSFunction ? new LuaFunction((LuaCSFunction)obj,this) : (LuaFunction)obj);
LuaDLL.lua_settop(L, oldTop);
return f;
}
/*
* Compares the two values referenced by ref1 and ref2 for equality
*/
internal bool compareRef(int ref1, int ref2)
{
int top=LuaDLL.lua_gettop(L);
LuaDLL.lua_getref(L,ref1);
LuaDLL.lua_getref(L,ref2);
int equal=LuaDLL.lua_equal(L,-1,-2);
LuaDLL.lua_settop(L,top);
return (equal!=0);
}
internal void pushCSFunction(LuaCSFunction function)
{
translator.pushFunction(L,function);
}
#region IDisposable Members
public void Dispose()
{
Dispose(true);
System.GC.Collect();
System.GC.WaitForPendingFinalizers();
}
public virtual void Dispose(bool dispose)
{
if( dispose )
{
if (translator != null)
{
translator.pendingEvents.Dispose();
translator = null;
}
}
}
#endregion
}
public class LuaThread : LuaState
{
// Tracks if thread is running or not
private bool start = false;
// Keeps reference of thread in registry to prevent GC
private int threadRef;
// Hold on to parent for later
private LuaState parent;
// Func running on
private LuaFunction func;
public LuaThread( LuaState parentState, LuaFunction threadFunc )
{
// Copy from parent
this.tracebackFunction = parentState.tracebackFunction;
this.translator = parentState.translator;
this.translator.interpreter = this;
this.panicCallback = parentState.panicCallback;
this.printFunction = parentState.printFunction;
this.loadfileFunction = parentState.loadfileFunction;
this.loaderFunction = parentState.loaderFunction;
this.dofileFunction = parentState.dofileFunction;
// Assign to store
func = threadFunc;
parent = parentState;
// Create Thread
L = LuaDLL.lua_newthread( parent.L );
// Store thread in registry
threadRef = LuaDLL.luaL_ref( parent.L, LuaIndexes.LUA_REGISTRYINDEX );
}
#region IDisposable Members
public override void Dispose(bool dispose)
{
if( dispose )
{
LuaDLL.luaL_unref( parent.L, LuaIndexes.LUA_REGISTRYINDEX, threadRef );
}
}
#endregion
public void Start()
{
if(IsInactive() && !start)
{
start = true;
}
}
public int Resume()
{
return Resume(null, null);
}
public int Resume(object[] args, LuaTable env)
{
int result = 0;
int oldTop = LuaDLL.lua_gettop(L);
// If thread isn't started, it needs to be restarted
if( start )
{
start = false;
func.push( L );
if (env != null)
{
env.push(L);
LuaDLL.lua_setfenv(L, -2);
}
result = resume(args, oldTop);
}
// If thread is suspended, resume it
else if( IsSuspended() )
{
result = resume(args, oldTop);
}
return result;
}
private int resume(object[] args, int oldTop)
{
int nArgs=0;
// Push args
if(args!=null)
{
nArgs=args.Length;
for(int i=0;i<args.Length;i++)
{
translator.push(L,args[i]);
}
}
// Call func
int r = 0;
r = LuaDLL.lua_resume( L, nArgs );
if( r > (int)LuaThreadStatus.LUA_YIELD )
{
// Error
int top = LuaDLL.lua_gettop(L);
ThrowExceptionFromError(top);
}
return r;
}
public bool IsStarted()
{
return start;
}
public bool IsSuspended()
{
int status = LuaDLL.lua_status( L );
return (status == (int)LuaThreadStatus.LUA_YIELD);
}
public bool IsDead()
{
int status = LuaDLL.lua_status( L );
return (status > (int)LuaThreadStatus.LUA_YIELD);
}
public bool IsInactive()
{
int status = LuaDLL.lua_status( L );
return (status == 0);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System.Runtime.Serialization;
using System.Management.Automation.Internal;
using System.Security.Permissions;
namespace System.Management.Automation.Remoting
{
/// <summary>
/// This enum defines the error message ids used by the resource manager to get
/// localized messages.
///
/// Related error ids are organized in a pre-defined range of values.
/// </summary>
internal enum PSRemotingErrorId : uint
{
// OS related 1-9
DefaultRemotingExceptionMessage = 0,
OutOfMemory = 1,
// Pipeline related range: 10-99
PipelineIdsDoNotMatch = 10,
PipelineNotFoundOnServer = 11,
PipelineStopped = 12,
// Runspace, Host, UI and RawUI related range: 200-299
RunspaceAlreadyExists = 200,
RunspaceIdsDoNotMatch = 201,
RemoteRunspaceOpenFailed = 202,
RunspaceCannotBeFound = 203,
ResponsePromptIdCannotBeFound = 204,
RemoteHostCallFailed = 205,
RemoteHostMethodNotImplemented = 206,
RemoteHostDataEncodingNotSupported = 207,
RemoteHostDataDecodingNotSupported = 208,
NestedPipelineNotSupported = 209,
RelativeUriForRunspacePathNotSupported = 210,
RemoteHostDecodingFailed = 211,
MustBeAdminToOverrideThreadOptions = 212,
RemoteHostPromptForCredentialModifiedCaption = 213,
RemoteHostPromptForCredentialModifiedMessage = 214,
RemoteHostReadLineAsSecureStringPrompt = 215,
RemoteHostGetBufferContents = 216,
RemoteHostPromptSecureStringPrompt = 217,
WinPERemotingNotSupported = 218,
// reserved range: 300-399
// Encoding/Decoding and fragmentation related range: 400-499
ReceivedUnsupportedRemoteHostCall = 400,
ReceivedUnsupportedAction = 401,
ReceivedUnsupportedDataType = 402,
MissingDestination = 403,
MissingTarget = 404,
MissingRunspaceId = 405,
MissingDataType = 406,
MissingCallId = 407,
MissingMethodName = 408,
MissingIsStartFragment = 409,
MissingProperty = 410,
ObjectIdsNotMatching = 411,
FragmentIdsNotInSequence = 412,
ObjectIsTooBig = 413,
MissingIsEndFragment = 414,
DeserializedObjectIsNull = 415,
BlobLengthNotInRange = 416,
DecodingErrorForErrorRecord = 417,
DecodingErrorForPipelineStateInfo = 418,
DecodingErrorForRunspaceStateInfo = 419,
ReceivedUnsupportedRemotingTargetInterfaceType = 420,
UnknownTargetClass = 421,
MissingTargetClass = 422,
DecodingErrorForRunspacePoolStateInfo = 423,
DecodingErrorForMinRunspaces = 424,
DecodingErrorForMaxRunspaces = 425,
DecodingErrorForPowerShellStateInfo = 426,
DecodingErrorForThreadOptions = 427,
CantCastPropertyToExpectedType = 428,
CantCastRemotingDataToPSObject = 429,
CantCastCommandToPSObject = 430,
CantCastParameterToPSObject = 431,
ObjectIdCannotBeLessThanZero = 432,
NotEnoughHeaderForRemoteDataObject = 433,
// reserved range: 500-599
// Remote Session related range: 600-699
RemotingDestinationNotForMe = 600,
ClientNegotiationTimeout = 601,
ClientNegotiationFailed = 602,
ServerRequestedToCloseSession = 603,
ServerNegotiationFailed = 604,
ServerNegotiationTimeout = 605,
ClientRequestedToCloseSession = 606,
FatalErrorCausingClose = 607,
ClientKeyExchangeFailed = 608,
ServerKeyExchangeFailed = 609,
ClientNotFoundCapabilityProperties = 610,
ServerNotFoundCapabilityProperties = 611,
// reserved range: 700-799
// Transport related range: 800-899
ConnectFailed = 801,
CloseIsCalled = 802,
ForceClosed = 803,
CloseFailed = 804,
CloseCompleted = 805,
UnsupportedWaitHandleType = 806,
ReceivedDataStreamIsNotStdout = 807,
StdInIsNotOpen = 808,
NativeWriteFileFailed = 809,
NativeReadFileFailed = 810,
InvalidSchemeValue = 811,
ClientReceiveFailed = 812,
ClientSendFailed = 813,
CommandHandleIsNull = 814,
StdInCannotBeSetToNoWait = 815,
PortIsOutOfRange = 816,
ServerProcessExited = 817,
CannotGetStdInHandle = 818,
CannotGetStdOutHandle = 819,
CannotGetStdErrHandle = 820,
CannotSetStdInHandle = 821,
CannotSetStdOutHandle = 822,
CannotSetStdErrHandle = 823,
InvalidConfigurationName = 824,
ConnectSkipCheckFailed = 825,
// Error codes added to support new WSMan Fan-In Model API
CreateSessionFailed = 851,
CreateExFailed = 853,
ConnectExCallBackError = 854,
SendExFailed = 855,
SendExCallBackError = 856,
ReceiveExFailed = 857,
ReceiveExCallBackError = 858,
RunShellCommandExFailed = 859,
RunShellCommandExCallBackError = 860,
CommandSendExFailed = 861,
CommandSendExCallBackError = 862,
CommandReceiveExFailed = 863,
CommandReceiveExCallBackError = 864,
CloseExCallBackError = 866,
// END: Error codes added to support new WSMan Fan-In Model API
// BEGIN: Error IDs introduced for URI redirection
RedirectedURINotWellFormatted = 867,
URIEndPointNotResolved = 868,
// END: Error IDs introduced for URI redirection
// BEGIN: Error IDs introduced for Quota Management
ReceivedObjectSizeExceededMaximumClient = 869,
ReceivedDataSizeExceededMaximumClient = 870,
ReceivedObjectSizeExceededMaximumServer = 871,
ReceivedDataSizeExceededMaximumServer = 872,
// END: Error IDs introduced for Quota Management
// BEGIN: Error IDs introduced for startup script
StartupScriptThrewTerminatingError = 873,
// END: Error IDs introduced for startup script
TroubleShootingHelpTopic = 874,
// BEGIN: Error IDs introduced for disconnect/reconnect
DisconnectShellExFailed = 875,
DisconnectShellExCallBackErrr = 876,
ReconnectShellExFailed = 877,
ReconnectShellExCallBackErrr = 878,
// END: Error IDs introduced for disconnect/reconnect
// Cmdlets related range: 900-999
RemoteRunspaceInfoHasDuplicates = 900,
RemoteRunspaceInfoLimitExceeded = 901,
RemoteRunspaceOpenUnknownState = 902,
UriSpecifiedNotValid = 903,
RemoteRunspaceClosed = 904,
RemoteRunspaceNotAvailableForSpecifiedComputer = 905,
RemoteRunspaceNotAvailableForSpecifiedRunspaceId = 906,
StopPSJobWhatIfTarget = 907,
InvalidJobStateGeneral = 909,
JobWithSpecifiedNameNotFound = 910,
JobWithSpecifiedInstanceIdNotFound = 911,
JobWithSpecifiedSessionIdNotFound = 912,
JobWithSpecifiedNameNotCompleted = 913,
JobWithSpecifiedSessionIdNotCompleted = 914,
JobWithSpecifiedInstanceIdNotCompleted = 915,
RemovePSJobWhatIfTarget = 916,
ComputerNameParamNotSupported = 917,
RunspaceParamNotSupported = 918,
RemoteRunspaceNotAvailableForSpecifiedName = 919,
RemoteRunspaceNotAvailableForSpecifiedSessionId = 920,
ItemNotFoundInRepository = 921,
CannotRemoveJob = 922,
NewRunspaceAmbiguousAuthentication = 923,
WildCardErrorFilePathParameter = 924,
FilePathNotFromFileSystemProvider = 925,
FilePathShouldPS1Extension = 926,
PSSessionConfigurationName = 927,
PSSessionAppName = 928,
// Custom Shell commands
CSCDoubleParameterOutOfRange = 929,
URIRedirectionReported = 930,
NoMoreInputWrites = 931,
InvalidComputerName = 932,
ProxyAmbiguousAuthentication = 933,
ProxyCredentialWithoutAccess = 934,
// Start-PSSession related error codes.
PushedRunspaceMustBeOpen = 951,
HostDoesNotSupportPushRunspace = 952,
RemoteRunspaceHasMultipleMatchesForSpecifiedRunspaceId = 953,
RemoteRunspaceHasMultipleMatchesForSpecifiedSessionId = 954,
RemoteRunspaceHasMultipleMatchesForSpecifiedName = 955,
RemoteRunspaceDoesNotSupportPushRunspace = 956,
HostInNestedPrompt = 957,
RemoteHostDoesNotSupportPushRunspace = 958,
InvalidVMId = 959,
InvalidVMNameNoVM = 960,
InvalidVMNameMultipleVM = 961,
HyperVModuleNotAvailable = 962,
InvalidUsername = 963,
InvalidCredential = 964,
VMSessionConnectFailed = 965,
InvalidContainerId = 966,
CannotCreateProcessInContainer = 967,
CannotTerminateProcessInContainer = 968,
ContainersFeatureNotEnabled = 969,
RemoteSessionHyperVSocketServerConstructorFailure = 970,
ContainerSessionConnectFailed = 973,
RemoteSessionHyperVSocketClientConstructorSetSocketOptionFailure = 974,
InvalidVMState = 975,
// Invoke-Command related error codes.
InvalidVMIdNotSingle = 981,
InvalidVMNameNotSingle = 982,
// SessionState Description related messages
WsmanMaxRedirectionCountVariableDescription = 1001,
PSDefaultSessionOptionDescription = 1002,
PSSenderInfoDescription = 1004,
// IPC for Background jobs related errors: 2000
IPCUnknownNodeType = 2001,
IPCInsufficientDataforElement = 2002,
IPCWrongAttributeCountForDataElement = 2003,
IPCOnlyTextExpectedInDataElement = 2004,
IPCWrongAttributeCountForElement = 2005,
IPCUnknownElementReceived = 2006,
IPCSupportsOnlyDefaultAuth = 2007,
IPCWowComponentNotPresent = 2008,
IPCServerProcessReportedError = 2100,
IPCServerProcessExited = 2101,
IPCErrorProcessingServerData = 2102,
IPCUnknownCommandGuid = 2103,
IPCNoSignalForSession = 2104,
IPCSignalTimedOut = 2105,
IPCCloseTimedOut = 2106,
IPCExceptionLaunchingProcess = 2107,
}
/// <summary>
/// This static class defines the resource base name used by remoting errors.
/// It also provides a convenience method to get the localized strings.
/// </summary>
internal static class PSRemotingErrorInvariants
{
/// <summary>
/// This method is a convenience method to retrieve the localized string.
/// </summary>
/// <param name="resourceString">
/// This parameter holds the string in the resource file.
/// </param>
/// <param name="args">
/// Optional parameters required by the resource string formating information.
/// </param>
/// <returns>
/// The formatted localized string.
/// </returns>
internal static string FormatResourceString(string resourceString, params object[] args)
{
string resourceFormatedString = StringUtil.Format(resourceString, args);
return resourceFormatedString;
}
}
/// <summary>
/// This exception is used by remoting code to indicated a data structure handler related error.
/// </summary>
[Serializable]
public class PSRemotingDataStructureException : RuntimeException
{
#region Constructors
/// <summary>
/// Default constructor.
/// </summary>
public PSRemotingDataStructureException()
: base(PSRemotingErrorInvariants.FormatResourceString(RemotingErrorIdStrings.DefaultRemotingExceptionMessage, typeof(PSRemotingDataStructureException).FullName))
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constuctor takes a localized string as the error message.
/// </summary>
/// <param name="message">
/// A localized string as an error message.
/// </param>
public PSRemotingDataStructureException(string message)
: base(message)
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constuctor takes a localized string as the error message, and an inner exception.
/// </summary>
/// <param name="message">
/// A localized string as an error message.
/// </param>
/// <param name="innerException">
/// Inner exception.
/// </param>
public PSRemotingDataStructureException(string message, Exception innerException)
: base(message, innerException)
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constructor takes an error id and optional parameters.
/// </summary>
/// <param name="resourceString">
/// The resource string in the base resource file.
/// </param>
/// <param name="args">
/// Optional parameters required to format the resource string.
/// </param>
internal PSRemotingDataStructureException(string resourceString, params object[] args)
: base(PSRemotingErrorInvariants.FormatResourceString(resourceString, args))
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constuctor takes an inner exception and an error id.
/// </summary>
/// <param name="innerException">
/// Inner exception.
/// </param>
/// <param name="resourceString">
/// The resource string in the base resource file.
/// </param>
/// <param name="args">
/// Optional parameters required to format the resource string.
/// </param>
internal PSRemotingDataStructureException(Exception innerException, string resourceString, params object[] args)
: base(PSRemotingErrorInvariants.FormatResourceString(resourceString, args), innerException)
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constructor is required by serialization.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
protected PSRemotingDataStructureException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
#endregion Constructors
/// <summary>
/// Set the default ErrorRecord.
/// </summary>
private void SetDefaultErrorRecord()
{
SetErrorCategory(ErrorCategory.ResourceUnavailable);
SetErrorId(typeof(PSRemotingDataStructureException).FullName);
}
}
/// <summary>
/// This exception is used by remoting code to indicate an error condition in network operations.
/// </summary>
[Serializable]
public class PSRemotingTransportException : RuntimeException
{
private int _errorCode;
private string _transportMessage;
#region Constructors
/// <summary>
/// This is the default constructor.
/// </summary>
public PSRemotingTransportException()
: base(PSRemotingErrorInvariants.FormatResourceString(RemotingErrorIdStrings.DefaultRemotingExceptionMessage, typeof(PSRemotingTransportException).FullName))
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constructor takes a localized error message.
/// </summary>
/// <param name="message">
/// A localized error message.
/// </param>
public PSRemotingTransportException(string message)
: base(message)
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constructor takes a localized message and an inner exception.
/// </summary>
/// <param name="message">
/// Localized error message.
/// </param>
/// <param name="innerException">
/// Inner exception.
/// </param>
public PSRemotingTransportException(string message, Exception innerException)
: base(message, innerException)
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constructor takes an error id and optional parameters.
/// </summary>
/// <param name="errorId">
/// The error id in the base resource file.
/// </param>
/// <param name="resourceString">
/// The resource string in the base resource file.
/// </param>
/// <param name="args">
/// Optional parameters required to format the resource string.
/// </param>
internal PSRemotingTransportException(PSRemotingErrorId errorId, string resourceString, params object[] args)
: base(PSRemotingErrorInvariants.FormatResourceString(resourceString, args))
{
SetDefaultErrorRecord();
_errorCode = (int)errorId;
}
/// <summary>
/// This constuctor takes an inner exception and an error id.
/// </summary>
/// <param name="innerException">
/// Inner exception.
/// </param>
/// <param name="resourceString">
/// The resource string in the base resource file.
/// </param>
/// <param name="args">
/// Optional parameters required to format the resource string.
/// </param>
internal PSRemotingTransportException(Exception innerException, string resourceString, params object[] args)
: base(PSRemotingErrorInvariants.FormatResourceString(resourceString, args), innerException)
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constructor is required by serialization.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
/// <exception cref="ArgumentNullException">
/// 1. info is null.
/// </exception>
protected PSRemotingTransportException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
if (info == null)
{
throw new PSArgumentNullException("info");
}
_errorCode = info.GetInt32("ErrorCode");
_transportMessage = info.GetString("TransportMessage");
}
#endregion Constructors
/// <summary>
/// Serializes the exception data.
/// </summary>
/// <param name="info">Serialization information.</param>
/// <param name="context">Streaming context.</param>
[SecurityPermissionAttribute(SecurityAction.Demand, SerializationFormatter = true)]
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
if (info == null)
{
throw new PSArgumentNullException("info");
}
base.GetObjectData(info, context);
// If there are simple fields, serialize them with info.AddValue
info.AddValue("ErrorCode", _errorCode);
info.AddValue("TransportMessage", _transportMessage);
}
/// <summary>
/// Set the default ErrorRecord.
/// </summary>
protected void SetDefaultErrorRecord()
{
SetErrorCategory(ErrorCategory.ResourceUnavailable);
SetErrorId(typeof(PSRemotingDataStructureException).FullName);
}
/// <summary>
/// The error code from native library API call.
/// </summary>
public int ErrorCode
{
get
{
return _errorCode;
}
set
{
_errorCode = value;
}
}
/// <summary>
/// This the message from the native transport layer.
/// </summary>
public string TransportMessage
{
get
{
return _transportMessage;
}
set
{
_transportMessage = value;
}
}
}
/// <summary>
/// This exception is used by PowerShell's remoting infrastructure to notify a URI redirection
/// exception.
/// </summary>
[Serializable]
public class PSRemotingTransportRedirectException : PSRemotingTransportException
{
#region Constructor
/// <summary>
/// This is the default constructor.
/// </summary>
public PSRemotingTransportRedirectException()
: base(PSRemotingErrorInvariants.FormatResourceString(RemotingErrorIdStrings.DefaultRemotingExceptionMessage,
typeof(PSRemotingTransportRedirectException).FullName))
{
SetDefaultErrorRecord();
}
/// <summary>
/// This constructor takes a localized error message.
/// </summary>
/// <param name="message">
/// A localized error message.
/// </param>
public PSRemotingTransportRedirectException(string message)
: base(message)
{
}
/// <summary>
/// This constructor takes a localized message and an inner exception.
/// </summary>
/// <param name="message">
/// Localized error message.
/// </param>
/// <param name="innerException">
/// Inner exception.
/// </param>
public PSRemotingTransportRedirectException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// This constuctor takes an inner exception and an error id.
/// </summary>
/// <param name="innerException">
/// Inner exception.
/// </param>
/// <param name="resourceString">
/// The resource string in the base resource file.
/// </param>
/// <param name="args">
/// Optional parameters required to format the resource string.
/// </param>
internal PSRemotingTransportRedirectException(Exception innerException, string resourceString, params object[] args)
: base(innerException, resourceString, args)
{
}
/// <summary>
/// This constructor is required by serialization.
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
/// <exception cref="ArgumentNullException">
/// 1. info is null.
/// </exception>
protected PSRemotingTransportRedirectException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
if (info == null)
{
throw new PSArgumentNullException("info");
}
RedirectLocation = info.GetString("RedirectLocation");
}
/// <summary>
/// This constructor takes an redirect URI, error id and optional parameters.
/// </summary>
/// <param name="redirectLocation">
/// String specifying a redirect location.
/// </param>
/// <param name="errorId">
/// The error id in the base resource file.
/// </param>
/// <param name="resourceString">
/// The resource string in the base resource file.
/// </param>
/// <param name="args">
/// Optional parameters required to format the resource string.
/// </param>
internal PSRemotingTransportRedirectException(string redirectLocation, PSRemotingErrorId errorId, string resourceString, params object[] args)
: base(errorId, resourceString, args)
{
RedirectLocation = redirectLocation;
}
#endregion
#region Public overrides
/// <summary>
/// Serializes the exception data.
/// </summary>
/// <param name="info">Serialization information.</param>
/// <param name="context">Streaming context.</param>
[SecurityPermissionAttribute(SecurityAction.Demand, SerializationFormatter = true)]
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
if (info == null)
{
throw new PSArgumentNullException("info");
}
base.GetObjectData(info, context);
// If there are simple fields, serialize them with info.AddValue
info.AddValue("RedirectLocation", RedirectLocation);
}
#endregion
#region Properties
/// <summary>
/// String specifying a redirect location.
/// </summary>
public string RedirectLocation { get; }
#endregion
}
/// <summary>
/// This exception is used by PowerShell Direct errors.
/// </summary>
[Serializable]
public class PSDirectException : RuntimeException
{
#region Constructor
/// <summary>
/// This constuctor takes a localized string as the error message.
/// </summary>
/// <param name="message">
/// A localized string as an error message.
/// </param>
public PSDirectException(string message)
: base(message)
{
}
#endregion Constructor
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.