context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
using UnityEngine;
using System.Collections.Generic;
/// <summary>
/// Manager that instantiates and recycles the pasta objects
/// </summary>
public class ROTD_PastaManager : MonoBehaviour
{
/// <summary>
/// Internal list of pasta objects
/// </summary>
private List<ROTD_Pasta> _pastas = new List<ROTD_Pasta>();
/// <summary>
/// The number of live pizza objects
/// </summary>
private int _livePizzaCount;
/// <summary>
/// The current score value of killing a single pasta
/// </summary>
private int _currentPastaScore;
/// <summary>
/// Reference to the game manager
/// </summary>
public ROTD_GameManager gameManager;
/// <summary>
/// The maximum number of live pizzas active at any time
/// </summary>
public int maxLivePizzas;
/// <summary>
/// Prefab of the pizza animation
/// </summary>
public GameObject pizzaPrefab;
/// <summary>
/// The location of where the pasta should be spawned from the oven
/// </summary>
public Vector3 ovenSpawnPoint;
/// <summary>
/// A random offset from the oven spawn point so that overlapping doesn't get too bad
/// </summary>
public Vector2 ovenSpawnPointRandomOffset;
/// <summary>
/// An array of spawn points around the kitchen
/// </summary>
public Vector3 [] foregroundSpawnPoints;
/// <summary>
/// Offset along the Y axis for the score FX above each pasta kill
/// </summary>
public float scoreYOffset;
/// <summary>
/// The amount of score value to increase after each kill
/// </summary>
public int pastaScoreIncrement;
/// <summary>
/// The current score value of a pasta kill
/// </summary>
public int CurrentPastaScore
{
get
{
// increment the pasta score value, making sure it doesn't get to 10 million (too large for our score animation)
_currentPastaScore = Mathf.Clamp(_currentPastaScore + pastaScoreIncrement, 0, 9999999);
return _currentPastaScore;
}
}
/// <summary>
/// Called once at the start of the scene
/// </summary>
void Start()
{
GameObject go;
ROTD_Pasta pasta;
// instantiate the pizzas
for (int i=0; i<maxLivePizzas; i++)
{
// create the pizza object
go = (GameObject)Instantiate(pizzaPrefab, Vector3.zero, Quaternion.identity);
go.transform.parent = this.transform;
// grab the pasta class component and initialize
pasta = go.GetComponent<ROTD_Pasta>();
pasta.Initialize(gameManager);
// add the pizza to the list
_pastas.Add (pasta);
}
}
/// <summary>
/// Called every frame from the game manager
/// </summary>
public void FrameUpdate ()
{
// update each pasta in the list if it is alive
_livePizzaCount = 0;
foreach (ROTD_Pasta pasta in _pastas)
{
if (pasta.State != ROTD_Pasta.STATE.Dead)
{
pasta.FrameUpdate();
switch (pasta.pastaType)
{
case ROTD_Pasta.TYPE.Pizza:
_livePizzaCount++;
break;
}
}
}
// loop through the pastas to see if any new ones need to be spawned
foreach (ROTD_Pasta pasta in _pastas)
{
if (pasta.State == ROTD_Pasta.STATE.Dead)
{
switch (pasta.pastaType)
{
case ROTD_Pasta.TYPE.Pizza:
// check to see if we need to add more pizzas.
// if the pizza count is less than the game manager's difficulty level
// and also less than the maximum pizza amount allowed, then we add a new pizza.
if (_livePizzaCount < Mathf.Min(maxLivePizzas, gameManager.GetCurrentPastaTypeCount(ROTD_Pasta.TYPE.Pizza)))
{
// pasta is dead, so recycle
pasta.ReSpawn();
_livePizzaCount++;
}
break;
}
}
}
}
/// <summary>
/// Deactivates a pasta so that it can be reused
/// </summary>
/// <param name="pasta">Pasta object to "kill"</param>
public void KillPasta(ROTD_Pasta pasta)
{
// set the pasta state to Dead
pasta.State = ROTD_Pasta.STATE.Dead;
}
/// <summary>
/// Gets a random spawn position
/// </summary>
/// <param name="fromOven">Whether the pasta should come from the oven</param>
/// <returns>Spawn Position</returns>
public Vector3 GetRandomRespawnPosition(out bool fromOven)
{
fromOven = false;
if (UnityEngine.Random.Range(0, 3) == 0 || gameManager.TotalPastaKills == 0)
{
// 1 in 3 chance the pasta will come from the oven (or it is the first pasta)
fromOven = true;
// open the oven door
gameManager.room.OpenOven();
// get the oven spawn point with some random offset to avoid overlap
Vector3 spawnPoint = ovenSpawnPoint;
spawnPoint.x += UnityEngine.Random.Range(0, ovenSpawnPointRandomOffset.x);
spawnPoint.y -= UnityEngine.Random.Range(0, ovenSpawnPointRandomOffset.y);
spawnPoint.z = spawnPoint.y;
return spawnPoint;
}
else
{
// not from the oven, so we get one of the other spawn points in the kitchen
return foregroundSpawnPoints[UnityEngine.Random.Range(0, foregroundSpawnPoints.Length)];
}
}
/// <summary>
/// Resets the pastas back to their starting states
/// </summary>
public void ResetToStart()
{
foreach (ROTD_Pasta pasta in _pastas)
{
pasta.State = ROTD_Pasta.STATE.Dead;
}
_currentPastaScore = 2;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Impl.Datastream
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Threading;
using System.Threading.Tasks;
using Apache.Ignite.Core.Datastream;
using Apache.Ignite.Core.Impl.Binary;
using Apache.Ignite.Core.Impl.Common;
/// <summary>
/// Data streamer internal interface to get rid of generics.
/// </summary>
internal interface IDataStreamer
{
/// <summary>
/// Callback invoked on topology size change.
/// </summary>
/// <param name="topVer">New topology version.</param>
/// <param name="topSize">New topology size.</param>
void TopologyChange(long topVer, int topSize);
}
/// <summary>
/// Data streamer implementation.
/// </summary>
internal class DataStreamerImpl<TK, TV> : PlatformDisposableTargetAdapter, IDataStreamer, IDataStreamer<TK, TV>
{
#pragma warning disable 0420
/** Policy: continue. */
internal const int PlcContinue = 0;
/** Policy: close. */
internal const int PlcClose = 1;
/** Policy: cancel and close. */
internal const int PlcCancelClose = 2;
/** Policy: flush. */
internal const int PlcFlush = 3;
/** Operation: update. */
private const int OpUpdate = 1;
/** Operation: set receiver. */
private const int OpReceiver = 2;
/** */
private const int OpAllowOverwrite = 3;
/** */
private const int OpSetAllowOverwrite = 4;
/** */
private const int OpSkipStore = 5;
/** */
private const int OpSetSkipStore = 6;
/** */
private const int OpPerNodeBufferSize = 7;
/** */
private const int OpSetPerNodeBufferSize = 8;
/** */
private const int OpPerNodeParallelOps = 9;
/** */
private const int OpSetPerNodeParallelOps = 10;
/** */
private const int OpListenTopology = 11;
/** */
private const int OpGetTimeout = 12;
/** */
private const int OpSetTimeout = 13;
/** Cache name. */
private readonly string _cacheName;
/** Lock. */
private readonly ReaderWriterLockSlim _rwLock = new ReaderWriterLockSlim();
/** Closed event. */
private readonly ManualResetEventSlim _closedEvt = new ManualResetEventSlim(false);
/** Close future. */
private readonly Future<object> _closeFut = new Future<object>();
/** GC handle to this streamer. */
private readonly long _hnd;
/** Topology version. */
private long _topVer;
/** Topology size. */
private int _topSize = 1;
/** Buffer send size. */
private volatile int _bufSndSize;
/** Current data streamer batch. */
private volatile DataStreamerBatch<TK, TV> _batch;
/** Flusher. */
private readonly Flusher<TK, TV> _flusher;
/** Receiver. */
private volatile IStreamReceiver<TK, TV> _rcv;
/** Receiver handle. */
private long _rcvHnd;
/** Receiver binary mode. */
private readonly bool _keepBinary;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="target">Target.</param>
/// <param name="marsh">Marshaller.</param>
/// <param name="cacheName">Cache name.</param>
/// <param name="keepBinary">Binary flag.</param>
public DataStreamerImpl(IPlatformTargetInternal target, Marshaller marsh, string cacheName, bool keepBinary)
: base(target)
{
_cacheName = cacheName;
_keepBinary = keepBinary;
// Create empty batch.
_batch = new DataStreamerBatch<TK, TV>();
// Allocate GC handle so that this data streamer could be easily dereferenced from native code.
WeakReference thisRef = new WeakReference(this);
_hnd = marsh.Ignite.HandleRegistry.Allocate(thisRef);
// Start topology listening. This call will ensure that buffer size member is updated.
DoOutInOp(OpListenTopology, _hnd);
// Membar to ensure fields initialization before leaving constructor.
Thread.MemoryBarrier();
// Start flusher after everything else is initialized.
_flusher = new Flusher<TK, TV>(thisRef);
_flusher.RunThread();
}
/** <inheritDoc /> */
public string CacheName
{
get { return _cacheName; }
}
/** <inheritDoc /> */
public bool AllowOverwrite
{
get
{
_rwLock.EnterReadLock();
try
{
ThrowIfDisposed();
return DoOutInOp(OpAllowOverwrite) == True;
}
finally
{
_rwLock.ExitReadLock();
}
}
set
{
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
DoOutInOp(OpSetAllowOverwrite, value ? True : False);
}
finally
{
_rwLock.ExitWriteLock();
}
}
}
/** <inheritDoc /> */
public bool SkipStore
{
get
{
_rwLock.EnterReadLock();
try
{
ThrowIfDisposed();
return DoOutInOp(OpSkipStore) == True;
}
finally
{
_rwLock.ExitReadLock();
}
}
set
{
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
DoOutInOp(OpSetSkipStore, value ? True : False);
}
finally
{
_rwLock.ExitWriteLock();
}
}
}
/** <inheritDoc /> */
public int PerNodeBufferSize
{
get
{
_rwLock.EnterReadLock();
try
{
ThrowIfDisposed();
return (int) DoOutInOp(OpPerNodeBufferSize);
}
finally
{
_rwLock.ExitReadLock();
}
}
set
{
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
DoOutInOp(OpSetPerNodeBufferSize, value);
_bufSndSize = _topSize * value;
}
finally
{
_rwLock.ExitWriteLock();
}
}
}
/** <inheritDoc /> */
public int PerNodeParallelOperations
{
get
{
_rwLock.EnterReadLock();
try
{
ThrowIfDisposed();
return (int) DoOutInOp(OpPerNodeParallelOps);
}
finally
{
_rwLock.ExitReadLock();
}
}
set
{
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
DoOutInOp(OpSetPerNodeParallelOps, value);
}
finally
{
_rwLock.ExitWriteLock();
}
}
}
/** <inheritDoc /> */
public long AutoFlushFrequency
{
get
{
_rwLock.EnterReadLock();
try
{
ThrowIfDisposed();
return _flusher.Frequency;
}
finally
{
_rwLock.ExitReadLock();
}
}
set
{
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
_flusher.Frequency = value;
}
finally
{
_rwLock.ExitWriteLock();
}
}
}
/** <inheritDoc /> */
public Task Task
{
get
{
return _closeFut.Task;
}
}
/** <inheritDoc /> */
public IStreamReceiver<TK, TV> Receiver
{
get
{
ThrowIfDisposed();
return _rcv;
}
set
{
IgniteArgumentCheck.NotNull(value, "value");
var handleRegistry = Marshaller.Ignite.HandleRegistry;
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
if (_rcv == value)
return;
var rcvHolder = new StreamReceiverHolder(value,
(rec, grid, cache, stream, keepBinary) =>
StreamReceiverHolder.InvokeReceiver((IStreamReceiver<TK, TV>) rec, grid, cache, stream,
keepBinary));
var rcvHnd0 = handleRegistry.Allocate(rcvHolder);
try
{
DoOutOp(OpReceiver, w =>
{
w.WriteLong(rcvHnd0);
w.WriteObject(rcvHolder);
});
}
catch (Exception)
{
handleRegistry.Release(rcvHnd0);
throw;
}
if (_rcv != null)
handleRegistry.Release(_rcvHnd);
_rcv = value;
_rcvHnd = rcvHnd0;
}
finally
{
_rwLock.ExitWriteLock();
}
}
}
/** <inheritDoc /> */
public Task AddData(TK key, TV val)
{
ThrowIfDisposed();
IgniteArgumentCheck.NotNull(key, "key");
return Add0(new DataStreamerEntry<TK, TV>(key, val), 1);
}
/** <inheritDoc /> */
public Task AddData(KeyValuePair<TK, TV> pair)
{
ThrowIfDisposed();
return Add0(new DataStreamerEntry<TK, TV>(pair.Key, pair.Value), 1);
}
/** <inheritDoc /> */
public Task AddData(ICollection<KeyValuePair<TK, TV>> entries)
{
ThrowIfDisposed();
IgniteArgumentCheck.NotNull(entries, "entries");
return Add0(entries, entries.Count);
}
/** <inheritDoc /> */
public Task RemoveData(TK key)
{
ThrowIfDisposed();
IgniteArgumentCheck.NotNull(key, "key");
return Add0(new DataStreamerRemoveEntry<TK>(key), 1);
}
/** <inheritDoc /> */
public void TryFlush()
{
ThrowIfDisposed();
DataStreamerBatch<TK, TV> batch0 = _batch;
if (batch0 != null)
Flush0(batch0, false, PlcFlush);
}
/** <inheritDoc /> */
public void Flush()
{
ThrowIfDisposed();
DataStreamerBatch<TK, TV> batch0 = _batch;
if (batch0 != null)
Flush0(batch0, true, PlcFlush);
else
{
// Batch is null, i.e. data streamer is closing. Wait for close to complete.
_closedEvt.Wait();
}
}
/** <inheritDoc /> */
public void Close(bool cancel)
{
_flusher.Stop();
while (true)
{
DataStreamerBatch<TK, TV> batch0 = _batch;
if (batch0 == null)
{
// Wait for concurrent close to finish.
_closedEvt.Wait();
return;
}
if (Flush0(batch0, true, cancel ? PlcCancelClose : PlcClose))
{
_closeFut.OnDone(null, null);
_rwLock.EnterWriteLock();
try
{
base.Dispose(true);
if (_rcv != null)
Marshaller.Ignite.HandleRegistry.Release(_rcvHnd);
_closedEvt.Set();
}
finally
{
_rwLock.ExitWriteLock();
}
Marshaller.Ignite.HandleRegistry.Release(_hnd);
break;
}
}
}
/** <inheritDoc /> */
public IDataStreamer<TK1, TV1> WithKeepBinary<TK1, TV1>()
{
if (_keepBinary)
{
var result = this as IDataStreamer<TK1, TV1>;
if (result == null)
throw new InvalidOperationException(
"Can't change type of binary streamer. WithKeepBinary has been called on an instance of " +
"binary streamer with incompatible generic arguments.");
return result;
}
return Marshaller.Ignite.GetDataStreamer<TK1, TV1>(_cacheName, true);
}
/** <inheritDoc /> */
public TimeSpan Timeout
{
get
{
_rwLock.EnterReadLock();
try
{
ThrowIfDisposed();
return BinaryUtils.LongToTimeSpan(DoOutInOp(OpGetTimeout));
}
finally
{
_rwLock.ExitReadLock();
}
}
set
{
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
DoOutInOp(OpSetTimeout, (long) value.TotalMilliseconds);
}
finally
{
_rwLock.ExitWriteLock();
}
}
}
/** <inheritDoc /> */
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes")]
protected override void Dispose(bool disposing)
{
if (disposing)
Close(false); // Normal dispose: do not cancel
else
{
// Finalizer: just close Java streamer
try
{
if (_batch != null)
_batch.Send(this, PlcCancelClose);
}
// ReSharper disable once EmptyGeneralCatchClause
catch (Exception)
{
// Finalizers should never throw
}
Marshaller.Ignite.HandleRegistry.Release(_hnd, true);
Marshaller.Ignite.HandleRegistry.Release(_rcvHnd, true);
}
base.Dispose(false);
}
/** <inheritDoc /> */
~DataStreamerImpl()
{
Dispose(false);
}
/** <inheritDoc /> */
public void TopologyChange(long topVer, int topSize)
{
_rwLock.EnterWriteLock();
try
{
ThrowIfDisposed();
if (_topVer < topVer)
{
_topVer = topVer;
_topSize = topSize > 0 ? topSize : 1; // Do not set to 0 to avoid 0 buffer size.
_bufSndSize = (int) (_topSize * DoOutInOp(OpPerNodeBufferSize));
}
}
finally
{
_rwLock.ExitWriteLock();
}
}
/// <summary>
/// Internal add/remove routine.
/// </summary>
/// <param name="val">Value.</param>
/// <param name="cnt">Items count.</param>
/// <returns>Future.</returns>
private Task Add0(object val, int cnt)
{
int bufSndSize0 = _bufSndSize;
Debug.Assert(bufSndSize0 > 0);
while (true)
{
var batch0 = _batch;
if (batch0 == null)
throw new InvalidOperationException("Data streamer is stopped.");
int size = batch0.Add(val, cnt);
if (size == -1)
{
// Batch is blocked, perform CAS.
Interlocked.CompareExchange(ref _batch,
new DataStreamerBatch<TK, TV>(batch0), batch0);
continue;
}
if (size >= bufSndSize0)
// Batch is too big, schedule flush.
Flush0(batch0, false, PlcContinue);
return batch0.Task;
}
}
/// <summary>
/// Internal flush routine.
/// </summary>
/// <param name="curBatch"></param>
/// <param name="wait">Whether to wait for flush to complete.</param>
/// <param name="plc">Whether this is the last batch.</param>
/// <returns>Whether this call was able to CAS previous batch</returns>
private bool Flush0(DataStreamerBatch<TK, TV> curBatch, bool wait, int plc)
{
// 1. Try setting new current batch to help further adders.
bool res = Interlocked.CompareExchange(ref _batch,
(plc == PlcContinue || plc == PlcFlush) ?
new DataStreamerBatch<TK, TV>(curBatch) : null, curBatch) == curBatch;
// 2. Perform actual send.
curBatch.Send(this, plc);
if (wait)
// 3. Wait for all futures to finish.
curBatch.AwaitCompletion();
return res;
}
/// <summary>
/// Start write.
/// </summary>
/// <returns>Writer.</returns>
internal void Update(Action<BinaryWriter> action)
{
_rwLock.EnterReadLock();
try
{
ThrowIfDisposed();
DoOutOp(OpUpdate, action);
}
finally
{
_rwLock.ExitReadLock();
}
}
/// <summary>
/// Flusher.
/// </summary>
private class Flusher<TK1, TV1>
{
/** State: running. */
private const int StateRunning = 0;
/** State: stopping. */
private const int StateStopping = 1;
/** State: stopped. */
private const int StateStopped = 2;
/** Data streamer. */
[SuppressMessage("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields",
Justification = "Incorrect warning")]
private readonly WeakReference _ldrRef;
/** Finish flag. */
[SuppressMessage("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields",
Justification = "Incorrect warning")]
private int _state;
/** Flush frequency. */
[SuppressMessage("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields",
Justification = "Incorrect warning")]
private long _freq;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="ldrRef">Data streamer weak reference..</param>
public Flusher(WeakReference ldrRef)
{
_ldrRef = ldrRef;
lock (this)
{
_state = StateRunning;
}
}
/// <summary>
/// Main flusher routine.
/// </summary>
private void Run()
{
bool force = false;
long curFreq = 0;
try
{
while (true)
{
if (curFreq > 0 || force)
{
var ldr = _ldrRef.Target as DataStreamerImpl<TK1, TV1>;
if (ldr == null)
return;
ldr.TryFlush();
force = false;
}
lock (this)
{
// Stop immediately.
if (_state == StateStopping)
return;
if (curFreq == _freq)
{
// Frequency is unchanged
if (curFreq == 0)
// Just wait for a second and re-try.
Monitor.Wait(this, 1000);
else
{
// Calculate remaining time.
DateTime now = DateTime.Now;
long ticks;
try
{
ticks = now.AddMilliseconds(curFreq).Ticks - now.Ticks;
if (ticks > int.MaxValue)
ticks = int.MaxValue;
}
catch (ArgumentOutOfRangeException)
{
// Handle possible overflow.
ticks = int.MaxValue;
}
Monitor.Wait(this, TimeSpan.FromTicks(ticks));
}
}
else
{
if (curFreq != 0)
force = true;
curFreq = _freq;
}
}
}
}
finally
{
// Let streamer know about stop.
lock (this)
{
_state = StateStopped;
Monitor.PulseAll(this);
}
}
}
/// <summary>
/// Frequency.
/// </summary>
public long Frequency
{
get
{
return Interlocked.Read(ref _freq);
}
set
{
lock (this)
{
if (_freq != value)
{
_freq = value;
Monitor.PulseAll(this);
}
}
}
}
/// <summary>
/// Stop flusher.
/// </summary>
public void Stop()
{
lock (this)
{
if (_state == StateRunning)
{
_state = StateStopping;
Monitor.PulseAll(this);
}
while (_state != StateStopped)
Monitor.Wait(this);
}
}
/// <summary>
/// Runs the flusher thread.
/// </summary>
public void RunThread()
{
TaskRunner.Run(Run);
}
}
#pragma warning restore 0420
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
namespace System.Linq.Tests.LegacyTests
{
public static class Extension
{
/// <summary>
/// Verify whether the ArgumentException is thrown due to the expected argument input being invalid
/// </summary>
/// <param name="ex"></param>
/// <param name="expected"></param>
/// <returns></returns>
public static bool CompareParamName(this ArgumentException ex, string expected)
{
#if SILVERLIGHT
#if SLRESOURCES
return ex.Message.Substring(ex.Message.LastIndexOf(": ") + 2) == expected;
#else
return true;
#endif
#else
return ex.ParamName == expected;
#endif
}
/// <summary>
/// Verify whether the ArgumentNullException is thrown due to the expected argument input being null
/// </summary>
/// <param name="ex"></param>
/// <param name="expected"></param>
/// <returns></returns>
public static bool CompareParamName(this ArgumentNullException ex, string expected)
{
#if SILVERLIGHT
#if SLRESOURCES
return ex.Message.Substring(ex.Message.LastIndexOf(": ") + 2) == expected;
#else
return true;
#endif
#else
return ex.ParamName == expected;
#endif
}
}
public static class Verification
{
public static int Allequal(string expected, string actual)
{
return string.Compare(expected, actual, StringComparison.CurrentCulture);
}
/// <summary>
/// Compare two IEnumerable to see whether they contain the same data (orderless and use the default EqualityComparer)
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="expected"></param>
/// <param name="actual"></param>
/// <returns>return 0 if the two sets are equal, otherwise return 1</returns>
/// <remarks>The order of the elements doesn't really matter when PLINQ team runs these tests.
/// The following verification functions will be used when PLINQ team runs our tests.
/// These verification functios ignores the order of the elements.</remarks>
public static int Allequal<T>(IEnumerable<T> expected, IEnumerable<T> actual)
{
return AllequalComparer(expected, actual, EqualityComparer<T>.Default);
}
/// <summary>
/// Compare two IEnumerable to see whether they contain the same data (orderless and use the specific IEqualityComparer)
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="expected"></param>
/// <param name="actual"></param>
/// <param name="comparer"></param>
/// <returns>return 0 if the two sets are equal, otherwise return 1</returns>
/// <remarks>The order of the elements doesn't really matter when PLINQ team runs these tests.
/// The following verification functions will be used when PLINQ team runs our tests.
/// These verification functios ignores the order of the elements.</remarks>
public static int AllequalComparer<T>(IEnumerable<T> expected, IEnumerable<T> actual, IEqualityComparer<T> comparer)
{
if ((expected == null) && (actual == null)) return 0;
if ((expected == null) || (actual == null))
{
Console.WriteLine("expected : {0}", expected == null ? "null" : expected.Count().ToString());
Console.WriteLine("actual: {0}", actual == null ? "null" : actual.Count().ToString());
return 1;
}
try
{
List<T> contents = new List<T>(expected);
foreach (T e in actual)
{
for (int i = 0; i < contents.Count; i++)
{
if (comparer.Equals(contents[i], e))
{
contents.RemoveAt(i);
break;
}
}
}
return contents.Count == 0 ? 0 : 1;
}
catch (AggregateException ae)
{
var innerExceptions = ae.Flatten().InnerExceptions;
if (innerExceptions.Where(ex => ex != null).Select(ex => ex.GetType()).Distinct().Count() == 1)
{
throw innerExceptions.First();
}
else
{
Console.WriteLine(ae);
}
return 1;
}
catch (Exception e)
{
Console.WriteLine(e);
throw;
}
}
/// <summary>
/// Helper function to verify that all elements in dictionary are matched using the default EqualityComparer
/// This verification function MatchAll is used by the GroupBy operator
/// </summary>
/// <typeparam name="K"></typeparam>
/// <typeparam name="E"></typeparam>
/// <param name="key"></param>
/// <param name="element"></param>
/// <param name="result"></param>
/// <returns>0 if matching, otherwise 1</returns>
public static int MatchAll<K, E>(IEnumerable<K> key, IEnumerable<E> element, IEnumerable<System.Linq.IGrouping<K, E>> result)
{
return MatchAll(key, element, result, EqualityComparer<K>.Default);
}
/// <summary>
/// Helper function to verify that all elements in dictionary are matched using the specific IEqualityComparer
/// This verification function MatchAll is used by the GroupBy operator
/// </summary>
/// <typeparam name="K"></typeparam>
/// <typeparam name="E"></typeparam>
/// <param name="key"></param>
/// <param name="element"></param>
/// <param name="result"></param>
/// <returns>0 if matching, otherwise 1</returns>
public static int MatchAll<K, E>(IEnumerable<K> key, IEnumerable<E> element, IEnumerable<System.Linq.IGrouping<K, E>> result, IEqualityComparer<K> keyComparer)
{
if ((result == null) && (element == null)) return 0;
try
{
Dictionary<K, List<E>> dict = new Dictionary<K, List<E>>(keyComparer);
List<E> groupingForNullKeys = new List<E>();
using (IEnumerator<E> e1 = element.GetEnumerator())
using (IEnumerator<K> k1 = key.GetEnumerator())
{
while (e1.MoveNext() && k1.MoveNext())
{
K mkey1 = k1.Current;
if (mkey1 == null)
{
groupingForNullKeys.Add(e1.Current);
}
else
{
List<E> list;
if (!dict.TryGetValue(mkey1, out list))
{
list = new List<E>();
dict.Add(mkey1, list);
}
list.Add(e1.Current);
}
}
}
foreach (System.Linq.IGrouping<K, E> r1 in result)
{
K mkey2 = r1.Key;
List<E> list;
if (mkey2 == null)
{
list = groupingForNullKeys;
}
else
{
if (!dict.TryGetValue(mkey2, out list)) return 1;
dict.Remove(mkey2);
}
foreach (E e1 in r1)
{
if (!list.Contains(e1)) return 1;
list.Remove(e1);
}
}
return 0;
}
catch (AggregateException ae)
{
var innerExceptions = ae.Flatten().InnerExceptions;
if (innerExceptions.Where(ex => ex != null).Select(ex => ex.GetType()).Distinct().Count() == 1)
{
throw innerExceptions.First();
}
else
{
Console.WriteLine(ae);
}
return 1;
}
catch (Exception e)
{
Console.WriteLine(e);
throw;
}
}
}
/// <summary>
/// Some helpers for quick generation/examination of inputs during PLINQ testing
/// </summary>
public static class Functions
{
public static bool IsEven(int num)
{
if (num % 2 == 0) return true;
return false;
}
public static bool IsEmpty(string str)
{
if (String.IsNullOrEmpty(str)) return true;
return false;
}
public static bool IsEven_Index(int num, int index)
{
if (num % 2 == 0) return true;
return false;
}
public static IEnumerable<int> NumRange(int num, long count)
{
for (long i = 0; i < count; i++)
yield return num;
}
public static IEnumerable<int> NumList(int start, int count)
{
for (int i = 0; i < count; i++)
yield return start + i;
}
public static IEnumerable<int?> NullSeq(long num)
{
for (long i = 0; i < num; i++)
yield return null;
}
public static IEnumerable<int> InfiniteNum()
{
for (; ;)
yield return 2;
}
}
}
| |
using Autofac;
using AutoMapper;
using Miningcore.Blockchain.Bitcoin.Configuration;
using Miningcore.Blockchain.Bitcoin.DaemonResponses;
using Miningcore.Configuration;
using Miningcore.Extensions;
using Miningcore.JsonRpc;
using Miningcore.Messaging;
using Miningcore.Mining;
using Miningcore.Payments;
using Miningcore.Persistence;
using Miningcore.Persistence.Model;
using Miningcore.Persistence.Repositories;
using Miningcore.Time;
using Miningcore.Util;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Block = Miningcore.Persistence.Model.Block;
using Contract = Miningcore.Contracts.Contract;
namespace Miningcore.Blockchain.Bitcoin;
[CoinFamily(CoinFamily.Bitcoin)]
public class BitcoinPayoutHandler : PayoutHandlerBase,
IPayoutHandler
{
public BitcoinPayoutHandler(
IComponentContext ctx,
IConnectionFactory cf,
IMapper mapper,
IShareRepository shareRepo,
IBlockRepository blockRepo,
IBalanceRepository balanceRepo,
IPaymentRepository paymentRepo,
IMasterClock clock,
IMessageBus messageBus) :
base(cf, mapper, shareRepo, blockRepo, balanceRepo, paymentRepo, clock, messageBus)
{
Contract.RequiresNonNull(ctx, nameof(ctx));
Contract.RequiresNonNull(balanceRepo, nameof(balanceRepo));
Contract.RequiresNonNull(paymentRepo, nameof(paymentRepo));
this.ctx = ctx;
}
protected readonly IComponentContext ctx;
protected RpcClient rpcClient;
protected BitcoinDaemonEndpointConfigExtra extraPoolConfig;
protected BitcoinPoolPaymentProcessingConfigExtra extraPoolPaymentProcessingConfig;
protected override string LogCategory => "Bitcoin Payout Handler";
#region IPayoutHandler
public virtual Task ConfigureAsync(ClusterConfig clusterConfig, PoolConfig poolConfig, CancellationToken ct)
{
Contract.RequiresNonNull(poolConfig, nameof(poolConfig));
this.poolConfig = poolConfig;
this.clusterConfig = clusterConfig;
extraPoolConfig = poolConfig.Extra.SafeExtensionDataAs<BitcoinDaemonEndpointConfigExtra>();
extraPoolPaymentProcessingConfig = poolConfig.PaymentProcessing.Extra.SafeExtensionDataAs<BitcoinPoolPaymentProcessingConfigExtra>();
logger = LogUtil.GetPoolScopedLogger(typeof(BitcoinPayoutHandler), poolConfig);
var jsonSerializerSettings = ctx.Resolve<JsonSerializerSettings>();
rpcClient = new RpcClient(poolConfig.Daemons.First(), jsonSerializerSettings, messageBus, poolConfig.Id);
return Task.FromResult(true);
}
public virtual async Task<Block[]> ClassifyBlocksAsync(IMiningPool pool, Block[] blocks, CancellationToken ct)
{
Contract.RequiresNonNull(poolConfig, nameof(poolConfig));
Contract.RequiresNonNull(blocks, nameof(blocks));
var coin = poolConfig.Template.As<CoinTemplate>();
var pageSize = 100;
var pageCount = (int) Math.Ceiling(blocks.Length / (double) pageSize);
var result = new List<Block>();
int minConfirmations;
if(coin is BitcoinTemplate bitcoinTemplate)
minConfirmations = extraPoolConfig?.MinimumConfirmations ?? bitcoinTemplate.CoinbaseMinConfimations ?? BitcoinConstants.CoinbaseMinConfimations;
else
minConfirmations = extraPoolConfig?.MinimumConfirmations ?? BitcoinConstants.CoinbaseMinConfimations;
for(var i = 0; i < pageCount; i++)
{
// get a page full of blocks
var page = blocks
.Skip(i * pageSize)
.Take(pageSize)
.ToArray();
// build command batch (block.TransactionConfirmationData is the hash of the blocks coinbase transaction)
var batch = page.Select(block => new RpcRequest(BitcoinCommands.GetTransaction,
new[] { block.TransactionConfirmationData })).ToArray();
// execute batch
var results = await rpcClient.ExecuteBatchAsync(logger, ct, batch);
for(var j = 0; j < results.Length; j++)
{
var cmdResult = results[j];
var transactionInfo = cmdResult.Response?.ToObject<Transaction>();
var block = page[j];
// check error
if(cmdResult.Error != null)
{
// Code -5 interpreted as "orphaned"
if(cmdResult.Error.Code == -5)
{
block.Status = BlockStatus.Orphaned;
block.Reward = 0;
result.Add(block);
logger.Info(() => $"[{LogCategory}] Block {block.BlockHeight} classified as orphaned due to daemon error {cmdResult.Error.Code}");
messageBus.NotifyBlockUnlocked(poolConfig.Id, block, coin);
}
else
logger.Warn(() => $"[{LogCategory}] Daemon reports error '{cmdResult.Error.Message}' (Code {cmdResult.Error.Code}) for transaction {page[j].TransactionConfirmationData}");
}
// missing transaction details are interpreted as "orphaned"
else if(transactionInfo?.Details == null || transactionInfo.Details.Length == 0)
{
block.Status = BlockStatus.Orphaned;
block.Reward = 0;
result.Add(block);
logger.Info(() => $"[{LogCategory}] Block {block.BlockHeight} classified as orphaned due to missing tx details");
}
else
{
switch(transactionInfo.Details[0].Category)
{
case "immature":
// update progress
block.ConfirmationProgress = Math.Min(1.0d, (double) transactionInfo.Confirmations / minConfirmations);
block.Reward = transactionInfo.Amount; // update actual block-reward from coinbase-tx
result.Add(block);
messageBus.NotifyBlockConfirmationProgress(poolConfig.Id, block, coin);
break;
case "generate":
// matured and spendable coinbase transaction
block.Status = BlockStatus.Confirmed;
block.ConfirmationProgress = 1;
block.Reward = transactionInfo.Amount; // update actual block-reward from coinbase-tx
result.Add(block);
logger.Info(() => $"[{LogCategory}] Unlocked block {block.BlockHeight} worth {FormatAmount(block.Reward)}");
messageBus.NotifyBlockUnlocked(poolConfig.Id, block, coin);
break;
default:
logger.Info(() => $"[{LogCategory}] Block {block.BlockHeight} classified as orphaned. Category: {transactionInfo.Details[0].Category}");
block.Status = BlockStatus.Orphaned;
block.Reward = 0;
result.Add(block);
messageBus.NotifyBlockUnlocked(poolConfig.Id, block, coin);
break;
}
}
}
}
return result.ToArray();
}
public virtual Task CalculateBlockEffortAsync(IMiningPool pool, Block block, double accumulatedBlockShareDiff, CancellationToken ct)
{
block.Effort = accumulatedBlockShareDiff / block.NetworkDifficulty;
return Task.FromResult(true);
}
public virtual async Task PayoutAsync(IMiningPool pool, Balance[] balances, CancellationToken ct)
{
Contract.RequiresNonNull(balances, nameof(balances));
// build args
var amounts = balances
.Where(x => x.Amount > 0)
.ToDictionary(x => x.Address, x => Math.Round(x.Amount, 4));
if(amounts.Count == 0)
return;
logger.Info(() => $"[{LogCategory}] Paying {FormatAmount(balances.Sum(x => x.Amount))} to {balances.Length} addresses");
object[] args;
if(extraPoolPaymentProcessingConfig?.MinersPayTxFees == true)
{
var identifier = !string.IsNullOrEmpty(clusterConfig.PaymentProcessing?.CoinbaseString) ?
clusterConfig.PaymentProcessing.CoinbaseString.Trim() : "Miningcore";
var comment = $"{identifier} Payment";
var subtractFeesFrom = amounts.Keys.ToArray();
if(!poolConfig.Template.As<BitcoinTemplate>().HasMasterNodes)
{
args = new object[]
{
string.Empty, // default account
amounts, // addresses and associated amounts
1, // only spend funds covered by this many confirmations
comment, // tx comment
subtractFeesFrom, // distribute transaction fee equally over all recipients,
// workaround for https://bitcoin.stackexchange.com/questions/102508/bitcoin-cli-sendtoaddress-error-fallbackfee-is-disabled-wait-a-few-blocks-or-en
// using bitcoin regtest
//true,
//null,
//"unset",
//"1"
};
}
else
{
args = new object[]
{
string.Empty, // default account
amounts, // addresses and associated amounts
1, // only spend funds covered by this many confirmations
false, // Whether to add confirmations to transactions locked via InstantSend
comment, // tx comment
subtractFeesFrom, // distribute transaction fee equally over all recipients
false, // use_is: Send this transaction as InstantSend
false, // Use anonymized funds only
};
}
}
else
{
args = new object[]
{
string.Empty, // default account
amounts, // addresses and associated amounts
};
}
var didUnlockWallet = false;
// send command
tryTransfer:
var result = await rpcClient.ExecuteAsync<string>(logger, BitcoinCommands.SendMany, ct, args);
if(result.Error == null)
{
if(didUnlockWallet)
{
// lock wallet
logger.Info(() => $"[{LogCategory}] Locking wallet");
await rpcClient.ExecuteAsync<JToken>(logger, BitcoinCommands.WalletLock, ct);
}
// check result
var txId = result.Response;
if(string.IsNullOrEmpty(txId))
logger.Error(() => $"[{LogCategory}] {BitcoinCommands.SendMany} did not return a transaction id!");
else
logger.Info(() => $"[{LogCategory}] Payment transaction id: {txId}");
await PersistPaymentsAsync(balances, txId);
NotifyPayoutSuccess(poolConfig.Id, balances, new[] { txId }, null);
}
else
{
if(result.Error.Code == (int) BitcoinRPCErrorCode.RPC_WALLET_UNLOCK_NEEDED && !didUnlockWallet)
{
if(!string.IsNullOrEmpty(extraPoolPaymentProcessingConfig?.WalletPassword))
{
logger.Info(() => $"[{LogCategory}] Unlocking wallet");
var unlockResult = await rpcClient.ExecuteAsync<JToken>(logger, BitcoinCommands.WalletPassphrase, ct, new[]
{
(object) extraPoolPaymentProcessingConfig.WalletPassword,
(object) 5 // unlock for N seconds
});
if(unlockResult.Error == null)
{
didUnlockWallet = true;
goto tryTransfer;
}
else
logger.Error(() => $"[{LogCategory}] {BitcoinCommands.WalletPassphrase} returned error: {result.Error.Message} code {result.Error.Code}");
}
else
logger.Error(() => $"[{LogCategory}] Wallet is locked but walletPassword was not configured. Unable to send funds.");
}
else
{
logger.Error(() => $"[{LogCategory}] {BitcoinCommands.SendMany} returned error: {result.Error.Message} code {result.Error.Code}");
NotifyPayoutFailure(poolConfig.Id, balances, $"{BitcoinCommands.SendMany} returned error: {result.Error.Message} code {result.Error.Code}", null);
}
}
}
#endregion // IPayoutHandler
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
//
// <OWNER>[....]</OWNER>
/*============================================================
**
** Class: RWLock
**
**
** Purpose: Defines the lock that implements
** single-writer/multiple-reader semantics
**
**
===========================================================*/
#if FEATURE_RWLOCK
namespace System.Threading {
using System.Threading;
using System.Security.Permissions;
using System.Runtime.Remoting;
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.ConstrainedExecution;
using System.Runtime.Versioning;
using System.Diagnostics.Contracts;
[HostProtection(Synchronization=true, ExternalThreading=true)]
[ComVisible(true)]
public sealed class ReaderWriterLock: CriticalFinalizerObject
{
/*
* Constructor
*/
[System.Security.SecuritySafeCritical] // auto-generated
public ReaderWriterLock()
{
PrivateInitialize();
}
/*
* Destructor
*/
[System.Security.SecuritySafeCritical] // auto-generated
~ReaderWriterLock()
{
PrivateDestruct();
}
/*
* Property that returns TRUE if the reader lock is held
* by the current thread
*/
public bool IsReaderLockHeld {
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
get {
return(PrivateGetIsReaderLockHeld());
}
}
/*
* Property that returns TRUE if the writer lock is held
* by the current thread
*/
public bool IsWriterLockHeld {
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
get {
return(PrivateGetIsWriterLockHeld());
}
}
/*
* Property that returns the current writer sequence number.
* The caller should be a reader or writer for getting
* meaningful results
*/
public int WriterSeqNum {
[System.Security.SecuritySafeCritical] // auto-generated
get {
return(PrivateGetWriterSeqNum());
}
}
/*
* Acquires reader lock. The thread will block if a different
* thread has writer lock.
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void AcquireReaderLockInternal(int millisecondsTimeout);
[System.Security.SecuritySafeCritical] // auto-generated
public void AcquireReaderLock(int millisecondsTimeout)
{
AcquireReaderLockInternal(millisecondsTimeout);
}
[System.Security.SecuritySafeCritical] // auto-generated
public void AcquireReaderLock(TimeSpan timeout)
{
long tm = (long)timeout.TotalMilliseconds;
if (tm < -1 || tm > (long) Int32.MaxValue)
throw new ArgumentOutOfRangeException("timeout", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegOrNegative1"));
AcquireReaderLockInternal((int)tm);
}
/*
* Acquires writer lock. The thread will block if a different
* thread has reader lock. It will dead lock if this thread
* has reader lock. Use UpgardeToWriterLock when you are not
* sure if the thread has reader lock
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void AcquireWriterLockInternal(int millisecondsTimeout);
[System.Security.SecuritySafeCritical] // auto-generated
public void AcquireWriterLock(int millisecondsTimeout)
{
AcquireWriterLockInternal(millisecondsTimeout);
}
[System.Security.SecuritySafeCritical] // auto-generated
public void AcquireWriterLock(TimeSpan timeout)
{
long tm = (long)timeout.TotalMilliseconds;
if (tm < -1 || tm > (long) Int32.MaxValue)
throw new ArgumentOutOfRangeException("timeout", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegOrNegative1"));
AcquireWriterLockInternal((int)tm);
}
/*
* Releases reader lock.
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private extern void ReleaseReaderLockInternal();
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
public void ReleaseReaderLock()
{
ReleaseReaderLockInternal();
}
/*
* Releases writer lock.
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private extern void ReleaseWriterLockInternal();
[System.Security.SecuritySafeCritical] // auto-generated
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
public void ReleaseWriterLock()
{
ReleaseWriterLockInternal();
}
/*
* Upgardes the thread to a writer. If the thread has is a
* reader, it is possible that the reader lock was
* released before writer lock was acquired.
*/
[System.Security.SecuritySafeCritical] // auto-generated
public LockCookie UpgradeToWriterLock(int millisecondsTimeout)
{
LockCookie result = new LockCookie ();
FCallUpgradeToWriterLock (ref result, millisecondsTimeout);
return result;
}
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void FCallUpgradeToWriterLock(ref LockCookie result, int millisecondsTimeout);
public LockCookie UpgradeToWriterLock(TimeSpan timeout)
{
long tm = (long)timeout.TotalMilliseconds;
if (tm < -1 || tm > (long) Int32.MaxValue)
throw new ArgumentOutOfRangeException("timeout", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegOrNegative1"));
return UpgradeToWriterLock((int)tm);
}
/*
* Restores the lock status of the thread to the one it was
* in when it called UpgradeToWriterLock.
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void DowngradeFromWriterLockInternal(ref LockCookie lockCookie);
[System.Security.SecuritySafeCritical] // auto-generated
public void DowngradeFromWriterLock(ref LockCookie lockCookie)
{
DowngradeFromWriterLockInternal(ref lockCookie);
}
/*
* Releases the lock irrespective of the number of times the thread
* acquired the lock
*/
[System.Security.SecuritySafeCritical] // auto-generated
public LockCookie ReleaseLock()
{
LockCookie result = new LockCookie ();
FCallReleaseLock (ref result);
return result;
}
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void FCallReleaseLock(ref LockCookie result);
/*
* Restores the lock status of the thread to the one it was
* in when it called ReleaseLock.
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void RestoreLockInternal(ref LockCookie lockCookie);
[System.Security.SecuritySafeCritical] // auto-generated
public void RestoreLock(ref LockCookie lockCookie)
{
RestoreLockInternal(ref lockCookie);
}
/*
* Internal helper that returns TRUE if the reader lock is held
* by the current thread
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private extern bool PrivateGetIsReaderLockHeld();
/*
* Internal helper that returns TRUE if the writer lock is held
* by the current thread
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private extern bool PrivateGetIsWriterLockHeld();
/*
* Internal helper that returns the current writer sequence
* number. The caller should be a reader or writer for getting
* meaningful results
*/
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern int PrivateGetWriterSeqNum();
/*
* Returns true if there were intermediate writes since the
* sequence number was obtained. The caller should be
* a reader or writer for getting meaningful results
*/
[System.Security.SecuritySafeCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
public extern bool AnyWritersSince(int seqNum);
// Initialize state kept inside the lock
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void PrivateInitialize();
// Destruct resource associated with the lock
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private extern void PrivateDestruct();
// State
#pragma warning disable 169
#pragma warning disable 414 // These fields are not used from managed.
private IntPtr _hWriterEvent;
private IntPtr _hReaderEvent;
private IntPtr _hObjectHandle;
private int _dwState = 0;
private int _dwULockID = 0;
private int _dwLLockID = 0;
private int _dwWriterID = 0;
private int _dwWriterSeqNum = 0;
private short _wWriterLevel;
#if RWLOCK_STATISTICS
// WARNING: You must explicitly #define RWLOCK_STATISTICS when you
// build in both the VM and BCL directories if you want this.
private int _dwReaderEntryCount = 0;
private int _dwReaderContentionCount = 0;
private int _dwWriterEntryCount = 0;
private int _dwWriterContentionCount = 0;
private int _dwEventsReleasedCount = 0;
#endif // RWLOCK_STATISTICS
#pragma warning restore 414
#pragma warning restore 169
}
}
#endif //FEATURE_RWLOCK
| |
using CrystalDecisions.CrystalReports.Engine;
using CrystalDecisions.Windows.Forms;
using DpSdkEngLib;
using DPSDKOPSLib;
using Microsoft.VisualBasic;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Diagnostics;
using System.Windows.Forms;
using System.Linq;
using System.Xml.Linq;
namespace _4PosBackOffice.NET
{
[Microsoft.VisualBasic.CompilerServices.DesignerGenerated()]
partial class frmPricelistFilter
{
#region "Windows Form Designer generated code "
[System.Diagnostics.DebuggerNonUserCode()]
public frmPricelistFilter() : base()
{
FormClosed += frmPricelistFilter_FormClosed;
KeyPress += frmPricelistFilter_KeyPress;
Resize += frmPricelistFilter_Resize;
Load += frmPricelistFilter_Load;
//This call is required by the Windows Form Designer.
InitializeComponent();
}
//Form overrides dispose to clean up the component list.
[System.Diagnostics.DebuggerNonUserCode()]
protected override void Dispose(bool Disposing)
{
if (Disposing) {
if ((components != null)) {
components.Dispose();
}
}
base.Dispose(Disposing);
}
//Required by the Windows Form Designer
private System.ComponentModel.IContainer components;
public System.Windows.Forms.ToolTip ToolTip1;
private System.Windows.Forms.Button withEventsField_cmdPrint;
public System.Windows.Forms.Button cmdPrint {
get { return withEventsField_cmdPrint; }
set {
if (withEventsField_cmdPrint != null) {
withEventsField_cmdPrint.Click -= cmdPrint_Click;
}
withEventsField_cmdPrint = value;
if (withEventsField_cmdPrint != null) {
withEventsField_cmdPrint.Click += cmdPrint_Click;
}
}
}
private System.Windows.Forms.CheckBox withEventsField_chkChannel;
public System.Windows.Forms.CheckBox chkChannel {
get { return withEventsField_chkChannel; }
set {
if (withEventsField_chkChannel != null) {
withEventsField_chkChannel.CheckStateChanged -= chkChannel_CheckStateChanged;
}
withEventsField_chkChannel = value;
if (withEventsField_chkChannel != null) {
withEventsField_chkChannel.CheckStateChanged += chkChannel_CheckStateChanged;
}
}
}
public System.Windows.Forms.ComboBox cmbDelivery;
public System.Windows.Forms.ComboBox cmbCOD;
public System.Windows.Forms.CheckBox _chkFields_12;
private System.Windows.Forms.TextBox withEventsField__txtFields_0;
public System.Windows.Forms.TextBox _txtFields_0 {
get { return withEventsField__txtFields_0; }
set {
if (withEventsField__txtFields_0 != null) {
withEventsField__txtFields_0.Enter -= txtFields_Enter;
}
withEventsField__txtFields_0 = value;
if (withEventsField__txtFields_0 != null) {
withEventsField__txtFields_0.Enter += txtFields_Enter;
}
}
}
private System.Windows.Forms.Button withEventsField_cmdAllocate;
public System.Windows.Forms.Button cmdAllocate {
get { return withEventsField_cmdAllocate; }
set {
if (withEventsField_cmdAllocate != null) {
withEventsField_cmdAllocate.Click -= cmdAllocate_Click;
}
withEventsField_cmdAllocate = value;
if (withEventsField_cmdAllocate != null) {
withEventsField_cmdAllocate.Click += cmdAllocate_Click;
}
}
}
private System.Windows.Forms.Button withEventsField_cmdCancel;
public System.Windows.Forms.Button cmdCancel {
get { return withEventsField_cmdCancel; }
set {
if (withEventsField_cmdCancel != null) {
withEventsField_cmdCancel.Click -= cmdCancel_Click;
}
withEventsField_cmdCancel = value;
if (withEventsField_cmdCancel != null) {
withEventsField_cmdCancel.Click += cmdCancel_Click;
}
}
}
private System.Windows.Forms.Button withEventsField_cmdClose;
public System.Windows.Forms.Button cmdClose {
get { return withEventsField_cmdClose; }
set {
if (withEventsField_cmdClose != null) {
withEventsField_cmdClose.Click -= cmdClose_Click;
}
withEventsField_cmdClose = value;
if (withEventsField_cmdClose != null) {
withEventsField_cmdClose.Click += cmdClose_Click;
}
}
}
public System.Windows.Forms.Panel picButtons;
public System.Windows.Forms.Label _lblLabels_0;
public System.Windows.Forms.Label _lblLabels_38;
public Microsoft.VisualBasic.PowerPacks.RectangleShape _Shape1_2;
public System.Windows.Forms.Label _lbl_5;
//Public WithEvents chkFields As Microsoft.VisualBasic.Compatibility.VB6.CheckBoxArray
//Public WithEvents lbl As Microsoft.VisualBasic.Compatibility.VB6.LabelArray
//Public WithEvents lblLabels As Microsoft.VisualBasic.Compatibility.VB6.LabelArray
//Public WithEvents txtFields As Microsoft.VisualBasic.Compatibility.VB6.TextBoxArray
public RectangleShapeArray Shape1;
public Microsoft.VisualBasic.PowerPacks.ShapeContainer ShapeContainer1;
//NOTE: The following procedure is required by the Windows Form Designer
//It can be modified using the Windows Form Designer.
//Do not modify it using the code editor.
[System.Diagnostics.DebuggerStepThrough()]
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.ToolTip1 = new System.Windows.Forms.ToolTip(this.components);
this.ShapeContainer1 = new Microsoft.VisualBasic.PowerPacks.ShapeContainer();
this._Shape1_2 = new Microsoft.VisualBasic.PowerPacks.RectangleShape();
this.cmdPrint = new System.Windows.Forms.Button();
this.chkChannel = new System.Windows.Forms.CheckBox();
this.cmbDelivery = new System.Windows.Forms.ComboBox();
this.cmbCOD = new System.Windows.Forms.ComboBox();
this._chkFields_12 = new System.Windows.Forms.CheckBox();
this._txtFields_0 = new System.Windows.Forms.TextBox();
this.picButtons = new System.Windows.Forms.Panel();
this.cmdAllocate = new System.Windows.Forms.Button();
this.cmdCancel = new System.Windows.Forms.Button();
this.cmdClose = new System.Windows.Forms.Button();
this._lblLabels_0 = new System.Windows.Forms.Label();
this._lblLabels_38 = new System.Windows.Forms.Label();
this._lbl_5 = new System.Windows.Forms.Label();
this.picButtons.SuspendLayout();
this.SuspendLayout();
//
//ShapeContainer1
//
this.ShapeContainer1.Location = new System.Drawing.Point(0, 0);
this.ShapeContainer1.Margin = new System.Windows.Forms.Padding(0);
this.ShapeContainer1.Name = "ShapeContainer1";
this.ShapeContainer1.Shapes.AddRange(new Microsoft.VisualBasic.PowerPacks.Shape[] { this._Shape1_2 });
this.ShapeContainer1.Size = new System.Drawing.Size(406, 131);
this.ShapeContainer1.TabIndex = 13;
this.ShapeContainer1.TabStop = false;
//
//_Shape1_2
//
this._Shape1_2.BackColor = System.Drawing.Color.FromArgb(Convert.ToInt32(Convert.ToByte(192)), Convert.ToInt32(Convert.ToByte(192)), Convert.ToInt32(Convert.ToByte(255)));
this._Shape1_2.BackStyle = Microsoft.VisualBasic.PowerPacks.BackStyle.Opaque;
this._Shape1_2.BorderColor = System.Drawing.SystemColors.WindowText;
this._Shape1_2.FillColor = System.Drawing.Color.Black;
this._Shape1_2.Location = new System.Drawing.Point(15, 60);
this._Shape1_2.Name = "_Shape1_2";
this._Shape1_2.Size = new System.Drawing.Size(379, 64);
//
//cmdPrint
//
this.cmdPrint.BackColor = System.Drawing.SystemColors.Control;
this.cmdPrint.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdPrint.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdPrint.Location = new System.Drawing.Point(320, 240);
this.cmdPrint.Name = "cmdPrint";
this.cmdPrint.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdPrint.Size = new System.Drawing.Size(76, 28);
this.cmdPrint.TabIndex = 12;
this.cmdPrint.Text = "&Print";
this.cmdPrint.UseVisualStyleBackColor = false;
//
//chkChannel
//
this.chkChannel.BackColor = System.Drawing.Color.FromArgb(Convert.ToInt32(Convert.ToByte(192)), Convert.ToInt32(Convert.ToByte(192)), Convert.ToInt32(Convert.ToByte(255)));
this.chkChannel.CheckAlign = System.Drawing.ContentAlignment.MiddleRight;
this.chkChannel.Cursor = System.Windows.Forms.Cursors.Default;
this.chkChannel.ForeColor = System.Drawing.SystemColors.ControlText;
this.chkChannel.Location = new System.Drawing.Point(218, 199);
this.chkChannel.Name = "chkChannel";
this.chkChannel.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.chkChannel.Size = new System.Drawing.Size(136, 13);
this.chkChannel.TabIndex = 5;
this.chkChannel.Text = "Delivery Channel Name:";
this.chkChannel.UseVisualStyleBackColor = false;
//
//cmbDelivery
//
this.cmbDelivery.BackColor = System.Drawing.SystemColors.Window;
this.cmbDelivery.Cursor = System.Windows.Forms.Cursors.Default;
this.cmbDelivery.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cmbDelivery.ForeColor = System.Drawing.SystemColors.WindowText;
this.cmbDelivery.Location = new System.Drawing.Point(218, 214);
this.cmbDelivery.Name = "cmbDelivery";
this.cmbDelivery.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmbDelivery.Size = new System.Drawing.Size(178, 21);
this.cmbDelivery.TabIndex = 6;
//
//cmbCOD
//
this.cmbCOD.BackColor = System.Drawing.SystemColors.Window;
this.cmbCOD.Cursor = System.Windows.Forms.Cursors.Default;
this.cmbCOD.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cmbCOD.ForeColor = System.Drawing.SystemColors.WindowText;
this.cmbCOD.Location = new System.Drawing.Point(29, 214);
this.cmbCOD.Name = "cmbCOD";
this.cmbCOD.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmbCOD.Size = new System.Drawing.Size(178, 21);
this.cmbCOD.TabIndex = 4;
//
//_chkFields_12
//
this._chkFields_12.BackColor = System.Drawing.Color.FromArgb(Convert.ToInt32(Convert.ToByte(192)), Convert.ToInt32(Convert.ToByte(192)), Convert.ToInt32(Convert.ToByte(255)));
this._chkFields_12.CheckAlign = System.Drawing.ContentAlignment.MiddleRight;
this._chkFields_12.Cursor = System.Windows.Forms.Cursors.Default;
this._chkFields_12.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this._chkFields_12.ForeColor = System.Drawing.SystemColors.WindowText;
this._chkFields_12.Location = new System.Drawing.Point(320, 96);
this._chkFields_12.Name = "_chkFields_12";
this._chkFields_12.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._chkFields_12.Size = new System.Drawing.Size(64, 19);
this._chkFields_12.TabIndex = 7;
this._chkFields_12.Text = "Disabled:";
this._chkFields_12.UseVisualStyleBackColor = false;
//
//_txtFields_0
//
this._txtFields_0.AcceptsReturn = true;
this._txtFields_0.BackColor = System.Drawing.SystemColors.Window;
this._txtFields_0.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this._txtFields_0.Cursor = System.Windows.Forms.Cursors.IBeam;
this._txtFields_0.ForeColor = System.Drawing.SystemColors.WindowText;
this._txtFields_0.Location = new System.Drawing.Point(140, 66);
this._txtFields_0.MaxLength = 0;
this._txtFields_0.Name = "_txtFields_0";
this._txtFields_0.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._txtFields_0.Size = new System.Drawing.Size(247, 19);
this._txtFields_0.TabIndex = 2;
//
//picButtons
//
this.picButtons.BackColor = System.Drawing.Color.Blue;
this.picButtons.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this.picButtons.Controls.Add(this.cmdAllocate);
this.picButtons.Controls.Add(this.cmdCancel);
this.picButtons.Controls.Add(this.cmdClose);
this.picButtons.Cursor = System.Windows.Forms.Cursors.Default;
this.picButtons.Dock = System.Windows.Forms.DockStyle.Top;
this.picButtons.ForeColor = System.Drawing.SystemColors.ControlText;
this.picButtons.Location = new System.Drawing.Point(0, 0);
this.picButtons.Name = "picButtons";
this.picButtons.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.picButtons.Size = new System.Drawing.Size(406, 39);
this.picButtons.TabIndex = 10;
//
//cmdAllocate
//
this.cmdAllocate.BackColor = System.Drawing.SystemColors.Control;
this.cmdAllocate.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdAllocate.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdAllocate.Location = new System.Drawing.Point(192, 3);
this.cmdAllocate.Name = "cmdAllocate";
this.cmdAllocate.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdAllocate.Size = new System.Drawing.Size(119, 28);
this.cmdAllocate.TabIndex = 11;
this.cmdAllocate.Text = "&Allocate Stock Items";
this.cmdAllocate.UseVisualStyleBackColor = false;
//
//cmdCancel
//
this.cmdCancel.BackColor = System.Drawing.SystemColors.Control;
this.cmdCancel.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdCancel.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdCancel.Location = new System.Drawing.Point(5, 3);
this.cmdCancel.Name = "cmdCancel";
this.cmdCancel.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdCancel.Size = new System.Drawing.Size(73, 29);
this.cmdCancel.TabIndex = 9;
this.cmdCancel.TabStop = false;
this.cmdCancel.Text = "&Undo";
this.cmdCancel.UseVisualStyleBackColor = false;
//
//cmdClose
//
this.cmdClose.BackColor = System.Drawing.SystemColors.Control;
this.cmdClose.Cursor = System.Windows.Forms.Cursors.Default;
this.cmdClose.ForeColor = System.Drawing.SystemColors.ControlText;
this.cmdClose.Location = new System.Drawing.Point(324, 3);
this.cmdClose.Name = "cmdClose";
this.cmdClose.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.cmdClose.Size = new System.Drawing.Size(73, 29);
this.cmdClose.TabIndex = 8;
this.cmdClose.TabStop = false;
this.cmdClose.Text = "E&xit";
this.cmdClose.UseVisualStyleBackColor = false;
//
//_lblLabels_0
//
this._lblLabels_0.AutoSize = true;
this._lblLabels_0.BackColor = System.Drawing.Color.Transparent;
this._lblLabels_0.Cursor = System.Windows.Forms.Cursors.Default;
this._lblLabels_0.ForeColor = System.Drawing.SystemColors.ControlText;
this._lblLabels_0.Location = new System.Drawing.Point(28, 199);
this._lblLabels_0.Name = "_lblLabels_0";
this._lblLabels_0.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._lblLabels_0.Size = new System.Drawing.Size(106, 13);
this._lblLabels_0.TabIndex = 3;
this._lblLabels_0.Text = "COD Channel Name:";
this._lblLabels_0.TextAlign = System.Drawing.ContentAlignment.TopRight;
//
//_lblLabels_38
//
this._lblLabels_38.AutoSize = true;
this._lblLabels_38.BackColor = System.Drawing.Color.Transparent;
this._lblLabels_38.Cursor = System.Windows.Forms.Cursors.Default;
this._lblLabels_38.ForeColor = System.Drawing.SystemColors.ControlText;
this._lblLabels_38.Location = new System.Drawing.Point(24, 69);
this._lblLabels_38.Name = "_lblLabels_38";
this._lblLabels_38.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._lblLabels_38.Size = new System.Drawing.Size(116, 13);
this._lblLabels_38.TabIndex = 1;
this._lblLabels_38.Text = "Price List Group Name:";
this._lblLabels_38.TextAlign = System.Drawing.ContentAlignment.TopRight;
//
//_lbl_5
//
this._lbl_5.AutoSize = true;
this._lbl_5.BackColor = System.Drawing.Color.Transparent;
this._lbl_5.Cursor = System.Windows.Forms.Cursors.Default;
this._lbl_5.ForeColor = System.Drawing.SystemColors.ControlText;
this._lbl_5.Location = new System.Drawing.Point(15, 45);
this._lbl_5.Name = "_lbl_5";
this._lbl_5.RightToLeft = System.Windows.Forms.RightToLeft.No;
this._lbl_5.Size = new System.Drawing.Size(56, 13);
this._lbl_5.TabIndex = 0;
this._lbl_5.Text = "&1. General";
//
//frmPricelistFilter
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6f, 13f);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.BackColor = System.Drawing.Color.FromArgb(Convert.ToInt32(Convert.ToByte(224)), Convert.ToInt32(Convert.ToByte(224)), Convert.ToInt32(Convert.ToByte(224)));
this.ClientSize = new System.Drawing.Size(406, 131);
this.ControlBox = false;
this.Controls.Add(this.cmdPrint);
this.Controls.Add(this.chkChannel);
this.Controls.Add(this.cmbDelivery);
this.Controls.Add(this.cmbCOD);
this.Controls.Add(this._chkFields_12);
this.Controls.Add(this._txtFields_0);
this.Controls.Add(this.picButtons);
this.Controls.Add(this._lblLabels_0);
this.Controls.Add(this._lblLabels_38);
this.Controls.Add(this._lbl_5);
this.Controls.Add(this.ShapeContainer1);
this.Cursor = System.Windows.Forms.Cursors.Default;
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.KeyPreview = true;
this.Location = new System.Drawing.Point(73, 22);
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "frmPricelistFilter";
this.RightToLeft = System.Windows.Forms.RightToLeft.No;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen;
this.Text = "Edit Price List Group";
this.picButtons.ResumeLayout(false);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
}
}
| |
/*
Copyright (c) 2004-2006 Tomas Matousek, Ladislav Prosek.
The use and distribution terms for this software are contained in the file named License.txt,
which can be found in the root of the Phalanger distribution. By using this software
in any fashion, you are agreeing to be bound by the terms of this license.
You must not remove this notice from this software.
*/
using System;
using System.IO;
using System.Runtime.Serialization;
using PHP.Core;
using PHP.Core.Reflection;
using System.Collections.Generic;
using System.Diagnostics;
namespace PHP.Library
{
#region Serializer
/// <summary>
/// A base class for serializers, i.e. a named formatters.
/// </summary>
public abstract class Serializer : MarshalByRefObject
{
#region ClassContextHolder
/// <summary>
/// Common base class of <c>ObjectWriter</c> and <c>ObjectReader</c> containing the cached class context functionality.
/// </summary>
/// <remarks>
/// Class context is needed when invoking <c>__sleep</c> and <c>__wakeup</c> magic methods.
/// </remarks>
internal abstract class ClassContextHolder
{
/// <summary>
/// Initialize the ClassCOntextHolder with a known DTypeDesc.
/// Use UnknownTypeDesc.Singleton to specify an unknown caller. In this case the caller will be determined when needed.
/// </summary>
/// <param name="caller"></param>
public ClassContextHolder(DTypeDesc caller)
{
if (caller == null || !caller.IsUnknown)
{
ClassContext = caller;
}
}
/// <summary>
/// Copies info from already used ClassContextHolder. It reuses the holder iff class context was already initialized.
/// </summary>
/// <param name="holder">Exiting class context holder with potentionaly already obtained class context.</param>
internal ClassContextHolder(ClassContextHolder/*!*/holder)
{
Debug.Assert(holder != null);
this._classContext = holder._classContext;
this.classContextIsValid = holder.classContextIsValid;
}
/// <summary>
/// Gets or sets the current class context. See <see cref="_classContext"/>.
/// </summary>
protected DTypeDesc ClassContext
{
get
{
return (classContextIsValid ?
_classContext :
(ClassContext = PhpStackTrace.GetClassContext()));
}
set
{
_classContext = value;
classContextIsValid = true;
}
}
/// <summary>
/// Holds the current class context (a type derived from <see cref="DObject"/> in whose
/// scope the calling code is executing). Initialized lazily.
/// </summary>
private DTypeDesc _classContext;
/// <summary>
/// Invalid class context singleton. The initial value for <see cref="_classContext"/>.
/// </summary>
private bool classContextIsValid;
}
#endregion
/// <summary>
/// Gets a name of the serializer. Shouldn't return a <B>null</B> reference.
/// </summary>
protected abstract string GetName();
/// <summary>
/// Creates a formatter. Shouldn't return a <B>null</B> reference.
/// </summary>
/// <param name="caller">DTypeDesc of the class context or UnknownTypeDesc if class context is not known yet and will be determined lazily.</param>
protected abstract IFormatter CreateFormatter(DTypeDesc caller);
/// <summary>
/// Gets tring representation of the serializer.
/// </summary>
/// <returns>The name of the serializer.</returns>
public override string ToString()
{
return Name;
}
/// <summary>
/// Gets the serializer name (always non-null).
/// </summary>
public string Name
{
get
{
string result = GetName();
if (result == null)
throw new InvalidMethodImplementationException(GetType().FullName + ".GetName");
return result;
}
}
/// <summary>
/// Creates a formatter (always non-null).
/// </summary>
/// <param name="caller">DTypeDesc of the class context or UnknownTypeDesc if class context is not known yet and will be determined lazily.</param>
/// <returns>New IFormatter class instance.</returns>
private IFormatter GetFormatter(DTypeDesc caller)
{
IFormatter result = CreateFormatter(caller);
if (result == null)
throw new InvalidMethodImplementationException(GetType().FullName + "CreateFormatter");
return result;
}
/// <summary>
/// Serializes a graph of connected objects to a byte array using a given formatter.
/// </summary>
/// <param name="variable">The variable to serialize.</param>
/// <param name="caller">DTypeDesc of the caller's class context if it is known or UnknownTypeDesc if it should be determined lazily.</param>
/// <returns>
/// The serialized representation of the <paramref name="variable"/> or a <B>null</B> reference on error.
/// </returns>
/// <exception cref="PhpException">Serialization failed (Notice).</exception>
public PhpBytes Serialize(object variable, DTypeDesc caller)
{
MemoryStream stream = new MemoryStream();
try
{
try
{
// serialize the variable into the memory stream
GetFormatter(caller).Serialize(stream, variable);
}
catch (System.Reflection.TargetInvocationException e)
{
throw e.InnerException;
}
}
catch (SerializationException e)
{
PhpException.Throw(PhpError.Notice, LibResources.GetString("serialization_failed", e.Message));
return null;
}
// extract the serialized data
return new PhpBytes(stream.ToArray());
}
/// <summary>
/// Deserializes a graph of connected object from a byte array using a given formatter.
/// </summary>
/// <param name="bytes">The byte array to deserialize the graph from.</param>
/// <param name="caller">DTypeDesc of the caller's class context if it is known or UnknownTypeDesc if it should be determined lazily.</param>
/// <returns>
/// The deserialized object graph or an instance of <see cref="PhpReference"/> containing <B>false</B> on error.
/// </returns>
/// <exception cref="PhpException">Deserialization failed (Notice).</exception>
public PhpReference Deserialize(PhpBytes bytes, DTypeDesc caller)
{
MemoryStream stream = new MemoryStream(bytes.ReadonlyData);
object result = null;
try
{
try
{
// deserialize the data
result = GetFormatter(caller).Deserialize(stream);
}
catch (System.Reflection.TargetInvocationException e)
{
throw e.InnerException;
}
}
catch (SerializationException e)
{
PhpException.Throw(PhpError.Notice, LibResources.GetString("deserialization_failed",
e.Message, stream.Position, stream.Length));
return new PhpReference(false);
}
return PhpVariable.MakeReference(result);
}
}
#endregion
#region SingletonSerializer
/// <summary>
/// Represents a serializer with a singleton formatter.
/// </summary>
/// <threadsafety static="true"/>
public sealed class SingletonSerializer : Serializer
{
/// <summary>
/// A name of the serializer. Can't contain a <B>null</B> reference.
/// </summary>
private readonly string/*!*/ name;
/// <summary>
/// A formatter. Can't contain a <B>null</B> reference.
/// </summary>
private readonly IFormatter/*!*/ formatter;
/// <summary>
/// Creates a new instance of the serializer.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="formatter">The formatter.</param>
/// <exception cref="ArgumentNullException"><paramref name="name"/> or <paramref name="formatter"/> are <B>null</B> references.</exception>
public SingletonSerializer(string/*!*/ name, IFormatter/*!*/ formatter)
{
if (name == null)
throw new ArgumentNullException("name");
if (formatter == null)
throw new ArgumentNullException("formatter");
this.name = name;
this.formatter = formatter;
}
/// <summary>
/// Returns the name.
/// </summary>
protected override string/*!*/ GetName()
{
return name;
}
/// <summary>
/// Returns the formatter.
/// </summary>
protected override IFormatter/*!*/ CreateFormatter(DTypeDesc caller)
{
return formatter;
}
}
#endregion
#region ContextualSerializer
/// <summary>
/// Prepresents a serializer with a formatter utilizing the <see cref="SerializationContext"/>.
/// </summary>
/// <threadsafety static="true"/>
public sealed class ContextualSerializer : Serializer
{
public delegate IFormatter/*!*/ FormatterFactory(DTypeDesc caller);
/// <summary>
/// A name of the serializer. Can't contain a <B>null</B> reference.
/// </summary>
private readonly string/*!*/ name;
/// <summary>
/// A formatter. Can't contain a <B>null</B> reference.
/// </summary>
private readonly FormatterFactory/*!*/ formatterFactory;
/// <summary>
/// Creates a new instance of the serializer.
/// </summary>
/// <param name="name">The name.</param>
/// <param name="formatterFactory">The factory that supplies fresh instances of the formatter.</param>
/// <exception cref="ArgumentNullException"><paramref name="name"/> or <paramref name="formatterFactory"/> are <B>null</B> references.</exception>
public ContextualSerializer(string/*!*/ name, FormatterFactory/*!*/ formatterFactory)
{
if (name == null)
throw new ArgumentNullException("name");
if (formatterFactory == null)
throw new ArgumentNullException("formatterFactory");
this.name = name;
this.formatterFactory = formatterFactory;
}
/// <summary>
/// Returns the name.
/// </summary>
protected override string/*!*/ GetName()
{
return name;
}
/// <summary>
/// Returns the formatter.
/// </summary>
protected override IFormatter/*!*/ CreateFormatter(DTypeDesc caller)
{
return formatterFactory(caller);
}
}
#endregion
#region PhpSerializer
public sealed class PhpSerializer : Serializer
{
private PhpSerializer() { }
/// <summary>
/// A singleton instance.
/// </summary>
public static readonly PhpSerializer Default = new PhpSerializer();
/// <summary>
/// Returns the name.
/// </summary>
protected override string GetName()
{
return "php";
}
/// <summary>
/// Returns the formatter using the current page encoding set in the global configuration.
/// </summary>
protected override IFormatter CreateFormatter(DTypeDesc caller)
{
return new PhpFormatter(Configuration.Application.Globalization.PageEncoding, caller);
}
}
#endregion
#region PhpJsonSerializer
public sealed class PhpJsonSerializer : Serializer
{
private readonly JsonFormatter.EncodeOptions encodeOptions;
private readonly JsonFormatter.DecodeOptions decodeOptions;
/// <summary>
/// Initialize parametrized serializer.
/// </summary>
internal PhpJsonSerializer(JsonFormatter.EncodeOptions encodeOptions, JsonFormatter.DecodeOptions decodeOptions)
{
// options
this.encodeOptions = encodeOptions;
this.decodeOptions = decodeOptions;
}
/// <summary>
/// A singleton instance with default parameters.
/// </summary>
public static readonly PhpJsonSerializer Default = new PhpJsonSerializer(new JsonFormatter.EncodeOptions(), new JsonFormatter.DecodeOptions());
/// <summary>
/// Returns the name.
/// </summary>
protected override string GetName()
{
return "JSON";
}
/// <summary>
/// Returns the formatter using the current page encoding set in the global configuration.
/// </summary>
protected override IFormatter CreateFormatter(DTypeDesc caller)
{
return new JsonFormatter(Configuration.Application.Globalization.PageEncoding, encodeOptions, decodeOptions, caller);
}
}
#endregion
//#region PhalangerSerializer
//public sealed class PhalangerSerializer : Serializer
//{
// private PhalangerSerializer() { }
// /// <summary>
// /// A singleton instance.
// /// </summary>
// public static readonly PhalangerSerializer Default = new PhalangerSerializer();
// /// <summary>
// /// Returns the name.
// /// </summary>
// protected override string GetName()
// {
// return "phalanger";
// }
// /// <summary>
// /// Returns the formatter using the current page encoding set in the global configuration.
// /// </summary>
// protected override IFormatter CreateFormatter(DTypeDesc caller)
// {
// return new PhalangerFormatter(Configuration.Application.Globalization.PageEncoding, caller);
// }
//}
//#endregion
#region Serializers
/// <summary>
/// Maintains serializers. Libraries can register their own serializers here.
/// </summary>
public static class Serializers
{
/// <summary>
/// Registered handlers.
/// </summary>
private static Dictionary<string, Serializer> serializers = new Dictionary<string, Serializer>();
private static readonly object serializersLock = new object();
/// <summary>
/// Registeres a new serializer. Serializers are usualy registered by libraries.
/// </summary>
/// <param name="serializer">The serializer.</param>
/// <returns>Whether the serializer has been successfuly registered. Two serializers with the same names can't be registered.</returns>
/// <exception cref="ArgumentNullException"><paramref name="serializer"/> is a <B>null</B> reference.</exception>
public static bool RegisterSerializer(Serializer serializer)
{
if (serializer == null) throw new ArgumentNullException("serializer");
lock (serializersLock)
{
if (serializers.ContainsKey(serializer.Name))
return false;
serializers.Add(serializer.Name, serializer);
}
return true;
}
/// <summary>
/// Gets a serializer by specified name.
/// </summary>
/// <param name="name">The name of the serializer.</param>
/// <returns>The serializer or <B>null</B> reference if such serializer has not been registered.</returns>
/// <exception cref="ArgumentNullException"><paramref name="name"/> is a <B>null</B> reference.</exception>
public static Serializer GetSerializer(string name)
{
if (name == null) throw new ArgumentNullException("name");
lock (serializersLock)
{
if (serializers.ContainsKey(name))
{
return (Serializer)serializers[name];
}
else
{
return null;
}
}
}
}
#endregion
}
| |
using System;
using System.Text;
using System.Collections.Generic;
using csfmt.Lexers;
using csfmt.Parsers;
namespace csfmt
{
class CSharpFormatterMain
{
public static void F()
{
var ts = Lexer.LexerString(@"namespace A{public class B{public static void Main(){yield return(!x);}}}");
var psr = new Parser(ts);
var result = psr.Evalute();
if (result.Success)
{
IO.PrintNormal(result.Output);
}
else
{
IO.PrintRed(result.Output);
}
}
public static void Main(String[] args)
{
//F();
//return;
var wait = false;
try
{
var defaultFI = new FormatterInfo();
var showHelp = false;
if (0 == args.Length)
{
showHelp = true;
}
else if (1 == args.Length)
{
switch (args[0])
{
case @"--help":
showHelp = true;
break;
}
}
if (showHelp)
{
const String urlAppveyorSvg = @"https://ci.appveyor.com/api/projects/status/n000mdm0bj1pxd35/branch/master?svg=true";
const String urlAppveyor = @"https://ci.appveyor.com/project/rbtnn/csharp-formatter/branch/master";
const String urlTravisSvg = @"https://travis-ci.org/rbtnn/csharp-formatter.svg?branch=master";
const String urlTravis = @"https://travis-ci.org/rbtnn/csharp-formatter";
IO.PrintNormal(@"");
IO.PrintYellow(@"# CSharp-Formatter");
IO.PrintHide(String.Format(@"[]({1})", urlAppveyorSvg, urlAppveyor));
IO.PrintHide(String.Format(@"[]({1})", urlTravisSvg, urlTravis));
IO.PrintNormal(@"");
IO.PrintNormal(@"This is a C# formatter tool without Visual Studio using .NET Framework's csc.exe. ");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintGreen(@"## BUILD");
IO.PrintNormal(@"");
IO.PrintNormal(@"__Windows__ ");
IO.PrintNormal(@"");
IO.PrintDarkGray(@"> MSBuild.exe msbuild.xml");
IO.PrintNormal(@"");
IO.PrintNormal(@"__Mac__ ");
IO.PrintNormal(@"");
IO.PrintDarkGray(@"> xbuild msbuild.xml");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintGreen(@"## USAGE");
IO.PrintNormal(@"");
IO.PrintNormal(@"__Windows__ ");
IO.PrintNormal(@"");
IO.PrintDarkGray(@"> .\bin\csfmt.exe {options} {.cs file or directory}");
IO.PrintNormal(@"");
IO.PrintNormal(@"__Mac__ ");
IO.PrintNormal(@"");
IO.PrintDarkGray(@"> mono ./bin/csfmt.exe {options} {.cs file or directory}");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintGreen(@"## OPTIONS");
IO.PrintNormal(@"");
IO.PrintRed(@"### --tab");
IO.PrintNormal(@"");
IO.PrintNormal(@"Use tabs when indenting. ");
IO.PrintNormal(@"(default: __" + (defaultFI.UseTab ? @"on" : @"off") + @"__) ");
IO.PrintNormal(@"");
IO.PrintRed(@"### --indent {count}");
IO.PrintNormal(@"");
IO.PrintNormal(@"Use {count} spaces per indent level. ");
IO.PrintNormal(@"(default: __" + defaultFI.Indent + @"__) ");
IO.PrintNormal(@"");
IO.PrintRed(@"### --encoding {encoding}");
IO.PrintNormal(@"");
IO.PrintNormal(@"Read .cs file by {encoding}. {encoding} is one of following encodings. ");
IO.PrintNormal(@"(default: __utf-8n__) ");
IO.PrintNormal(@"");
IO.PrintNormal(@" cp932");
IO.PrintNormal(@" shift_jis");
IO.PrintNormal(@" utf-16");
IO.PrintNormal(@" iso-2022-jp");
IO.PrintNormal(@" euc-jp");
IO.PrintNormal(@" utf-7");
IO.PrintNormal(@" utf-8");
IO.PrintNormal(@" utf-8n");
IO.PrintNormal(@"");
IO.PrintRed(@"### --overwrite");
IO.PrintNormal(@"");
IO.PrintNormal(@"Do not print reformatted sources to standard output. ");
IO.PrintNormal(@"Overwrite .cs file By LF with csfmt's version. ");
IO.PrintNormal(@"(default: __" + (defaultFI.Overwrite ? @"on" : @"off") + @"__) ");
IO.PrintNormal(@"");
IO.PrintRed(@"### --crlf");
IO.PrintNormal(@"");
IO.PrintNormal(@"Overwrite .cs file by CRLF. --overwrite only. ");
IO.PrintNormal(@"(default: __" + (defaultFI.Crlf ? @"on" : @"off") + @"__) ");
IO.PrintNormal(@"");
IO.PrintRed(@"### --debug");
IO.PrintNormal(@"");
IO.PrintNormal(@"Enable Debug mode. ");
IO.PrintNormal(@"(default: __" + (defaultFI.DebugMode ? @"on" : @"off") + @"__) ");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintRed(@"### --wait");
IO.PrintNormal(@"");
IO.PrintNormal(@"Show `Press any key to continue.`. ");
IO.PrintNormal(@"(default: __off__) ");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintNormal(@"");
IO.PrintGreen(@"## LICENSE");
IO.PrintNormal(@"");
IO.PrintNormal(@"Distributed under MIT License. See LICENSE. ");
IO.PrintNormal(@"");
}
else
{
var fis = new List<FormatterInfo>(){};
Encoding enc = defaultFI.Enc;
var indent = defaultFI.Indent;
var useTab = defaultFI.UseTab;
var overwrite = defaultFI.Overwrite;
var crlf = defaultFI.Crlf;
var sbkp = defaultFI.SpaceBetweenKeywordAndParan;
var debugMode = defaultFI.DebugMode;
var optionName = @"";
foreach (var arg in args)
{
switch (arg)
{
case @"--overwrite":
overwrite = true;
break;
case @"--indent":
optionName = arg;
break;
case @"--tab":
useTab = true;
break;
case @"--crlf":
crlf = true;
break;
case @"--debug":
debugMode = true;
break;
case @"--wait":
wait = true;
break;
case @"--encoding":
optionName = arg;
break;
default:
switch (optionName)
{
case @"":
if (IO.ExistsFile(arg))
{
fis.Add(new FormatterInfo(arg, enc, indent, useTab, overwrite, crlf, sbkp, debugMode));
}
else if (IO.ExistsDirectory(arg))
{
foreach (var f in IO.GetFilesRec(arg))
{
fis.Add(new FormatterInfo(f, enc, indent, useTab, overwrite, crlf, sbkp, debugMode));
}
}
else
{
throw new Exception(String.Format(@"No such file or directory: '{0}'", arg));
}
break;
case @"--indent":
var temp = 0;
if (Int32.TryParse(arg, out temp))
{
indent = temp;
}
else
{
throw new Exception(String.Format(@"Invalid argument: '{0}'", arg));
}
break;
case @"--encoding":
switch (arg)
{
case @"cp932":
case @"shift_jis":
enc = Encoding.GetEncoding(932);
break;
case @"utf-16":
enc = Encoding.GetEncoding(1200);
break;
case @"iso-2022-jp":
enc = Encoding.GetEncoding(50222);
break;
case @"euc-jp":
enc = Encoding.GetEncoding(51932);
break;
case @"utf-7":
enc = Encoding.GetEncoding(65000);
break;
case @"utf-8":
enc = new UTF8Encoding(true);
break;
case @"utf-8n":
enc = new UTF8Encoding(false);
break;
default:
throw new Exception(String.Format(@"Invalid encoding: '{0}'", arg));
}
break;
}
optionName = @"";
break;
}
}
foreach (var fi in fis)
{
var ts = Lexer.LexerFile(fi);
var psr = new Parser(ts, fi);
var result = psr.Evalute();
if (result.Success)
{
if (fi.Overwrite)
{
if (fi.Crlf)
{
IO.WriteFileCRLF(fi.Path, result.Output, fi.Enc);
}
else
{
IO.WriteFileLF(fi.Path, result.Output, fi.Enc);
}
}
else
{
IO.PrintNormal(result.Output);
}
}
else
{
IO.PrintRed(result.Output);
}
}
}
}
catch (Exception ex)
{
IO.PrintRed(String.Format(@"{0}", ex.ToString()));
}
if (wait)
{
IO.PrintYellow(@"Press any key to continue.");
IO.ReadKey();
}
}
}
}
| |
using System;
using System.Collections.Generic;
using Nop.Core;
using Nop.Core.Domain.Customers;
using Nop.Core.Domain.Orders;
namespace Nop.Services.Customers
{
/// <summary>
/// Customer service interface
/// </summary>
public partial interface ICustomerService
{
#region Customers
/// <summary>
/// Gets all customers
/// </summary>
/// <param name="createdFromUtc">Created date from (UTC); null to load all records</param>
/// <param name="createdToUtc">Created date to (UTC); null to load all records</param>
/// <param name="affiliateId">Affiliate identifier</param>
/// <param name="vendorId">Vendor identifier</param>
/// <param name="customerRoleIds">A list of customer role identifiers to filter by (at least one match); pass null or empty list in order to load all customers; </param>
/// <param name="email">Email; null to load all customers</param>
/// <param name="username">Username; null to load all customers</param>
/// <param name="firstName">First name; null to load all customers</param>
/// <param name="lastName">Last name; null to load all customers</param>
/// <param name="dayOfBirth">Day of birth; 0 to load all customers</param>
/// <param name="monthOfBirth">Month of birth; 0 to load all customers</param>
/// <param name="company">Company; null to load all customers</param>
/// <param name="phone">Phone; null to load all customers</param>
/// <param name="zipPostalCode">Phone; null to load all customers</param>
/// <param name="ipAddress">IP address; null to load all customers</param>
/// <param name="loadOnlyWithShoppingCart">Value indicating whether to load customers only with shopping cart</param>
/// <param name="sct">Value indicating what shopping cart type to filter; userd when 'loadOnlyWithShoppingCart' param is 'true'</param>
/// <param name="pageIndex">Page index</param>
/// <param name="pageSize">Page size</param>
/// <returns>Customers</returns>
IPagedList<Customer> GetAllCustomers(DateTime? createdFromUtc = null,
DateTime? createdToUtc = null, int affiliateId = 0, int vendorId = 0,
int[] customerRoleIds = null, string email = null, string username = null,
string firstName = null, string lastName = null,
int dayOfBirth = 0, int monthOfBirth = 0,
string company = null, string phone = null, string zipPostalCode = null,
string ipAddress = null, bool loadOnlyWithShoppingCart = false, ShoppingCartType? sct = null,
int pageIndex = 0, int pageSize = int.MaxValue);
/// <summary>
/// Gets online customers
/// </summary>
/// <param name="lastActivityFromUtc">Customer last activity date (from)</param>
/// <param name="customerRoleIds">A list of customer role identifiers to filter by (at least one match); pass null or empty list in order to load all customers; </param>
/// <param name="pageIndex">Page index</param>
/// <param name="pageSize">Page size</param>
/// <returns>Customers</returns>
IPagedList<Customer> GetOnlineCustomers(DateTime lastActivityFromUtc,
int[] customerRoleIds, int pageIndex = 0, int pageSize = int.MaxValue);
/// <summary>
/// Delete a customer
/// </summary>
/// <param name="customer">Customer</param>
void DeleteCustomer(Customer customer);
/// <summary>
/// Gets a customer
/// </summary>
/// <param name="customerId">Customer identifier</param>
/// <returns>A customer</returns>
Customer GetCustomerById(int customerId);
/// <summary>
/// Get customers by identifiers
/// </summary>
/// <param name="customerIds">Customer identifiers</param>
/// <returns>Customers</returns>
IList<Customer> GetCustomersByIds(int[] customerIds);
/// <summary>
/// Gets a customer by GUID
/// </summary>
/// <param name="customerGuid">Customer GUID</param>
/// <returns>A customer</returns>
Customer GetCustomerByGuid(Guid customerGuid);
/// <summary>
/// Get customer by email
/// </summary>
/// <param name="email">Email</param>
/// <returns>Customer</returns>
Customer GetCustomerByEmail(string email);
/// <summary>
/// Get customer by system role
/// </summary>
/// <param name="systemName">System name</param>
/// <returns>Customer</returns>
Customer GetCustomerBySystemName(string systemName);
/// <summary>
/// Get customer by username
/// </summary>
/// <param name="username">Username</param>
/// <returns>Customer</returns>
Customer GetCustomerByUsername(string username);
/// <summary>
/// Insert a guest customer
/// </summary>
/// <returns>Customer</returns>
Customer InsertGuestCustomer();
/// <summary>
/// Insert a customer
/// </summary>
/// <param name="customer">Customer</param>
void InsertCustomer(Customer customer);
/// <summary>
/// Updates the customer
/// </summary>
/// <param name="customer">Customer</param>
void UpdateCustomer(Customer customer);
/// <summary>
/// Reset data required for checkout
/// </summary>
/// <param name="customer">Customer</param>
/// <param name="storeId">Store identifier</param>
/// <param name="clearCouponCodes">A value indicating whether to clear coupon code</param>
/// <param name="clearCheckoutAttributes">A value indicating whether to clear selected checkout attributes</param>
/// <param name="clearRewardPoints">A value indicating whether to clear "Use reward points" flag</param>
/// <param name="clearShippingMethod">A value indicating whether to clear selected shipping method</param>
/// <param name="clearPaymentMethod">A value indicating whether to clear selected payment method</param>
void ResetCheckoutData(Customer customer, int storeId,
bool clearCouponCodes = false, bool clearCheckoutAttributes = false,
bool clearRewardPoints = true, bool clearShippingMethod = true,
bool clearPaymentMethod = true);
/// <summary>
/// Delete guest customer records
/// </summary>
/// <param name="createdFromUtc">Created date from (UTC); null to load all records</param>
/// <param name="createdToUtc">Created date to (UTC); null to load all records</param>
/// <param name="onlyWithoutShoppingCart">A value indicating whether to delete customers only without shopping cart</param>
/// <returns>Number of deleted customers</returns>
int DeleteGuestCustomers(DateTime? createdFromUtc, DateTime? createdToUtc, bool onlyWithoutShoppingCart);
#endregion
#region Customer roles
/// <summary>
/// Delete a customer role
/// </summary>
/// <param name="customerRole">Customer role</param>
void DeleteCustomerRole(CustomerRole customerRole);
/// <summary>
/// Gets a customer role
/// </summary>
/// <param name="customerRoleId">Customer role identifier</param>
/// <returns>Customer role</returns>
CustomerRole GetCustomerRoleById(int customerRoleId);
/// <summary>
/// Gets a customer role
/// </summary>
/// <param name="systemName">Customer role system name</param>
/// <returns>Customer role</returns>
CustomerRole GetCustomerRoleBySystemName(string systemName);
/// <summary>
/// Gets all customer roles
/// </summary>
/// <param name="showHidden">A value indicating whether to show hidden records</param>
/// <returns>Customer roles</returns>
IList<CustomerRole> GetAllCustomerRoles(bool showHidden = false);
/// <summary>
/// Inserts a customer role
/// </summary>
/// <param name="customerRole">Customer role</param>
void InsertCustomerRole(CustomerRole customerRole);
/// <summary>
/// Updates the customer role
/// </summary>
/// <param name="customerRole">Customer role</param>
void UpdateCustomerRole(CustomerRole customerRole);
#endregion
#region Customer passwords
/// <summary>
/// Gets customer passwords
/// </summary>
/// <param name="customerId">Customer identifier; pass null to load all records</param>
/// <param name="passwordFormat">Password format; pass null to load all records</param>
/// <param name="passwordsToReturn">Number of returning passwords; pass null to load all records</param>
/// <returns>List of customer passwords</returns>
IList<CustomerPassword> GetCustomerPasswords(int? customerId = null,
PasswordFormat? passwordFormat = null, int? passwordsToReturn = null);
/// <summary>
/// Get current customer password
/// </summary>
/// <param name="customerId">Customer identifier</param>
/// <returns>Customer password</returns>
CustomerPassword GetCurrentPassword(int customerId);
/// <summary>
/// Insert a customer password
/// </summary>
/// <param name="customerPassword">Customer password</param>
void InsertCustomerPassword(CustomerPassword customerPassword);
/// <summary>
/// Update a customer password
/// </summary>
/// <param name="customerPassword">Customer password</param>
void UpdateCustomerPassword(CustomerPassword customerPassword);
#endregion
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using DiscUtils.Streams;
namespace DiscUtils.Diagnostics
{
internal sealed class ValidatingFileSystemWrapperStream<Tfs, Tc> : SparseStream
where Tfs : DiscFileSystem, IDiagnosticTraceable
where Tc : DiscFileSystemChecker
{
private ValidatingFileSystem<Tfs, Tc> _fileSystem;
private ValidatingFileSystem<Tfs, Tc>.StreamOpenFn _openFn;
private long _replayHandle;
private static long _nextReplayHandle;
private long _shadowPosition;
private bool _disposed;
public ValidatingFileSystemWrapperStream(ValidatingFileSystem<Tfs, Tc> fileSystem, ValidatingFileSystem<Tfs, Tc>.StreamOpenFn openFn)
{
_fileSystem = fileSystem;
_openFn = openFn;
_replayHandle = Interlocked.Increment(ref _nextReplayHandle);
}
protected override void Dispose(bool disposing)
{
if (disposing && !_disposed && !_fileSystem.InLockdown)
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
GetNativeStream(fs, context, pos).Dispose();
_disposed = true;
ForgetNativeStream(context);
return 0;
};
_fileSystem.PerformActivity(fn);
}
// Don't call base.Dispose because it calls close
base.Dispose(disposing);
}
public override bool CanRead
{
get
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).CanRead;
};
return (bool)_fileSystem.PerformActivity(fn);
}
}
public override bool CanSeek
{
get
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).CanSeek;
};
return (bool)_fileSystem.PerformActivity(fn);
}
}
public override bool CanWrite
{
get
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).CanWrite;
};
return (bool)_fileSystem.PerformActivity(fn);
}
}
public override void Flush()
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
GetNativeStream(fs, context, pos).Flush();
return 0;
};
_fileSystem.PerformActivity(fn);
}
public override long Length
{
get
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).Length;
};
return (long)_fileSystem.PerformActivity(fn);
}
}
public override long Position
{
get
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).Position;
};
return (long)_fileSystem.PerformActivity(fn);
}
set
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
GetNativeStream(fs, context, pos).Position = value;
return 0;
};
_fileSystem.PerformActivity(fn);
_shadowPosition = value;
}
}
public override IEnumerable<StreamExtent> Extents
{
get
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).Extents;
};
return (IEnumerable<StreamExtent>)_fileSystem.PerformActivity(fn);
}
}
public override int Read(byte[] buffer, int offset, int count)
{
long pos = _shadowPosition;
// Avoid stomping on buffers we know nothing about by ditching the writes into gash buffer.
byte[] tempBuffer = new byte[buffer.Length];
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).Read(tempBuffer, offset, count);
};
int numRead = (int)_fileSystem.PerformActivity(fn);
Array.Copy(tempBuffer, buffer, numRead);
_shadowPosition += numRead;
return numRead;
}
public override long Seek(long offset, SeekOrigin origin)
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
return GetNativeStream(fs, context, pos).Seek(offset, origin);
};
_shadowPosition = (long)_fileSystem.PerformActivity(fn);
return _shadowPosition;
}
public override void SetLength(long value)
{
long pos = _shadowPosition;
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
GetNativeStream(fs, context, pos).SetLength(value);
return 0;
};
_fileSystem.PerformActivity(fn);
}
public override void Write(byte[] buffer, int offset, int count)
{
long pos = _shadowPosition;
// Take a copy of the buffer - otherwise who knows what we're messing with.
byte[] tempBuffer = new byte[buffer.Length];
Array.Copy(buffer, tempBuffer, buffer.Length);
Activity<Tfs> fn = delegate(Tfs fs, Dictionary<string, object> context)
{
GetNativeStream(fs, context, pos).Write(tempBuffer, offset, count);
return 0;
};
_fileSystem.PerformActivity(fn);
_shadowPosition += count;
}
internal void SetNativeStream(Dictionary<string, object> context, Stream s)
{
string streamKey = "WrapStream#" + _replayHandle + "_Stream";
context[streamKey] = s;
}
private SparseStream GetNativeStream(Tfs fs, Dictionary<string, object> context, long shadowPosition)
{
string streamKey = "WrapStream#" + _replayHandle + "_Stream";
Object streamObj;
SparseStream s;
if (context.TryGetValue(streamKey, out streamObj))
{
s = (SparseStream)streamObj;
}
else
{
// The native stream isn't in the context. This means we're replaying
// but the stream open isn't part of the sequence being replayed. We
// do our best to re-create it...
s = _openFn(fs);
context[streamKey] = s;
}
if (shadowPosition != s.Position)
{
s.Position = shadowPosition;
}
return s;
}
private void ForgetNativeStream(Dictionary<string, object> context)
{
string streamKey = "WrapStream#" + _replayHandle + "_Stream";
context.Remove(streamKey);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Debug = System.Diagnostics.Debug;
using Interlocked = System.Threading.Interlocked;
namespace System.Xml.Linq
{
/// <summary>
/// This is a thread-safe hash table which maps string keys to values of type TValue. It is assumed that the string key is embedded in the hashed value
/// and can be extracted via a call to ExtractKeyDelegate (in order to save space and allow cleanup of key if value is released due to a WeakReference
/// TValue releasing its target).
/// </summary>
/// <remarks>
/// All methods on this class are thread-safe.
///
/// When the hash table fills up, it is necessary to resize it and rehash all contents. Because this can be expensive,
/// a lock is taken, and one thread is responsible for the resize. Other threads which need to add values must wait
/// for the resize to be complete.
///
/// Thread-Safety Notes
/// ===================
///
/// 1. Because performance and scalability are such a concern with the global name table, I have avoided the use of
/// BIFALOs (Big Fat Locks). Instead, I use CompareExchange, Interlocked.Increment, memory barriers, atomic state objects,
/// etc. to avoid locks. Any changes to code which accesses these variables should be carefully reviewed and tested,
/// as it can be *very* tricky. In particular, if you don't understand the CLR memory model or if you don't know
/// what a memory barrier is, DON'T attempt to modify this code. A good discussion of these topics can be found at
/// <![CDATA[http://discuss.develop.com/archives/wa.exe?A2=ind0203B&L=DOTNET&P=R375]]>.
///
/// 2. Because I am not sure if the CLR spec has changed since versions 1.0/1.1, I am assuming the weak memory model that
/// is described in the ECMA spec, in which normal writes can be reordered. This means I must introduce more memory
/// barriers than otherwise would be necessary.
///
/// 3. There are several thread-safety concepts and patterns I utilize in this code:
/// a. Publishing -- There are a small number of places where state is exposed, or published, to multiple threads.
/// These places are marked with the comment "PUBLISH", and are key locations to consider when
/// reviewing the code for thread-safety.
///
/// b. Immutable objects -- Immutable objects initialize their fields once in their constructor and then never modify
/// them again. As long as care is taken to ensure that initial field values are visible to
/// other threads before publishing the immutable object itself, immutable objects are
/// completely thread-safe.
///
/// c. Atomic state objects -- Locks typically are taken when several pieces of state must be updated atomically. In
/// other words, there is a window in which state is inconsistent, and that window must
/// be protected from view by locking. However, if a new object is created each time state
/// changes (or state changes substantially), then during creation the new object is only
/// visible to a single thread. Once construction is complete, an assignment (guaranteed
/// atomic) can replace the old state object with the new state object, thus publishing a
/// consistent view to all threads.
///
/// d. Retry -- When several threads contend over shared state which only one is allowed to possess, it is possible
/// to avoid locking by repeatedly attempting to acquire the shared state. The CompareExchange method
/// is useful for atomically ensuring that only one thread succeeds, and other threads are notified that
/// they must retry.
///
/// 4. All variables which can be written by multiple threads are marked "SHARED STATE".
/// </remarks>
internal sealed class XHashtable<TValue>
{
private XHashtableState _state; // SHARED STATE: Contains all XHashtable state, so it can be atomically swapped when resizes occur
private const int StartingHash = (5381 << 16) + 5381; // Starting hash code value for string keys to be hashed
/// <summary>
/// Prototype of function which is called to extract a string key value from a hashed value.
/// Returns null if the hashed value is invalid (e.g. value has been released due to a WeakReference TValue being cleaned up).
/// </summary>
public delegate string ExtractKeyDelegate(TValue value);
/// <summary>
/// Construct a new XHashtable with the specified starting capacity.
/// </summary>
public XHashtable(ExtractKeyDelegate extractKey, int capacity)
{
_state = new XHashtableState(extractKey, capacity);
}
/// <summary>
/// Get an existing value from the hash table. Return false if no such value exists.
/// </summary>
public bool TryGetValue(string key, int index, int count, out TValue value)
{
return _state.TryGetValue(key, index, count, out value);
}
/// <summary>
/// Add a value to the hash table, hashed based on a string key embedded in it. Return the added value (may be a different object than "value").
/// </summary>
public TValue Add(TValue value)
{
TValue newValue;
// Loop until value is in hash table
while (true)
{
// Add new value
// XHashtableState.TryAdd returns false if hash table is not big enough
if (_state.TryAdd(value, out newValue))
return newValue;
// PUBLISH (state)
// Hash table was not big enough, so resize it.
// We only want one thread to perform a resize, as it is an expensive operation
// First thread will perform resize; waiting threads will call Resize(), but should immediately
// return since there will almost always be space in the hash table resized by the first thread.
lock (this)
{
XHashtableState newState = _state.Resize();
// Use memory barrier to ensure that the resized XHashtableState object is fully constructed before it is assigned
#if !SILVERLIGHT
Thread.MemoryBarrier();
#else // SILVERLIGHT
// The MemoryBarrier method usage is probably incorrect and should be removed.
// Replacing with Interlocked.CompareExchange for now (with no effect)
// which will do a very similar thing to MemoryBarrier (it's just slower)
System.Threading.Interlocked.CompareExchange<XHashtableState>(ref _state, null, null);
#endif // SILVERLIGHT
_state = newState;
}
}
}
/// <summary>
/// This class contains all the hash table state. Rather than creating a bucket object, buckets are structs
/// packed into an array. Buckets with the same truncated hash code are linked into lists, so that collisions
/// can be disambiguated.
/// </summary>
/// <remarks>
/// Note that the "buckets" and "entries" arrays are never themselves written by multiple threads. Instead, the
/// *contents* of the array are written by multiple threads. Resizing the hash table does not modify these variables,
/// or even modify the contents of these variables. Instead, resizing makes an entirely new XHashtableState object
/// in which all entries are rehashed. This strategy allows reader threads to continue finding values in the "old"
/// XHashtableState, while writer threads (those that need to add a new value to the table) are blocked waiting for
/// the resize to complete.
/// </remarks>
private sealed class XHashtableState
{
private int[] _buckets; // Buckets contain indexes into entries array (bucket values are SHARED STATE)
private Entry[] _entries; // Entries contain linked lists of buckets (next pointers are SHARED STATE)
private int _numEntries; // SHARED STATE: Current number of entries (including orphaned entries)
private ExtractKeyDelegate _extractKey; // Delegate called in order to extract string key embedded in hashed TValue
private const int EndOfList = 0; // End of linked list marker
private const int FullList = -1; // Indicates entries should not be added to end of linked list
/// <summary>
/// Construct a new XHashtableState object with the specified capacity.
/// </summary>
public XHashtableState(ExtractKeyDelegate extractKey, int capacity)
{
Debug.Assert((capacity & (capacity - 1)) == 0, "capacity must be a power of 2");
Debug.Assert(extractKey != null, "extractKey may not be null");
// Initialize hash table data structures, with specified maximum capacity
_buckets = new int[capacity];
_entries = new Entry[capacity];
// Save delegate
_extractKey = extractKey;
}
/// <summary>
/// If this table is not full, then just return "this". Otherwise, create and return a new table with
/// additional capacity, and rehash all values in the table.
/// </summary>
public XHashtableState Resize()
{
// No need to resize if there are open entries
if (_numEntries < _buckets.Length)
return this;
int newSize = 0;
// Determine capacity of resized hash table by first counting number of valid, non-orphaned entries
// As this count proceeds, close all linked lists so that no additional entries can be added to them
for (int bucketIdx = 0; bucketIdx < _buckets.Length; bucketIdx++)
{
int entryIdx = _buckets[bucketIdx];
if (entryIdx == EndOfList)
{
// Replace EndOfList with FullList, so that any threads still attempting to add will be forced to resize
entryIdx = Interlocked.CompareExchange(ref _buckets[bucketIdx], FullList, EndOfList);
}
// Loop until we've guaranteed that the list has been counted and closed to further adds
while (entryIdx > EndOfList)
{
// Count each valid entry
if (_extractKey(_entries[entryIdx].Value) != null)
newSize++;
if (_entries[entryIdx].Next == EndOfList)
{
// Replace EndOfList with FullList, so that any threads still attempting to add will be forced to resize
entryIdx = Interlocked.CompareExchange(ref _entries[entryIdx].Next, FullList, EndOfList);
}
else
{
// Move to next entry in the list
entryIdx = _entries[entryIdx].Next;
}
}
Debug.Assert(entryIdx == EndOfList, "Resize() should only be called by one thread");
}
// Double number of valid entries; if result is less than current capacity, then use current capacity
if (newSize < _buckets.Length / 2)
{
newSize = _buckets.Length;
}
else
{
newSize = _buckets.Length * 2;
if (newSize < 0)
throw new OverflowException();
}
// Create new hash table with additional capacity
XHashtableState newHashtable = new XHashtableState(_extractKey, newSize);
// Rehash names (TryAdd will always succeed, since we won't fill the new table)
// Do not simply walk over entries and add them to table, as that would add orphaned
// entries. Instead, walk the linked lists and add each name.
for (int bucketIdx = 0; bucketIdx < _buckets.Length; bucketIdx++)
{
int entryIdx = _buckets[bucketIdx];
TValue newValue;
while (entryIdx > EndOfList)
{
newHashtable.TryAdd(_entries[entryIdx].Value, out newValue);
entryIdx = _entries[entryIdx].Next;
}
Debug.Assert(entryIdx == FullList, "Linked list should have been closed when it was counted");
}
return newHashtable;
}
/// <summary>
/// Attempt to find "key" in the table. If the key exists, return the associated value in "value" and
/// return true. Otherwise return false.
/// </summary>
public bool TryGetValue(string key, int index, int count, out TValue value)
{
int hashCode = ComputeHashCode(key, index, count);
int entryIndex = 0;
// If a matching entry is found, return its value
if (FindEntry(hashCode, key, index, count, ref entryIndex))
{
value = _entries[entryIndex].Value;
return true;
}
// No matching entry found, so return false
value = default(TValue);
return false;
}
/// <summary>
/// Attempt to add "value" to the table, hashed by an embedded string key. If a value having the same key already exists,
/// then return the existing value in "newValue". Otherwise, return the newly added value in "newValue".
///
/// If the hash table is full, return false. Otherwise, return true.
/// </summary>
public bool TryAdd(TValue value, out TValue newValue)
{
int newEntry, entryIndex;
string key;
int hashCode;
// Assume "value" will be added and returned as "newValue"
newValue = value;
// Extract the key from the value. If it's null, then value is invalid and does not need to be added to table.
key = _extractKey(value);
if (key == null)
return true;
// Compute hash code over entire length of key
hashCode = ComputeHashCode(key, 0, key.Length);
// Assume value is not yet in the hash table, and prepare to add it (if table is full, return false).
// Use the entry index returned from Increment, which will never be zero, as zero conflicts with EndOfList.
// Although this means that the first entry will never be used, it avoids the need to initialize all
// starting buckets to the EndOfList value.
newEntry = Interlocked.Increment(ref _numEntries);
if (newEntry < 0 || newEntry >= _buckets.Length)
return false;
_entries[newEntry].Value = value;
_entries[newEntry].HashCode = hashCode;
// Ensure that all writes to the entry can't be reordered past this barrier (or other threads might see new entry
// in list before entry has been initialized!).
#if !SILVERLIGHT
Thread.MemoryBarrier();
#else // SILVERLIGHT
// The MemoryBarrier method usage is probably incorrect and should be removed.
// Replacing with Interlocked.CompareExchange for now (with no effect)
// which will do a very similar thing to MemoryBarrier (it's just slower)
System.Threading.Interlocked.CompareExchange<Entry[]>(ref _entries, null, null);
#endif // SILVERLIGHT
// Loop until a matching entry is found, a new entry is added, or linked list is found to be full
entryIndex = 0;
while (!FindEntry(hashCode, key, 0, key.Length, ref entryIndex))
{
// PUBLISH (buckets slot)
// No matching entry found, so add the new entry to the end of the list ("entryIndex" is index of last entry)
if (entryIndex == 0)
entryIndex = Interlocked.CompareExchange(ref _buckets[hashCode & (_buckets.Length - 1)], newEntry, EndOfList);
else
entryIndex = Interlocked.CompareExchange(ref _entries[entryIndex].Next, newEntry, EndOfList);
// Return true only if the CompareExchange succeeded (happens when replaced value is EndOfList).
// Return false if the linked list turned out to be full because another thread is currently resizing
// the hash table. In this case, entries[newEntry] is orphaned (not part of any linked list) and the
// Add needs to be performed on the new hash table. Otherwise, keep looping, looking for new end of list.
if (entryIndex <= EndOfList)
return entryIndex == EndOfList;
}
// Another thread already added the value while this thread was trying to add, so return that instance instead.
// Note that entries[newEntry] will be orphaned (not part of any linked list) in this case
newValue = _entries[entryIndex].Value;
return true;
}
/// <summary>
/// Searches a linked list of entries, beginning at "entryIndex". If "entryIndex" is 0, then search starts at a hash bucket instead.
/// Each entry in the list is matched against the (hashCode, key, index, count) key. If a matching entry is found, then its
/// entry index is returned in "entryIndex" and true is returned. If no matching entry is found, then the index of the last entry
/// in the list (or 0 if list is empty) is returned in "entryIndex" and false is returned.
/// </summary>
/// <remarks>
/// This method has the side effect of removing invalid entries from the list as it is traversed.
/// </remarks>
private bool FindEntry(int hashCode, string key, int index, int count, ref int entryIndex)
{
int previousIndex = entryIndex;
int currentIndex;
// Set initial value of currentIndex to index of the next entry following entryIndex
if (previousIndex == 0)
currentIndex = _buckets[hashCode & (_buckets.Length - 1)];
else
currentIndex = previousIndex;
// Loop while not at end of list
while (currentIndex > EndOfList)
{
// Check for matching hash code, then matching key
if (_entries[currentIndex].HashCode == hashCode)
{
string keyCompare = _extractKey(_entries[currentIndex].Value);
// If the key is invalid, then attempt to remove the current entry from the linked list.
// This is thread-safe in the case where the Next field points to another entry, since once a Next field points
// to another entry, it will never be modified to be EndOfList or FullList.
if (keyCompare == null)
{
if (_entries[currentIndex].Next > EndOfList)
{
// PUBLISH (buckets slot or entries slot)
// Entry is invalid, so modify previous entry to point to its next entry
_entries[currentIndex].Value = default(TValue);
currentIndex = _entries[currentIndex].Next;
if (previousIndex == 0)
_buckets[hashCode & (_buckets.Length - 1)] = currentIndex;
else
_entries[previousIndex].Next = currentIndex;
continue;
}
}
else
{
// Valid key, so compare keys
if (count == keyCompare.Length && string.CompareOrdinal(key, index, keyCompare, 0, count) == 0)
{
// Found match, so return true and matching entry in list
entryIndex = currentIndex;
return true;
}
}
}
// Move to next entry
previousIndex = currentIndex;
currentIndex = _entries[currentIndex].Next;
}
// Return false and last entry in list
entryIndex = previousIndex;
return false;
}
/// <summary>
/// Compute hash code for a string key (index, count substring of "key"). The algorithm used is the same on used in NameTable.cs in System.Xml.
/// </summary>
private static int ComputeHashCode(string key, int index, int count)
{
int hashCode = StartingHash;
int end = index + count;
Debug.Assert(key != null, "key should have been checked previously for null");
// Hash the key
for (int i = index; i < end; i++)
unchecked
{
hashCode += (hashCode << 7) ^ key[i];
}
// Mix up hash code a bit more and clear the sign bit. This code was taken from NameTable.cs in System.Xml.
hashCode -= hashCode >> 17;
hashCode -= hashCode >> 11;
hashCode -= hashCode >> 5;
return hashCode & 0x7FFFFFFF;
}
/// <summary>
/// Hash table entry. The "Value" and "HashCode" fields are filled during initialization, and are never changed. The "Next"
/// field is updated when a new entry is chained to this one, and therefore care must be taken to ensure that updates to
/// this field are thread-safe.
/// </summary>
private struct Entry
{
public TValue Value; // Hashed value
public int HashCode; // Hash code of string key (equal to extractKey(Value).GetHashCode())
public int Next; // SHARED STATE: Points to next entry in linked list
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Aspose.Cells.Common.CloudHelper;
using Aspose.Cells.Common.Config;
using Aspose.Cells.Common.Controllers;
using Aspose.Cells.Common.Models;
using Aspose.Cells.Common.Services;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json.Linq;
namespace Aspose.Cells.Conversion.Controllers
{
public class ChartCloudApiController : CellsCloudApiControllerBase
{
private const string AppName = "Chart";
public ChartCloudApiController(IStorageService storage, IConfiguration configuration, ILogger<ChartCloudApiController> logger) : base(storage, configuration, logger)
{
}
[HttpPost]
[ActionName("Download")]
public async Task<Response> Download([FromBody] JObject obj)
{
var charts = obj["charts"].ToObject<List<PreviewChart>>();
var outputType = Convert.ToString(obj["outputType"]);
if (charts == null || outputType == null)
{
return new Response
{
Status = "Save failed, please try again",
StatusCode = 500
};
}
string outfileName;
var folderName = Guid.NewGuid().ToString();
if (outputType is "PDF")
{
var index = 0;
var workbook = new Workbook();
foreach (var chart in charts)
{
var upperLeftRow = workbook.Worksheets[0].Pictures.Count > 0 ? workbook.Worksheets[0].Pictures[index].LowerRightRow : index;
var imgPath = $"{Configuration.ConvertFolder}/{chart.ImgFolderName}/{chart.ImgFileName}";
var stream = await Storage.Download(imgPath);
index = workbook.Worksheets[0].Pictures.Add(upperLeftRow, 0, stream);
}
outfileName = $"{Configuration.ConvertFolder}/{folderName}/charts.pdf";
await using var memoryStream = new MemoryStream();
workbook.Save(memoryStream, SaveFormat.Pdf);
memoryStream.Seek(0, SeekOrigin.Begin);
await Storage.Upload(outfileName, memoryStream, new AwsMetaInfo
{
OriginalFileName = Path.GetFileName(outfileName),
Title = Path.GetFileName(outfileName)
});
}
else
{
var streams = new Dictionary<string, Stream>();
foreach (var chart in charts)
{
var imgPath = $"{Configuration.ConvertFolder}/{chart.ImgFolderName}/{chart.ImgFileName}";
var stream = await Storage.Download(imgPath);
var filename = $"{Path.GetFileNameWithoutExtension(imgPath)}.{outputType.ToLower()}";
var ms = new MemoryStream();
switch (outputType)
{
case "PNG":
{
await stream.CopyToAsync(ms);
break;
}
case "JPG":
case "BMP":
case "TIFF":
{
using var bitmap = new Bitmap(stream);
bitmap.Save(ms, _imageFormat(outputType));
break;
}
case "SVG":
case "XPS":
{
var workbook = new Workbook();
workbook.Worksheets[0].Pictures.Add(0, 0, stream);
workbook.Save(ms, _saveFormat(outputType));
break;
}
}
if (ms.Length > 0)
{
ms.Seek(0, SeekOrigin.Begin);
streams.Add(filename, ms);
}
stream.Close();
}
await using var zipStream = new MemoryStream();
using (var archive = new ZipArchive(zipStream, ZipArchiveMode.Create, true))
{
foreach (var (filename, stream) in streams)
{
var entry = archive.CreateEntry(filename);
await using var entryStream = entry.Open();
await stream.CopyToAsync(entryStream);
stream.Close();
}
}
outfileName = $"{Configuration.ConvertFolder}/{folderName}/charts.zip";
zipStream.Seek(0, SeekOrigin.Begin);
await Storage.Upload(outfileName, zipStream, new AwsMetaInfo
{
OriginalFileName = Path.GetFileName(outfileName),
Title = Path.GetFileName(outfileName)
});
}
if (outfileName.IsNullOrEmpty())
return new Response
{
Status = "Save failed, please try again",
StatusCode = 500
};
return new Response
{
StatusCode = 200,
Status = "OK",
FileName = Path.GetFileName(outfileName),
FolderName = folderName
};
}
[HttpPost]
[ActionName("PreChart")]
public async Task<PreviewChartsResponse> PreChart(string outputType)
{
var sessionId = Guid.NewGuid().ToString();
var action = $"Chart to {outputType}";
try
{
var taskUpload = Task.Run(() => UploadFiles(sessionId, AppName));
taskUpload.Wait(Configuration.MillisecondsTimeout);
if (!taskUpload.IsCompleted)
{
Logger.LogError(
"{AppName} UploadFiles=>{SessionId}=>{Timeout}",
AppName,
sessionId,
Configuration.ProcessingTimeout);
throw new TimeoutException(Configuration.ProcessingTimeout);
}
var docs = taskUpload.Result;
if (docs == null)
return (PreviewChartsResponse)PasswordProtectedResponse;
if (docs.Count == 0 || docs.Count > MaximumUploadFiles)
return (PreviewChartsResponse)MaximumFileLimitsResponse;
var charts = new List<PreviewChart>();
var stopWatch = new Stopwatch();
stopWatch.Start();
Logger.LogWarning(
"Chart to {OutputType}=>{Filenames}=>Start",
outputType.Trim().ToLower(),
string.Join(",", docs.Select(t => t.Key))
);
CellsCloudClient cells = new CellsCloudClient();
CellsCloudFilesResult cellsCloudFilesResult = cells.Export(docs, "png", "chart").Result;
foreach (CellsCloudFileInfo cellsCloudFileInfo in cellsCloudFilesResult.Files)
{
var imgFilename = cellsCloudFileInfo.Filename;
await using (var ms = new MemoryStream())
{
var objectPath = $"{Configuration.ConvertFolder}/{sessionId}/{imgFilename}";
byte[] workbookData = System.Convert.FromBase64String(cellsCloudFileInfo.FileContent);
ms.Write(workbookData, 0, workbookData.Length);
await Storage.Upload(objectPath, ms, new AwsMetaInfo
{
OriginalFileName = imgFilename,
Title = imgFilename
});
}
var previewChart = new PreviewChart
{
WorkbookHash = cellsCloudFileInfo.GetHashCode(),
SheetIndex = 0,
ChartHash = cellsCloudFileInfo.FileContent.GetHashCode(),
ChartName = Path.GetFileNameWithoutExtension(imgFilename),
ImgFolderName = sessionId,
ImgFileName = imgFilename
};
charts.Add(previewChart);
}
stopWatch.Stop();
Logger.LogWarning(
"Chart to {OutputType}=>{Filenames}=>cost seconds:{TotalSeconds}",
outputType.Trim().ToLower(),
string.Join(",", docs.Select(t => t.Key)),
stopWatch.Elapsed.TotalSeconds
);
if (charts.Count > 0)
return new PreviewChartsResponse
{
StatusCode = 200,
Status = "OK",
FolderName = sessionId,
Charts = charts,
OutputType = outputType
};
return new PreviewChartsResponse
{
StatusCode = 404,
Status = "There's no chart in the Excel file.",
FolderName = sessionId,
Text = "There's no chart in the Excel file."
};
}
catch (Exception e)
{
var exception = e.InnerException ?? e;
var statusCode = 500;
if (exception is CellsException { Code: ExceptionType.IncorrectPassword })
{
statusCode = 403;
}
Logger.LogError(
"Action = {Action} | Message = {Message} | OutputType = {OutputType} | SessionId = {SessionId}",
action,
exception.Message,
outputType,
sessionId
);
return new PreviewChartsResponse
{
StatusCode = statusCode,
Status = exception.Message,
FolderName = sessionId,
Text = action
};
}
}
private static SaveFormat _saveFormat(string outputType)
{
return outputType switch
{
"SVG" => SaveFormat.SVG,
"XPS" => SaveFormat.XPS,
_ => SaveFormat.Unknown
};
}
private static ImageFormat _imageFormat(string outputType)
{
return outputType switch
{
"JPG" => ImageFormat.Jpeg,
"PNG" => ImageFormat.Png,
"BMP" => ImageFormat.Bmp,
"TIFF" => ImageFormat.Tiff,
_ => ImageFormat.Jpeg
};
}
}
}
| |
// Generated by SharpKit.QooxDoo.Generator
using System;
using System.Collections.Generic;
using SharpKit.Html;
using SharpKit.JavaScript;
namespace qx.ui.virtualx.core
{
/// <summary>
/// <para>EXPERIMENTAL!</para>
/// <para>The Pane provides a window of a larger virtual grid.</para>
/// <para>The actual rendering is performed by one or several layers (<see cref="ILayer"/>.
/// The pane computes, which cells of the virtual area is visible and instructs
/// the layers to render these cells.</para>
/// </summary>
[JsType(JsMode.Prototype, Name = "qx.ui.virtual.core.Pane", OmitOptionalParameters = true, Export = false)]
public partial class Pane : qx.ui.core.Widget
{
#region Events
/// <summary>
/// <para>Fired if a cell is clicked.</para>
/// </summary>
public event Action<qx.ui.virtualx.core.CellEvent> OnCellClick;
/// <summary>
/// <para>Fired if a cell is right-clicked.</para>
/// </summary>
public event Action<qx.ui.virtualx.core.CellEvent> OnCellContextmenu;
/// <summary>
/// <para>Fired if a cell is double-clicked.</para>
/// </summary>
public event Action<qx.ui.virtualx.core.CellEvent> OnCellDblclick;
/// <summary>
/// <para>Fired if the pane is scrolled horizontally.</para>
/// </summary>
public event Action<qx.eventx.type.Data> OnScrollX;
/// <summary>
/// <para>Fired if the pane is scrolled vertically.</para>
/// </summary>
public event Action<qx.eventx.type.Data> OnScrollY;
/// <summary>
/// <para>Fired on resize of either the container or the (virtual) content.</para>
/// </summary>
public event Action<qx.eventx.type.Event> OnUpdate;
#endregion Events
#region Properties
/// <summary>
/// <para>The item’s preferred height.</para>
/// <para>The computed height may differ from the given height due to
/// stretching. Also take a look at the related properties
/// <see cref="MinHeight"/> and <see cref="MaxHeight"/>.</para>
/// </summary>
/// <remarks>
/// Allow nulls: true
/// </remarks>
[JsProperty(Name = "height", NativeField = true)]
public double Height { get; set; }
/// <summary>
/// <para>The LayoutItem‘s preferred width.</para>
/// <para>The computed width may differ from the given width due to
/// stretching. Also take a look at the related properties
/// <see cref="MinWidth"/> and <see cref="MaxWidth"/>.</para>
/// </summary>
/// <remarks>
/// Allow nulls: true
/// </remarks>
[JsProperty(Name = "width", NativeField = true)]
public double Width { get; set; }
#endregion Properties
#region Methods
public Pane() { throw new NotImplementedException(); }
/// <param name="rowCount">The number of rows of the virtual grid.</param>
/// <param name="columnCount">The number of columns of the virtual grid.</param>
/// <param name="cellHeight">The default cell height.</param>
/// <param name="cellWidth">The default cell width.</param>
public Pane(double rowCount = 0, double columnCount = 0, double cellHeight = 10, double cellWidth = 10) { throw new NotImplementedException(); }
/// <summary>
/// <para>Add a layer to the layer container.</para>
/// </summary>
/// <param name="layer">The layer to add.</param>
[JsMethod(Name = "addLayer")]
public void AddLayer(ILayer layer) { throw new NotImplementedException(); }
/// <summary>
/// <para>Schedule a full update on all visible layers.</para>
/// </summary>
[JsMethod(Name = "fullUpdate")]
public void FullUpdate() { throw new NotImplementedException(); }
/// <summary>
/// <para>Get the grid cell at the given absolute document coordinates. This method
/// can be used to convert the mouse position returned by
/// <see cref="qx.event.type.Mouse.GetDocumentLeft"/> and
/// <see cref="qx.event.type.Mouse.GetDocumentLeft"/> into cell coordinates.</para>
/// </summary>
/// <param name="documentX">The x coordinate relative to the viewport origin.</param>
/// <param name="documentY">The y coordinate relative to the viewport origin.</param>
/// <returns>A map containing the row and column of the found cell. If the coordinate is outside of the pane’s bounds or there is no cell at the coordinate null is returned.</returns>
[JsMethod(Name = "getCellAtPosition")]
public object GetCellAtPosition(double documentX, double documentY) { throw new NotImplementedException(); }
/// <summary>
/// <para>Returns an array containing the layer container.</para>
/// </summary>
/// <returns>The layer container array.</returns>
[JsMethod(Name = "getChildren")]
public object GetChildren() { throw new NotImplementedException(); }
/// <summary>
/// <para>Get the axis object, which defines the column numbers and the column sizes.</para>
/// </summary>
/// <returns>The column configuration.</returns>
[JsMethod(Name = "getColumnConfig")]
public Axis GetColumnConfig() { throw new NotImplementedException(); }
/// <summary>
/// <para>Get a list of all layers.</para>
/// </summary>
/// <returns>List of the pane’s layers.</returns>
[JsMethod(Name = "getLayers")]
public ILayer GetLayers() { throw new NotImplementedException(); }
/// <summary>
/// <para>Get the axis object, which defines the row numbers and the row sizes.</para>
/// </summary>
/// <returns>The row configuration.</returns>
[JsMethod(Name = "getRowConfig")]
public Axis GetRowConfig() { throw new NotImplementedException(); }
/// <summary>
/// <para>The maximum horizontal scroll position.</para>
/// </summary>
/// <returns>Maximum horizontal scroll position.</returns>
[JsMethod(Name = "getScrollMaxX")]
public double GetScrollMaxX() { throw new NotImplementedException(); }
/// <summary>
/// <para>The maximum vertical scroll position.</para>
/// </summary>
/// <returns>Maximum vertical scroll position.</returns>
[JsMethod(Name = "getScrollMaxY")]
public double GetScrollMaxY() { throw new NotImplementedException(); }
/// <summary>
/// <para>The (virtual) size of the content.</para>
/// </summary>
/// <returns>Size of the content (keys: width and height).</returns>
[JsMethod(Name = "getScrollSize")]
public object GetScrollSize() { throw new NotImplementedException(); }
/// <summary>
/// <para>Returns the horizontal scroll offset.</para>
/// </summary>
/// <returns>The horizontal scroll offset.</returns>
[JsMethod(Name = "getScrollX")]
public double GetScrollX() { throw new NotImplementedException(); }
/// <summary>
/// <para>Returns the vertical scroll offset.</para>
/// </summary>
/// <returns>The vertical scroll offset.</returns>
[JsMethod(Name = "getScrollY")]
public double GetScrollY() { throw new NotImplementedException(); }
/// <summary>
/// <para>Get a list of all visible layers.</para>
/// </summary>
/// <returns>List of the pane’s visible layers.</returns>
[JsMethod(Name = "getVisibleLayers")]
public ILayer GetVisibleLayers() { throw new NotImplementedException(); }
/// <summary>
/// <para>Whether a full update is scheduled.</para>
/// </summary>
/// <returns>Whether a full update is scheduled.</returns>
[JsMethod(Name = "isUpdatePending")]
public bool IsUpdatePending() { throw new NotImplementedException(); }
/// <summary>
/// <para>Increase the layers width beyond the needed width to improve
/// horizontal scrolling. The layers are only resized if invisible parts
/// left/right of the pane window are smaller than minLeft/minRight.</para>
/// </summary>
/// <param name="minLeft">Only prefetch if the invisible part left of the pane window if smaller than this (pixel) value.</param>
/// <param name="maxLeft">The amount of pixel the layers should reach left of the pane window.</param>
/// <param name="minRight">Only prefetch if the invisible part right of the pane window if smaller than this (pixel) value.</param>
/// <param name="maxRight">The amount of pixel the layers should reach right of the pane window.</param>
[JsMethod(Name = "prefetchX")]
public void PrefetchX(double minLeft, double maxLeft, double minRight, double maxRight) { throw new NotImplementedException(); }
/// <summary>
/// <para>Increase the layers height beyond the needed height to improve
/// vertical scrolling. The layers are only resized if invisible parts
/// above/below the pane window are smaller than minAbove/minBelow.</para>
/// </summary>
/// <param name="minAbove">Only prefetch if the invisible part above the pane window if smaller than this (pixel) value.</param>
/// <param name="maxAbove">The amount of pixel the layers should reach above the pane window.</param>
/// <param name="minBelow">Only prefetch if the invisible part below the pane window if smaller than this (pixel) value.</param>
/// <param name="maxBelow">The amount of pixel the layers should reach below the pane window.</param>
[JsMethod(Name = "prefetchY")]
public void PrefetchY(double minAbove, double maxAbove, double minBelow, double maxBelow) { throw new NotImplementedException(); }
/// <summary>
/// <para>Scrolls a grid cell into the visible area of the pane.</para>
/// </summary>
/// <param name="column">The cell’s column index.</param>
/// <param name="row">The cell’s row index.</param>
[JsMethod(Name = "scrollCellIntoView")]
public void ScrollCellIntoView(double column, double row) { throw new NotImplementedException(); }
/// <summary>
/// <para>Scrolls a column into the visible area of the pane.</para>
/// </summary>
/// <param name="column">The column’s index.</param>
[JsMethod(Name = "scrollColumnIntoView")]
public void ScrollColumnIntoView(double column) { throw new NotImplementedException(); }
/// <summary>
/// <para>Scrolls a row into the visible area of the pane.</para>
/// </summary>
/// <param name="row">The row’s index.</param>
[JsMethod(Name = "scrollRowIntoView")]
public void ScrollRowIntoView(double row) { throw new NotImplementedException(); }
/// <summary>
/// <para>Scrolls the content to the given top coordinate.</para>
/// </summary>
/// <param name="value">The horizontal position to scroll to.</param>
[JsMethod(Name = "setScrollX")]
public void SetScrollX(double value) { throw new NotImplementedException(); }
/// <summary>
/// <para>Scrolls the content to the given left coordinate.</para>
/// </summary>
/// <param name="value">The vertical position to scroll to.</param>
[JsMethod(Name = "setScrollY")]
public void SetScrollY(double value) { throw new NotImplementedException(); }
/// <summary>
/// <para>This method is called during the flush of the
/// <see cref="qx.ui.core.queue.Widget widget queue"/>.</para>
/// </summary>
/// <param name="jobs">A map of jobs.</param>
[JsMethod(Name = "syncWidget")]
public void SyncWidget(object jobs) { throw new NotImplementedException(); }
#endregion Methods
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: Provides some static methods to aid with the implementation
** of a Formatter for Serialization.
**
**
============================================================*/
namespace System.Runtime.Serialization {
using System;
using System.Reflection;
using System.Collections;
using System.Collections.Generic;
using System.Security;
using System.Security.Permissions;
using System.Runtime.Remoting;
using System.Runtime.CompilerServices;
using System.Runtime.Versioning;
using System.Threading;
using System.IO;
using System.Text;
using System.Globalization;
using System.Diagnostics;
using System.Diagnostics.Contracts;
[System.Runtime.InteropServices.ComVisible(true)]
public static class FormatterServices {
// Gets a new instance of the object. The entire object is initalized to 0 and no
// constructors have been run. **THIS MEANS THAT THE OBJECT MAY NOT BE IN A STATE
// CONSISTENT WITH ITS INTERNAL REQUIREMENTS** This method should only be used for
// deserialization when the user intends to immediately populate all fields. This method
// will not create an unitialized string because it is non-sensical to create an empty
// instance of an immutable type.
//
public static Object GetUninitializedObject(Type type) {
if ((object)type == null) {
throw new ArgumentNullException(nameof(type));
}
Contract.EndContractBlock();
if (!(type is RuntimeType)) {
throw new SerializationException(Environment.GetResourceString("Serialization_InvalidType", type.ToString()));
}
return nativeGetUninitializedObject((RuntimeType)type);
}
public static Object GetSafeUninitializedObject(Type type) {
if ((object)type == null) {
throw new ArgumentNullException(nameof(type));
}
Contract.EndContractBlock();
if (!(type is RuntimeType)) {
throw new SerializationException(Environment.GetResourceString("Serialization_InvalidType", type.ToString()));
}
try {
return nativeGetSafeUninitializedObject((RuntimeType)type);
}
catch(SecurityException e) {
throw new SerializationException(Environment.GetResourceString("Serialization_Security", type.FullName), e);
}
}
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private static extern Object nativeGetSafeUninitializedObject(RuntimeType type);
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private static extern Object nativeGetUninitializedObject(RuntimeType type);
private static Binder s_binder = Type.DefaultBinder;
internal static void SerializationSetValue(MemberInfo fi, Object target, Object value)
{
Contract.Requires(fi != null);
RtFieldInfo rtField = fi as RtFieldInfo;
if (rtField != null)
{
rtField.CheckConsistency(target);
rtField.UnsafeSetValue(target, value, BindingFlags.Default, s_binder, null);
return;
}
SerializationFieldInfo serField = fi as SerializationFieldInfo;
if (serField != null)
{
serField.InternalSetValue(target, value, BindingFlags.Default, s_binder, null);
return;
}
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidFieldInfo"));
}
// Fill in the members of obj with the data contained in data.
// Returns the number of members populated.
//
public static Object PopulateObjectMembers(Object obj, MemberInfo[] members, Object[] data) {
if (obj==null) {
throw new ArgumentNullException(nameof(obj));
}
if (members==null) {
throw new ArgumentNullException(nameof(members));
}
if (data==null) {
throw new ArgumentNullException(nameof(data));
}
if (members.Length!=data.Length) {
throw new ArgumentException(Environment.GetResourceString("Argument_DataLengthDifferent"));
}
Contract.EndContractBlock();
MemberInfo mi;
BCLDebug.Trace("SER", "[PopulateObjectMembers]Enter.");
for (int i=0; i<members.Length; i++) {
mi = members[i];
if (mi==null) {
throw new ArgumentNullException(nameof(members), Environment.GetResourceString("ArgumentNull_NullMember", i));
}
//If we find an empty, it means that the value was never set during deserialization.
//This is either a forward reference or a null. In either case, this may break some of the
//invariants mantained by the setter, so we'll do nothing with it for right now.
if (data[i]!=null) {
if (mi.MemberType==MemberTypes.Field) {
SerializationSetValue(mi, obj, data[i]);
} else {
throw new SerializationException(Environment.GetResourceString("Serialization_UnknownMemberInfo"));
}
BCLDebug.Trace("SER", "[PopulateObjectMembers]\tType:", obj.GetType(), "\tMember:",
members[i].Name, " with member type: ", ((FieldInfo)members[i]).FieldType);
}
//Console.WriteLine("X");
}
BCLDebug.Trace("SER", "[PopulateObjectMembers]Leave.");
return obj;
}
// Extracts the data from obj. members is the array of members which we wish to
// extract (must be FieldInfos or PropertyInfos). For each supplied member, extract the matching value and
// return it in a Object[] of the same size.
//
public static Object[] GetObjectData(Object obj, MemberInfo[] members) {
if (obj==null) {
throw new ArgumentNullException(nameof(obj));
}
if (members==null) {
throw new ArgumentNullException(nameof(members));
}
Contract.EndContractBlock();
int numberOfMembers = members.Length;
Object[] data = new Object[numberOfMembers];
MemberInfo mi;
for (int i=0; i<numberOfMembers; i++) {
mi=members[i];
if (mi==null) {
throw new ArgumentNullException(nameof(members), Environment.GetResourceString("ArgumentNull_NullMember", i));
}
if (mi.MemberType==MemberTypes.Field) {
Debug.Assert(mi is RuntimeFieldInfo || mi is SerializationFieldInfo,
"[FormatterServices.GetObjectData]mi is RuntimeFieldInfo || mi is SerializationFieldInfo.");
RtFieldInfo rfi = mi as RtFieldInfo;
if (rfi != null) {
rfi.CheckConsistency(obj);
data[i] = rfi.UnsafeGetValue(obj);
} else {
data[i] = ((SerializationFieldInfo)mi).InternalGetValue(obj);
}
} else {
throw new SerializationException(Environment.GetResourceString("Serialization_UnknownMemberInfo"));
}
}
return data;
}
[System.Runtime.InteropServices.ComVisible(false)]
public static ISerializationSurrogate GetSurrogateForCyclicalReference(ISerializationSurrogate innerSurrogate)
{
if (innerSurrogate == null)
throw new ArgumentNullException(nameof(innerSurrogate));
Contract.EndContractBlock();
return new SurrogateForCyclicalReference(innerSurrogate);
}
/*=============================GetTypeFromAssembly==============================
**Action:
**Returns:
**Arguments:
**Exceptions:
==============================================================================*/
public static Type GetTypeFromAssembly(Assembly assem, String name) {
if (assem==null)
throw new ArgumentNullException(nameof(assem));
Contract.EndContractBlock();
return assem.GetType(name, false, false);
}
/*============================LoadAssemblyFromString============================
**Action: Loads an assembly from a given string. The current assembly loading story
** is quite confusing. If the assembly is in the fusion cache, we can load it
** using the stringized-name which we transmitted over the wire. If that fails,
** we try for a lookup of the assembly using the simple name which is the first
** part of the assembly name. If we can't find it that way, we'll return null
** as our failure result.
**Returns: The loaded assembly or null if it can't be found.
**Arguments: assemblyName -- The stringized assembly name.
**Exceptions: None
==============================================================================*/
internal static Assembly LoadAssemblyFromString(String assemblyName) {
//
// Try using the stringized assembly name to load from the fusion cache.
//
BCLDebug.Trace("SER", "[LoadAssemblyFromString]Looking for assembly: ", assemblyName);
Assembly found = Assembly.Load(assemblyName);
return found;
}
internal static Assembly LoadAssemblyFromStringNoThrow(String assemblyName) {
try {
return LoadAssemblyFromString(assemblyName);
}
catch (Exception e){
BCLDebug.Trace("SER", "[LoadAssemblyFromString]", e.ToString());
}
return null;
}
internal static string GetClrAssemblyName(Type type, out bool hasTypeForwardedFrom) {
if ((object)type == null) {
throw new ArgumentNullException(nameof(type));
}
object[] typeAttributes = type.GetCustomAttributes(typeof(TypeForwardedFromAttribute), false);
if (typeAttributes != null && typeAttributes.Length > 0) {
hasTypeForwardedFrom = true;
TypeForwardedFromAttribute typeForwardedFromAttribute = (TypeForwardedFromAttribute)typeAttributes[0];
return typeForwardedFromAttribute.AssemblyFullName;
}
else {
hasTypeForwardedFrom = false;
return type.Assembly.FullName;
}
}
internal static string GetClrTypeFullName(Type type) {
if (type.IsArray) {
return GetClrTypeFullNameForArray(type);
}
else {
return GetClrTypeFullNameForNonArrayTypes(type);
}
}
static string GetClrTypeFullNameForArray(Type type) {
int rank = type.GetArrayRank();
if (rank == 1)
{
return String.Format(CultureInfo.InvariantCulture, "{0}{1}", GetClrTypeFullName(type.GetElementType()), "[]");
}
else
{
StringBuilder builder = new StringBuilder(GetClrTypeFullName(type.GetElementType())).Append("[");
for (int commaIndex = 1; commaIndex < rank; commaIndex++)
{
builder.Append(",");
}
builder.Append("]");
return builder.ToString();
}
}
static string GetClrTypeFullNameForNonArrayTypes(Type type) {
if (!type.IsGenericType) {
return type.FullName;
}
Type[] genericArguments = type.GetGenericArguments();
StringBuilder builder = new StringBuilder(type.GetGenericTypeDefinition().FullName).Append("[");
bool hasTypeForwardedFrom;
foreach (Type genericArgument in genericArguments) {
builder.Append("[").Append(GetClrTypeFullName(genericArgument)).Append(", ");
builder.Append(GetClrAssemblyName(genericArgument, out hasTypeForwardedFrom)).Append("],");
}
//remove the last comma and close typename for generic with a close bracket
return builder.Remove(builder.Length - 1, 1).Append("]").ToString();
}
}
internal sealed class SurrogateForCyclicalReference : ISerializationSurrogate
{
ISerializationSurrogate innerSurrogate;
internal SurrogateForCyclicalReference(ISerializationSurrogate innerSurrogate)
{
if (innerSurrogate == null)
throw new ArgumentNullException(nameof(innerSurrogate));
this.innerSurrogate = innerSurrogate;
}
public void GetObjectData(Object obj, SerializationInfo info, StreamingContext context)
{
innerSurrogate.GetObjectData(obj, info, context);
}
public Object SetObjectData(Object obj, SerializationInfo info, StreamingContext context, ISurrogateSelector selector)
{
return innerSurrogate.SetObjectData(obj, info, context, selector);
}
}
}
| |
using System.Collections.Generic;
using System.Linq;
using hw.DebugFormatter;
using hw.Helper;
using JetBrains.Annotations;
using Reni.Basics;
using Reni.Runtime;
using Reni.Struct;
namespace Reni.Code
{
sealed class CSharpGenerator : DumpableObject, IVisitor
{
readonly int TemporaryByteCount;
readonly List<string> DataCache = new();
int IndentLevel;
public CSharpGenerator(int temporaryByteCount) => TemporaryByteCount = temporaryByteCount;
public string Data
{
get
{
var start = $"\nvar data = Data.Create({TemporaryByteCount})";
return DataCache
.Aggregate(start, (x, y) => x + ";\n" + y)
+ ";\n";
}
}
[StringFormatMethod("pattern")]
void AddCode(string pattern, params object[] data)
{
var c = string.Format(pattern, data);
DataCache.Add(" ".Repeat(IndentLevel) + c);
}
static string BitCast(Size size, Size dataSize)
{
if(size == dataSize)
return "";
return $".BitCast({dataSize.ToInt()}).BitCast({size.ToInt()})";
}
static int RefBytes => DataHandler.RefBytes;
void IVisitor.Drop(Size beforeSize, Size afterSize)
{
if(afterSize.IsZero)
AddCode("data.Drop({0})", beforeSize.ByteCount);
else
AddCode("data.Drop({0}, {1})", beforeSize.ByteCount, afterSize.ByteCount);
}
void IVisitor.BitsArray(Size size, BitsConst data)
=> AddCode("data.SizedPush({0}{1})", size.ByteCount, data.ByteSequence());
void IVisitor.ReferencePlus(Size size)
=> AddCode("data.PointerPlus({0})", size.SaveByteCount);
void IVisitor.PrintNumber(Size leftSize, Size rightSize)
=> AddCode("data.Pull({0}).PrintNumber()", leftSize.SaveByteCount);
void IVisitor.PrintText(string dumpPrintText)
=> AddCode("Data.PrintText({0})", dumpPrintText.Quote());
void IVisitor.TopRef(Size offset)
=> AddCode("data.Push(data.Pointer({0}))", offset.SaveByteCount);
void IVisitor.TopFrameRef(Size offset)
=> AddCode("data.Push(frame.Pointer({0}))", offset.SaveByteCount);
void IVisitor.Assign(Size targetSize)
=> AddCode("data.Assign({0})", targetSize.SaveByteCount);
void IVisitor.BitCast(Size size, Size targetSize, Size significantSize)
=> AddCode
(
"data.Push(data.Pull({0}).BitCast({1}).BitCast({2}))",
targetSize.SaveByteCount,
significantSize.ToInt(),
size.ToInt()
);
void IVisitor.PrintText(Size leftSize, Size itemSize)
=> AddCode
(
"data.Pull({0}).PrintText({1})",
leftSize.SaveByteCount,
itemSize.SaveByteCount
);
void IVisitor.RecursiveCall() => AddCode("goto Start");
void IVisitor.RecursiveCallCandidate() { throw new UnexpectedRecursiveCallCandidate(); }
void IVisitor.ArrayGetter(Size elementSize, Size indexSize)
=>
AddCode
(
"data.ArrayGetter({0},{1})",
elementSize.SaveByteCount,
indexSize.SaveByteCount
);
void IVisitor.ArraySetter(Size elementSize, Size indexSize)
=>
AddCode
(
"data.ArraySetter({0},{1})",
elementSize.SaveByteCount,
indexSize.SaveByteCount
);
void IVisitor.Call(Size size, FunctionId functionId, Size argsAndRefsSize)
=> AddCode
(
"data.Push({0}(data.Pull({1})))",
Generator.FunctionName(functionId),
argsAndRefsSize.SaveByteCount
);
void IVisitor.TopData(Size offset, Size size, Size dataSize)
=> AddCode
(
"data.Push(data.Get({0}, {1}){2})",
dataSize.ByteCount,
offset.SaveByteCount,
BitCast(size, dataSize)
);
void IVisitor.TopFrameData(Size offset, Size size, Size dataSize)
=> AddCode
(
"data.Push(frame.Get({0}, {1}){2})",
dataSize.ByteCount,
offset.SaveByteCount,
BitCast(size, dataSize)
);
void IVisitor.DePointer(Size size, Size dataSize)
=> AddCode
(
"data.Push(data.Pull({0}).DePointer({1}){2})",
RefBytes,
dataSize.ByteCount,
BitCast(size, dataSize)
);
void IVisitor.BitArrayPrefixOp(string operation, Size size, Size argSize)
{
var sizeBytes = size.SaveByteCount;
var argBytes = argSize.SaveByteCount;
if(sizeBytes == argBytes)
AddCode("data.{0}Prefix(bytes:{1})", operation, sizeBytes);
else
AddCode
(
"data.{0}Prefix(sizeBytes:{1}, argBytes:{2})",
operation,
sizeBytes,
argBytes
);
}
void IVisitor.BitArrayBinaryOp(string opToken, Size size, Size leftSize, Size rightSize)
{
var sizeBytes = size.SaveByteCount;
var leftBytes = leftSize.SaveByteCount;
var rightBytes = rightSize.SaveByteCount;
AddCode
(
"data.{0}(sizeBytes:{1}, leftBytes:{2}, rightBytes:{3})",
opToken,
sizeBytes,
leftBytes,
rightBytes
);
}
void IVisitor.ThenElse(Size condSize, CodeBase thenCode, CodeBase elseCode)
{
AddCode("if({0})\n{{", PullBool(condSize.ByteCount));
Indent();
thenCode.Visit(this);
BackIndent();
AddCode("}}\nelse\n{{");
Indent();
elseCode.Visit(this);
BackIndent();
AddCode("}}");
}
static string PullBool(int byteCount)
{
if(byteCount == 1)
return "data.Pull(1).GetBytes()[0] != 0";
return "data.Pull(" + byteCount + ").IsNotNull()";
}
void BackIndent() => IndentLevel--;
void Indent() => IndentLevel++;
void IVisitor.Fiber(FiberHead fiberHead, FiberItem[] fiberItems)
{
fiberHead.Visit(this);
foreach(var fiberItem in fiberItems)
fiberItem.Visit(this);
}
void IVisitor.List(CodeBase[] data)
{
foreach(var codeBase in data)
codeBase.Visit(this);
}
internal static string GenerateCSharpStatements(CodeBase codeBase)
{
var generator = new CSharpGenerator(codeBase.TemporarySize.SaveByteCount);
try
{
codeBase.Visit(generator);
}
catch(UnexpectedContextReference e)
{
Tracer.AssertionFailed("", () => e.Message);
}
return generator.Data;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.DataLake.Analytics
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure.OData;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// StorageAccountsOperations operations.
/// </summary>
public partial interface IStorageAccountsOperations
{
/// <summary>
/// Gets the specified Azure Storage account linked to the given Data
/// Lake Analytics account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account from which to retrieve
/// Azure storage account details.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure Storage account for which to retrieve the
/// details.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageAccountInfo>> GetWithHttpMessagesAsync(string resourceGroupName, string accountName, string storageAccountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the specified Data Lake Analytics account to remove an
/// Azure Storage account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account from which to remove
/// the Azure Storage account.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure Storage account to remove
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string accountName, string storageAccountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the Data Lake Analytics account to replace Azure Storage
/// blob account details, such as the access key and/or suffix.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account to modify storage
/// accounts in
/// </param>
/// <param name='storageAccountName'>
/// The Azure Storage account to modify
/// </param>
/// <param name='parameters'>
/// The parameters containing the access key and suffix to update the
/// storage account with, if any. Passing nothing results in no
/// change.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> UpdateWithHttpMessagesAsync(string resourceGroupName, string accountName, string storageAccountName, UpdateStorageAccountParameters parameters = default(UpdateStorageAccountParameters), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the specified Data Lake Analytics account to add an Azure
/// Storage account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account to which to add the
/// Azure Storage account.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure Storage account to add
/// </param>
/// <param name='parameters'>
/// The parameters containing the access key and optional suffix for
/// the Azure Storage Account.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> AddWithHttpMessagesAsync(string resourceGroupName, string accountName, string storageAccountName, AddStorageAccountParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the specified Azure Storage container associated with the
/// given Data Lake Analytics and Azure Storage accounts.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account for which to retrieve
/// blob container.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure storage account from which to retrieve the
/// blob container.
/// </param>
/// <param name='containerName'>
/// The name of the Azure storage container to retrieve
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<StorageContainer>> GetStorageContainerWithHttpMessagesAsync(string resourceGroupName, string accountName, string storageAccountName, string containerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists the Azure Storage containers, if any, associated with the
/// specified Data Lake Analytics and Azure Storage account
/// combination. The response includes a link to the next page of
/// results, if any.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account for which to list
/// Azure Storage blob containers.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure storage account from which to list blob
/// containers.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<StorageContainer>>> ListStorageContainersWithHttpMessagesAsync(string resourceGroupName, string accountName, string storageAccountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the SAS token associated with the specified Data Lake
/// Analytics and Azure Storage account and container combination.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account from which an Azure
/// Storage account's SAS token is being requested.
/// </param>
/// <param name='storageAccountName'>
/// The name of the Azure storage account for which the SAS token is
/// being requested.
/// </param>
/// <param name='containerName'>
/// The name of the Azure storage container for which the SAS token is
/// being requested.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<SasTokenInfo>>> ListSasTokensWithHttpMessagesAsync(string resourceGroupName, string accountName, string storageAccountName, string containerName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the first page of Azure Storage accounts, if any, linked to
/// the specified Data Lake Analytics account. The response includes
/// a link to the next page, if any.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake
/// Analytics account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Analytics account for which to list
/// Azure Storage accounts.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to
/// just those requested, e.g.
/// Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// The Boolean value of true or false to request a count of the
/// matching resources included with the resources in the response,
/// e.g. Categories?$count=true. Optional.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<StorageAccountInfo>>> ListByAccountWithHttpMessagesAsync(string resourceGroupName, string accountName, ODataQuery<StorageAccountInfo> odataQuery = default(ODataQuery<StorageAccountInfo>), string select = default(string), bool? count = default(bool?), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists the Azure Storage containers, if any, associated with the
/// specified Data Lake Analytics and Azure Storage account
/// combination. The response includes a link to the next page of
/// results, if any.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<StorageContainer>>> ListStorageContainersNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the SAS token associated with the specified Data Lake
/// Analytics and Azure Storage account and container combination.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<SasTokenInfo>>> ListSasTokensNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the first page of Azure Storage accounts, if any, linked to
/// the specified Data Lake Analytics account. The response includes
/// a link to the next page, if any.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<StorageAccountInfo>>> ListByAccountNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Reflection;
using Ploeh.Albedo;
using Ploeh.Albedo.Refraction;
using Ploeh.AutoFixture.Kernel;
namespace Ploeh.AutoFixture.Idioms
{
/// <summary>
/// Encapsulates a unit test that verifies that a member (property or field) is correctly intialized
/// by the constructor.
/// </summary>
public class ConstructorInitializedMemberAssertion : IdiomaticAssertion
{
private readonly ISpecimenBuilder builder;
private readonly IEqualityComparer comparer;
private readonly IEqualityComparer<IReflectionElement> parameterMemberMatcher;
/// <summary>
/// Initializes a new instance of the <see cref="ConstructorInitializedMemberAssertion"/> class.
/// </summary>
/// <param name="builder">
/// A composer which can create instances required to implement the idiomatic unit test,
/// such as the owner of the property, as well as the value to be assigned and read from
/// the member.
/// </param>
/// <param name="comparer"> An <see cref="IEqualityComparer"/> instance, which is used
/// to determine if each member has the same value which was passed to the matching
/// constructor parameter.
/// </param>
/// <param name="parameterMemberMatcher">Provides a way to customize the way parameters
/// are matched to members. The boolean value returned from
/// <see cref="IEqualityComparer{T}.Equals(T,T)"/> indicates if the parameter and member
/// are matched.
/// </param>
/// <remarks>
/// <para>
/// <paramref name="builder" /> will typically be a <see cref="Fixture" /> instance.
/// </para>
/// </remarks>
public ConstructorInitializedMemberAssertion(
ISpecimenBuilder builder,
IEqualityComparer comparer,
IEqualityComparer<IReflectionElement> parameterMemberMatcher)
{
if (builder == null)
{
throw new ArgumentNullException("builder");
}
if (comparer == null)
{
throw new ArgumentNullException("comparer");
}
if (parameterMemberMatcher == null)
{
throw new ArgumentNullException("parameterMemberMatcher");
}
this.builder = builder;
this.comparer = comparer;
this.parameterMemberMatcher = parameterMemberMatcher;
}
/// <summary>
/// Initializes a new instance of the <see cref="ConstructorInitializedMemberAssertion"/> class.
/// </summary>
/// <param name="builder">
/// A composer which can create instances required to implement the idiomatic unit test,
/// such as the owner of the property, as well as the value to be assigned and read from
/// the member.
/// </param>
/// <remarks>
/// <para>
/// <paramref name="builder" /> will typically be a <see cref="Fixture" /> instance.
/// </para>
/// </remarks>
public ConstructorInitializedMemberAssertion(ISpecimenBuilder builder)
: this(
builder,
EqualityComparer<object>.Default,
new DefaultParameterMemberMatcher())
{
}
/// <summary>
/// Gets the builder supplied by the constructor.
/// </summary>
public ISpecimenBuilder Builder
{
get { return this.builder; }
}
/// <summary>
/// Gets the comparer supplied to the constructor.
/// </summary>
/// <remarks>
/// This comparer instance is used to determine if all of the value retreived from
/// the members are equal to their corresponding 'matched' constructor parameter.
/// </remarks>
public IEqualityComparer Comparer
{
get { return comparer; }
}
/// <summary>
/// Gets the <see cref="IEqualityComparer{IReflectionElement}"/> instance which is
/// used to determine if a constructor parameter matches a given member (property
/// or field).
/// </summary>
/// <remarks>
/// If the parameter and member are matched, the member is expected to be initialized
/// from the value passed into the matching constructor parameter.
/// </remarks>
public IEqualityComparer<IReflectionElement> ParameterMemberMatcher
{
get { return parameterMemberMatcher; }
}
/// <summary>
/// Verifies that all constructor arguments are properly exposed as either fields
/// or properties.
/// </summary>
/// <param name="constructorInfo">The constructor.</param>
public override void Verify(ConstructorInfo constructorInfo)
{
if (constructorInfo == null)
throw new ArgumentNullException("constructorInfo");
var parameters = constructorInfo.GetParameters();
if (parameters.Length == 0)
return;
var publicPropertiesAndFields = GetPublicPropertiesAndFields(constructorInfo.DeclaringType).ToArray();
// Handle backwards-compatibility by replacing the default
// matcher with one that behaves the similar to the previous
// behaviour
IEqualityComparer<IReflectionElement> matcher =
this.parameterMemberMatcher is DefaultParameterMemberMatcher
? new DefaultParameterMemberMatcher(
new DefaultParameterMemberMatcher.NameIgnoreCaseAndTypeAssignableComparer())
: this.parameterMemberMatcher;
var firstParameterNotExposed = parameters.FirstOrDefault(
p => !publicPropertiesAndFields.Any(m =>
matcher.Equals(p.ToReflectionElement(), m.ToReflectionElement())));
if (firstParameterNotExposed != null)
{
throw new ConstructorInitializedMemberException(constructorInfo, firstParameterNotExposed);
}
}
/// <summary>
/// Verifies that a property is correctly initialized by the constructor.
/// </summary>
/// <param name="propertyInfo">The property.</param>
/// <remarks>
/// <para>
/// This method verifies that the <paramref name="propertyInfo" /> is correctly initialized with
/// the value given to the same-named constructor paramter. It uses the <see cref="Builder" /> to
/// supply values to the constructor(s) of the Type on which the field is implemented, and then
/// reads from the field. The assertion passes if the value read from the property is the same as
/// the value passed to the constructor. If more than one constructor has an argument with the
/// same name and type, all constructors are checked. If any constructor (with a matching argument)
/// does not initialise the property with the correct value, a
/// <see cref="ConstructorInitializedMemberException" /> is thrown.
/// </para>
/// </remarks>
/// <exception cref="WritablePropertyException">The verification fails.</exception>
public override void Verify(PropertyInfo propertyInfo)
{
if (propertyInfo == null)
throw new ArgumentNullException("propertyInfo");
var matchingConstructors = GetConstructorsWithInitializerForMember(propertyInfo).ToArray();
if (!matchingConstructors.Any())
{
if (IsMemberThatRequiresConstructorInitialization(propertyInfo))
{
throw new ConstructorInitializedMemberException(propertyInfo, string.Format(CultureInfo.CurrentCulture,
"No constructors with an argument that matches the read-only property '{0}' were found", propertyInfo.Name));
}
// For writable properties or fields, having no constructor parameter that initializes
// the member is perfectly fine.
return;
}
if (CouldLeadToFalsePositive(propertyInfo.PropertyType))
{
throw BuildExceptionDueToPotentialFalsePositive(propertyInfo);
}
var expectedAndActuals = matchingConstructors
.Select(ctor => BuildSpecimenFromConstructor(ctor, propertyInfo));
// Compare the value passed into the constructor with the value returned from the property
if (expectedAndActuals.Any(s => !this.comparer.Equals(s.Expected, s.Actual)))
{
throw new ConstructorInitializedMemberException(propertyInfo);
}
}
/// <summary>
/// Verifies that a field is correctly initialized by the constructor.
/// </summary>
/// <param name="fieldInfo">The field.</param>
/// <remarks>
/// <para>
/// This method verifies that <paramref name="fieldInfo" /> is correctly initialized with the
/// value given to the same-named constructor paramter. It uses the <see cref="Builder" /> to
/// supply values to the constructor(s) of the Type on which the field is implemented, and then
/// reads from the field. The assertion passes if the value read from the field is the same as
/// the value passed to the constructor. If more than one constructor has an argument with the
/// same name and type, all constructors are checked. If any constructor does not initialise
/// the field with the correct value, a <see cref="ConstructorInitializedMemberException" />
/// is thrown.
/// </para>
/// </remarks>
/// <exception cref="ConstructorInitializedMemberException">The verification fails.</exception>
public override void Verify(FieldInfo fieldInfo)
{
if (fieldInfo == null)
throw new ArgumentNullException("fieldInfo");
var matchingConstructors = GetConstructorsWithInitializerForMember(fieldInfo).ToArray();
if (!matchingConstructors.Any())
{
if (IsMemberThatRequiresConstructorInitialization(fieldInfo))
{
throw new ConstructorInitializedMemberException(fieldInfo, string.Format(CultureInfo.CurrentCulture,
"No constructors with an argument that matches the read-only field '{0}' were found", fieldInfo.Name));
}
// For writable properties or fields, having no constructor parameter that initializes
// the member is perfectly fine.
return;
}
if (CouldLeadToFalsePositive(fieldInfo.FieldType))
{
throw BuildExceptionDueToPotentialFalsePositive(fieldInfo);
}
var expectedAndActuals = matchingConstructors
.Select(ctor => BuildSpecimenFromConstructor(ctor, fieldInfo));
// Compare the value passed into the constructor with the value returned from the property
if (expectedAndActuals.Any(s => !this.comparer.Equals(s.Expected, s.Actual)))
{
throw new ConstructorInitializedMemberException(fieldInfo);
}
}
private static bool CouldLeadToFalsePositive(Type type)
{
if (type.IsEnum)
{
//Check for a default value only enum
var values = Enum.GetValues(type);
return values.Length == 1 && values.GetValue(0).Equals(Activator.CreateInstance(type));
}
return false;
}
private static ConstructorInitializedMemberException BuildExceptionDueToPotentialFalsePositive(MemberInfo propertyOrField)
{
var message = string.Format(CultureInfo.CurrentCulture, "Unable to properly detect a successful initialization due to {0} being of type enum having a single default value.{3}Declaring type: {1}{3}Reflected type: {2}{3}", propertyOrField.Name, propertyOrField.DeclaringType.AssemblyQualifiedName, propertyOrField.ReflectedType.AssemblyQualifiedName, Environment.NewLine);
var field = propertyOrField as FieldInfo;
var property = propertyOrField as PropertyInfo;
if (field != null)
{
return new ConstructorInitializedMemberException(field, message);
}
else if (property != null)
{
return new ConstructorInitializedMemberException(property, message);
}
else
{
throw new ArgumentException("Must be a property or field", "propertyOrField");
}
}
private ExpectedAndActual BuildSpecimenFromConstructor(
ConstructorInfo ci, MemberInfo propertyOrField)
{
var parametersAndValues = ci.GetParameters()
.Select(pi =>
{
var value = this.builder.CreateAnonymous(pi);
// Ensure enum isn't getting the default value, otherwise
// we won't be able to determine whether initialization
// occurred.
if (pi.ParameterType.IsEnum && value.Equals(Activator.CreateInstance(pi.ParameterType)))
{
value = this.builder.CreateAnonymous(pi);
}
return new
{
Parameter = pi,
Value = value
};
})
.ToArray();
// Get the value expected to be assigned to the matching member
var expectedValueForMember = parametersAndValues
.Single(p => IsMatchingParameterAndMember(p.Parameter, propertyOrField))
.Value;
// Construct an instance of the specimen class
var specimen = ci.Invoke(parametersAndValues.Select(pv => pv.Value).ToArray());
// Get the value from the specimen field/property
object actual;
if (propertyOrField is FieldInfo)
{
actual = (propertyOrField as FieldInfo).GetValue(specimen);
}
else if (propertyOrField is PropertyInfo)
{
var propertyInfo = propertyOrField as PropertyInfo;
actual = propertyInfo.CanRead
? propertyInfo.GetValue(specimen, null)
: expectedValueForMember;
}
else
{
throw new ArgumentException("Must be a property or field", "propertyOrField");
}
return new ExpectedAndActual(expectedValueForMember, actual);
}
private class ExpectedAndActual
{
public ExpectedAndActual(object expected, object actual)
{
this.Expected = expected;
this.Actual = actual;
}
public object Expected { get; private set; }
public object Actual { get; private set; }
}
private IEnumerable<ConstructorInfo> GetConstructorsWithInitializerForMember(MemberInfo member)
{
return member.ReflectedType
.GetConstructors()
.Where(ci => IsConstructorWithMatchingArgument(ci, member));
}
private bool IsMatchingParameterAndMember(ParameterInfo parameter, MemberInfo fieldOrProperty)
{
return this.parameterMemberMatcher.Equals(
fieldOrProperty.ToReflectionElement(), parameter.ToReflectionElement());
}
private bool IsConstructorWithMatchingArgument(ConstructorInfo ci, MemberInfo memberInfo)
{
return ci.GetParameters().Any(parameterElement =>
IsMatchingParameterAndMember(parameterElement, memberInfo));
}
private static IEnumerable<MemberInfo> GetPublicPropertiesAndFields(Type t)
{
return t.GetMembers(BindingFlags.Instance | BindingFlags.Public)
.Where(m => m.MemberType.HasFlag(MemberTypes.Field) || m.MemberType.HasFlag(MemberTypes.Property));
}
private static bool IsMemberThatRequiresConstructorInitialization(MemberInfo member)
{
var memberAsPropertyInfo = member as PropertyInfo;
if (memberAsPropertyInfo != null)
{
MethodInfo setterMethod = memberAsPropertyInfo.GetSetMethod();
bool isReadOnly = memberAsPropertyInfo.CanRead &&
(setterMethod == null || setterMethod.IsPrivate || setterMethod.IsFamilyOrAssembly || setterMethod.IsFamilyAndAssembly);
return isReadOnly;
}
var memberAsFieldInfo = member as FieldInfo;
if (memberAsFieldInfo != null)
{
bool isReadOnly = memberAsFieldInfo.Attributes.HasFlag(FieldAttributes.InitOnly);
return isReadOnly;
}
return false;
}
private class DefaultParameterMemberMatcher : ReflectionVisitorElementComparer<NameAndType>
{
public class NameIgnoreCaseAndTypeAssignableComparer : IEqualityComparer<NameAndType>
{
public bool Equals(NameAndType x, NameAndType y)
{
if (x == null) throw new ArgumentNullException("x");
if (y == null) throw new ArgumentNullException("y");
return x.Name.Equals(y.Name, StringComparison.CurrentCultureIgnoreCase)
&& (x.Type.IsAssignableFrom(y.Type) || y.Type.IsAssignableFrom(x.Type));
}
public int GetHashCode(NameAndType obj)
{
// Forces methods like Distinct() to use the Equals method, because
// the hashcodes will all be equal.
return 0;
}
}
public DefaultParameterMemberMatcher(
IEqualityComparer<NameAndType> comparer)
: base(new NameAndTypeCollectingVisitor(), comparer)
{
}
public DefaultParameterMemberMatcher()
: this(null)
{
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Numerics.Tests
{
public class divremTest
{
private static int s_samples = 10;
private static Random s_random = new Random(100);
[Fact]
public static void RunDivRem_TwoLargeBI()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
// DivRem Method - Two Large BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
tempByteArray2 = GetRandomByteArray(s_random);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
}
}
[Fact]
public static void RunDivRem_TwoSmallBI()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
// DivRem Method - Two Small BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random, 2);
tempByteArray2 = GetRandomByteArray(s_random, 2);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
}
}
[Fact]
public static void RunDivRem_OneSmallOneLargeBI()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
// DivRem Method - One Large and one small BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
tempByteArray2 = GetRandomByteArray(s_random, 2);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
tempByteArray1 = GetRandomByteArray(s_random, 2);
tempByteArray2 = GetRandomByteArray(s_random);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
}
}
[Fact]
public static void RunDivRem_OneLargeOne0BI()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
// DivRem Method - One Large BigIntegers and zero
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
tempByteArray2 = new byte[] { 0 };
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
Assert.Throws<DivideByZeroException>(() =>
{
VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem");
});
}
}
[Fact]
public static void RunDivRem_OneSmallOne0BI()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
// DivRem Method - One small BigIntegers and zero
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random, 2);
tempByteArray2 = new byte[] { 0 };
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
Assert.Throws<DivideByZeroException>(() =>
{
VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem");
});
}
}
[Fact]
public static void Boundary()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
// Check interesting cases for boundary conditions
// You'll either be shifting a 0 or 1 across the boundary
// 32 bit boundary n2=0
VerifyDivRemString(Math.Pow(2, 32) + " 2 bDivRem");
// 32 bit boundary n1=0 n2=1
VerifyDivRemString(Math.Pow(2, 33) + " 2 bDivRem");
}
[Fact]
public static void RunDivRemTests()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
// DivRem Method - Two Large BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
tempByteArray2 = GetRandomByteArray(s_random);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
}
// DivRem Method - Two Small BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random, 2);
tempByteArray2 = GetRandomByteArray(s_random, 2);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
}
// DivRem Method - One Large and one small BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
tempByteArray2 = GetRandomByteArray(s_random, 2);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
tempByteArray1 = GetRandomByteArray(s_random, 2);
tempByteArray2 = GetRandomByteArray(s_random);
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
}
// DivRem Method - One Large BigIntegers and zero
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
tempByteArray2 = new byte[] { 0 };
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
Assert.Throws<DivideByZeroException>(() => { VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem"); });
}
// DivRem Method - One small BigIntegers and zero
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random, 2);
tempByteArray2 = new byte[] { 0 };
VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem");
Assert.Throws<DivideByZeroException>(() => { VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem"); });
}
// Check interesting cases for boundary conditions
// You'll either be shifting a 0 or 1 across the boundary
// 32 bit boundary n2=0
VerifyDivRemString(Math.Pow(2, 32) + " 2 bDivRem");
// 32 bit boundary n1=0 n2=1
VerifyDivRemString(Math.Pow(2, 33) + " 2 bDivRem");
}
private static void VerifyDivRemString(string opstring)
{
try
{
StackCalc sc = new StackCalc(opstring);
while (sc.DoNextOperation())
{
Assert.Equal(sc.snCalc.Peek().ToString(), sc.myCalc.Peek().ToString());
sc.VerifyOutParameter();
}
}
catch(Exception e) when (!(e is DivideByZeroException))
{
// Log the original parameters, so we can reproduce any failure given the log
throw new Exception($"VerifyDivRemString failed: {opstring} {e.ToString()}", e);
}
}
private static byte[] GetRandomByteArray(Random random)
{
return GetRandomByteArray(random, random.Next(1, 100));
}
private static byte[] GetRandomByteArray(Random random, int size)
{
return MyBigIntImp.GetNonZeroRandomByteArray(random, size);
}
private static string Print(byte[] bytes)
{
return MyBigIntImp.Print(bytes);
}
}
}
| |
using Microsoft.IdentityModel.S2S.Protocols.OAuth2;
using Microsoft.IdentityModel.Tokens;
using Microsoft.SharePoint.Client;
using System;
using System.Net;
using System.Security.Principal;
using System.Web;
using System.Web.Configuration;
namespace BusinessApps.HelpDesk
{
/// <summary>
/// Encapsulates all the information from SharePoint.
/// </summary>
public abstract class SharePointContext
{
public const string SPHostUrlKey = "SPHostUrl";
public const string SPAppWebUrlKey = "SPAppWebUrl";
public const string SPLanguageKey = "SPLanguage";
public const string SPClientTagKey = "SPClientTag";
public const string SPProductNumberKey = "SPProductNumber";
protected static readonly TimeSpan AccessTokenLifetimeTolerance = TimeSpan.FromMinutes(5.0);
private readonly Uri spHostUrl;
private readonly Uri spAppWebUrl;
private readonly string spLanguage;
private readonly string spClientTag;
private readonly string spProductNumber;
// <AccessTokenString, UtcExpiresOn>
protected Tuple<string, DateTime> userAccessTokenForSPHost;
protected Tuple<string, DateTime> userAccessTokenForSPAppWeb;
protected Tuple<string, DateTime> appOnlyAccessTokenForSPHost;
protected Tuple<string, DateTime> appOnlyAccessTokenForSPAppWeb;
/// <summary>
/// Gets the SharePoint host url from QueryString of the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The specified HTTP request.</param>
/// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns>
public static Uri GetSPHostUrl(HttpRequestBase httpRequest)
{
if (httpRequest == null)
{
throw new ArgumentNullException("httpRequest");
}
string spHostUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SPHostUrlKey]);
Uri spHostUrl;
if (Uri.TryCreate(spHostUrlString, UriKind.Absolute, out spHostUrl) &&
(spHostUrl.Scheme == Uri.UriSchemeHttp || spHostUrl.Scheme == Uri.UriSchemeHttps))
{
return spHostUrl;
}
return null;
}
/// <summary>
/// Gets the SharePoint host url from QueryString of the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The specified HTTP request.</param>
/// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns>
public static Uri GetSPHostUrl(HttpRequest httpRequest)
{
return GetSPHostUrl(new HttpRequestWrapper(httpRequest));
}
/// <summary>
/// The SharePoint host url.
/// </summary>
public Uri SPHostUrl
{
get { return this.spHostUrl; }
}
/// <summary>
/// The SharePoint app web url.
/// </summary>
public Uri SPAppWebUrl
{
get { return this.spAppWebUrl; }
}
/// <summary>
/// The SharePoint language.
/// </summary>
public string SPLanguage
{
get { return this.spLanguage; }
}
/// <summary>
/// The SharePoint client tag.
/// </summary>
public string SPClientTag
{
get { return this.spClientTag; }
}
/// <summary>
/// The SharePoint product number.
/// </summary>
public string SPProductNumber
{
get { return this.spProductNumber; }
}
/// <summary>
/// The user access token for the SharePoint host.
/// </summary>
public abstract string UserAccessTokenForSPHost
{
get;
}
/// <summary>
/// The user access token for the SharePoint app web.
/// </summary>
public abstract string UserAccessTokenForSPAppWeb
{
get;
}
/// <summary>
/// The app only access token for the SharePoint host.
/// </summary>
public abstract string AppOnlyAccessTokenForSPHost
{
get;
}
/// <summary>
/// The app only access token for the SharePoint app web.
/// </summary>
public abstract string AppOnlyAccessTokenForSPAppWeb
{
get;
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name="spHostUrl">The SharePoint host url.</param>
/// <param name="spAppWebUrl">The SharePoint app web url.</param>
/// <param name="spLanguage">The SharePoint language.</param>
/// <param name="spClientTag">The SharePoint client tag.</param>
/// <param name="spProductNumber">The SharePoint product number.</param>
protected SharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber)
{
if (spHostUrl == null)
{
throw new ArgumentNullException("spHostUrl");
}
if (string.IsNullOrEmpty(spLanguage))
{
throw new ArgumentNullException("spLanguage");
}
if (string.IsNullOrEmpty(spClientTag))
{
throw new ArgumentNullException("spClientTag");
}
if (string.IsNullOrEmpty(spProductNumber))
{
throw new ArgumentNullException("spProductNumber");
}
this.spHostUrl = spHostUrl;
this.spAppWebUrl = spAppWebUrl;
this.spLanguage = spLanguage;
this.spClientTag = spClientTag;
this.spProductNumber = spProductNumber;
}
/// <summary>
/// Creates a user ClientContext for the SharePoint host.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateUserClientContextForSPHost()
{
return CreateClientContext(this.SPHostUrl, this.UserAccessTokenForSPHost);
}
/// <summary>
/// Creates a user ClientContext for the SharePoint app web.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateUserClientContextForSPAppWeb()
{
return CreateClientContext(this.SPAppWebUrl, this.UserAccessTokenForSPAppWeb);
}
/// <summary>
/// Creates app only ClientContext for the SharePoint host.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateAppOnlyClientContextForSPHost()
{
return CreateClientContext(this.SPHostUrl, this.AppOnlyAccessTokenForSPHost);
}
/// <summary>
/// Creates an app only ClientContext for the SharePoint app web.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateAppOnlyClientContextForSPAppWeb()
{
return CreateClientContext(this.SPAppWebUrl, this.AppOnlyAccessTokenForSPAppWeb);
}
/// <summary>
/// Gets the database connection string from SharePoint for autohosted app.
/// This method is deprecated because the autohosted option is no longer available.
/// </summary>
[ObsoleteAttribute("This method is deprecated because the autohosted option is no longer available.", true)]
public string GetDatabaseConnectionString()
{
throw new NotSupportedException("This method is deprecated because the autohosted option is no longer available.");
}
/// <summary>
/// Determines if the specified access token is valid.
/// It considers an access token as not valid if it is null, or it has expired.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <returns>True if the access token is valid.</returns>
protected static bool IsAccessTokenValid(Tuple<string, DateTime> accessToken)
{
return accessToken != null &&
!string.IsNullOrEmpty(accessToken.Item1) &&
accessToken.Item2 > DateTime.UtcNow;
}
/// <summary>
/// Creates a ClientContext with the specified SharePoint site url and the access token.
/// </summary>
/// <param name="spSiteUrl">The site url.</param>
/// <param name="accessToken">The access token.</param>
/// <returns>A ClientContext instance.</returns>
private static ClientContext CreateClientContext(Uri spSiteUrl, string accessToken)
{
if (spSiteUrl != null && !string.IsNullOrEmpty(accessToken))
{
return TokenHelper.GetClientContextWithAccessToken(spSiteUrl.AbsoluteUri, accessToken);
}
return null;
}
}
/// <summary>
/// Redirection status.
/// </summary>
public enum RedirectionStatus
{
Ok,
ShouldRedirect,
CanNotRedirect
}
/// <summary>
/// Provides SharePointContext instances.
/// </summary>
public abstract class SharePointContextProvider
{
private static SharePointContextProvider current;
/// <summary>
/// The current SharePointContextProvider instance.
/// </summary>
public static SharePointContextProvider Current
{
get { return SharePointContextProvider.current; }
}
/// <summary>
/// Initializes the default SharePointContextProvider instance.
/// </summary>
static SharePointContextProvider()
{
if (!TokenHelper.IsHighTrustApp())
{
SharePointContextProvider.current = new SharePointAcsContextProvider();
}
else
{
SharePointContextProvider.current = new SharePointHighTrustContextProvider();
}
}
/// <summary>
/// Registers the specified SharePointContextProvider instance as current.
/// It should be called by Application_Start() in Global.asax.
/// </summary>
/// <param name="provider">The SharePointContextProvider to be set as current.</param>
public static void Register(SharePointContextProvider provider)
{
if (provider == null)
{
throw new ArgumentNullException("provider");
}
SharePointContextProvider.current = provider;
}
/// <summary>
/// Checks if it is necessary to redirect to SharePoint for user to authenticate.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param>
/// <returns>Redirection status.</returns>
public static RedirectionStatus CheckRedirectionStatus(HttpContextBase httpContext, out Uri redirectUrl)
{
if (httpContext == null)
{
throw new ArgumentNullException("httpContext");
}
redirectUrl = null;
bool contextTokenExpired = false;
try
{
if (SharePointContextProvider.Current.GetSharePointContext(httpContext) != null)
{
return RedirectionStatus.Ok;
}
}
catch (SecurityTokenExpiredException)
{
contextTokenExpired = true;
}
const string SPHasRedirectedToSharePointKey = "SPHasRedirectedToSharePoint";
if (!string.IsNullOrEmpty(httpContext.Request.QueryString[SPHasRedirectedToSharePointKey]) && !contextTokenExpired)
{
return RedirectionStatus.CanNotRedirect;
}
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
if (spHostUrl == null)
{
return RedirectionStatus.CanNotRedirect;
}
if (StringComparer.OrdinalIgnoreCase.Equals(httpContext.Request.HttpMethod, "POST"))
{
return RedirectionStatus.CanNotRedirect;
}
Uri requestUrl = httpContext.Request.Url;
var queryNameValueCollection = HttpUtility.ParseQueryString(requestUrl.Query);
// Removes the values that are included in {StandardTokens}, as {StandardTokens} will be inserted at the beginning of the query string.
queryNameValueCollection.Remove(SharePointContext.SPHostUrlKey);
queryNameValueCollection.Remove(SharePointContext.SPAppWebUrlKey);
queryNameValueCollection.Remove(SharePointContext.SPLanguageKey);
queryNameValueCollection.Remove(SharePointContext.SPClientTagKey);
queryNameValueCollection.Remove(SharePointContext.SPProductNumberKey);
// Adds SPHasRedirectedToSharePoint=1.
queryNameValueCollection.Add(SPHasRedirectedToSharePointKey, "1");
UriBuilder returnUrlBuilder = new UriBuilder(requestUrl);
returnUrlBuilder.Query = queryNameValueCollection.ToString();
// Inserts StandardTokens.
const string StandardTokens = "{StandardTokens}";
string returnUrlString = returnUrlBuilder.Uri.AbsoluteUri;
returnUrlString = returnUrlString.Insert(returnUrlString.IndexOf("?") + 1, StandardTokens + "&");
// Constructs redirect url.
string redirectUrlString = TokenHelper.GetAppContextTokenRequestUrl(spHostUrl.AbsoluteUri, Uri.EscapeDataString(returnUrlString));
redirectUrl = new Uri(redirectUrlString, UriKind.Absolute);
return RedirectionStatus.ShouldRedirect;
}
/// <summary>
/// Checks if it is necessary to redirect to SharePoint for user to authenticate.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param>
/// <returns>Redirection status.</returns>
public static RedirectionStatus CheckRedirectionStatus(HttpContext httpContext, out Uri redirectUrl)
{
return CheckRedirectionStatus(new HttpContextWrapper(httpContext), out redirectUrl);
}
/// <summary>
/// Creates a SharePointContext instance with the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
public SharePointContext CreateSharePointContext(HttpRequestBase httpRequest)
{
if (httpRequest == null)
{
throw new ArgumentNullException("httpRequest");
}
// SPHostUrl
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpRequest);
if (spHostUrl == null)
{
return null;
}
// SPAppWebUrl
string spAppWebUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SharePointContext.SPAppWebUrlKey]);
Uri spAppWebUrl;
if (!Uri.TryCreate(spAppWebUrlString, UriKind.Absolute, out spAppWebUrl) ||
!(spAppWebUrl.Scheme == Uri.UriSchemeHttp || spAppWebUrl.Scheme == Uri.UriSchemeHttps))
{
spAppWebUrl = null;
}
// SPLanguage
string spLanguage = httpRequest.QueryString[SharePointContext.SPLanguageKey];
if (string.IsNullOrEmpty(spLanguage))
{
return null;
}
// SPClientTag
string spClientTag = httpRequest.QueryString[SharePointContext.SPClientTagKey];
if (string.IsNullOrEmpty(spClientTag))
{
return null;
}
// SPProductNumber
string spProductNumber = httpRequest.QueryString[SharePointContext.SPProductNumberKey];
if (string.IsNullOrEmpty(spProductNumber))
{
return null;
}
return CreateSharePointContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, httpRequest);
}
/// <summary>
/// Creates a SharePointContext instance with the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
public SharePointContext CreateSharePointContext(HttpRequest httpRequest)
{
return CreateSharePointContext(new HttpRequestWrapper(httpRequest));
}
/// <summary>
/// Gets a SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns>
public SharePointContext GetSharePointContext(HttpContextBase httpContext)
{
if (httpContext == null)
{
throw new ArgumentNullException("httpContext");
}
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
if (spHostUrl == null)
{
return null;
}
SharePointContext spContext = LoadSharePointContext(httpContext);
if (spContext == null || !ValidateSharePointContext(spContext, httpContext))
{
spContext = CreateSharePointContext(httpContext.Request);
if (spContext != null)
{
SaveSharePointContext(spContext, httpContext);
}
}
return spContext;
}
/// <summary>
/// Gets a SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns>
public SharePointContext GetSharePointContext(HttpContext httpContext)
{
return GetSharePointContext(new HttpContextWrapper(httpContext));
}
/// <summary>
/// Creates a SharePointContext instance.
/// </summary>
/// <param name="spHostUrl">The SharePoint host url.</param>
/// <param name="spAppWebUrl">The SharePoint app web url.</param>
/// <param name="spLanguage">The SharePoint language.</param>
/// <param name="spClientTag">The SharePoint client tag.</param>
/// <param name="spProductNumber">The SharePoint product number.</param>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
protected abstract SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest);
/// <summary>
/// Validates if the given SharePointContext can be used with the specified HTTP context.
/// </summary>
/// <param name="spContext">The SharePointContext.</param>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>True if the given SharePointContext can be used with the specified HTTP context.</returns>
protected abstract bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext);
/// <summary>
/// Loads the SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found.</returns>
protected abstract SharePointContext LoadSharePointContext(HttpContextBase httpContext);
/// <summary>
/// Saves the specified SharePointContext instance associated with the specified HTTP context.
/// <c>null</c> is accepted for clearing the SharePointContext instance associated with the HTTP context.
/// </summary>
/// <param name="spContext">The SharePointContext instance to be saved, or <c>null</c>.</param>
/// <param name="httpContext">The HTTP context.</param>
protected abstract void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext);
}
#region ACS
/// <summary>
/// Encapsulates all the information from SharePoint in ACS mode.
/// </summary>
public class SharePointAcsContext : SharePointContext
{
private readonly string contextToken;
private readonly SharePointContextToken contextTokenObj;
/// <summary>
/// The context token.
/// </summary>
public string ContextToken
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextToken : null; }
}
/// <summary>
/// The context token's "CacheKey" claim.
/// </summary>
public string CacheKey
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.CacheKey : null; }
}
/// <summary>
/// The context token's "refreshtoken" claim.
/// </summary>
public string RefreshToken
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.RefreshToken : null; }
}
public override string UserAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.userAccessTokenForSPHost,
() => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPHostUrl.Authority));
}
}
public override string UserAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb,
() => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPAppWebUrl.Authority));
}
}
public override string AppOnlyAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost,
() => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPHostUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPHostUrl)));
}
}
public override string AppOnlyAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb,
() => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPAppWebUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPAppWebUrl)));
}
}
public SharePointAcsContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, string contextToken, SharePointContextToken contextTokenObj)
: base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber)
{
if (string.IsNullOrEmpty(contextToken))
{
throw new ArgumentNullException("contextToken");
}
if (contextTokenObj == null)
{
throw new ArgumentNullException("contextTokenObj");
}
this.contextToken = contextToken;
this.contextTokenObj = contextTokenObj;
}
/// <summary>
/// Ensures the access token is valid and returns it.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
/// <returns>The access token string.</returns>
private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler)
{
RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler);
return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null;
}
/// <summary>
/// Renews the access token if it is not valid.
/// </summary>
/// <param name="accessToken">The access token to renew.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler)
{
if (IsAccessTokenValid(accessToken))
{
return;
}
try
{
OAuth2AccessTokenResponse oAuth2AccessTokenResponse = tokenRenewalHandler();
DateTime expiresOn = oAuth2AccessTokenResponse.ExpiresOn;
if ((expiresOn - oAuth2AccessTokenResponse.NotBefore) > AccessTokenLifetimeTolerance)
{
// Make the access token get renewed a bit earlier than the time when it expires
// so that the calls to SharePoint with it will have enough time to complete successfully.
expiresOn -= AccessTokenLifetimeTolerance;
}
accessToken = Tuple.Create(oAuth2AccessTokenResponse.AccessToken, expiresOn);
}
catch (WebException)
{
}
}
}
/// <summary>
/// Default provider for SharePointAcsContext.
/// </summary>
public class SharePointAcsContextProvider : SharePointContextProvider
{
private const string SPContextKey = "SPContext";
private const string SPCacheKeyKey = "SPCacheKey";
protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest)
{
string contextTokenString = TokenHelper.GetContextTokenFromRequest(httpRequest);
if (string.IsNullOrEmpty(contextTokenString))
{
return null;
}
SharePointContextToken contextToken = null;
try
{
contextToken = TokenHelper.ReadAndValidateContextToken(contextTokenString, httpRequest.Url.Authority);
}
catch (WebException)
{
return null;
}
catch (AudienceUriValidationFailedException)
{
return null;
}
return new SharePointAcsContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, contextTokenString, contextToken);
}
protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointAcsContext spAcsContext = spContext as SharePointAcsContext;
if (spAcsContext != null)
{
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
string contextToken = TokenHelper.GetContextTokenFromRequest(httpContext.Request);
HttpCookie spCacheKeyCookie = httpContext.Request.Cookies[SPCacheKeyKey];
string spCacheKey = spCacheKeyCookie != null ? spCacheKeyCookie.Value : null;
return spHostUrl == spAcsContext.SPHostUrl &&
!string.IsNullOrEmpty(spAcsContext.CacheKey) &&
spCacheKey == spAcsContext.CacheKey &&
!string.IsNullOrEmpty(spAcsContext.ContextToken) &&
(string.IsNullOrEmpty(contextToken) || contextToken == spAcsContext.ContextToken);
}
return false;
}
protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext)
{
return httpContext.Session[SPContextKey] as SharePointAcsContext;
}
protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointAcsContext spAcsContext = spContext as SharePointAcsContext;
if (spAcsContext != null)
{
HttpCookie spCacheKeyCookie = new HttpCookie(SPCacheKeyKey)
{
Value = spAcsContext.CacheKey,
Secure = true,
HttpOnly = true
};
httpContext.Response.AppendCookie(spCacheKeyCookie);
}
httpContext.Session[SPContextKey] = spAcsContext;
}
}
#endregion ACS
#region HighTrust
/// <summary>
/// Encapsulates all the information from SharePoint in HighTrust mode.
/// </summary>
public class SharePointHighTrustContext : SharePointContext
{
private readonly WindowsIdentity logonUserIdentity;
/// <summary>
/// The Windows identity for the current user.
/// </summary>
public WindowsIdentity LogonUserIdentity
{
get { return this.logonUserIdentity; }
}
public override string UserAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.userAccessTokenForSPHost,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, this.LogonUserIdentity));
}
}
public override string UserAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, this.LogonUserIdentity));
}
}
public override string AppOnlyAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, null));
}
}
public override string AppOnlyAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, null));
}
}
public SharePointHighTrustContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, WindowsIdentity logonUserIdentity)
: base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber)
{
if (logonUserIdentity == null)
{
throw new ArgumentNullException("logonUserIdentity");
}
this.logonUserIdentity = logonUserIdentity;
}
/// <summary>
/// Ensures the access token is valid and returns it.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
/// <returns>The access token string.</returns>
private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler)
{
RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler);
return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null;
}
/// <summary>
/// Renews the access token if it is not valid.
/// </summary>
/// <param name="accessToken">The access token to renew.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler)
{
if (IsAccessTokenValid(accessToken))
{
return;
}
DateTime expiresOn = DateTime.UtcNow.Add(TokenHelper.HighTrustAccessTokenLifetime);
if (TokenHelper.HighTrustAccessTokenLifetime > AccessTokenLifetimeTolerance)
{
// Make the access token get renewed a bit earlier than the time when it expires
// so that the calls to SharePoint with it will have enough time to complete successfully.
expiresOn -= AccessTokenLifetimeTolerance;
}
accessToken = Tuple.Create(tokenRenewalHandler(), expiresOn);
}
}
/// <summary>
/// Default provider for SharePointHighTrustContext.
/// </summary>
public class SharePointHighTrustContextProvider : SharePointContextProvider
{
private const string SPContextKey = "SPContext";
protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest)
{
WindowsIdentity logonUserIdentity = httpRequest.LogonUserIdentity;
if (logonUserIdentity == null || !logonUserIdentity.IsAuthenticated || logonUserIdentity.IsGuest || logonUserIdentity.User == null)
{
return null;
}
return new SharePointHighTrustContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, logonUserIdentity);
}
protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointHighTrustContext spHighTrustContext = spContext as SharePointHighTrustContext;
if (spHighTrustContext != null)
{
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
WindowsIdentity logonUserIdentity = httpContext.Request.LogonUserIdentity;
return spHostUrl == spHighTrustContext.SPHostUrl &&
logonUserIdentity != null &&
logonUserIdentity.IsAuthenticated &&
!logonUserIdentity.IsGuest &&
logonUserIdentity.User == spHighTrustContext.LogonUserIdentity.User;
}
return false;
}
protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext)
{
return httpContext.Session[SPContextKey] as SharePointHighTrustContext;
}
protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
httpContext.Session[SPContextKey] = spContext as SharePointHighTrustContext;
}
}
#endregion HighTrust
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Runtime.Serialization;
using System.Text.RegularExpressions;
using Funq;
using NUnit.Framework;
using ServiceStack.ServiceClient.Web;
using ServiceStack.FluentValidation;
using ServiceStack.Service;
using ServiceStack.ServiceHost;
using ServiceStack.ServiceInterface;
using ServiceStack.ServiceInterface.ServiceModel;
using ServiceStack.ServiceInterface.Validation;
using ServiceStack.Text;
using ServiceStack.WebHost.Endpoints;
using ServiceStack.WebHost.Endpoints.Support;
using ServiceStack.WebHost.Endpoints.Tests;
using ServiceStack.WebHost.Endpoints.Tests.Support;
using ServiceStack.WebHost.Endpoints.Tests.Support.Host;
namespace ServiceStack.WebHost.IntegrationTests.Services
{
[RestService("/customers")]
[RestService("/customers/{Id}")]
public class Customers
{
public int Id { get; set; }
public string FirstName { get; set; }
public string LastName { get; set; }
public string Company { get; set; }
public decimal Discount { get; set; }
public string Address { get; set; }
public string Postcode { get; set; }
public bool HasDiscount { get; set; }
}
public interface IAddressValidator
{
bool ValidAddress(string address);
}
public class AddressValidator : IAddressValidator
{
public bool ValidAddress(string address)
{
return address != null
&& address.Length >= 20
&& address.Length <= 250;
}
}
public class CustomersValidator : AbstractValidator<Customers>
{
public IAddressValidator AddressValidator { get; set; }
public CustomersValidator()
{
RuleFor(x => x.Id).NotEqual(default(int));
RuleSet(ApplyTo.Post | ApplyTo.Put, () => {
RuleFor(x => x.LastName).NotEmpty().WithErrorCode("ShouldNotBeEmpty");
RuleFor(x => x.FirstName).NotEmpty().WithMessage("Please specify a first name");
RuleFor(x => x.Company).NotNull();
RuleFor(x => x.Discount).NotEqual(0).When(x => x.HasDiscount);
RuleFor(x => x.Address).Must(x => AddressValidator.ValidAddress(x));
RuleFor(x => x.Postcode).Must(BeAValidPostcode).WithMessage("Please specify a valid postcode");
});
}
static readonly Regex UsPostCodeRegEx = new Regex(@"^\d{5}(-\d{4})?$", RegexOptions.Compiled);
private bool BeAValidPostcode(string postcode)
{
return !string.IsNullOrEmpty(postcode) && UsPostCodeRegEx.IsMatch(postcode);
}
}
public class CustomersResponse
{
public Customers Result { get; set; }
public ResponseStatus ResponseStatus { get; set; }
}
public class CustomerService : RestServiceBase<Customers>
{
public override object OnGet(Customers request)
{
return new CustomersResponse { Result = request };
}
public override object OnPost(Customers request)
{
return new CustomersResponse { Result = request };
}
public override object OnPut(Customers request)
{
return new CustomersResponse { Result = request };
}
public override object OnDelete(Customers request)
{
return new CustomersResponse { Result = request };
}
}
[TestFixture]
public class CustomerServiceValidationTests
{
private const string ListeningOn = "http://localhost:82/";
public class ValidationAppHostHttpListener
: AppHostHttpListenerBase
{
public ValidationAppHostHttpListener()
: base("Validation Tests", typeof(CustomerService).Assembly) { }
public override void Configure(Container container)
{
Plugins.Add(new ValidationFeature());
container.Register<IAddressValidator>(new AddressValidator());
container.RegisterValidators(typeof(CustomersValidator).Assembly);
}
}
ValidationAppHostHttpListener appHost;
[TestFixtureSetUp]
public void OnTestFixtureSetUp()
{
appHost = new ValidationAppHostHttpListener();
appHost.Init();
appHost.Start(ListeningOn);
}
[TestFixtureTearDown]
public void OnTestFixtureTearDown()
{
appHost.Dispose();
}
private static List<ResponseError> GetValidationFieldErrors(string httpMethod, Customers request)
{
var validator = (IValidator)new CustomersValidator {
AddressValidator = new AddressValidator()
};
var validationResult = validator.Validate(
new ValidationContext(request, null, new MultiRuleSetValidatorSelector(httpMethod)));
var responseStatus = ResponseStatusTranslator.Instance.Parse(validationResult.ToErrorResult());
var errorFields = responseStatus.Errors;
return errorFields ?? new List<ResponseError>();
}
private string[] ExpectedPostErrorFields = new[] {
"Id",
"LastName",
"FirstName",
"Company",
"Address",
"Postcode",
};
private string[] ExpectedPostErrorCodes = new[] {
"NotEqual",
"ShouldNotBeEmpty",
"NotEmpty",
"NotNull",
"Predicate",
"Predicate",
};
Customers validRequest;
[SetUp]
public void SetUp()
{
validRequest = new Customers {
Id = 1,
FirstName = "FirstName",
LastName = "LastName",
Address = "12345 Address St, New York",
Company = "Company",
Discount = 10,
HasDiscount = true,
Postcode = "11215",
};
}
[Test]
public void Validates_ValidRequest_request_on_Post()
{
var errorFields = GetValidationFieldErrors(HttpMethod.Post, validRequest);
Assert.That(errorFields.Count, Is.EqualTo(0));
}
[Test]
public void Validates_ValidRequest_request_on_Get()
{
var errorFields = GetValidationFieldErrors(HttpMethod.Get, validRequest);
Assert.That(errorFields.Count, Is.EqualTo(0));
}
[Test]
public void Validates_Conditional_Request_request_on_Post()
{
validRequest.Discount = 0;
validRequest.HasDiscount = true;
var errorFields = GetValidationFieldErrors(HttpMethod.Post, validRequest);
Assert.That(errorFields.Count, Is.EqualTo(1));
Assert.That(errorFields[0].FieldName, Is.EqualTo("Discount"));
}
[Test]
public void Validates_empty_request_on_Post()
{
var request = new Customers();
var errorFields = GetValidationFieldErrors(HttpMethod.Post, request);
var fieldNames = errorFields.Select(x => x.FieldName).ToArray();
var fieldErrorCodes = errorFields.Select(x => x.ErrorCode).ToArray();
Assert.That(errorFields.Count, Is.EqualTo(ExpectedPostErrorFields.Length));
Assert.That(fieldNames, Is.EquivalentTo(ExpectedPostErrorFields));
Assert.That(fieldErrorCodes, Is.EquivalentTo(ExpectedPostErrorCodes));
}
[Test]
public void Validates_empty_request_on_Put()
{
var request = new Customers();
var errorFields = GetValidationFieldErrors(HttpMethod.Put, request);
var fieldNames = errorFields.Select(x => x.FieldName).ToArray();
var fieldErrorCodes = errorFields.Select(x => x.ErrorCode).ToArray();
Assert.That(errorFields.Count, Is.EqualTo(ExpectedPostErrorFields.Length));
Assert.That(fieldNames, Is.EquivalentTo(ExpectedPostErrorFields));
Assert.That(fieldErrorCodes, Is.EquivalentTo(ExpectedPostErrorCodes));
}
[Test]
public void Validates_empty_request_on_Get()
{
var request = new Customers();
var errorFields = GetValidationFieldErrors(HttpMethod.Get, request);
Assert.That(errorFields.Count, Is.EqualTo(1));
Assert.That(errorFields[0].ErrorCode, Is.EqualTo("NotEqual"));
Assert.That(errorFields[0].FieldName, Is.EqualTo("Id"));
}
[Test]
public void Validates_empty_request_on_Delete()
{
var request = new Customers();
var errorFields = GetValidationFieldErrors(HttpMethod.Delete, request);
Assert.That(errorFields.Count, Is.EqualTo(1));
Assert.That(errorFields[0].ErrorCode, Is.EqualTo("NotEqual"));
Assert.That(errorFields[0].FieldName, Is.EqualTo("Id"));
}
protected static IServiceClient UnitTestServiceClient()
{
EndpointHandlerBase.ServiceManager = new ServiceManager(true, typeof(SecureService).Assembly);
return new DirectServiceClient(EndpointHandlerBase.ServiceManager);
}
public static IEnumerable ServiceClients
{
get
{
//Seriously retarded workaround for some devs idea who thought this should
//be run for all test fixtures, not just this one.
return new Func<IServiceClient>[] {
() => UnitTestServiceClient(),
() => new JsonServiceClient(ListeningOn),
() => new JsvServiceClient(ListeningOn),
() => new XmlServiceClient(ListeningOn),
};
}
}
[Test, TestCaseSource(typeof(CustomerServiceValidationTests), "ServiceClients")]
public void Post_empty_request_throws_validation_exception(Func<IServiceClient> factory)
{
try
{
var client = factory();
var response = client.Send<CustomersResponse>(new Customers());
Assert.Fail("Should throw Validation Exception");
}
catch (WebServiceException ex)
{
var response = (CustomersResponse)ex.ResponseDto;
var errorFields = response.ResponseStatus.Errors;
var fieldNames = errorFields.Select(x => x.FieldName).ToArray();
var fieldErrorCodes = errorFields.Select(x => x.ErrorCode).ToArray();
Assert.That(ex.StatusCode, Is.EqualTo((int)HttpStatusCode.BadRequest));
Assert.That(errorFields.Count, Is.EqualTo(ExpectedPostErrorFields.Length));
Assert.That(fieldNames, Is.EquivalentTo(ExpectedPostErrorFields));
Assert.That(fieldErrorCodes, Is.EquivalentTo(ExpectedPostErrorCodes));
}
}
[Test, TestCaseSource(typeof(CustomerServiceValidationTests), "ServiceClients")]
public void Get_empty_request_throws_validation_exception(Func<IServiceClient> factory)
{
try
{
var client = (IRestClient)factory();
var response = client.Get<CustomersResponse>("Customers");
Assert.Fail("Should throw Validation Exception");
}
catch (WebServiceException ex)
{
var response = (CustomersResponse)ex.ResponseDto;
var errorFields = response.ResponseStatus.Errors;
Assert.That(ex.StatusCode, Is.EqualTo((int)HttpStatusCode.BadRequest));
Assert.That(errorFields.Count, Is.EqualTo(1));
Assert.That(errorFields[0].ErrorCode, Is.EqualTo("NotEqual"));
Assert.That(errorFields[0].FieldName, Is.EqualTo("Id"));
}
}
[Test, TestCaseSource(typeof(CustomerServiceValidationTests), "ServiceClients")]
public void Post_ValidRequest_succeeds(Func<IServiceClient> factory)
{
var client = factory();
var response = client.Send<CustomersResponse>(validRequest);
Assert.That(response.ResponseStatus, Is.Null);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Automation
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for DscNodeConfigurationOperations.
/// </summary>
public static partial class DscNodeConfigurationOperationsExtensions
{
/// <summary>
/// Delete the Dsc node configurations by node configuration.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='nodeConfigurationName'>
/// The Dsc node configuration name.
/// </param>
public static void Delete(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string nodeConfigurationName)
{
operations.DeleteAsync(resourceGroupName, automationAccountName, nodeConfigurationName).GetAwaiter().GetResult();
}
/// <summary>
/// Delete the Dsc node configurations by node configuration.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='nodeConfigurationName'>
/// The Dsc node configuration name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string nodeConfigurationName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.DeleteWithHttpMessagesAsync(resourceGroupName, automationAccountName, nodeConfigurationName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Retrieve the Dsc node configurations by node configuration.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='nodeConfigurationName'>
/// The Dsc node configuration name.
/// </param>
public static DscNodeConfiguration Get(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string nodeConfigurationName)
{
return operations.GetAsync(resourceGroupName, automationAccountName, nodeConfigurationName).GetAwaiter().GetResult();
}
/// <summary>
/// Retrieve the Dsc node configurations by node configuration.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='nodeConfigurationName'>
/// The Dsc node configuration name.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DscNodeConfiguration> GetAsync(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string nodeConfigurationName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, automationAccountName, nodeConfigurationName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Create the node configuration identified by node configuration name.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='nodeConfigurationName'>
/// The create or update parameters for configuration.
/// </param>
/// <param name='parameters'>
/// The create or update parameters for configuration.
/// </param>
public static DscNodeConfiguration CreateOrUpdate(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string nodeConfigurationName, DscNodeConfigurationCreateOrUpdateParameters parameters)
{
return operations.CreateOrUpdateAsync(resourceGroupName, automationAccountName, nodeConfigurationName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Create the node configuration identified by node configuration name.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='nodeConfigurationName'>
/// The create or update parameters for configuration.
/// </param>
/// <param name='parameters'>
/// The create or update parameters for configuration.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DscNodeConfiguration> CreateOrUpdateAsync(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string nodeConfigurationName, DscNodeConfigurationCreateOrUpdateParameters parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, automationAccountName, nodeConfigurationName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Retrieve a list of dsc node configurations.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='filter'>
/// The filter to apply on the operation.
/// </param>
public static IPage<DscNodeConfiguration> ListByAutomationAccount(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string filter = default(string))
{
return operations.ListByAutomationAccountAsync(resourceGroupName, automationAccountName, filter).GetAwaiter().GetResult();
}
/// <summary>
/// Retrieve a list of dsc node configurations.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The resource group name.
/// </param>
/// <param name='automationAccountName'>
/// The automation account name.
/// </param>
/// <param name='filter'>
/// The filter to apply on the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DscNodeConfiguration>> ListByAutomationAccountAsync(this IDscNodeConfigurationOperations operations, string resourceGroupName, string automationAccountName, string filter = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByAutomationAccountWithHttpMessagesAsync(resourceGroupName, automationAccountName, filter, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Retrieve a list of dsc node configurations.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<DscNodeConfiguration> ListByAutomationAccountNext(this IDscNodeConfigurationOperations operations, string nextPageLink)
{
return operations.ListByAutomationAccountNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Retrieve a list of dsc node configurations.
/// <see href="http://aka.ms/azureautomationsdk/dscnodeconfigurations" />
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DscNodeConfiguration>> ListByAutomationAccountNextAsync(this IDscNodeConfigurationOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByAutomationAccountNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
using System;
using System.Net;
using System.Net.Sockets;
using System.Collections;
using System.Security.Cryptography;
using System.Diagnostics;
using DeOps.Implementation.Protocol;
using DeOps.Implementation.Protocol.Net;
using DeOps.Implementation.Dht;
using DeOps.Simulator;
namespace DeOps.Implementation.Transport
{
public enum ProxyType { Unset, Server, ClientBlocked, ClientNAT };
public enum TcpState { Connecting, Connected, Closed };
public class TcpConnect : DhtSource
{
public OpCore Core;
public DhtNetwork Network;
public TcpHandler TcpControl;
// client info
public IPAddress RemoteIP;
// socket info
public Socket TcpSocket = null;
public TcpState State = TcpState.Connecting;
public int Age;
public bool CheckedFirewall;
public bool Outbound;
public string ByeMessage;
// bandwidth
public int BytesReceivedinSec;
public int BytesSentinSec;
int SecondsDead;
// proxying
public ProxyType Proxy;
const int BUFF_SIZE = 16 * 1024;
// sending
ICryptoTransform Encryptor;
byte[] SendBuffer;
int SendBuffSize;
byte[] FinalSendBuffer;
int FinalSendBuffSize;
// receiving
ICryptoTransform Decryptor;
public byte[] RecvBuffer;
public int RecvBuffSize;
public byte[] FinalRecvBuffer;
public int FinalRecvBuffSize;
// bandwidth
public BandwidthLog Bandwidth;
// inbound
public TcpConnect(TcpHandler control)
{
TcpControl = control;
Network = TcpControl.Network;
Core = TcpControl.Core;
Bandwidth = new BandwidthLog(Core.RecordBandwidthSeconds);
}
// outbound
public TcpConnect(TcpHandler control, DhtAddress address, ushort tcpPort)
{
Debug.Assert(address.UserID != 0);
TcpControl = control;
Network = TcpControl.Network;
Core = TcpControl.Core;
Bandwidth = new BandwidthLog(Core.RecordBandwidthSeconds);
Outbound = true;
RemoteIP = address.IP;
TcpPort = tcpPort;
UdpPort = address.UdpPort;
UserID = address.UserID;
try
{
IPEndPoint endpoint = new IPEndPoint(RemoteIP, TcpPort);
if (Core.Sim != null)
{
Core.Sim.Internet.SendPacket(SimPacketType.TcpConnect, Network, null, endpoint, this);
return;
}
TcpSocket = new Socket(address.IP.AddressFamily, SocketType.Stream, ProtocolType.Tcp);
TcpSocket.BeginConnect((EndPoint)endpoint, new AsyncCallback(Socket_Connect), TcpSocket);
}
catch (Exception ex)
{
LogException("TcpSocket", ex.Message);
Disconnect();
}
}
public void SecondTimer()
{
if (State == TcpState.Closed)
return;
// update bandwidth
SecondsDead = (BytesReceivedinSec > 0) ? 0 : SecondsDead + 1;
BytesSentinSec = 0;
BytesReceivedinSec = 0;
Core.Context.Bandwidth.InPerSec += Bandwidth.InPerSec;
Core.Context.Bandwidth.OutPerSec += Bandwidth.OutPerSec;
Bandwidth.NextSecond();
if (Age < 60)
Age++;
// if proxy not set after 10 secs disconnect
if(Age > 10 && Proxy == ProxyType.Unset)
{
if (State == TcpState.Connecting)
CleanClose("Timed out");
else
CleanClose("No proxy request");
return;
}
// replicate
if (Age == 15 && !Network.IsLookup)
{
Network.Store.Replicate(new DhtContact(this, RemoteIP));
}
// new global proxy
if (Proxy == ProxyType.Server)
{
if (Age == 5)
{
// announce to rudp connections new proxy if blocked/nat, or using a global proxy
if (Network.IsLookup)
{
Core.Context.Cores.LockReading(delegate()
{
foreach (OpCore core in Core.Context.Cores)
if (core.Network.UseLookupProxies)
core.Network.RudpControl.AnnounceProxy(this);
});
}
else
Network.RudpControl.AnnounceProxy(this);
}
else if (Age == 15)
{
if (!Network.IsLookup)
Core.Locations.UpdateLocation();
if (Network.UseLookupProxies)
Core.Locations.PublishGlobal();
}
}
// new proxied host
else if(Age == 15)
{
// proxied replicates to server naturally by adding server to routing table
// server replicates to proxy here
Network.Store.Replicate(new DhtContact(this, RemoteIP));
}
// send ping if dead for x secs
if (SecondsDead > 30 && SecondsDead % 5 == 0)
{
SendPacket(new Ping());
}
else if(SecondsDead > 60)
{
CleanClose("Minute dead");
return;
}
// flush send buffer
TrySend();
}
public void Socket_Connect(IAsyncResult asyncResult)
{
try
{
TcpSocket.EndConnect(asyncResult);
OnConnect();
}
catch(Exception ex)
{
LogException("Socket_Connect", ex.Message);
Disconnect();
}
}
public void OnConnect()
{
Network.UpdateLog("Tcp", "Connected to " + ToString());
SetConnected();
if (Core.Sim == null || Core.Sim.Internet.TestEncryption)
CreateEncryptor();
Ping ping = new Ping();
ping.Source = Network.GetLocalSource();
ping.RemoteIP = RemoteIP;
Core.RunInCoreAsync(delegate()
{
SendPacket(ping);
// if we made connection to the node its not firewalled
if (Outbound)
Network.Routing.Add(new DhtContact(this, RemoteIP));
});
}
private void CreateEncryptor()
{
RijndaelManaged crypt = new RijndaelManaged();
crypt.Key = Network.GetAugmentedKey(UserID);
crypt.Padding = PaddingMode.None;
if (UserID == Network.Local.UserID)
Debug.Assert(Utilities.MemCompare(crypt.Key, Network.LocalAugmentedKey));
Encryptor = crypt.CreateEncryptor();
crypt.IV.CopyTo(FinalSendBuffer, 0);
FinalSendBuffSize = crypt.IV.Length;
}
public void SetConnected()
{
SendBuffer = new byte[BUFF_SIZE];
RecvBuffer = new byte[BUFF_SIZE];
FinalRecvBuffer = new byte[BUFF_SIZE];
FinalSendBuffer = new byte[BUFF_SIZE];
State = TcpState.Connected;
if (Core.Sim != null)
return;
try
{
TcpSocket.BeginReceive( RecvBuffer, RecvBuffSize, RecvBuffer.Length, SocketFlags.None, new AsyncCallback(Socket_Receive), TcpSocket);
}
catch(Exception ex)
{
LogException("SetConnected", ex.Message);
Disconnect();
}
}
public void CleanClose(string reason)
{
CleanClose(reason, false);
}
public void CleanClose(string reason, bool reconnect)
{
if (State == TcpState.Connecting)
ByeMessage = reason;
if (State == TcpState.Connected)
{
ByeMessage = reason;
Bye bye = new Bye();
bye.SenderID = Network.Local.UserID;
bye.ContactList = Network.Routing.Find(UserID, 8);
bye.Message = reason;
bye.Reconnect = reconnect;
SendPacket(bye);
Network.UpdateLog("Tcp", "Closing connection to " + ToString() + " " + reason);
}
State = TcpState.Closed;
}
public void Disconnect()
{
if (State != TcpState.Closed)
{
try
{
if (Core.Sim == null)
TcpSocket.Close();
else
Core.Sim.Internet.SendPacket(SimPacketType.TcpClose, Network, null, new IPEndPoint(RemoteIP, TcpPort), this);
}
catch (Exception ex)
{
LogException("Disconnect", ex.Message);
}
}
State = TcpState.Closed;
}
public int SendPacket(G2Packet packet)
{
if (Core.InvokeRequired)
Debug.Assert(false);
if (State != TcpState.Connected)
return 0;
// usually when an inbound connection (dont know remote userId) is determined to be a loopback, we close the connection
// even before the userId is set, if the userId is not set then the encryptor cant be init'd to send the 'close' packet
if (UserID == 0)
return 0;
if (Core.Sim == null || Core.Sim.Internet.TestEncryption)
if(Encryptor == null)
CreateEncryptor();
try
{
if (packet is NetworkPacket)
{
((NetworkPacket)packet).SourceID = Network.Local.UserID;
((NetworkPacket)packet).ClientID = Network.Local.ClientID;
}
byte[] encoded = packet.Encode(Network.Protocol);
PacketLogEntry logEntry = new PacketLogEntry(Core.TimeNow, TransportProtocol.Tcp, DirectionType.Out, new DhtAddress(RemoteIP, this), encoded);
Network.LogPacket(logEntry);
lock(FinalSendBuffer)
{
// fill up final buffer, keep encrypt buffer clear
if (BUFF_SIZE - FinalSendBuffSize < encoded.Length + 128)
throw new Exception("SendBuff Full"); //crit check packet log
// encrypt, turn off encryption during simulation
if (Core.Sim == null || Core.Sim.Internet.TestEncryption)
{
encoded.CopyTo(SendBuffer, SendBuffSize);
SendBuffSize += encoded.Length;
int remainder = SendBuffSize % Encryptor.InputBlockSize;
if (remainder > 0)
{
CryptPadding padding = new CryptPadding();
int fillerNeeded = Encryptor.InputBlockSize - remainder;
if (fillerNeeded > 2)
padding.Filler = new byte[fillerNeeded - 2];
encoded = padding.Encode(Network.Protocol);
encoded.CopyTo(SendBuffer, SendBuffSize);
SendBuffSize += encoded.Length;
}
int tryTransform = SendBuffSize - (SendBuffSize % Encryptor.InputBlockSize);
if (tryTransform == 0)
return 0;
int tranformed = Encryptor.TransformBlock(SendBuffer, 0, tryTransform, FinalSendBuffer, FinalSendBuffSize);
if (tranformed == 0)
return 0;
FinalSendBuffSize += tranformed;
SendBuffSize -= tranformed;
Buffer.BlockCopy(SendBuffer, tranformed, SendBuffer, 0, SendBuffSize);
}
else
{
encoded.CopyTo(FinalSendBuffer, FinalSendBuffSize);
FinalSendBuffSize += encoded.Length;
}
}
TrySend();
// record bandwidth
return encoded.Length;
}
catch(Exception ex)
{
LogException("SendPacket", ex.Message);
}
return 0;
}
public void TrySend()
{
if (FinalSendBuffSize == 0)
return;
try
{
lock (FinalSendBuffer)
{
int bytesSent = 0;
//Core.UpdateConsole("Begin Send " + SendBufferSize.ToString());
if (Core.Sim == null)
{
TcpSocket.Blocking = false;
bytesSent = TcpSocket.Send(FinalSendBuffer, FinalSendBuffSize, SocketFlags.None);
}
else
{
bytesSent = Core.Sim.Internet.SendPacket(SimPacketType.Tcp, Network, Utilities.ExtractBytes(FinalSendBuffer, 0, FinalSendBuffSize), new IPEndPoint(RemoteIP, TcpPort), this);
if (bytesSent < 0) // simulator tcp disconnected
{
LogException("TrySend", "Disconnected");
Disconnect();
return;
}
}
if (bytesSent > 0)
{
FinalSendBuffSize -= bytesSent;
BytesSentinSec += bytesSent;
Bandwidth.OutPerSec += bytesSent;
if (FinalSendBuffSize < 0)
throw new Exception("Tcp SendBuff size less than zero");
// realign send buffer
if (FinalSendBuffSize > 0)
lock (FinalSendBuffer)
Buffer.BlockCopy(FinalSendBuffer, bytesSent, FinalSendBuffer, 0, FinalSendBuffSize);
}
}
}
catch(Exception ex)
{
LogException("TrySend", ex.Message);
Disconnect();
}
}
void Socket_Receive(IAsyncResult asyncResult)
{
try
{
int recvLength = TcpSocket.EndReceive(asyncResult);
//Core.UpdateConsole(recvLength.ToString() + " received");
if (recvLength <= 0)
{
Disconnect();
return;
}
OnReceive(recvLength);
}
catch (Exception ex)
{
LogException("Socket_Receive:1", ex.Message);
Disconnect();
}
try
{
if (State == TcpState.Connected)
TcpSocket.BeginReceive(RecvBuffer, RecvBuffSize, RecvBuffer.Length, SocketFlags.None, new AsyncCallback(Socket_Receive), TcpSocket);
}
catch (Exception ex)
{
LogException("Socket_Receive:2", ex.Message);
Disconnect();
}
}
public void OnReceive(int length)
{
if (State != TcpState.Connected)
return;
if (length <= 0)
{
Disconnect();
return;
}
Bandwidth.InPerSec += length;
BytesReceivedinSec += length;
RecvBuffSize += length;
// transfer to final recv buffer
if (Core.Sim == null || Core.Sim.Internet.TestEncryption)
{
//create decryptor
if (Decryptor == null)
{
int ivlen = 16;
if (RecvBuffSize < ivlen)
return;
RijndaelManaged crypt = new RijndaelManaged();
crypt.Key = Network.LocalAugmentedKey;
crypt.IV = Utilities.ExtractBytes(RecvBuffer, 0, ivlen);
crypt.Padding = PaddingMode.None;
Decryptor = crypt.CreateDecryptor();
RecvBuffSize -= ivlen;
if (RecvBuffSize == 0)
return;
Buffer.BlockCopy(RecvBuffer, ivlen, RecvBuffer, 0, RecvBuffSize);
}
// decrypt
int tryTransform = RecvBuffSize - (RecvBuffSize % Decryptor.InputBlockSize);
if (tryTransform == 0)
return;
int transformed = Decryptor.TransformBlock(RecvBuffer, 0, tryTransform, FinalRecvBuffer, FinalRecvBuffSize);
if (transformed == 0)
return;
FinalRecvBuffSize += transformed;
RecvBuffSize -= transformed;
Buffer.BlockCopy(RecvBuffer, transformed, RecvBuffer, 0, RecvBuffSize);
}
else
{
int copysize = RecvBuffSize;
if (FinalRecvBuffSize + RecvBuffSize > FinalRecvBuffer.Length)
copysize = FinalRecvBuffer.Length - FinalRecvBuffSize;
Buffer.BlockCopy(RecvBuffer, 0, FinalRecvBuffer, FinalRecvBuffSize, copysize);
FinalRecvBuffSize += copysize;
RecvBuffSize -= copysize;
if (RecvBuffSize > 0)
Buffer.BlockCopy(RecvBuffer, copysize, RecvBuffer, 0, RecvBuffSize);
}
ReceivePackets();
}
G2ReceivedPacket LastPacket; //crit delete
void ReceivePackets()
{
int Start = 0;
G2ReadResult streamStatus = G2ReadResult.PACKET_GOOD;
while(streamStatus == G2ReadResult.PACKET_GOOD)
{
G2ReceivedPacket packet = new G2ReceivedPacket();
packet.Root = new G2Header(FinalRecvBuffer);
streamStatus = G2Protocol.ReadNextPacket(packet.Root, ref Start, ref FinalRecvBuffSize);
if( streamStatus != G2ReadResult.PACKET_GOOD )
break;
packet.Tcp = this;
packet.Source = new DhtContact(this, RemoteIP);
// extract data from final recv buffer so it can be referenced without being overwritten by this thread
byte[] extracted = Utilities.ExtractBytes(packet.Root.Data, packet.Root.PacketPos, packet.Root.PacketSize);
packet.Root = new G2Header(extracted);
G2Protocol.ReadPacket(packet.Root);
LastPacket = packet;
PacketLogEntry logEntry = new PacketLogEntry(Core.TimeNow, TransportProtocol.Tcp, DirectionType.In, packet.Source, packet.Root.Data);
Network.LogPacket(logEntry);
Network.IncomingPacket(packet);
}
// re-align buffer
if (Start > 0 && FinalRecvBuffSize > 0)
{
Buffer.BlockCopy(FinalRecvBuffer, Start, FinalRecvBuffer, 0, FinalRecvBuffSize);
//Network.UpdateConsole(PacketBytesReady.ToString() + " bytes moved to front of receive buffer");
}
}
void LogException(string where, string message)
{
Network.UpdateLog("Exception", "TcpConnect(" + ToString() + ")::" + where + ": " + message);
}
public DhtContact GetContact()
{
return new DhtContact(UserID, ClientID, RemoteIP, TcpPort, UdpPort);
}
public override string ToString()
{
return RemoteIP.ToString() + ":" + TcpPort.ToString();
}
// the simulator uses tcpConnects in a dictionay, this prevents it from using the base hash and overlapping
// tcpConnect instances
object UniqueIdentifier = new object();
public override int GetHashCode()
{
return UniqueIdentifier.GetHashCode();
}
}
}
| |
using System;
using System.Text;
namespace SkipList
{
public class SkipList
{
/* This "Node" class models a node in the Skip List structure. It holds a pair of (key, value)
* and also three pointers towards Node situated to the right, upwards and downwards. */
private class Node
{
public Node(IComparable key, object value)
{
this.key = key;
this.value = value;
this.right = this.up = this.down = null;
}
public Node() : this(null, null) {}
public IComparable key;
public object value;
public Node right, up, down;
}
/* Public constructor of the class. Receives as parameters the maximum number
* of paralel lists and the probability. */
public SkipList(int maxListsCount, double probability)
{
/* Store the maximum number of lists and the probability. */
this.maxListsCount = maxListsCount;
this.probability = probability;
/* Initialize the sentinel nodes. We will have for each list a distinct
* first sentinel, but the second sentinel will be shared among all lists. */
this.head = new Node[maxListsCount + 1];
for (int i = 0; i <= maxListsCount; i++)
this.head[i] = new Node();
this.tail = new Node();
/* Link the first sentinels of the lists one to another. Also link all first
* sentinels to the unique second sentinel. */
for (int i = 0; i <= maxListsCount; i++)
{
head[i].right = tail;
if (i > 0)
{
head[i].down = head[i-1];
head[i-1].up = head[i];
}
}
/* For the beginning we have no additional list, only the bottom list. */
this.currentListsCount = 0;
}
/* This is another public constructor. It creates a skip list with 11 aditional lists
* and with a probability of 0.5. */
public SkipList() : this(11, 0.5) {}
/* Inserts a pair of (key, value) into the Skip List structure. */
public void Insert(IComparable key, object value)
{
/* When inserting a key into the list, we will start from the top list and
* move right until we find a node that has a greater key than the one we want to insert.
* At this moment we move down to the next list and then again right.
*
* In this array we store the rightmost node that we reach on each level. We need to
* store them because when we will insert the new key in the lists, it will be inserted
* after these rightmost nodes. */
Node[] next = new Node[maxListsCount + 1];
/* Now we will parse the skip list structure, from the top list, going right and then down
* and then right again and then down again and so on. We use a "cursor" variable that will
* represent the current node that we reached in the skip list structure. */
Node cursor = head[currentListsCount];
for (int i=currentListsCount; i>=0; i--)
{
/* If we are not at the topmost list, then we move down with one level. */
if (i < currentListsCount)
cursor = cursor.down;
/* While we do not reach the second sentinel and we do not find a greater
* numeric value than the one we want to insert, keep moving right. */
while ((cursor.right != tail) && (cursor.right.key.CompareTo(key) < 0))
cursor = cursor.right;
/* Store this rightmost reached node on this level. */
next[i] = cursor;
}
/* Here we are on the bottom list, and we test to see if the new value to add
* is not already in the skip list. If it already exists, then we just update
* the value of the key. */
if ((next[0].right != tail) && (next[0].right.key.Equals(key)))
{
next[0].right.value = value;
}
/* If the number to insert is not in the list, then we flip the coin and insert
* it in the lists. */
else
{
/* We find a new level number which will tell us in how many lists to add the new number.
* This new random level number is generated by flipping the coin (see below). */
int newLevel = NewRandomLevel();
/* If the new level is greater than the current number of lists, then we extend our
* "rightmost nodes" array to include more lists. In the same time we increase the
* number of current lists. */
if (newLevel > currentListsCount)
{
for (int i = currentListsCount + 1; i <= newLevel; i++)
next[i] = head[i];
currentListsCount = newLevel;
}
/* Now we add the node to the lists and adjust the pointers accordingly.
* We add the node starting from the bottom list and moving up to the next lists.
* When we get above the bottom list, we start updating the "up" and "down" pointer of the
* nodes. */
Node prevNode = null;
Node n = null;
for (int i = 0; i <= newLevel; i++)
{
prevNode = n;
n = new Node(key, value);
n.right = next[i].right;
next[i].right = n;
if (i > 0)
{
n.down = prevNode;
prevNode.up = n;
}
}
}
}
/* This method computes a random value, smaller than the maximum admitted lists count. This random
* value will tell us into how many lists to insert a new key. */
private int NewRandomLevel()
{
int newLevel = 0;
while ((newLevel < maxListsCount) && FlipCoin())
newLevel++;
return newLevel;
}
/* This method simulates the flipping of a coin. It returns true, or false, similar to a coint which
* falls on one face or the other. */
private bool FlipCoin()
{
double d = r.NextDouble();
return (d < this.probability);
}
/* This method removes a key from the Skip List structure. */
public void Remove(IComparable key)
{
/* For removal too we will search for the element to remove. While searching
* the element, we parse each list, starting from the top list and going down
* towards the bottom list. On each list, we move from the first sentinel node
* rightwards until either we reach the second sentinel node, either we find
* a node with a key greater or equal than the key we want to remove.
*
* For each list we remember the rightmost node that we reached. */
Node[] next = new Node[maxListsCount + 1];
/* We search for the value to remove. We start from the topmost list, from the first
* sentinel node and move right, down, right, down, etc.
* As we said above, we will remember for each
* level the rightmost node that we reach. */
Node cursor = head[currentListsCount];
for (int i = currentListsCount; i >= 0; i--)
{
/* If we are not on the top level, then we move down one level. */
if (i < currentListsCount)
cursor = cursor.down;
/* Move right as long as we encounter values smaller than the value we want to
* remove. */
while ((cursor.right != tail) && (cursor.right.key.CompareTo(key) < 0))
cursor = cursor.right;
/* When we got here, either we reached the second sentinel node on the current
* level, either we found a node that is not smaller than the value we want to
* remove. It is possible that the node we found is equal to the value that we
* want to remove, or it can be greater. In both cases we will store this
* rightmost node. */
next[i] = cursor;
}
/* When we got here, we parsed even the bottom list and we stopped before a node
* that is greater or equal with the value to remove. We test to see if it is equal
* with the value or not. If it is equal, then we remove the value from the bottom
* list and also from the lists above. */
if ((next[0].right != tail) && (next[0].right.key.Equals(key)))
{
/* Parse each existing list. */
for (int i=currentListsCount; i>=0; i--)
{
/* And if the rightmost reached node is followed by the key to remove, then
* remove the key from the list. */
if ((next[i].right != tail) && next[i].right.key.Equals(key))
next[i].right = next[i].right.right;
}
}
}
/* Finds a key in a Skip List structure. Returns the object associated with that key, or null
* if the key is not found. */
public object Find(IComparable key)
{
/* We parse the skip list structure starting from the topmost list, from the first sentinel
* node. As long as we have keys smaller than the key we search for, we keep moving right.
* When we find a key that is greater or equal that the key we search, then we go down one
* level and there we try to go again right. When we reach the bottom list, we stop. */
Node cursor = head[currentListsCount];
for (int i = currentListsCount; i >= 0; i--)
{
if (i < currentListsCount)
cursor = cursor.down;
while ((cursor.right != tail) && (cursor.right.key.CompareTo(key) < 0))
cursor = cursor.right;
}
/* Here we are on the bottom list. Now we see if the searched key is there or not.
* If it is, we return the value associated with it. If not, we return null. */
if ((cursor.right != tail) && (cursor.right.key.Equals(key)))
return cursor.right.value;
else
return null;
}
/* This method prints the content of the Skip List structure. It can be useful for debugging. */
override public string ToString()
{
StringBuilder sb = new StringBuilder();
sb.Append("current number of lists: "+currentListsCount);
for (int i=currentListsCount; i>=0; i--)
{
sb.Append(Environment.NewLine);
Node cursor = head[i];
while (cursor != null)
{
sb.Append("[");
if (cursor.key != null)
sb.Append(cursor.key.ToString());
else
sb.Append("N/A");
sb.Append(", ");
if (cursor.value != null)
sb.Append(cursor.value.ToString());
else
sb.Append("N/A");
sb.Append("] ");
cursor = cursor.right;
}
}
sb.Append(Environment.NewLine);
sb.Append("--------------");
return sb.ToString();
}
/* This array will hold the first sentinel node from each list. */
private Node[] head;
/* This node will represent the second sentinel for the lists. It is enough
* to store only one sentinel node and link all lists to it, instead of creating
* sentinel nodes for each list separately. */
private Node tail;
/* This number represents the maximum number of lists that can be created.
* However it is possible that not all lists are created, depending on how
* the coin flips. In order to optimize the operations we will not create all
* lists from the beginning, but will create them only as necessary. */
private int maxListsCount;
/* This number represents the number of currently created lists. It can be smaller
* than the number of maximum accepted lists. */
private int currentListsCount;
/* This number represents the probability of adding a new element to another list above
* the bottom list. Usually this probability is 0.5 and is equivalent to the probability
* of flipping a coin (that is why we say that we flip a coin and then decide if we
* add the element to another list). However it is better to leave this probability
* easy to change, because in some situations a smaller or a greater probability can
* be better suited. */
private double probability;
/* This is a random number generator that is used for simulating the flipping of the coin. */
private Random r = new Random();
}
}
| |
// File generated automatically by ReswPlus. https://github.com/rudyhuyn/ReswPlus
// The NuGet package PluralNet is necessary to support Pluralization.
using System;
using Windows.ApplicationModel.Resources;
using Windows.UI.Xaml.Markup;
using Windows.UI.Xaml.Data;
namespace JustRemember.Strings{
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Huyn.ReswPlus", "0.1.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
public class Setting {
private static ResourceLoader _resourceLoader;
static Setting()
{
_resourceLoader = ResourceLoader.GetForViewIndependentUse("Setting");
}
#region storage_day
/// <summary>
/// Get the pluralized version of the string similar to: day
/// </summary>
public static string storage_day(double number)
{
return Huyn.PluralNet.ResourceLoaderExtension.GetPlural(_resourceLoader, "storage_day", (decimal)number);
}
#endregion
#region storage_hour
/// <summary>
/// Get the pluralized version of the string similar to: hour
/// </summary>
public static string storage_hour(double number)
{
return Huyn.PluralNet.ResourceLoaderExtension.GetPlural(_resourceLoader, "storage_hour", (decimal)number);
}
#endregion
#region storage_minute
/// <summary>
/// Get the pluralized version of the string similar to: minute
/// </summary>
public static string storage_minute(double number)
{
return Huyn.PluralNet.ResourceLoaderExtension.GetPlural(_resourceLoader, "storage_minute", (decimal)number);
}
#endregion
#region storage_month
/// <summary>
/// Get the pluralized version of the string similar to: month
/// </summary>
public static string storage_month(double number)
{
return Huyn.PluralNet.ResourceLoaderExtension.GetPlural(_resourceLoader, "storage_month", (decimal)number);
}
#endregion
/// <summary>
/// Looks up a localized string similar to: Changelog
/// </summary>
public static string Changelog_0 => _resourceLoader.GetString("Changelog_0");
/// <summary>
/// Looks up a localized string similar to: About
/// </summary>
public static string Config_about => _resourceLoader.GetString("Config_about");
/// <summary>
/// Looks up a localized string similar to: Contact:
/// </summary>
public static string Config_About_Contact => _resourceLoader.GetString("Config_About_Contact");
/// <summary>
/// Looks up a localized string similar to: About
/// </summary>
public static string Config_about_header => _resourceLoader.GetString("Config_about_header");
/// <summary>
/// Looks up a localized string similar to: Instruction
/// </summary>
public static string Config_About_Howto => _resourceLoader.GetString("Config_About_Howto");
/// <summary>
/// Looks up a localized string similar to: Developed by:
/// </summary>
public static string Config_about_made => _resourceLoader.GetString("Config_about_made");
/// <summary>
/// Looks up a localized string similar to: Version:
/// </summary>
public static string Config_About_Version => _resourceLoader.GetString("Config_About_Version");
/// <summary>
/// Looks up a localized string similar to: What's new
/// </summary>
public static string Config_About_WhatNew => _resourceLoader.GetString("Config_About_WhatNew");
/// <summary>
/// Looks up a localized string similar to: Enable display text auto scroll
/// </summary>
public static string Config_autoscroll => _resourceLoader.GetString("Config_autoscroll");
/// <summary>
/// Looks up a localized string similar to: Beta testing:
/// </summary>
public static string Config_beta_tester => _resourceLoader.GetString("Config_beta_tester");
/// <summary>
/// Looks up a localized string similar to: 1
/// </summary>
public static string Config_c1a => _resourceLoader.GetString("Config_c1a");
/// <summary>
/// Looks up a localized string similar to: first
/// </summary>
public static string Config_c1b => _resourceLoader.GetString("Config_c1b");
/// <summary>
/// Looks up a localized string similar to: 2
/// </summary>
public static string Config_c2a => _resourceLoader.GetString("Config_c2a");
/// <summary>
/// Looks up a localized string similar to: second
/// </summary>
public static string Config_c2b => _resourceLoader.GetString("Config_c2b");
/// <summary>
/// Looks up a localized string similar to: 3
/// </summary>
public static string Config_c3a => _resourceLoader.GetString("Config_c3a");
/// <summary>
/// Looks up a localized string similar to: third
/// </summary>
public static string Config_c3b => _resourceLoader.GetString("Config_c3b");
/// <summary>
/// Looks up a localized string similar to: Choose style
/// </summary>
public static string Config_choicelook => _resourceLoader.GetString("Config_choicelook");
/// <summary>
/// Looks up a localized string similar to: Number of choices
/// </summary>
public static string Config_choices => _resourceLoader.GetString("Config_choices");
/// <summary>
/// Looks up a localized string similar to: Bottom
/// </summary>
public static string Config_Choose_Bottom => _resourceLoader.GetString("Config_Choose_Bottom");
/// <summary>
/// Looks up a localized string similar to: Center
/// </summary>
public static string Config_Choose_Center => _resourceLoader.GetString("Config_Choose_Center");
/// <summary>
/// Looks up a localized string similar to: Write
/// </summary>
public static string Config_Choose_Write => _resourceLoader.GetString("Config_Choose_Write");
/// <summary>
/// Looks up a localized string similar to: Preview
/// </summary>
public static string Config_cpv => _resourceLoader.GetString("Config_cpv");
/// <summary>
/// Looks up a localized string similar to: Clear progress
/// </summary>
public static string Config_delall => _resourceLoader.GetString("Config_delall");
/// <summary>
/// Looks up a localized string similar to: After completion
/// </summary>
public static string Config_end => _resourceLoader.GetString("Config_end");
/// <summary>
/// Looks up a localized string similar to: Go to summary page
/// </summary>
public static string Config_end_a => _resourceLoader.GetString("Config_end_a");
/// <summary>
/// Looks up a localized string similar to: And...
/// </summary>
public static string Config_end_and => _resourceLoader.GetString("Config_end_and");
/// <summary>
/// Looks up a localized string similar to: Go back to main page
/// </summary>
public static string Config_end_c => _resourceLoader.GetString("Config_end_c");
/// <summary>
/// Looks up a localized string similar to: English translate by:
/// </summary>
public static string Config_eng_translate => _resourceLoader.GetString("Config_eng_translate");
/// <summary>
/// Looks up a localized string similar to: Extensions & Bundled memos
/// </summary>
public static string Config_extension => _resourceLoader.GetString("Config_extension");
/// <summary>
/// Looks up a localized string similar to: Uninstall
/// </summary>
public static string Config_Ext_Uninstall => _resourceLoader.GetString("Config_Ext_Uninstall");
/// <summary>
/// Looks up a localized string similar to: Display font size
/// </summary>
public static string Config_fontsize => _resourceLoader.GetString("Config_fontsize");
/// <summary>
/// Looks up a localized string similar to: Theme
/// </summary>
public static string Config_General_Theme => _resourceLoader.GetString("Config_General_Theme");
/// <summary>
/// Looks up a localized string similar to: Dark
/// </summary>
public static string Config_General_Theme_Dark => _resourceLoader.GetString("Config_General_Theme_Dark");
/// <summary>
/// Looks up a localized string similar to: Light
/// </summary>
public static string Config_General_Theme_Light => _resourceLoader.GetString("Config_General_Theme_Light");
/// <summary>
/// Looks up a localized string similar to: Show hint on start
/// </summary>
public static string Config_hint => _resourceLoader.GetString("Config_hint");
/// <summary>
/// Looks up a localized string similar to: This option apply to:
/// </summary>
public static string Config_Info_Apply => _resourceLoader.GetString("Config_Info_Apply");
/// <summary>
/// Looks up a localized string similar to: Dictionary
/// </summary>
public static string Config_Info_Apply_Dic => _resourceLoader.GetString("Config_Info_Apply_Dic");
/// <summary>
/// Looks up a localized string similar to: This option work with memo that begin with #MODE=VOLC
/// </summary>
public static string Config_Info_Apply_Dic_Desc => _resourceLoader.GetString("Config_Info_Apply_Dic_Desc");
/// <summary>
/// Looks up a localized string similar to: This option work with memo that begin with #MODE=EXAM
/// </summary>
public static string Config_Info_Apply_QA_Desc => _resourceLoader.GetString("Config_Info_Apply_QA_Desc");
/// <summary>
/// Looks up a localized string similar to: Language
/// </summary>
public static string Config_language => _resourceLoader.GetString("Config_language");
/// <summary>
/// Looks up a localized string similar to: Version, developer, contributor info
/// </summary>
public static string Config_Menu_About_Desc => _resourceLoader.GetString("Config_Menu_About_Desc");
/// <summary>
/// Looks up a localized string similar to: All change that has been made since initial release
/// </summary>
public static string Config_Menu_Changes_Desc => _resourceLoader.GetString("Config_Menu_Changes_Desc");
/// <summary>
/// Looks up a localized string similar to: General settings
/// </summary>
public static string Config_Menu_General => _resourceLoader.GetString("Config_Menu_General");
/// <summary>
/// Looks up a localized string similar to: Language, theme, selection mode
/// </summary>
public static string Config_Menu_General_Desc => _resourceLoader.GetString("Config_Menu_General_Desc");
/// <summary>
/// Looks up a localized string similar to: Change number of choices, time limit
/// </summary>
public static string Config_Menu_Session_Desc => _resourceLoader.GetString("Config_Menu_Session_Desc");
/// <summary>
/// Looks up a localized string similar to: Storage & reset
/// </summary>
public static string Config_Menu_SR => _resourceLoader.GetString("Config_Menu_SR");
/// <summary>
/// Looks up a localized string similar to: All stats that saved after finish memorize
/// </summary>
public static string Config_Menu_Stat_Desc => _resourceLoader.GetString("Config_Menu_Stat_Desc");
/// <summary>
/// Looks up a localized string similar to: Memo, sessions configs size and reset setting
/// </summary>
public static string Config_Menu_Storage_Desc => _resourceLoader.GetString("Config_Menu_Storage_Desc");
/// <summary>
/// Looks up a localized string similar to: Randomize question & answer
/// </summary>
public static string Config_RandomQA => _resourceLoader.GetString("Config_RandomQA");
/// <summary>
/// Looks up a localized string similar to: No randomize
/// </summary>
public static string Config_Rand_a => _resourceLoader.GetString("Config_Rand_a");
/// <summary>
/// Looks up a localized string similar to: Randomize question order
/// </summary>
public static string Config_Rand_b => _resourceLoader.GetString("Config_Rand_b");
/// <summary>
/// Looks up a localized string similar to: Randomize question & answer order
/// </summary>
public static string Config_Rand_c => _resourceLoader.GetString("Config_Rand_c");
/// <summary>
/// Looks up a localized string similar to: This will reset/delete everything Press Yes to continue, Press No if you actually don't intent to
/// </summary>
public static string Config_ResetConfirm => _resourceLoader.GetString("Config_ResetConfirm");
/// <summary>
/// Looks up a localized string similar to: Restore default settings
/// </summary>
public static string Config_reset_a => _resourceLoader.GetString("Config_reset_a");
/// <summary>
/// Looks up a localized string similar to: Clear stats
/// </summary>
public static string Config_reset_b => _resourceLoader.GetString("Config_reset_b");
/// <summary>
/// Looks up a localized string similar to: Clear sessions
/// </summary>
public static string Config_reset_c => _resourceLoader.GetString("Config_reset_c");
/// <summary>
/// Looks up a localized string similar to: Clear all memos
/// </summary>
public static string Config_reset_d => _resourceLoader.GetString("Config_reset_d");
/// <summary>
/// Looks up a localized string similar to: Reset everything
/// </summary>
public static string Config_reset_e => _resourceLoader.GetString("Config_reset_e");
/// <summary>
/// Looks up a localized string similar to: Reset
/// </summary>
public static string Config_reset_header => _resourceLoader.GetString("Config_reset_header");
/// <summary>
/// Looks up a localized string similar to: Reverse dictionary tranlation
/// </summary>
public static string Config_Reverse_Dictionary => _resourceLoader.GetString("Config_Reverse_Dictionary");
/// <summary>
/// Looks up a localized string similar to: Save stats
/// </summary>
public static string Config_saveall => _resourceLoader.GetString("Config_saveall");
/// <summary>
/// Looks up a localized string similar to: Session settings
/// </summary>
public static string Config_session => _resourceLoader.GetString("Config_session");
/// <summary>
/// Looks up a localized string similar to: Show ads
/// </summary>
public static string Config_ShowAd => _resourceLoader.GetString("Config_ShowAd");
/// <summary>
/// Looks up a localized string similar to: Stats
/// </summary>
public static string Config_stat => _resourceLoader.GetString("Config_stat");
/// <summary>
/// Looks up a localized string similar to: Correct choice
/// </summary>
public static string Config_stat_corrected => _resourceLoader.GetString("Config_stat_corrected");
/// <summary>
/// Looks up a localized string similar to: {0} {1}
/// </summary>
public static string Config_stat_count_format => _resourceLoader.GetString("Config_stat_count_format");
/// <summary>
/// Looks up a localized string similar to: No stats saved yet
/// </summary>
public static string Config_stat_no => _resourceLoader.GetString("Config_stat_no");
/// <summary>
/// Looks up a localized string similar to: stats
/// </summary>
public static string Config_stat_prural => _resourceLoader.GetString("Config_stat_prural");
/// <summary>
/// Looks up a localized string similar to: Selected choice
/// </summary>
public static string Config_stat_selected => _resourceLoader.GetString("Config_stat_selected");
/// <summary>
/// Looks up a localized string similar to: Selected and correct choice
/// </summary>
public static string Config_stat_selectNcorrect => _resourceLoader.GetString("Config_stat_selectNcorrect");
/// <summary>
/// Looks up a localized string similar to: stat
/// </summary>
public static string Config_stat_single => _resourceLoader.GetString("Config_stat_single");
/// <summary>
/// Looks up a localized string similar to: Started on:
/// </summary>
public static string Config_stat_started => _resourceLoader.GetString("Config_stat_started");
/// <summary>
/// Looks up a localized string similar to: This session took
/// </summary>
public static string Config_stat_time_a => _resourceLoader.GetString("Config_stat_time_a");
/// <summary>
/// Looks up a localized string similar to: with
/// </summary>
public static string Config_stat_time_b => _resourceLoader.GetString("Config_stat_time_b");
/// <summary>
/// Looks up a localized string similar to: left before
/// </summary>
public static string Config_stat_time_c => _resourceLoader.GetString("Config_stat_time_c");
/// <summary>
/// Looks up a localized string similar to: limit
/// </summary>
public static string Config_stat_time_d => _resourceLoader.GetString("Config_stat_time_d");
/// <summary>
/// Looks up a localized string similar to: Storage & Reset
/// </summary>
public static string Config_StorageReset => _resourceLoader.GetString("Config_StorageReset");
/// <summary>
/// Looks up a localized string similar to: Memos Description file size
/// </summary>
public static string Config_Storage_Desc => _resourceLoader.GetString("Config_Storage_Desc");
/// <summary>
/// Looks up a localized string similar to: Last updated:
/// </summary>
public static string Config_Storage_LastUpdate => _resourceLoader.GetString("Config_Storage_LastUpdate");
/// <summary>
/// Looks up a localized string similar to: Local storage size
/// </summary>
public static string Config_Storage_Local => _resourceLoader.GetString("Config_Storage_Local");
/// <summary>
/// Looks up a localized string similar to: Memos file size
/// </summary>
public static string Config_Storage_Memos => _resourceLoader.GetString("Config_Storage_Memos");
/// <summary>
/// Looks up a localized string similar to: Roaming storage size
/// </summary>
public static string Config_Storage_Roaming => _resourceLoader.GetString("Config_Storage_Roaming");
/// <summary>
/// Looks up a localized string similar to: Saved sessions file size
/// </summary>
public static string Config_Storage_Session => _resourceLoader.GetString("Config_Storage_Session");
/// <summary>
/// Looks up a localized string similar to: Stats file size
/// </summary>
public static string Config_Storage_Stat => _resourceLoader.GetString("Config_Storage_Stat");
/// <summary>
/// Looks up a localized string similar to: Total size
/// </summary>
public static string Config_Storage_Total => _resourceLoader.GetString("Config_Storage_Total");
/// <summary>
/// Looks up a localized string similar to: Submit
/// </summary>
public static string Config_submit => _resourceLoader.GetString("Config_submit");
/// <summary>
/// Looks up a localized string similar to: Use time limit
/// </summary>
public static string Config_timelimit_use => _resourceLoader.GetString("Config_timelimit_use");
/// <summary>
/// Looks up a localized string similar to: When incorrect
/// </summary>
public static string Config_when_wrong => _resourceLoader.GetString("Config_when_wrong");
/// <summary>
/// Looks up a localized string similar to: Skip to next set
/// </summary>
public static string Config_wrong_a => _resourceLoader.GetString("Config_wrong_a");
/// <summary>
/// Looks up a localized string similar to: Keep guessing
/// </summary>
public static string Config_wrong_b => _resourceLoader.GetString("Config_wrong_b");
/// <summary>
/// Looks up a localized string similar to: Restart match
/// </summary>
public static string Config_wrong_c => _resourceLoader.GetString("Config_wrong_c");
/// <summary>
/// Looks up a localized string similar to: Hide wrong choice with ???
/// </summary>
public static string Config_wrong_hide => _resourceLoader.GetString("Config_wrong_hide");
/// <summary>
/// Looks up a localized string similar to: ago
/// </summary>
public static string storage_ago => _resourceLoader.GetString("storage_ago");
/// <summary>
/// Looks up a localized string similar to: Never
/// </summary>
public static string storage_never => _resourceLoader.GetString("storage_never");
/// <summary>
/// Looks up a localized string similar to: Now
/// </summary>
public static string storage_now => _resourceLoader.GetString("storage_now");
/// <summary>
/// Looks up a localized string similar to: Press this to update
/// </summary>
public static string storage_press_to_update => _resourceLoader.GetString("storage_press_to_update");
/// <summary>
/// Looks up a localized string similar to: seconds
/// </summary>
public static string storage_seconds => _resourceLoader.GetString("storage_seconds");
/// <summary>
/// Looks up a localized string similar to: week
/// </summary>
public static string storage_week => _resourceLoader.GetString("storage_week");
/// <summary>
/// Looks up a localized string similar to: System
/// </summary>
public static string Theme_Default => _resourceLoader.GetString("Theme_Default");
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Huyn.ReswPlus", "0.1.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[MarkupExtensionReturnType(ReturnType = typeof(string))]
public class SettingExtension: MarkupExtension
{
public enum KeyEnum
{
__Undefined = 0,
Changelog_0,
Config_about,
Config_About_Contact,
Config_about_header,
Config_About_Howto,
Config_about_made,
Config_About_Version,
Config_About_WhatNew,
Config_autoscroll,
Config_beta_tester,
Config_c1a,
Config_c1b,
Config_c2a,
Config_c2b,
Config_c3a,
Config_c3b,
Config_choicelook,
Config_choices,
Config_Choose_Bottom,
Config_Choose_Center,
Config_Choose_Write,
Config_cpv,
Config_delall,
Config_end,
Config_end_a,
Config_end_and,
Config_end_c,
Config_eng_translate,
Config_extension,
Config_Ext_Uninstall,
Config_fontsize,
Config_General_Theme,
Config_General_Theme_Dark,
Config_General_Theme_Light,
Config_hint,
Config_Info_Apply,
Config_Info_Apply_Dic,
Config_Info_Apply_Dic_Desc,
Config_Info_Apply_QA_Desc,
Config_language,
Config_Menu_About_Desc,
Config_Menu_Changes_Desc,
Config_Menu_General,
Config_Menu_General_Desc,
Config_Menu_Session_Desc,
Config_Menu_SR,
Config_Menu_Stat_Desc,
Config_Menu_Storage_Desc,
Config_RandomQA,
Config_Rand_a,
Config_Rand_b,
Config_Rand_c,
Config_ResetConfirm,
Config_reset_a,
Config_reset_b,
Config_reset_c,
Config_reset_d,
Config_reset_e,
Config_reset_header,
Config_Reverse_Dictionary,
Config_saveall,
Config_session,
Config_ShowAd,
Config_stat,
Config_stat_corrected,
Config_stat_count_format,
Config_stat_no,
Config_stat_prural,
Config_stat_selected,
Config_stat_selectNcorrect,
Config_stat_single,
Config_stat_started,
Config_stat_time_a,
Config_stat_time_b,
Config_stat_time_c,
Config_stat_time_d,
Config_StorageReset,
Config_Storage_Desc,
Config_Storage_LastUpdate,
Config_Storage_Local,
Config_Storage_Memos,
Config_Storage_Roaming,
Config_Storage_Session,
Config_Storage_Stat,
Config_Storage_Total,
Config_submit,
Config_timelimit_use,
Config_when_wrong,
Config_wrong_a,
Config_wrong_b,
Config_wrong_c,
Config_wrong_hide,
storage_ago,
storage_never,
storage_now,
storage_press_to_update,
storage_seconds,
storage_week,
Theme_Default,
}
private static ResourceLoader _resourceLoader;
static SettingExtension()
{
_resourceLoader = ResourceLoader.GetForViewIndependentUse("Setting");
}
public KeyEnum Key { get; set;}
public IValueConverter Converter { get; set;}
public object ConverterParameter { get; set;}
protected override object ProvideValue()
{
string res;
if(Key == KeyEnum.__Undefined)
{
res = "";
}
else
{
res = _resourceLoader.GetString(Key.ToString());
}
return Converter == null ? res : Converter.Convert(res, typeof(String), ConverterParameter, null);
}
}
} //JustRemember.Strings
| |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using RestSharp;
using IO.Swagger.Client;
using IO.Swagger.Model;
namespace IO.Swagger.Api {
public interface IInformationApi {
/// <summary>
/// Get a list of the valid conversions. Gets a list of the valid conversions that can be made with the API. For each conversion is also shown the available options for that specific type of conversion.\n\nThis conversions can be added to a Job through the specific endpoint or in the information given to create the new Job.\n
/// </summary>
/// <param name="Category">Category for the conversion.</param>/// <param name="Target">Target for for the conversion.</param>/// <param name="Page">Pagination for list of elements.</param>
/// <returns>List<Conversion></returns>
List<Conversion> ConversionsGet (string Category, string Target, double? Page);
/// <summary>
/// Get a list of the valid conversions. Gets a list of the valid conversions that can be made with the API. For each conversion is also shown the available options for that specific type of conversion.\n\nThis conversions can be added to a Job through the specific endpoint or in the information given to create the new Job.\n
/// </summary>
/// <param name="Category">Category for the conversion.</param>/// <param name="Target">Target for for the conversion.</param>/// <param name="Page">Pagination for list of elements.</param>
/// <returns>List<Conversion></returns>
Task<List<Conversion>> ConversionsGetAsync (string Category, string Target, double? Page);
/// <summary>
/// Get a list of the valid statuses. The endpoint provide a list of all available status that the Job may have during the process as a description of the status.
/// </summary>
/// <returns>List<Status></returns>
List<Status> StatusesGet ();
/// <summary>
/// Get a list of the valid statuses. The endpoint provide a list of all available status that the Job may have during the process as a description of the status.
/// </summary>
/// <returns>List<Status></returns>
Task<List<Status>> StatusesGetAsync ();
}
/// <summary>
/// Represents a collection of functions to interact with the API endpoints
/// </summary>
public class InformationApi : IInformationApi {
/// <summary>
/// Initializes a new instance of the <see cref="InformationApi"/> class.
/// </summary>
/// <param name="apiClient"> an instance of ApiClient (optional)
/// <returns></returns>
public InformationApi(ApiClient apiClient = null) {
if (apiClient == null) { // use the default one in Configuration
this.apiClient = Configuration.apiClient;
} else {
this.apiClient = apiClient;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="InformationApi"/> class.
/// </summary>
/// <returns></returns>
public InformationApi(String basePath)
{
this.apiClient = new ApiClient(basePath);
}
/// <summary>
/// Sets the base path of the API client.
/// </summary>
/// <value>The base path</value>
public void SetBasePath(String basePath) {
this.apiClient.basePath = basePath;
}
/// <summary>
/// Gets the base path of the API client.
/// </summary>
/// <value>The base path</value>
public String GetBasePath(String basePath) {
return this.apiClient.basePath;
}
/// <summary>
/// Gets or sets the API client.
/// </summary>
/// <value>The API client</value>
public ApiClient apiClient {get; set;}
/// <summary>
/// Get a list of the valid conversions. Gets a list of the valid conversions that can be made with the API. For each conversion is also shown the available options for that specific type of conversion.\n\nThis conversions can be added to a Job through the specific endpoint or in the information given to create the new Job.\n
/// </summary>
/// <param name="Category">Category for the conversion.</param>/// <param name="Target">Target for for the conversion.</param>/// <param name="Page">Pagination for list of elements.</param>
/// <returns>List<Conversion></returns>
public List<Conversion> ConversionsGet (string Category, string Target, double? Page) {
var path = "/conversions";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, String>();
String postBody = null;
if (Category != null) queryParams.Add("category", apiClient.ParameterToString(Category)); // query parameter
if (Target != null) queryParams.Add("target", apiClient.ParameterToString(Target)); // query parameter
if (Page != null) queryParams.Add("page", apiClient.ParameterToString(Page)); // query parameter
// authentication setting, if any
String[] authSettings = new String[] { };
// make the HTTP request
IRestResponse response = (IRestResponse) apiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400) {
throw new ApiException ((int)response.StatusCode, "Error calling ConversionsGet: " + response.Content, response.Content);
}
return (List<Conversion>) apiClient.Deserialize(response.Content, typeof(List<Conversion>));
}
/// <summary>
/// Get a list of the valid conversions. Gets a list of the valid conversions that can be made with the API. For each conversion is also shown the available options for that specific type of conversion.\n\nThis conversions can be added to a Job through the specific endpoint or in the information given to create the new Job.\n
/// </summary>
/// <param name="Category">Category for the conversion.</param>/// <param name="Target">Target for for the conversion.</param>/// <param name="Page">Pagination for list of elements.</param>
/// <returns>List<Conversion></returns>
public async Task<List<Conversion>> ConversionsGetAsync (string Category, string Target, double? Page) {
var path = "/conversions";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, String>();
String postBody = null;
if (Category != null) queryParams.Add("category", apiClient.ParameterToString(Category)); // query parameter
if (Target != null) queryParams.Add("target", apiClient.ParameterToString(Target)); // query parameter
if (Page != null) queryParams.Add("page", apiClient.ParameterToString(Page)); // query parameter
// authentication setting, if any
String[] authSettings = new String[] { };
// make the HTTP request
IRestResponse response = (IRestResponse) await apiClient.CallApiAsync(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400) {
throw new ApiException ((int)response.StatusCode, "Error calling ConversionsGet: " + response.Content, response.Content);
}
return (List<Conversion>) apiClient.Deserialize(response.Content, typeof(List<Conversion>));
}
/// <summary>
/// Get a list of the valid statuses. The endpoint provide a list of all available status that the Job may have during the process as a description of the status.
/// </summary>
/// <returns>List<Status></returns>
public List<Status> StatusesGet () {
var path = "/statuses";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, String>();
String postBody = null;
// authentication setting, if any
String[] authSettings = new String[] { };
// make the HTTP request
IRestResponse response = (IRestResponse) apiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400) {
throw new ApiException ((int)response.StatusCode, "Error calling StatusesGet: " + response.Content, response.Content);
}
return (List<Status>) apiClient.Deserialize(response.Content, typeof(List<Status>));
}
/// <summary>
/// Get a list of the valid statuses. The endpoint provide a list of all available status that the Job may have during the process as a description of the status.
/// </summary>
/// <returns>List<Status></returns>
public async Task<List<Status>> StatusesGetAsync () {
var path = "/statuses";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, String>();
String postBody = null;
// authentication setting, if any
String[] authSettings = new String[] { };
// make the HTTP request
IRestResponse response = (IRestResponse) await apiClient.CallApiAsync(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400) {
throw new ApiException ((int)response.StatusCode, "Error calling StatusesGet: " + response.Content, response.Content);
}
return (List<Status>) apiClient.Deserialize(response.Content, typeof(List<Status>));
}
}
}
| |
/*
* REST API Documentation for the MOTI Hired Equipment Tracking System (HETS) Application
*
* The Hired Equipment Program is for owners/operators who have a dump truck, bulldozer, backhoe or other piece of equipment they want to hire out to the transportation ministry for day labour and emergency projects. The Hired Equipment Program distributes available work to local equipment owners. The program is based on seniority and is designed to deliver work to registered users fairly and efficiently through the development of local area call-out lists.
*
* OpenAPI spec version: v1
*
*
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using System.ComponentModel.DataAnnotations.Schema;
using System.ComponentModel.DataAnnotations;
using HETSAPI.Models;
namespace HETSAPI.Models
{
/// <summary>
/// The users associated with a given group that has been defined in the application.
/// </summary>
[MetaDataExtension (Description = "The users associated with a given group that has been defined in the application.")]
public partial class GroupMembership : AuditableEntity, IEquatable<GroupMembership>
{
/// <summary>
/// Default constructor, required by entity framework
/// </summary>
public GroupMembership()
{
this.Id = 0;
}
/// <summary>
/// Initializes a new instance of the <see cref="GroupMembership" /> class.
/// </summary>
/// <param name="Id">A system-generated unique identifier for a GroupMembership (required).</param>
/// <param name="Active">A flag indicating the User is active in the group. Set false to remove the user from the designated group. (required).</param>
/// <param name="Group">A foreign key reference to the system-generated unique identifier for a Group (required).</param>
/// <param name="User">A foreign key reference to the system-generated unique identifier for a User (required).</param>
public GroupMembership(int Id, bool Active, Group Group, User User)
{
this.Id = Id;
this.Active = Active;
this.Group = Group;
this.User = User;
}
/// <summary>
/// A system-generated unique identifier for a GroupMembership
/// </summary>
/// <value>A system-generated unique identifier for a GroupMembership</value>
[MetaDataExtension (Description = "A system-generated unique identifier for a GroupMembership")]
public int Id { get; set; }
/// <summary>
/// A flag indicating the User is active in the group. Set false to remove the user from the designated group.
/// </summary>
/// <value>A flag indicating the User is active in the group. Set false to remove the user from the designated group.</value>
[MetaDataExtension (Description = "A flag indicating the User is active in the group. Set false to remove the user from the designated group.")]
public bool Active { get; set; }
/// <summary>
/// A foreign key reference to the system-generated unique identifier for a Group
/// </summary>
/// <value>A foreign key reference to the system-generated unique identifier for a Group</value>
[MetaDataExtension (Description = "A foreign key reference to the system-generated unique identifier for a Group")]
public Group Group { get; set; }
/// <summary>
/// Foreign key for Group
/// </summary>
[ForeignKey("Group")]
[JsonIgnore]
[MetaDataExtension (Description = "A foreign key reference to the system-generated unique identifier for a Group")]
public int? GroupId { get; set; }
/// <summary>
/// A foreign key reference to the system-generated unique identifier for a User
/// </summary>
/// <value>A foreign key reference to the system-generated unique identifier for a User</value>
[MetaDataExtension (Description = "A foreign key reference to the system-generated unique identifier for a User")]
public User User { get; set; }
/// <summary>
/// Foreign key for User
/// </summary>
[ForeignKey("User")]
[JsonIgnore]
[MetaDataExtension (Description = "A foreign key reference to the system-generated unique identifier for a User")]
public int? UserId { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class GroupMembership {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" Active: ").Append(Active).Append("\n");
sb.Append(" Group: ").Append(Group).Append("\n");
sb.Append(" User: ").Append(User).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) { return false; }
if (ReferenceEquals(this, obj)) { return true; }
if (obj.GetType() != GetType()) { return false; }
return Equals((GroupMembership)obj);
}
/// <summary>
/// Returns true if GroupMembership instances are equal
/// </summary>
/// <param name="other">Instance of GroupMembership to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(GroupMembership other)
{
if (ReferenceEquals(null, other)) { return false; }
if (ReferenceEquals(this, other)) { return true; }
return
(
this.Id == other.Id ||
this.Id.Equals(other.Id)
) &&
(
this.Active == other.Active ||
this.Active.Equals(other.Active)
) &&
(
this.Group == other.Group ||
this.Group != null &&
this.Group.Equals(other.Group)
) &&
(
this.User == other.User ||
this.User != null &&
this.User.Equals(other.User)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks
hash = hash * 59 + this.Id.GetHashCode();
hash = hash * 59 + this.Active.GetHashCode();
if (this.Group != null)
{
hash = hash * 59 + this.Group.GetHashCode();
}
if (this.User != null)
{
hash = hash * 59 + this.User.GetHashCode();
}
return hash;
}
}
#region Operators
/// <summary>
/// Equals
/// </summary>
/// <param name="left"></param>
/// <param name="right"></param>
/// <returns></returns>
public static bool operator ==(GroupMembership left, GroupMembership right)
{
return Equals(left, right);
}
/// <summary>
/// Not Equals
/// </summary>
/// <param name="left"></param>
/// <param name="right"></param>
/// <returns></returns>
public static bool operator !=(GroupMembership left, GroupMembership right)
{
return !Equals(left, right);
}
#endregion Operators
}
}
| |
//
// Window.cs
//
// Author:
// Lluis Sanchez <lluis@xamarin.com>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using Xwt.Backends;
namespace Xwt
{
[BackendType (typeof(IWindowBackend))]
public class Window: WindowFrame
{
Widget child;
WidgetSpacing padding;
Menu mainMenu;
bool shown;
protected new class WindowBackendHost: WindowFrame.WindowBackendHost
{
}
protected override BackendHost CreateBackendHost ()
{
return new WindowBackendHost ();
}
public Window ()
{
Padding = 12;
}
IWindowBackend Backend {
get { return (IWindowBackend) BackendHost.Backend; }
}
public WindowLocation InitialLocation {
get { return initialLocation; }
set { initialLocation = value; }
}
public WidgetSpacing Padding {
get { return padding; }
set {
padding = value;
UpdatePadding ();
}
}
public double PaddingLeft {
get { return padding.Left; }
set {
padding.Left = value;
UpdatePadding ();
}
}
public double PaddingRight {
get { return padding.Right; }
set {
padding.Right = value;
UpdatePadding ();
}
}
public double PaddingTop {
get { return padding.Top; }
set {
padding.Top = value;
UpdatePadding ();
}
}
public double PaddingBottom {
get { return padding.Bottom; }
set {
padding.Bottom = value;
UpdatePadding ();
}
}
void UpdatePadding ()
{
Backend.SetPadding (padding.Left, padding.Top, padding.Right, padding.Bottom);
}
public Menu MainMenu {
get {
return mainMenu;
}
set {
mainMenu = value;
Backend.SetMainMenu ((IMenuBackend)BackendHost.ToolkitEngine.GetSafeBackend (mainMenu));
}
}
public Widget Content {
get {
return child;
}
set {
if (child != null)
child.SetParentWindow (null);
this.child = value;
child.SetParentWindow (this);
Backend.SetChild ((IWidgetBackend)BackendHost.ToolkitEngine.GetSafeBackend (child));
if (!BackendHost.EngineBackend.HandlesSizeNegotiation)
Widget.QueueWindowSizeNegotiation (this);
}
}
protected override void Dispose (bool disposing)
{
if (Content != null)
Content.Dispose ();
base.Dispose (disposing);
}
protected override void OnReallocate ()
{
if (child != null && !BackendHost.EngineBackend.HandlesSizeNegotiation) {
child.Surface.Reallocate ();
}
}
bool widthSet;
bool heightSet;
bool locationSet;
Rectangle initialBounds;
WindowLocation initialLocation = WindowLocation.CenterParent;
internal override void SetBackendSize (double width, double height)
{
if (shown)
base.SetBackendSize (width, height);
else {
if (width != -1) {
initialBounds.Width = width;
widthSet = true;
}
if (height != -1) {
heightSet = true;
initialBounds.Height = height;
}
}
}
internal override void SetBackendLocation (double x, double y)
{
if (shown || BackendHost.EngineBackend.HandlesSizeNegotiation)
base.SetBackendLocation (x, y);
if (!shown) {
locationSet = true;
initialBounds.Location = new Point (x, y);
}
}
internal override Rectangle BackendBounds
{
get
{
return shown || BackendHost.EngineBackend.HandlesSizeNegotiation ? base.BackendBounds : initialBounds;
}
set
{
if (shown || BackendHost.EngineBackend.HandlesSizeNegotiation)
base.BackendBounds = value;
if (!shown) {
widthSet = heightSet = locationSet = true;
initialBounds = value;
}
}
}
internal void OnChildPlacementChanged (Widget child)
{
Backend.UpdateChildPlacement (child.GetBackend ());
if (!BackendHost.EngineBackend.HandlesSizeNegotiation)
Widget.QueueWindowSizeNegotiation (this);
}
internal override void AdjustSize ()
{
Size mMinSize, mDecorationsSize;
Backend.GetMetrics (out mMinSize, out mDecorationsSize);
var size = shown ? Size : initialBounds.Size;
var wc = (shown || widthSet) ? SizeConstraint.WithSize (Math.Max (size.Width - padding.HorizontalSpacing - mDecorationsSize.Width, mMinSize.Width)) : SizeConstraint.Unconstrained;
var hc = (shown || heightSet) ? SizeConstraint.WithSize (Math.Max (size.Height - padding.VerticalSpacing - mDecorationsSize.Height, mMinSize.Height)) : SizeConstraint.Unconstrained;
var ws = mDecorationsSize;
if (child != null) {
IWidgetSurface s = child.Surface;
ws += s.GetPreferredSize (wc, hc, true);
}
ws.Width += padding.HorizontalSpacing;
ws.Height += padding.VerticalSpacing;
if (!shown) {
if (!widthSet)
size.Width = ws.Width;
if (!heightSet)
size.Height = ws.Height;
}
if (ws.Width < mMinSize.Width)
ws.Width = mMinSize.Width;
if (ws.Height < mMinSize.Height)
ws.Height = mMinSize.Height;
if (ws.Width > size.Width)
size.Width = ws.Width;
if (ws.Height > size.Height)
size.Height = ws.Height;
if (!shown) {
shown = true;
if (!locationSet && initialLocation != WindowLocation.Manual) {
Point center;
if (initialLocation == WindowLocation.CenterScreen || TransientFor == null)
center = Desktop.PrimaryScreen.VisibleBounds.Center;
else
center = TransientFor.ScreenBounds.Center;
initialBounds.X = Math.Round (center.X - size.Width / 2);
initialBounds.Y = Math.Round (center.Y - size.Height / 2);
locationSet = true;
}
if (size != Size) {
if (locationSet)
Backend.Bounds = new Rectangle (initialBounds.X, initialBounds.Y, size.Width, size.Height);
else
Backend.SetSize (size.Width, size.Height);
} else if (locationSet && !shown)
Backend.Move (initialBounds.X, initialBounds.Y);
} else {
if (size != Size)
Backend.SetSize (size.Width, size.Height);
}
Backend.SetMinSize (new Size (ws.Width, ws.Height));
}
}
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using Microsoft.AspNet.Razor.Generator;
using Microsoft.AspNet.Razor.Generator.Compiler;
using Microsoft.AspNet.Razor.Parser.SyntaxTree;
using Microsoft.AspNet.Razor.Text;
using Xunit;
namespace Microsoft.AspNet.Razor.Test.Generator
{
public class CSharpRazorCodeGeneratorTest : RazorCodeGeneratorTest<CSharpRazorCodeLanguage>
{
protected override string FileExtension
{
get { return "cshtml"; }
}
protected override string LanguageName
{
get { return "CS"; }
}
protected override string BaselineExtension
{
get { return "cs"; }
}
private const string TestPhysicalPath = @"C:\Bar.cshtml";
private const string TestVirtualPath = "~/Foo/Bar.cshtml";
[Fact]
public void ConstructorRequiresNonNullClassName()
{
Assert.Throws<ArgumentException>("className", () => new CSharpRazorCodeGenerator(null, TestRootNamespaceName, TestPhysicalPath, CreateHost()));
}
[Fact]
public void ConstructorRequiresNonEmptyClassName()
{
Assert.Throws<ArgumentException>("className", () => new CSharpRazorCodeGenerator(string.Empty, TestRootNamespaceName, TestPhysicalPath, CreateHost()));
}
[Fact]
public void ConstructorRequiresNonNullRootNamespaceName()
{
Assert.Throws<ArgumentNullException>("rootNamespaceName", () => new CSharpRazorCodeGenerator("Foo", null, TestPhysicalPath, CreateHost()));
}
[Fact]
public void ConstructorAllowsEmptyRootNamespaceName()
{
new CSharpRazorCodeGenerator("Foo", String.Empty, TestPhysicalPath, CreateHost());
}
[Fact]
public void ConstructorRequiresNonNullHost()
{
Assert.Throws<ArgumentNullException>("host", () => new CSharpRazorCodeGenerator("Foo", TestRootNamespaceName, TestPhysicalPath, null));
}
[Theory]
[InlineData("NestedCodeBlocks")]
[InlineData("CodeBlock")]
[InlineData("ExplicitExpression")]
[InlineData("MarkupInCodeBlock")]
[InlineData("Blocks")]
[InlineData("ImplicitExpression")]
[InlineData("Imports")]
[InlineData("ExpressionsInCode")]
[InlineData("FunctionsBlock")]
[InlineData("FunctionsBlock_Tabs")]
[InlineData("Templates")]
[InlineData("Sections")]
[InlineData("RazorComments")]
[InlineData("Helpers")]
[InlineData("HelpersMissingCloseParen")]
[InlineData("HelpersMissingOpenBrace")]
[InlineData("HelpersMissingOpenParen")]
[InlineData("NestedHelpers")]
[InlineData("InlineBlocks")]
[InlineData("LayoutDirective")]
[InlineData("ConditionalAttributes")]
[InlineData("ResolveUrl")]
[InlineData("Await")]
public void CSharpCodeGeneratorCorrectlyGeneratesRunTimeCode(string testType)
{
RunTest(testType);
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesMappingsForAwait()
{
RunTest("Await",
"Await.DesignTime",
designTimeMode: true,
tabTest: TabTest.Tabs,
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(12, 0, 12, 173, 9, 0, 76),
BuildLineMapping(192, 9, 39, 646, 31, 15, 11),
BuildLineMapping(247, 10, 38, 730, 36, 14, 11),
BuildLineMapping(304, 11, 39, 812, 41, 12, 14),
BuildLineMapping(371, 12, 46, 899, 47, 13, 1),
BuildLineMapping(376, 12, 51, 978, 53, 18, 11),
BuildLineMapping(391, 12, 66, 1066, 58, 18, 1),
BuildLineMapping(448, 13, 49, 1146, 64, 19, 5),
BuildLineMapping(578, 18, 42, 1225, 69, 15, 15),
BuildLineMapping(650, 19, 51, 1317, 74, 18, 19),
BuildLineMapping(716, 20, 41, 1412, 79, 17, 22),
BuildLineMapping(787, 21, 42, 1505, 84, 12, 39),
BuildLineMapping(884, 22, 51, 1619, 90, 15, 21),
BuildLineMapping(961, 23, 49, 1713, 96, 13, 1),
BuildLineMapping(966, 23, 54, 1792, 102, 18, 27),
BuildLineMapping(997, 23, 85, 1900, 107, 22, 1),
BuildLineMapping(1057, 24, 52, 1980, 113, 19, 19)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesMappingsForSimpleUnspacedIf()
{
RunTest("SimpleUnspacedIf",
"SimpleUnspacedIf.DesignTime.Tabs",
designTimeMode: true,
tabTest: TabTest.Tabs,
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(1, 0, 1, 532, 22, 0, 15),
BuildLineMapping(27, 2, 12, 623, 30, 6, 3)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesMappingsForRazorCommentsAtDesignTime()
{
RunTest("RazorComments", "RazorComments.DesignTime", designTimeMode: true, tabTest: TabTest.NoTabs,
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(81, 3, 525, 22, 2, 6),
BuildLineMapping(122, 4, 39, 636, 29, 38, 22),
BuildLineMapping(173, 5, 49, 773, 36, 48, 58),
BuildLineMapping(238, 11, 899, 45, 2, 24),
BuildLineMapping(310, 12, 1036, 51, 45, 3),
BuildLineMapping(323, 14, 2, 1112, 56, 6, 1),
BuildLineMapping(328, 14, 1155, 58, 7, 1)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGenerateMappingForOpenedCurlyIf()
{
OpenedIf(withTabs: true);
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGenerateMappingForOpenedCurlyIfSpaces()
{
OpenedIf(withTabs: false);
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesImportStatementsAtDesignTime()
{
RunTest("Imports", "Imports.DesignTime", designTimeMode: true, tabTest: TabTest.NoTabs, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(1, 0, 1, 51, 3, 0, 15),
BuildLineMapping(19, 1, 1, 132, 9, 0, 32),
BuildLineMapping(54, 2, 1, 230, 15, 0, 12),
BuildLineMapping(99, 4, 772, 39, 29, 21),
BuildLineMapping(161, 5, 888, 44, 35, 20)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesFunctionsBlocksAtDesignTime()
{
RunTest("FunctionsBlock",
"FunctionsBlock.DesignTime",
designTimeMode: true,
tabTest: TabTest.NoTabs,
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(12, 0, 12, 191, 9, 0, 4),
BuildLineMapping(33, 4, 12, 259, 15, 0, 104),
BuildLineMapping(167, 11, 788, 37, 25, 11)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesFunctionsBlocksAtDesignTimeTabs()
{
RunTest("FunctionsBlock",
"FunctionsBlock.DesignTime.Tabs",
designTimeMode: true,
tabTest: TabTest.Tabs,
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(12, 0, 12, 191, 9, 0, 4),
BuildLineMapping(33, 4, 12, 259, 15, 0, 104),
BuildLineMapping(167, 11, 25, 776, 37, 13, 11)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesMinimalFunctionsBlocksAtDesignTimeTabs()
{
RunTest("FunctionsBlockMinimal",
"FunctionsBlockMinimal.DesignTime.Tabs",
designTimeMode: true,
tabTest: TabTest.Tabs,
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(16, 2, 12, 205, 9, 0, 55)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesHiddenSpansWithinCode()
{
RunTest("HiddenSpansInCode", designTimeMode: true, tabTest: TabTest.NoTabs, expectedDesignTimePragmas: new List<LineMapping>
{
BuildLineMapping(2, 0, 537, 22, 2, 6),
BuildLineMapping(9, 1, 619, 29, 5, 5)
});
}
[Fact]
public void CSharpCodeGeneratorGeneratesCodeWithParserErrorsInDesignTimeMode()
{
RunTest("ParserError", designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(2, 0, 519, 22, 2, 31)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesInheritsAtRuntime()
{
RunTest("Inherits", baselineName: "Inherits.Runtime");
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesInheritsAtDesigntime()
{
RunTest("Inherits", baselineName: "Inherits.Designtime", designTimeMode: true, tabTest: TabTest.NoTabs, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(20, 2, 321, 12, 10, 25),
BuildLineMapping(1, 0, 1, 662, 27, 6, 5)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForUnfinishedExpressionsInCode()
{
RunTest("UnfinishedExpressionInCode", tabTest: TabTest.NoTabs, designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(2, 0, 564, 22, 2, 2),
BuildLineMapping(5, 1, 1, 650, 28, 6, 9),
BuildLineMapping(14, 1, 748, 33, 10, 2)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForUnfinishedExpressionsInCodeTabs()
{
RunTest("UnfinishedExpressionInCode",
"UnfinishedExpressionInCode.Tabs",
tabTest: TabTest.Tabs,
designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(2, 0, 564, 22, 2, 2),
BuildLineMapping(5, 1, 1, 650, 28, 6, 9),
BuildLineMapping(14, 1, 10, 742, 33, 4, 2)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasMarkupAndExpressions()
{
RunTest("DesignTime",
designTimeMode: true,
tabTest: TabTest.NoTabs,
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(222, 16, 8, 209, 10, 0, 7),
BuildLineMapping(229, 16, 352, 16, 15, 26),
BuildLineMapping(265, 18, 461, 24, 18, 9),
BuildLineMapping(274, 20, 556, 33, 0, 1),
BuildLineMapping(20, 1, 13, 964, 52, 12, 36),
BuildLineMapping(74, 2, 1086, 59, 22, 1),
BuildLineMapping(79, 2, 1177, 64, 27, 15),
BuildLineMapping(113, 7, 2, 1262, 71, 6, 12),
BuildLineMapping(129, 8, 1, 1343, 76, 6, 4),
BuildLineMapping(142, 8, 1443, 78, 14, 3),
BuildLineMapping(153, 8, 1540, 85, 25, 1),
BuildLineMapping(204, 13, 5, 1725, 95, 6, 3)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForImplicitExpressionStartedAtEOF()
{
RunTest("ImplicitExpressionAtEOF", designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(19, 2, 1, 559, 22, 6, 0)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForExplicitExpressionStartedAtEOF()
{
RunTest("ExplicitExpressionAtEOF", designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(20, 2, 2, 559, 22, 6, 0)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForCodeBlockStartedAtEOF()
{
RunTest("CodeBlockAtEOF", designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(2, 0, 528, 22, 2, 0)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForEmptyImplicitExpression()
{
RunTest("EmptyImplicitExpression", designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(19, 2, 1, 559, 22, 6, 0)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForEmptyImplicitExpressionInCode()
{
RunTest("EmptyImplicitExpressionInCode", tabTest: TabTest.NoTabs, designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(2, 0, 573, 22, 2, 6),
BuildLineMapping(9, 1, 5, 668, 29, 6, 0),
BuildLineMapping(9, 1, 755, 34, 5, 2)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForEmptyImplicitExpressionInCodeTabs()
{
RunTest("EmptyImplicitExpressionInCode",
"EmptyImplicitExpressionInCode.Tabs",
tabTest: TabTest.Tabs,
designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(2, 0, 573, 22, 2, 6),
BuildLineMapping(9, 1, 5, 668, 29, 6, 0),
BuildLineMapping(9, 1, 5, 752, 34, 2, 2)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForEmptyExplicitExpression()
{
RunTest("EmptyExplicitExpression", designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(20, 2, 2, 559, 22, 6, 0)
});
}
[Fact]
public void CSharpCodeGeneratorCorrectlyGeneratesDesignTimePragmasForEmptyCodeBlock()
{
RunTest("EmptyCodeBlock", designTimeMode: true, expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(20, 2, 528, 22, 2, 0)
});
}
[Fact]
public void CSharpCodeGeneratorDoesNotRenderLinePragmasIfGenerateLinePragmasIsSetToFalse()
{
RunTest("NoLinePragmas", generatePragmas: false);
}
[Fact]
public void CSharpCodeGeneratorRendersHelpersBlockCorrectlyWhenInstanceHelperRequested()
{
RunTest("Helpers", baselineName: "Helpers.Instance", hostConfig: h => h.StaticHelpers = false);
}
// TODO: This should be re-added once instrumentation support has been added
//[Fact]
//public void CSharpCodeGeneratorCorrectlyInstrumentsRazorCodeWhenInstrumentationRequested()
//{
// RunTest("Instrumented", hostConfig: host =>
// {
// host.EnableInstrumentation = true;
// host.InstrumentedSourceFilePath = String.Format("~/{0}.cshtml", host.DefaultClassName);
// });
//}
[Fact]
public void CSharpCodeGeneratorGeneratesUrlsCorrectlyWithCommentsAndQuotes()
{
RunTest("HtmlCommentWithQuote_Single",
tabTest: TabTest.NoTabs);
RunTest("HtmlCommentWithQuote_Double",
tabTest: TabTest.NoTabs);
}
private void OpenedIf(bool withTabs)
{
int tabOffsetForMapping = 7;
// where the test is running with tabs, the offset into the CS buffer changes for the whitespace mapping
// with spaces we get 7xspace -> offset of 8 (column = offset+1)
// with tabs we get tab + 3 spaces -> offset of 4 chars + 1 = 5
if (withTabs)
{
tabOffsetForMapping -= 3;
}
RunTest("OpenedIf",
"OpenedIf.DesignTime" + (withTabs ? ".Tabs" : ""),
designTimeMode: true,
tabTest: withTabs ? TabTest.Tabs : TabTest.NoTabs,
spans: new TestSpan[]
{
new TestSpan(SpanKind.Markup, 0, 16),
new TestSpan(SpanKind.Transition, 16, 17),
new TestSpan(SpanKind.Code, 17, 31),
new TestSpan(SpanKind.Markup, 31, 38),
new TestSpan(SpanKind.Code, 38, 40),
new TestSpan(SpanKind.Markup, 40, 47),
new TestSpan(SpanKind.Code, 47, 47),
},
expectedDesignTimePragmas: new List<LineMapping>()
{
BuildLineMapping(17, 2, 1, 508, 22, 0, 14),
BuildLineMapping(38, 3, 7, 582 + tabOffsetForMapping, 28, tabOffsetForMapping, 2),
// Multiply the tab offset absolute index by 2 to account for the first mapping
BuildLineMapping(47, 4, 7, 644 + tabOffsetForMapping * 2, 34, tabOffsetForMapping, 0)
});
}
private static LineMapping BuildLineMapping(int documentAbsoluteIndex, int documentLineIndex, int generatedAbsoluteIndex, int generatedLineIndex, int characterOffsetIndex, int contentLength)
{
return BuildLineMapping(documentAbsoluteIndex, documentLineIndex, characterOffsetIndex, generatedAbsoluteIndex, generatedLineIndex, characterOffsetIndex, contentLength);
}
private static LineMapping BuildLineMapping(int documentAbsoluteIndex, int documentLineIndex, int documentCharacterOffsetIndex, int generatedAbsoluteIndex, int generatedLineIndex, int generatedCharacterOffsetIndex, int contentLength)
{
return new LineMapping(
documentLocation: new MappingLocation(new SourceLocation(documentAbsoluteIndex, documentLineIndex, documentCharacterOffsetIndex), contentLength),
generatedLocation: new MappingLocation(new SourceLocation(generatedAbsoluteIndex, generatedLineIndex, generatedCharacterOffsetIndex), contentLength)
);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// ------------------------------------------------------------------------------
// Changes to this file must follow the http://aka.ms/api-review process.
// ------------------------------------------------------------------------------
namespace System.Threading
{
public partial class AbandonedMutexException : System.SystemException
{
public AbandonedMutexException() { }
public AbandonedMutexException(int location, System.Threading.WaitHandle handle) { }
public AbandonedMutexException(string message) { }
public AbandonedMutexException(string message, System.Exception inner) { }
public AbandonedMutexException(string message, System.Exception inner, int location, System.Threading.WaitHandle handle) { }
public AbandonedMutexException(string message, int location, System.Threading.WaitHandle handle) { }
protected AbandonedMutexException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
public System.Threading.Mutex Mutex { get { throw null; } }
public int MutexIndex { get { throw null; } }
}
public partial struct AsyncFlowControl : System.IDisposable
{
public void Dispose() { }
public override bool Equals(object obj) { throw null; }
public bool Equals(System.Threading.AsyncFlowControl obj) { throw null; }
public override int GetHashCode() { throw null; }
public static bool operator ==(System.Threading.AsyncFlowControl a, System.Threading.AsyncFlowControl b) { throw null; }
public static bool operator !=(System.Threading.AsyncFlowControl a, System.Threading.AsyncFlowControl b) { throw null; }
public void Undo() { }
}
public sealed partial class AsyncLocal<T>
{
public AsyncLocal() { }
[System.Security.SecurityCriticalAttribute]
public AsyncLocal(System.Action<System.Threading.AsyncLocalValueChangedArgs<T>> valueChangedHandler) { }
public T Value { get { throw null; } set { } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public partial struct AsyncLocalValueChangedArgs<T>
{
public T CurrentValue { get { throw null; } }
public T PreviousValue { get { throw null; } }
public bool ThreadContextChanged { get { throw null; } }
}
public sealed partial class AutoResetEvent : System.Threading.EventWaitHandle
{
public AutoResetEvent(bool initialState) : base(default(bool), default(System.Threading.EventResetMode)) { }
}
public partial class Barrier : System.IDisposable
{
public Barrier(int participantCount) { }
public Barrier(int participantCount, System.Action<System.Threading.Barrier> postPhaseAction) { }
public long CurrentPhaseNumber { get { throw null; } }
public int ParticipantCount { get { throw null; } }
public int ParticipantsRemaining { get { throw null; } }
public long AddParticipant() { throw null; }
public long AddParticipants(int participantCount) { throw null; }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void RemoveParticipant() { }
public void RemoveParticipants(int participantCount) { }
public void SignalAndWait() { }
public bool SignalAndWait(int millisecondsTimeout) { throw null; }
public bool SignalAndWait(int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { throw null; }
public void SignalAndWait(System.Threading.CancellationToken cancellationToken) { }
public bool SignalAndWait(System.TimeSpan timeout) { throw null; }
public bool SignalAndWait(System.TimeSpan timeout, System.Threading.CancellationToken cancellationToken) { throw null; }
}
public partial class BarrierPostPhaseException : System.Exception
{
public BarrierPostPhaseException() { }
public BarrierPostPhaseException(System.Exception innerException) { }
public BarrierPostPhaseException(string message) { }
public BarrierPostPhaseException(string message, System.Exception innerException) { }
protected BarrierPostPhaseException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public delegate void ContextCallback(object state);
public partial class CountdownEvent : System.IDisposable
{
public CountdownEvent(int initialCount) { }
public int CurrentCount { get { throw null; } }
public int InitialCount { get { throw null; } }
public bool IsSet { get { throw null; } }
public System.Threading.WaitHandle WaitHandle { get { throw null; } }
public void AddCount() { }
public void AddCount(int signalCount) { }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void Reset() { }
public void Reset(int count) { }
public bool Signal() { throw null; }
public bool Signal(int signalCount) { throw null; }
public bool TryAddCount() { throw null; }
public bool TryAddCount(int signalCount) { throw null; }
public void Wait() { }
public bool Wait(int millisecondsTimeout) { throw null; }
public bool Wait(int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { throw null; }
public void Wait(System.Threading.CancellationToken cancellationToken) { }
public bool Wait(System.TimeSpan timeout) { throw null; }
public bool Wait(System.TimeSpan timeout, System.Threading.CancellationToken cancellationToken) { throw null; }
}
public enum EventResetMode
{
AutoReset = 0,
ManualReset = 1,
}
public partial class EventWaitHandle : System.Threading.WaitHandle
{
public EventWaitHandle(bool initialState, System.Threading.EventResetMode mode) { }
[System.Security.SecurityCriticalAttribute]
public EventWaitHandle(bool initialState, System.Threading.EventResetMode mode, string name) { }
[System.Security.SecurityCriticalAttribute]
public EventWaitHandle(bool initialState, System.Threading.EventResetMode mode, string name, out bool createdNew) { throw null; }
[System.Security.SecurityCriticalAttribute]
public static System.Threading.EventWaitHandle OpenExisting(string name) { throw null; }
public bool Reset() { throw null; }
public bool Set() { throw null; }
[System.Security.SecurityCriticalAttribute]
public static bool TryOpenExisting(string name, out System.Threading.EventWaitHandle result) { throw null; }
}
public sealed partial class ExecutionContext : System.IDisposable, System.Runtime.Serialization.ISerializable
{
private ExecutionContext() { }
public static System.Threading.ExecutionContext Capture() { throw null; }
public System.Threading.ExecutionContext CreateCopy() { throw null; }
public void Dispose() { }
public void GetObjectData(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
public static bool IsFlowSuppressed() { throw null; }
public static void RestoreFlow() { }
public static void Run(System.Threading.ExecutionContext executionContext, System.Threading.ContextCallback callback, object state) { }
public static System.Threading.AsyncFlowControl SuppressFlow() { throw null; }
}
public partial class HostExecutionContext : System.IDisposable
{
public HostExecutionContext() { }
public HostExecutionContext(object state) { }
protected internal object State { get { throw null; } set { } }
public virtual System.Threading.HostExecutionContext CreateCopy() { throw null; }
public void Dispose() { }
public virtual void Dispose(bool disposing) { }
}
public partial class HostExecutionContextManager
{
public HostExecutionContextManager() { }
public virtual System.Threading.HostExecutionContext Capture() { throw null; }
public virtual void Revert(object previousState) { }
public virtual object SetHostExecutionContext(System.Threading.HostExecutionContext hostExecutionContext) { throw null; }
}
public static partial class Interlocked
{
public static int Add(ref int location1, int value) { throw null; }
public static long Add(ref long location1, long value) { throw null; }
public static double CompareExchange(ref double location1, double value, double comparand) { throw null; }
public static int CompareExchange(ref int location1, int value, int comparand) { throw null; }
public static long CompareExchange(ref long location1, long value, long comparand) { throw null; }
public static System.IntPtr CompareExchange(ref System.IntPtr location1, System.IntPtr value, System.IntPtr comparand) { throw null; }
public static object CompareExchange(ref object location1, object value, object comparand) { throw null; }
public static float CompareExchange(ref float location1, float value, float comparand) { throw null; }
public static T CompareExchange<T>(ref T location1, T value, T comparand) where T : class { throw null; }
public static int Decrement(ref int location) { throw null; }
public static long Decrement(ref long location) { throw null; }
public static double Exchange(ref double location1, double value) { throw null; }
public static int Exchange(ref int location1, int value) { throw null; }
public static long Exchange(ref long location1, long value) { throw null; }
public static System.IntPtr Exchange(ref System.IntPtr location1, System.IntPtr value) { throw null; }
public static object Exchange(ref object location1, object value) { throw null; }
public static float Exchange(ref float location1, float value) { throw null; }
public static T Exchange<T>(ref T location1, T value) where T : class { throw null; }
public static int Increment(ref int location) { throw null; }
public static long Increment(ref long location) { throw null; }
public static void MemoryBarrier() { }
public static long Read(ref long location) { throw null; }
}
public static partial class LazyInitializer
{
public static T EnsureInitialized<T>(ref T target) where T : class { throw null; }
public static T EnsureInitialized<T>(ref T target, ref bool initialized, ref object syncLock) { throw null; }
public static T EnsureInitialized<T>(ref T target, ref bool initialized, ref object syncLock, System.Func<T> valueFactory) { throw null; }
public static T EnsureInitialized<T>(ref T target, System.Func<T> valueFactory) where T : class { throw null; }
}
public partial struct LockCookie
{
public override bool Equals(object obj) { throw null; }
public bool Equals(System.Threading.LockCookie obj) { throw null; }
public override int GetHashCode() { throw null; }
public static bool operator ==(System.Threading.LockCookie a, System.Threading.LockCookie b) { throw null; }
public static bool operator !=(System.Threading.LockCookie a, System.Threading.LockCookie b) { throw null; }
}
public partial class LockRecursionException : System.Exception
{
public LockRecursionException() { }
public LockRecursionException(string message) { }
public LockRecursionException(string message, System.Exception innerException) { }
protected LockRecursionException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public enum LockRecursionPolicy
{
NoRecursion = 0,
SupportsRecursion = 1,
}
public sealed partial class ManualResetEvent : System.Threading.EventWaitHandle
{
public ManualResetEvent(bool initialState) : base(default(bool), default(System.Threading.EventResetMode)) { }
}
public partial class ManualResetEventSlim : System.IDisposable
{
public ManualResetEventSlim() { }
public ManualResetEventSlim(bool initialState) { }
public ManualResetEventSlim(bool initialState, int spinCount) { }
public bool IsSet { get { throw null; } }
public int SpinCount { get { throw null; } }
public System.Threading.WaitHandle WaitHandle { get { throw null; } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public void Reset() { }
public void Set() { }
public void Wait() { }
public bool Wait(int millisecondsTimeout) { throw null; }
public bool Wait(int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { throw null; }
public void Wait(System.Threading.CancellationToken cancellationToken) { }
public bool Wait(System.TimeSpan timeout) { throw null; }
public bool Wait(System.TimeSpan timeout, System.Threading.CancellationToken cancellationToken) { throw null; }
}
public static partial class Monitor
{
public static void Enter(object obj) { }
public static void Enter(object obj, ref bool lockTaken) { }
public static void Exit(object obj) { }
public static bool IsEntered(object obj) { throw null; }
public static void Pulse(object obj) { }
public static void PulseAll(object obj) { }
public static bool TryEnter(object obj) { throw null; }
public static void TryEnter(object obj, ref bool lockTaken) { }
public static bool TryEnter(object obj, int millisecondsTimeout) { throw null; }
public static void TryEnter(object obj, int millisecondsTimeout, ref bool lockTaken) { }
public static bool TryEnter(object obj, System.TimeSpan timeout) { throw null; }
public static void TryEnter(object obj, System.TimeSpan timeout, ref bool lockTaken) { }
public static bool Wait(object obj) { throw null; }
public static bool Wait(object obj, int millisecondsTimeout) { throw null; }
public static bool Wait(object obj, int millisecondsTimeout, bool exitContext) { throw null; }
public static bool Wait(object obj, System.TimeSpan timeout) { throw null; }
public static bool Wait(object obj, System.TimeSpan timeout, bool exitContext) { throw null; }
}
public sealed partial class Mutex : System.Threading.WaitHandle
{
public Mutex() { }
public Mutex(bool initiallyOwned) { }
[System.Security.SecurityCriticalAttribute]
public Mutex(bool initiallyOwned, string name) { }
[System.Security.SecurityCriticalAttribute]
public Mutex(bool initiallyOwned, string name, out bool createdNew) { throw null; }
[System.Security.SecurityCriticalAttribute]
public static System.Threading.Mutex OpenExisting(string name) { throw null; }
public void ReleaseMutex() { }
[System.Security.SecurityCriticalAttribute]
public static bool TryOpenExisting(string name, out System.Threading.Mutex result) { throw null; }
}
public sealed partial class ReaderWriterLock : System.Runtime.ConstrainedExecution.CriticalFinalizerObject
{
public ReaderWriterLock() { }
public bool IsReaderLockHeld { get { throw null; } }
public bool IsWriterLockHeld { get { throw null; } }
public int WriterSeqNum { get { throw null; } }
public void AcquireReaderLock(int millisecondsTimeout) { }
public void AcquireReaderLock(System.TimeSpan timeout) { }
public void AcquireWriterLock(int millisecondsTimeout) { }
public void AcquireWriterLock(System.TimeSpan timeout) { }
public bool AnyWritersSince(int seqNum) { throw null; }
public void DowngradeFromWriterLock(ref System.Threading.LockCookie lockCookie) { }
~ReaderWriterLock() { }
public System.Threading.LockCookie ReleaseLock() { throw null; }
public void ReleaseReaderLock() { }
public void ReleaseWriterLock() { }
public void RestoreLock(ref System.Threading.LockCookie lockCookie) { }
public System.Threading.LockCookie UpgradeToWriterLock(int millisecondsTimeout) { throw null; }
public System.Threading.LockCookie UpgradeToWriterLock(System.TimeSpan timeout) { throw null; }
}
public partial class ReaderWriterLockSlim : System.IDisposable
{
public ReaderWriterLockSlim() { }
public ReaderWriterLockSlim(System.Threading.LockRecursionPolicy recursionPolicy) { }
public int CurrentReadCount { get { throw null; } }
public bool IsReadLockHeld { get { throw null; } }
public bool IsUpgradeableReadLockHeld { get { throw null; } }
public bool IsWriteLockHeld { get { throw null; } }
public System.Threading.LockRecursionPolicy RecursionPolicy { get { throw null; } }
public int RecursiveReadCount { get { throw null; } }
public int RecursiveUpgradeCount { get { throw null; } }
public int RecursiveWriteCount { get { throw null; } }
public int WaitingReadCount { get { throw null; } }
public int WaitingUpgradeCount { get { throw null; } }
public int WaitingWriteCount { get { throw null; } }
public void Dispose() { }
public void EnterReadLock() { }
public void EnterUpgradeableReadLock() { }
public void EnterWriteLock() { }
public void ExitReadLock() { }
public void ExitUpgradeableReadLock() { }
public void ExitWriteLock() { }
public bool TryEnterReadLock(int millisecondsTimeout) { throw null; }
public bool TryEnterReadLock(System.TimeSpan timeout) { throw null; }
public bool TryEnterUpgradeableReadLock(int millisecondsTimeout) { throw null; }
public bool TryEnterUpgradeableReadLock(System.TimeSpan timeout) { throw null; }
public bool TryEnterWriteLock(int millisecondsTimeout) { throw null; }
public bool TryEnterWriteLock(System.TimeSpan timeout) { throw null; }
}
public sealed partial class Semaphore : System.Threading.WaitHandle
{
public Semaphore(int initialCount, int maximumCount) { }
public Semaphore(int initialCount, int maximumCount, string name) { }
public Semaphore(int initialCount, int maximumCount, string name, out bool createdNew) { throw null; }
public static System.Threading.Semaphore OpenExisting(string name) { throw null; }
public int Release() { throw null; }
public int Release(int releaseCount) { throw null; }
public static bool TryOpenExisting(string name, out System.Threading.Semaphore result) { throw null; }
}
public partial class SemaphoreFullException : System.SystemException
{
public SemaphoreFullException() { }
public SemaphoreFullException(string message) { }
public SemaphoreFullException(string message, System.Exception innerException) { }
protected SemaphoreFullException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public partial class SemaphoreSlim : System.IDisposable
{
public SemaphoreSlim(int initialCount) { }
public SemaphoreSlim(int initialCount, int maxCount) { }
public System.Threading.WaitHandle AvailableWaitHandle { get { throw null; } }
public int CurrentCount { get { throw null; } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
public int Release() { throw null; }
public int Release(int releaseCount) { throw null; }
public void Wait() { }
public bool Wait(int millisecondsTimeout) { throw null; }
public bool Wait(int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { throw null; }
public void Wait(System.Threading.CancellationToken cancellationToken) { }
public bool Wait(System.TimeSpan timeout) { throw null; }
public bool Wait(System.TimeSpan timeout, System.Threading.CancellationToken cancellationToken) { throw null; }
public System.Threading.Tasks.Task WaitAsync() { throw null; }
public System.Threading.Tasks.Task<bool> WaitAsync(int millisecondsTimeout) { throw null; }
public System.Threading.Tasks.Task<bool> WaitAsync(int millisecondsTimeout, System.Threading.CancellationToken cancellationToken) { throw null; }
public System.Threading.Tasks.Task WaitAsync(System.Threading.CancellationToken cancellationToken) { throw null; }
public System.Threading.Tasks.Task<bool> WaitAsync(System.TimeSpan timeout) { throw null; }
public System.Threading.Tasks.Task<bool> WaitAsync(System.TimeSpan timeout, System.Threading.CancellationToken cancellationToken) { throw null; }
}
public delegate void SendOrPostCallback(object state);
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public partial struct SpinLock
{
public SpinLock(bool enableThreadOwnerTracking) { throw null; }
public bool IsHeld { get { throw null; } }
public bool IsHeldByCurrentThread { get { throw null; } }
public bool IsThreadOwnerTrackingEnabled { get { throw null; } }
public void Enter(ref bool lockTaken) { }
public void Exit() { }
public void Exit(bool useMemoryBarrier) { }
public void TryEnter(ref bool lockTaken) { }
public void TryEnter(int millisecondsTimeout, ref bool lockTaken) { }
public void TryEnter(System.TimeSpan timeout, ref bool lockTaken) { }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public partial struct SpinWait
{
public int Count { get { throw null; } }
public bool NextSpinWillYield { get { throw null; } }
public void Reset() { }
public void SpinOnce() { }
public static void SpinUntil(System.Func<bool> condition) { }
public static bool SpinUntil(System.Func<bool> condition, int millisecondsTimeout) { throw null; }
public static bool SpinUntil(System.Func<bool> condition, System.TimeSpan timeout) { throw null; }
}
public partial class SynchronizationContext
{
public SynchronizationContext() { }
public static System.Threading.SynchronizationContext Current { get { throw null; } }
public virtual System.Threading.SynchronizationContext CreateCopy() { throw null; }
public bool IsWaitNotificationRequired() { throw null; }
public virtual void OperationCompleted() { }
public virtual void OperationStarted() { }
public virtual void Post(System.Threading.SendOrPostCallback d, object state) { }
public virtual void Send(System.Threading.SendOrPostCallback d, object state) { }
public static void SetSynchronizationContext(System.Threading.SynchronizationContext syncContext) { }
protected void SetWaitNotificationRequired() { }
[System.CLSCompliantAttribute(false)]
[System.Runtime.ConstrainedExecution.PrePrepareMethodAttribute]
public virtual int Wait(System.IntPtr[] waitHandles, bool waitAll, int millisecondsTimeout) { throw null; }
[System.CLSCompliantAttribute(false)]
[System.Runtime.ConstrainedExecution.PrePrepareMethodAttribute]
protected static int WaitHelper(System.IntPtr[] waitHandles, bool waitAll, int millisecondsTimeout) { throw null; }
}
public partial class SynchronizationLockException : System.SystemException
{
public SynchronizationLockException() { }
public SynchronizationLockException(string message) { }
public SynchronizationLockException(string message, System.Exception innerException) { }
protected SynchronizationLockException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
public partial class ThreadLocal<T> : System.IDisposable
{
public ThreadLocal() { }
public ThreadLocal(bool trackAllValues) { }
public ThreadLocal(System.Func<T> valueFactory) { }
public ThreadLocal(System.Func<T> valueFactory, bool trackAllValues) { }
public bool IsValueCreated { get { throw null; } }
public T Value { get { throw null; } set { } }
public System.Collections.Generic.IList<T> Values { get { throw null; } }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
~ThreadLocal() { }
public override string ToString() { throw null; }
}
public static partial class Volatile
{
public static bool Read(ref bool location) { throw null; }
public static byte Read(ref byte location) { throw null; }
public static double Read(ref double location) { throw null; }
public static short Read(ref short location) { throw null; }
public static int Read(ref int location) { throw null; }
public static long Read(ref long location) { throw null; }
public static System.IntPtr Read(ref System.IntPtr location) { throw null; }
[System.CLSCompliantAttribute(false)]
public static sbyte Read(ref sbyte location) { throw null; }
public static float Read(ref float location) { throw null; }
[System.CLSCompliantAttribute(false)]
public static ushort Read(ref ushort location) { throw null; }
[System.CLSCompliantAttribute(false)]
public static uint Read(ref uint location) { throw null; }
[System.CLSCompliantAttribute(false)]
public static ulong Read(ref ulong location) { throw null; }
[System.CLSCompliantAttribute(false)]
public static System.UIntPtr Read(ref System.UIntPtr location) { throw null; }
public static T Read<T>(ref T location) where T : class { throw null; }
public static void Write(ref bool location, bool value) { }
public static void Write(ref byte location, byte value) { }
public static void Write(ref double location, double value) { }
public static void Write(ref short location, short value) { }
public static void Write(ref int location, int value) { }
public static void Write(ref long location, long value) { }
public static void Write(ref System.IntPtr location, System.IntPtr value) { }
[System.CLSCompliantAttribute(false)]
public static void Write(ref sbyte location, sbyte value) { }
public static void Write(ref float location, float value) { }
[System.CLSCompliantAttribute(false)]
public static void Write(ref ushort location, ushort value) { }
[System.CLSCompliantAttribute(false)]
public static void Write(ref uint location, uint value) { }
[System.CLSCompliantAttribute(false)]
public static void Write(ref ulong location, ulong value) { }
[System.CLSCompliantAttribute(false)]
public static void Write(ref System.UIntPtr location, System.UIntPtr value) { }
public static void Write<T>(ref T location, T value) where T : class { }
}
public partial class WaitHandleCannotBeOpenedException : System.ApplicationException
{
public WaitHandleCannotBeOpenedException() { }
public WaitHandleCannotBeOpenedException(string message) { }
public WaitHandleCannotBeOpenedException(string message, System.Exception innerException) { }
protected WaitHandleCannotBeOpenedException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) { }
}
}
| |
namespace Edi.Apps.ViewModels
{
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Windows;
using System.Windows.Threading;
using Dialogs.FindReplace.ViewModel;
using ICSharpCode.AvalonEdit.Document;
using MsgBox;
using Documents.ViewModels.EdiDoc;
using System.Linq;
public partial class ApplicationViewModel
{
private FindReplaceViewModel _mFindReplaceVm;
public FindReplaceViewModel FindReplaceVm
{
get { return _mFindReplaceVm; }
protected set
{
if (_mFindReplaceVm != value)
{
_mFindReplaceVm = value;
RaisePropertyChanged(() => FindReplaceVm);
}
}
}
private IEditor GetNextEditor(FindReplaceViewModel f,
bool previous = false
)
{
// There is no next open document if there is none or only one open
if (this.Files.Count <= 1)
return f.GetCurrentEditor();
// There is no next open document If the user wants to search the current document only
if (f.SearchIn == Edi.Dialogs.FindReplace.SearchScope.CurrentDocument)
return f.GetCurrentEditor();
var l = new List<object>(this.Files.Cast<object>());
int idxStart = l.IndexOf(f.CurrentEditor);
int i = idxStart;
if (i >= 0)
{
Match m = null;
bool textSearchSuccess = false;
do
{
if (previous == true) // Get next/previous document
i = (i < 1 ? l.Count - 1 : i - 1);
else
i = (i >= l.Count - 1 ? 0 : i + 1);
//// i = (i + (previous ? l.Count - 1 : +1)) % l.Count;
// Search text in document
if (l[i] is EdiViewModel)
{
EdiViewModel fTmp = l[i] as EdiViewModel;
Regex r;
m = this.FindNextMatchInText(0, 0, false, fTmp.Text, ref f, out r);
textSearchSuccess = m.Success;
}
}
while (i != idxStart && textSearchSuccess != true);
// Found a match so activate the corresponding document and select the text with scroll into view
if (textSearchSuccess == true && m != null)
{
var doc = l[i] as EdiViewModel;
if (doc != null)
this.ActiveDocument = doc;
// Ensure that no pending calls are in the dispatcher queue
// This makes sure that we are blocked until bindings are re-established
// Bindings are required to scroll a selection into view
Dispatcher.CurrentDispatcher.BeginInvoke(DispatcherPriority.SystemIdle, (Action)delegate
{
if (this.ActiveDocument != null && doc != null)
{
doc.TextEditorSelectionStart = m.Index;
doc.TextEditorSelectionLength = m.Length;
// Reset cursor position to make sure we search a document from its beginning
doc.TxtControl.SelectText(m.Index, m.Length);
f.CurrentEditor = l[i] as IEditor;
IEditor edi = f.GetCurrentEditor();
if (edi != null)
edi.Select(m.Index, m.Length);
}
});
return f.GetCurrentEditor();
}
}
return null;
}
/// <summary>
/// Find a match in a given peace of string
/// </summary>
/// <param name="selectionStart"></param>
/// <param name="selectionLength"></param>
/// <param name="invertLeftRight"></param>
/// <param name="text"></param>
/// <param name="f"></param>
/// <param name="r"></param>
/// <returns></returns>
Match FindNextMatchInText(int selectionStart, // CE.SelectionStart
int selectionLength, // CE.SelectionLength
bool invertLeftRight, // CE.Text
string text, // InvertLeftRight
ref FindReplaceViewModel f,
out Regex r)
{
if (invertLeftRight)
{
f.SearchUp = !f.SearchUp;
r = f.GetRegEx();
f.SearchUp = !f.SearchUp;
}
else
r = f.GetRegEx();
return r.Match(text, r.Options.HasFlag(RegexOptions.RightToLeft) ? selectionStart : selectionStart + selectionLength);
}
private bool FindNext(FindReplaceViewModel f,
bool invertLeftRight = false)
{
IEditor ce = f.GetCurrentEditor();
if (ce == null)
return false;
Regex r;
Match m = FindNextMatchInText(ce.SelectionStart, ce.SelectionLength,
invertLeftRight, ce.Text, ref f, out r);
if (m.Success)
{
ce.Select(m.Index, m.Length);
return true;
}
else
{
if (f.SearchIn == Dialogs.FindReplace.SearchScope.CurrentDocument)
{
_MsgBox.Show(Util.Local.Strings.STR_MSG_FIND_NO_MORE_ITEMS_FOUND);
return false;
}
// we have reached the end of the document
// start again from the beginning/end,
object oldEditor = f.CurrentEditor;
do
{
if (f.SearchIn == Dialogs.FindReplace.SearchScope.AllDocuments)
{
ce = GetNextEditor(f, r.Options.HasFlag(RegexOptions.RightToLeft));
if (ce == null)
return false;
f.CurrentEditor = ce;
return true;
}
m = r.Options.HasFlag(RegexOptions.RightToLeft) ? r.Match(ce.Text, ce.Text.Length - 1) : r.Match(ce.Text, 0);
if (m.Success)
{
ce.Select(m.Index, m.Length);
break;
}
else
{
_MsgBox.Show(Util.Local.Strings.STR_MSG_FIND_NO_MORE_ITEMS_FOUND2,
Util.Local.Strings.STR_MSG_FIND_Caption);
}
} while (f.CurrentEditor != oldEditor);
}
return false;
}
/// <summary>
/// Gets the current line in which the cursor is currently located
/// </summary>
/// <param name="f"></param>
/// <returns></returns>
private static int GetCurrentEditorLine(EdiViewModel f)
{
int iCurrLine = 0;
try
{
int start, length;
bool IsRectangularSelection = false;
f.TxtControl.CurrentSelection(out start, out length, out IsRectangularSelection);
iCurrLine = f.Document.GetLineByOffset(start).LineNumber;
}
catch (Exception)
{
// ignored
}
return iCurrLine;
}
private void ShowGotoLineDialog()
{
if (ActiveDocument is EdiViewModel)
{
EdiViewModel f = ActiveDocument as EdiViewModel;
Window dlg = null;
Dialogs.GotoLine.GotoLineViewModel dlgVm = null;
try
{
int iCurrLine = GetCurrentEditorLine(f);
dlgVm = new Dialogs.GotoLine.GotoLineViewModel(1, f.Document.LineCount, iCurrLine);
dlg = ViewSelector.GetDialogView(dlgVm, Application.Current.MainWindow);
dlg.Closing += dlgVm.OnClosing;
dlg.ShowDialog();
// Copy input if user OK'ed it. This could also be done by a method, equality operator, or copy constructor
if (dlgVm.WindowCloseResult == true)
{
DocumentLine line = f.Document.GetLineByNumber(dlgVm.LineNumber);
f.TxtControl.SelectText(line.Offset, 0); // Select text with length 0 and scroll to where
f.TxtControl.ScrollToLine(dlgVm.LineNumber); // we are supposed to be at
}
}
catch (Exception exc)
{
_MsgBox.Show(exc, Util.Local.Strings.STR_MSG_FIND_UNEXPECTED_ERROR,
MsgBoxButtons.OK, MsgBoxImage.Error);
}
finally
{
if (dlg != null)
{
dlg.Closing -= dlgVm.OnClosing;
dlg.Close();
}
}
}
}
private void ShowFindReplaceDialog(bool showFind = true)
{
if (ActiveDocument is EdiViewModel)
{
EdiViewModel f = ActiveDocument as EdiViewModel;
Window dlg = null;
try
{
if (FindReplaceVm == null)
{
FindReplaceVm = new FindReplaceViewModel(_SettingsManager, _MsgBox);
}
FindReplaceVm.FindNext = FindNext;
// determine whether Find or Find/Replace is to be executed
FindReplaceVm.ShowAsFind = showFind;
if (f.TxtControl != null) // Search by default for currently selected text (if any)
{
string textToFind;
f.TxtControl.GetSelectedText(out textToFind);
if (textToFind.Length > 0)
FindReplaceVm.TextToFind = textToFind;
}
FindReplaceVm.CurrentEditor = f;
dlg = ViewSelector.GetDialogView(FindReplaceVm, Application.Current.MainWindow);
dlg.Closing += FindReplaceVm.OnClosing;
dlg.ShowDialog();
}
catch (Exception exc)
{
_MsgBox.Show(exc, Util.Local.Strings.STR_MSG_FIND_UNEXPECTED_ERROR,
MsgBoxButtons.OK, MsgBoxImage.Error);
}
finally
{
if (dlg != null)
{
dlg.Closing -= FindReplaceVm.OnClosing;
dlg.Close();
}
}
}
}
}
}
| |
namespace Economy.scripts.Messages
{
using System;
using System.Linq;
using EconConfig;
using Economy.scripts;
using Economy.scripts.EconStructures;
using ProtoBuf;
using Sandbox.Common.ObjectBuilders;
using Sandbox.Definitions;
using Sandbox.ModAPI;
using VRage;
using VRage.Game;
using VRage.Game.ObjectBuilders.Definitions;
using VRage.ModAPI;
using VRage.ObjectBuilders;
/// <summary>
/// this is to do the actual work of checking and moving the goods when a player is buying from something/someone
/// </summary>
[ProtoContract]
public class MessageBuy : MessageBase
{
#region properties
/// <summary>
/// person, NPC, offer or faction to submit an offer to buy from
/// </summary>
[ProtoMember(1)]
public string FromUserName;
/// <summary>
/// qty of item
/// </summary>
[ProtoMember(2)]
public decimal ItemQuantity;
/// <summary>
/// item name / id we are selling
/// </summary>
[ProtoMember(3)]
public string ItemTypeId;
[ProtoMember(4)]
public string ItemSubTypeName;
/// <summary>
/// unit price of item
/// </summary>
[ProtoMember(5)]
public decimal ItemPrice;
/// <summary>
/// Use the Current Sell price to buy it at. The Player
/// will not have access to this information without fetching it first. This saves us the trouble.
/// </summary>
[ProtoMember(6)]
public bool UseBankSellPrice;
/// <summary>
/// We are trading with a player or npc merchant zone.
/// </summary>
[ProtoMember(7)]
public bool BuyFromMerchant;
/// <summary>
/// The Item is been put onto the market.
/// </summary>
[ProtoMember(8)]
public bool FindOnMarket;
#endregion
public static void SendMessage(string toUserName, decimal itemQuantity, string itemTypeId, string itemSubTypeName, decimal itemPrice, bool useBankBuyPrice, bool sellToMerchant, bool offerToMarket)
{
ConnectionHelper.SendMessageToServer(new MessageBuy { FromUserName = toUserName, ItemQuantity = itemQuantity, ItemTypeId = itemTypeId, ItemSubTypeName = itemSubTypeName, ItemPrice = itemPrice, UseBankSellPrice = useBankBuyPrice, BuyFromMerchant = sellToMerchant, FindOnMarket = offerToMarket });
}
public override void ProcessClient()
{
// never processed on client
}
public override void ProcessServer()
{
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy started by Steam Id '{0}'.", SenderSteamId);
if (!EconomyScript.Instance.ServerConfig.EnableNpcTradezones && !EconomyScript.Instance.ServerConfig.EnablePlayerTradezones)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "All Trade zones are disabled.");
return;
}
// Get player steam ID
var buyingPlayer = MyAPIGateway.Players.FindPlayerBySteamId(SenderSteamId);
MyDefinitionBase definition = null;
MyObjectBuilderType result;
if (MyObjectBuilderType.TryParse(ItemTypeId, out result))
{
var id = new MyDefinitionId(result, ItemSubTypeName);
MyDefinitionManager.Static.TryGetDefinition(id, out definition);
}
if (definition == null)
{
// Someone hacking, and passing bad data?
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, the item you specified doesn't exist!");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- item doesn't exist.", SenderSteamId);
return;
}
if (definition.Id.TypeId == typeof (MyObjectBuilder_GasProperties))
{
// TODO: buy gasses!
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Cannot buy gasses currently.");
return;
}
// Do a floating point check on the item item. Tools and components cannot have decimals. They must be whole numbers.
if (definition.Id.TypeId != typeof(MyObjectBuilder_Ore) && definition.Id.TypeId != typeof(MyObjectBuilder_Ingot))
{
if (ItemQuantity != Math.Truncate(ItemQuantity))
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "You must provide a whole number for the quantity to buy that item.");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- invalid quantity.", SenderSteamId);
return;
}
//ItemQuantity = Math.Round(ItemQuantity, 0); // Or do we just round the number?
}
if (ItemQuantity <= 0)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "You must provide a valid quantity to buy.");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- invalid quantity.", SenderSteamId);
return;
}
// Who are we buying from?
BankAccountStruct accountToSell;
if (BuyFromMerchant)
accountToSell = AccountManager.FindAccount(EconomyConsts.NpcMerchantId);
else
accountToSell = AccountManager.FindAccount(FromUserName);
if (accountToSell == null)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, player does not exist or have an account!");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- no account.", SenderSteamId);
return;
}
if (MarketManager.IsItemBlacklistedOnServer(ItemTypeId, ItemSubTypeName))
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, the item you tried to buy is blacklisted on this server.");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- item blacklisted.", SenderSteamId);
return;
}
// Get the player's inventory, regardless of if they are in a ship, or a remote control cube.
var character = buyingPlayer.GetCharacter();
// TODO: do players in Cryochambers count as a valid trading partner? They should be alive, but the connected player may be offline.
// I think we'll have to do lower level checks to see if a physical player is Online.
if (character == null)
{
// Player has no body. Could mean they are dead.
// Either way, there is no inventory.
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "You are dead. You cannot trade while dead.");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- player is dead.", SenderSteamId);
return;
}
// TODO: is a null check adaqaute?, or do we need to check for IsDead?
// I don't think the chat console is accessible during respawn, only immediately after death.
// Is it valid to be able to trade when freshly dead?
//var identity = buyingPlayer.Identity();
//MyAPIGateway.Utilities.ShowMessage("CHECK", "Is Dead: {0}", identity.IsDead);
//if (identity.IsDead)
//{
// MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "You are dead. You cannot trade while dead.");
// return;
//}
var position = ((IMyEntity)character).WorldMatrix.Translation;
MarketItemStruct marketItem = null;
if (BuyFromMerchant || UseBankSellPrice)
{
var markets = MarketManager.FindMarketsFromLocation(position);
if (markets.Count == 0)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, your are not in range of any markets!");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- no market in range.", SenderSteamId);
return;
}
// TODO: find market with best Sell price that isn't blacklisted.
var market = markets.FirstOrDefault();
if (market == null)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, the market you are accessing does not exist!");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- no market found.", SenderSteamId);
return;
}
accountToSell = AccountManager.FindAccount(market.MarketId);
marketItem = market.MarketItems.FirstOrDefault(e => e.TypeId == ItemTypeId && e.SubtypeName == ItemSubTypeName);
if (marketItem == null)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, the items you are trying to buy doesn't have a market entry!");
// In reality, this shouldn't happen as all markets have their items synced up on start up of the mod.
return;
}
if (marketItem.IsBlacklisted)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, the item you tried to buy is blacklisted in this market.");
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- item is blacklisted in market.", SenderSteamId);
return;
}
// Verify that the items are in the player inventory.
// TODO: later check trade block, cockpit inventory, cockpit ship inventory, inventory of targeted cube.
if (UseBankSellPrice)
// The player is buying, but the *Market* will *sell* it to the player at this price.
// if we are not using price scaling OR the market we are trading with isn't owned by the NPC ID, dont change price. Otherwise scale.
if (!EconomyScript.Instance.ServerConfig.PriceScaling || accountToSell.SteamId != EconomyConsts.NpcMerchantId) ItemPrice = marketItem.SellPrice; else ItemPrice = EconDataManager.PriceAdjust(marketItem.SellPrice, marketItem.Quantity, PricingBias.Sell);
// If price scaling is on, adjust item price (or check player for subsidy pricing)
}
var accountToBuy = AccountManager.FindOrCreateAccount(SenderSteamId, SenderDisplayName, SenderLanguage);
var transactionAmount = ItemPrice * ItemQuantity;
// need fix negative amounts before checking if the player can afford it.
if (!buyingPlayer.IsAdmin())
transactionAmount = Math.Abs(transactionAmount);
// TODO: admin check on ability to afford it?
//[maybe later, our pay and reset commands let us steal money from npc anyway best to keep admin abuse features to minimum]
//[we could put an admin check on blacklist however, allow admins to spawn even blacklisted gear]
if (accountToBuy.BankBalance < transactionAmount && accountToBuy.SteamId != accountToSell.SteamId)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, you cannot afford {0} {1}!", transactionAmount, EconomyScript.Instance.ServerConfig.CurrencyName);
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- not enough money.", SenderSteamId);
return;
}
if (BuyFromMerchant) // and supply is not exhausted, or unlimited mode is not on.
//This is a quick fix, ideally it should do a partial buy of what is left and post a buy offer for remainder
{
// here we look up item price and transfer items and money as appropriate
if (marketItem.Quantity >= ItemQuantity
|| (!EconomyScript.Instance.ServerConfig.LimitedSupply && accountToBuy.SteamId != accountToSell.SteamId))
{
marketItem.Quantity -= ItemQuantity; // reduce Market content.
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy finalizing by Steam Id '{0}' -- adding to inventory.", SenderSteamId);
var remainingToCollect = MessageSell.AddToInventories(buyingPlayer, ItemQuantity, definition.Id);
//EconomyScript.Instance.Config.LimitedSupply
if (accountToBuy.SteamId != accountToSell.SteamId)
{
accountToSell.BankBalance += transactionAmount;
accountToSell.Date = DateTime.Now;
accountToBuy.BankBalance -= transactionAmount;
accountToBuy.Date = DateTime.Now;
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "You just purchased {1} '{2}' for {0} {3}", transactionAmount, ItemQuantity, definition.GetDisplayName(), EconomyScript.Instance.ServerConfig.CurrencyName);
MessageUpdateClient.SendAccountMessage(accountToSell);
MessageUpdateClient.SendAccountMessage(accountToBuy);
}
else
{
accountToBuy.Date = DateTime.Now;
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "You just arranged transfer of {0} '{1}' into your inventory.", ItemQuantity, definition.GetDisplayName());
}
if (remainingToCollect > 0)
{
MarketManager.CreateStockHeld(buyingPlayer.SteamUserId, ItemTypeId, ItemSubTypeName, remainingToCollect, ItemPrice);
// TODO: there should be a common command to collect items. Not use /sell.
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "There are {0} remaining to collect. Use '/collect'", remainingToCollect);
}
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy complete by Steam Id '{0}' -- items bought.", SenderSteamId);
}
else
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "There isn't '{0}' of {1} available to purchase! Only {2} available to buy!", ItemQuantity, definition.GetDisplayName(), marketItem.Quantity);
EconomyScript.Instance.ServerLogger.WriteVerbose("Action /Buy aborted by Steam Id '{0}' -- not enough stock.", SenderSteamId);
}
return;
}
else if (FindOnMarket)
{
// TODO: Here we find the best offer on the zone market
return;
}
else
{
// is it a player then?
if (accountToSell.SteamId == buyingPlayer.SteamUserId)
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, you cannot buy from yourself!");
return;
}
// check if selling player is online and in range?
var payingPlayer = MyAPIGateway.Players.FindPlayerBySteamId(accountToSell.SteamId);
if (EconomyScript.Instance.ServerConfig.LimitedRange && !Support.RangeCheck(buyingPlayer, payingPlayer))
{
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Sorry, you are not in range of that player!");
return;
}
if (payingPlayer == null)
{
// TODO: other player offline.
}
else
{
// TODO: other player is online.
}
}
// this is a fall through from the above conditions not yet complete.
MessageClientTextMessage.SendMessage(SenderSteamId, "BUY", "Not yet complete.");
}
}
}
| |
#region License, Terms and Author(s)
//
// ELMAH - Error Logging Modules and Handlers for ASP.NET
// Copyright (c) 2004-9 Atif Aziz. All rights reserved.
//
// Author(s):
//
// Atif Aziz, http://www.raboof.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
[assembly: Elmah.Scc("$Id$")]
namespace Elmah
{
#region Imports
using System;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Collections.Generic;
using CultureInfo = System.Globalization.CultureInfo;
#endregion
/// <summary>
/// Renders an HTML page displaying a page of errors from the error log.
/// </summary>
internal sealed class ErrorLogPage : ErrorPageBase
{
private int _pageIndex;
private int _pageSize;
private int _totalCount;
private List<ErrorLogEntry> _errorEntryList;
private const int _defaultPageSize = 15;
private const int _maximumPageSize = 100;
protected override void OnLoad(EventArgs e)
{
//
// Get the page index and size parameters within their bounds.
//
_pageSize = Convert.ToInt32(this.Request.QueryString["size"], CultureInfo.InvariantCulture);
_pageSize = Math.Min(_maximumPageSize, Math.Max(0, _pageSize));
if (_pageSize == 0)
{
_pageSize = _defaultPageSize;
}
_pageIndex = Convert.ToInt32(this.Request.QueryString["page"], CultureInfo.InvariantCulture);
_pageIndex = Math.Max(1, _pageIndex) - 1;
//
// Read the error records.
//
_errorEntryList = new List<ErrorLogEntry>(_pageSize);
_totalCount = this.ErrorLog.GetErrors(_pageIndex, _pageSize, _errorEntryList);
//
// Set the title of the page.
//
string hostName = Environment.TryGetMachineName(Context);
this.PageTitle = string.Format(
hostName.Length > 0
? "Error log for {0} on {2} (Page #{1})"
: "Error log for {0} (Page #{1})",
this.ApplicationName, (_pageIndex + 1).ToString("N0"), hostName);
base.OnLoad(e);
}
protected override void RenderHead(HtmlTextWriter writer)
{
if (writer == null)
throw new ArgumentNullException("writer");
base.RenderHead(writer);
//
// Write a <link> tag to relate the RSS feed.
//
writer.AddAttribute(HtmlTextWriterAttribute.Rel, HtmlLinkType.Alternate);
writer.AddAttribute(HtmlTextWriterAttribute.Type, "application/rss+xml");
writer.AddAttribute(HtmlTextWriterAttribute.Title, "RSS");
writer.AddAttribute(HtmlTextWriterAttribute.Href, this.BasePageName + "/rss");
writer.RenderBeginTag(HtmlTextWriterTag.Link);
writer.RenderEndTag();
writer.WriteLine();
//
// If on the first page, then enable auto-refresh every minute
// by issuing the following markup:
//
// <meta http-equiv="refresh" content="60">
//
if (_pageIndex == 0)
{
writer.AddAttribute("http-equiv", "refresh");
writer.AddAttribute("content", "60");
writer.RenderBeginTag(HtmlTextWriterTag.Meta);
writer.RenderEndTag();
writer.WriteLine();
}
}
protected override void RenderContents(HtmlTextWriter writer)
{
if (writer == null)
throw new ArgumentNullException("writer");
//
// Write out the page title and speed bar in the body.
//
RenderTitle(writer);
SpeedBar.Render(writer,
SpeedBar.RssFeed.Format(BasePageName),
SpeedBar.RssDigestFeed.Format(BasePageName),
SpeedBar.DownloadLog.Format(BasePageName),
SpeedBar.Help,
SpeedBar.About.Format(BasePageName));
if (_errorEntryList.Count != 0)
{
//
// Write error number range displayed on this page and the
// total available in the log, followed by stock
// page sizes.
//
writer.RenderBeginTag(HtmlTextWriterTag.P);
RenderStats(writer);
RenderStockPageSizes(writer);
writer.RenderEndTag(); // </p>
writer.WriteLine();
//
// Write out the main table to display the errors.
//
RenderErrors(writer);
//
// Write out page navigation links.
//
RenderPageNavigators(writer);
}
else
{
//
// No errors found in the log, so display a corresponding
// message.
//
RenderNoErrors(writer);
}
base.RenderContents(writer);
}
private void RenderPageNavigators(HtmlTextWriter writer)
{
Debug.Assert(writer != null);
//
// If not on the last page then render a link to the next page.
//
writer.RenderBeginTag(HtmlTextWriterTag.P);
int nextPageIndex = _pageIndex + 1;
bool moreErrors = nextPageIndex * _pageSize < _totalCount;
if (moreErrors)
RenderLinkToPage(writer, HtmlLinkType.Next, "Next errors", nextPageIndex);
//
// If not on the first page then render a link to the firs page.
//
if (_pageIndex > 0 && _totalCount > 0)
{
if (moreErrors)
writer.Write("; ");
RenderLinkToPage(writer, HtmlLinkType.Start, "Back to first page", 0);
}
writer.RenderEndTag(); // </p>
writer.WriteLine();
}
private void RenderStockPageSizes(HtmlTextWriter writer)
{
Debug.Assert(writer != null);
//
// Write out a set of stock page size choices. Note that
// selecting a stock page size re-starts the log
// display from the first page to get the right paging.
//
writer.Write("Start with ");
int[] stockSizes = new int[] { 10, 15, 20, 25, 30, 50, 100 };
for (int stockSizeIndex = 0; stockSizeIndex < stockSizes.Length; stockSizeIndex++)
{
int stockSize = stockSizes[stockSizeIndex];
if (stockSizeIndex > 0)
writer.Write(stockSizeIndex + 1 < stockSizes.Length ? ", " : " or ");
RenderLinkToPage(writer, HtmlLinkType.Start, stockSize.ToString(), 0, stockSize);
}
writer.Write(" errors per page.");
}
private void RenderStats(HtmlTextWriter writer)
{
Debug.Assert(writer != null);
int firstErrorNumber = _pageIndex * _pageSize + 1;
int lastErrorNumber = firstErrorNumber + _errorEntryList.Count - 1;
int totalPages = (int) Math.Ceiling((double) _totalCount / _pageSize);
writer.Write("Errors {0} to {1} of total {2} (page {3} of {4}). ",
firstErrorNumber.ToString("N0"),
lastErrorNumber.ToString("N0"),
_totalCount.ToString("N0"),
(_pageIndex + 1).ToString("N0"),
totalPages.ToString("N0"));
}
private void RenderTitle(HtmlTextWriter writer)
{
Debug.Assert(writer != null);
//
// If the application name matches the APPL_MD_PATH then its
// of the form /LM/W3SVC/.../<name>. In this case, use only the
// <name> part to reduce the noise. The full application name is
// still made available through a tooltip.
//
string simpleName = this.ApplicationName;
if (string.Compare(simpleName, this.Request.ServerVariables["APPL_MD_PATH"],
true, CultureInfo.InvariantCulture) == 0)
{
int lastSlashIndex = simpleName.LastIndexOf('/');
if (lastSlashIndex > 0)
simpleName = simpleName.Substring(lastSlashIndex + 1);
}
writer.AddAttribute(HtmlTextWriterAttribute.Id, "PageTitle");
writer.RenderBeginTag(HtmlTextWriterTag.H1);
writer.Write("Error Log for ");
writer.AddAttribute(HtmlTextWriterAttribute.Id, "ApplicationName");
writer.AddAttribute(HtmlTextWriterAttribute.Title, this.Server.HtmlEncode(this.ApplicationName));
writer.RenderBeginTag(HtmlTextWriterTag.Span);
Server.HtmlEncode(simpleName, writer);
string hostName = Environment.TryGetMachineName(Context);
if (hostName.Length > 0)
{
writer.Write(" on ");
Server.HtmlEncode(hostName, writer);
}
writer.RenderEndTag(); // </span>
writer.RenderEndTag(); // </h1>
writer.WriteLine();
}
private void RenderNoErrors(HtmlTextWriter writer)
{
Debug.Assert(writer != null);
writer.RenderBeginTag(HtmlTextWriterTag.P);
writer.Write("No errors found. ");
//
// It is possible that there are no error at the requested
// page in the log (especially if it is not the first page).
// However, if there are error in the log
//
if (_pageIndex > 0 && _totalCount > 0)
{
RenderLinkToPage(writer, HtmlLinkType.Start, "Go to first page", 0);
writer.Write(". ");
}
writer.RenderEndTag();
writer.WriteLine();
}
private void RenderErrors(HtmlTextWriter writer)
{
Debug.Assert(writer != null);
//
// Create a table to display error information in each row.
//
Table table = new Table();
table.ID = "ErrorLog";
table.CellSpacing = 0;
//
// Create the table row for headings.
//
TableRow headRow = new TableRow();
headRow.Cells.Add(FormatCell(new TableHeaderCell(), "Host", "host-col"));
headRow.Cells.Add(FormatCell(new TableHeaderCell(), "Code", "code-col"));
headRow.Cells.Add(FormatCell(new TableHeaderCell(), "Type", "type-col"));
headRow.Cells.Add(FormatCell(new TableHeaderCell(), "Error", "error-col"));
headRow.Cells.Add(FormatCell(new TableHeaderCell(), "User", "user-col"));
headRow.Cells.Add(FormatCell(new TableHeaderCell(), "Date", "date-col"));
headRow.Cells.Add(FormatCell(new TableHeaderCell(), "Time", "time-col"));
table.Rows.Add(headRow);
//
// Generate a table body row for each error.
//
for (int errorIndex = 0; errorIndex < _errorEntryList.Count; errorIndex++)
{
ErrorLogEntry errorEntry = (ErrorLogEntry) _errorEntryList[errorIndex];
Error error = errorEntry.Error;
TableRow bodyRow = new TableRow();
bodyRow.CssClass = errorIndex % 2 == 0 ? "even-row" : "odd-row";
//
// Format host and status code cells.
//
bodyRow.Cells.Add(FormatCell(new TableCell(), error.HostName, "host-col"));
bodyRow.Cells.Add(FormatCell(new TableCell(), error.StatusCode.ToString(), "code-col", HttpWorkerRequest.GetStatusDescription(error.StatusCode) ?? string.Empty));
bodyRow.Cells.Add(FormatCell(new TableCell(), ErrorDisplay.HumaneExceptionErrorType(error), "type-col", error.Type));
//
// Format the message cell, which contains the message
// text and a details link pointing to the page where
// all error details can be viewed.
//
TableCell messageCell = new TableCell();
messageCell.CssClass = "error-col";
Label messageLabel = new Label();
messageLabel.Text = this.Server.HtmlEncode(error.Message);
HyperLink detailsLink = new HyperLink();
detailsLink.NavigateUrl = BasePageName + "/detail?id=" + HttpUtility.UrlEncode(errorEntry.Id);
detailsLink.Text = "Details…";
messageCell.Controls.Add(messageLabel);
messageCell.Controls.Add(new LiteralControl(" "));
messageCell.Controls.Add(detailsLink);
bodyRow.Cells.Add(messageCell);
//
// Format the user, date and time cells.
//
bodyRow.Cells.Add(FormatCell(new TableCell(), error.User, "user-col"));
bodyRow.Cells.Add(FormatCell(new TableCell(), error.Time.ToShortDateString(), "date-col",
error.Time.ToLongDateString()));
bodyRow.Cells.Add(FormatCell(new TableCell(), error.Time.ToShortTimeString(), "time-col",
error.Time.ToLongTimeString()));
//
// Finally, add the row to the table.
//
table.Rows.Add(bodyRow);
}
table.RenderControl(writer);
}
private TableCell FormatCell(TableCell cell, string contents, string cssClassName)
{
return FormatCell(cell, contents, cssClassName, string.Empty);
}
private TableCell FormatCell(TableCell cell, string contents, string cssClassName, string toolTip)
{
Debug.Assert(cell != null);
Debug.AssertStringNotEmpty(cssClassName);
cell.Wrap = false;
cell.CssClass = cssClassName;
if (contents.Length == 0)
{
cell.Text = " ";
}
else
{
string encodedContents = this.Server.HtmlEncode(contents);
if (toolTip.Length == 0)
{
cell.Text = encodedContents;
}
else
{
Label label = new Label();
label.ToolTip = toolTip;
label.Text = encodedContents;
cell.Controls.Add(label);
}
}
return cell;
}
private void RenderLinkToPage(HtmlTextWriter writer, string type, string text, int pageIndex)
{
RenderLinkToPage(writer, type, text, pageIndex, _pageSize);
}
private void RenderLinkToPage(HtmlTextWriter writer, string type, string text, int pageIndex, int pageSize)
{
Debug.Assert(writer != null);
Debug.Assert(text != null);
Debug.Assert(pageIndex >= 0);
Debug.Assert(pageSize >= 0);
string href = string.Format("{0}?page={1}&size={2}",
BasePageName,
(pageIndex + 1).ToString(CultureInfo.InvariantCulture),
pageSize.ToString(CultureInfo.InvariantCulture));
writer.AddAttribute(HtmlTextWriterAttribute.Href, href);
if (type != null && type.Length > 0)
writer.AddAttribute(HtmlTextWriterAttribute.Rel, type);
writer.RenderBeginTag(HtmlTextWriterTag.A);
this.Server.HtmlEncode(text, writer);
writer.RenderEndTag();
}
}
}
| |
namespace WebBaseSystem.Web.Areas.HelpPage
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using WebBaseSystem.Web.Areas.HelpPage.ModelDescriptions;
using WebBaseSystem.Web.Areas.HelpPage.Models;
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model description generator.
/// </summary>
/// <param name="config">The configuration.</param>
/// <returns>The <see cref="ModelDescriptionGenerator"/></returns>
public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config)
{
return (ModelDescriptionGenerator)config.Properties.GetOrAdd(
typeof(ModelDescriptionGenerator),
k => InitializeModelDescriptionGenerator(config));
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => string.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
model = GenerateApiModel(apiDescription, config);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config)
{
HelpPageApiModel apiModel = new HelpPageApiModel()
{
ApiDescription = apiDescription,
};
ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator();
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
GenerateUriParameters(apiModel, modelGenerator);
GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator);
GenerateResourceDescription(apiModel, modelGenerator);
GenerateSamples(apiModel, sampleGenerator);
return apiModel;
}
private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromUri)
{
HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor;
Type parameterType = null;
ModelDescription typeDescription = null;
ComplexTypeModelDescription complexTypeDescription = null;
if (parameterDescriptor != null)
{
parameterType = parameterDescriptor.ParameterType;
typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
complexTypeDescription = typeDescription as ComplexTypeModelDescription;
}
// Example:
// [TypeConverter(typeof(PointConverter))]
// public class Point
// {
// public Point(int x, int y)
// {
// X = x;
// Y = y;
// }
// public int X { get; set; }
// public int Y { get; set; }
// }
// Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection.
//
// public class Point
// {
// public int X { get; set; }
// public int Y { get; set; }
// }
// Regular complex class Point will have properties X and Y added to UriParameters collection.
if (complexTypeDescription != null
&& !IsBindableWithTypeConverter(parameterType))
{
foreach (ParameterDescription uriParameter in complexTypeDescription.Properties)
{
apiModel.UriParameters.Add(uriParameter);
}
}
else if (parameterDescriptor != null)
{
ParameterDescription uriParameter =
AddParameterDescription(apiModel, apiParameter, typeDescription);
if (!parameterDescriptor.IsOptional)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" });
}
object defaultValue = parameterDescriptor.DefaultValue;
if (defaultValue != null)
{
uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) });
}
}
else
{
Debug.Assert(parameterDescriptor == null, "The reason for the assert?!");
// If parameterDescriptor is null, this is an undeclared route parameter which only occurs
// when source is FromUri. Ignored in request model and among resource parameters but listed
// as a simple string here.
ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string));
AddParameterDescription(apiModel, apiParameter, modelDescription);
}
}
}
}
private static bool IsBindableWithTypeConverter(Type parameterType)
{
if (parameterType == null)
{
return false;
}
return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string));
}
private static ParameterDescription AddParameterDescription(
HelpPageApiModel apiModel,
ApiParameterDescription apiParameter,
ModelDescription typeDescription)
{
ParameterDescription parameterDescription = new ParameterDescription
{
Name = apiParameter.Name,
Documentation = apiParameter.Documentation,
TypeDescription = typeDescription,
};
apiModel.UriParameters.Add(parameterDescription);
return parameterDescription;
}
private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator)
{
ApiDescription apiDescription = apiModel.ApiDescription;
foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions)
{
if (apiParameter.Source == ApiParameterSource.FromBody)
{
Type parameterType = apiParameter.ParameterDescriptor.ParameterType;
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
apiModel.RequestDocumentation = apiParameter.Documentation;
}
else if (apiParameter.ParameterDescriptor != null &&
apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))
{
Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
if (parameterType != null)
{
apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
}
}
private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator)
{
ResponseDescription response = apiModel.ApiDescription.ResponseDescription;
Type responseType = response.ResponseType ?? response.DeclaredType;
if (responseType != null && responseType != typeof(void))
{
apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType);
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator)
{
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(
string.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while generating the sample. Exception message: {0}",
HelpPageSampleGenerator.UnwrapException(e).Message));
}
}
private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType)
{
parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault(
p => p.Source == ApiParameterSource.FromBody ||
(p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)));
if (parameterDescription == null)
{
resourceType = null;
return false;
}
resourceType = parameterDescription.ParameterDescriptor.ParameterType;
if (resourceType == typeof(HttpRequestMessage))
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription);
}
if (resourceType == null)
{
parameterDescription = null;
return false;
}
return true;
}
private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config)
{
ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config);
Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions;
foreach (ApiDescription api in apis)
{
ApiParameterDescription parameterDescription;
Type parameterType;
if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType))
{
modelGenerator.GetOrCreateModelDescription(parameterType);
}
}
return modelGenerator;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
using Parse;
using Parse.Core.Internal;
using NUnit.Framework;
using Moq;
using System;
using System.Runtime.CompilerServices;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace ParseTest {
[TestFixture]
public class UserTests {
[SetUp]
public void SetUp() {
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
}
[TearDown]
public void TearDown() {
ParseCorePlugins.Instance = null;
}
[Test]
public void TestRemoveFields() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "username", "kevin" },
{ "name", "andrew" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
Assert.Throws<ArgumentException>(() => user.Remove("username"));
Assert.DoesNotThrow(() => user.Remove("name"));
Assert.False(user.ContainsKey("name"));
}
[Test]
public void TestSessionTokenGetter() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "username", "kevin" },
{ "sessionToken", "se551onT0k3n" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
Assert.AreEqual("se551onT0k3n", user.SessionToken);
}
[Test]
public void TestUsernameGetterSetter() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "username", "kevin" },
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
Assert.AreEqual("kevin", user.Username);
user.Username = "ilya";
Assert.AreEqual("ilya", user.Username);
}
[Test]
public void TestPasswordGetterSetter() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "username", "kevin" },
{ "password", "hurrah" },
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
Assert.AreEqual("hurrah", user.GetState()["password"]);
user.Password = "david";
Assert.NotNull(user.GetCurrentOperations()["password"]);
}
[Test]
public void TestEmailGetterSetter() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "email", "james@parse.com" },
{ "name", "andrew" },
{ "sessionToken", "se551onT0k3n" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
Assert.AreEqual("james@parse.com", user.Email);
user.Email = "bryan@parse.com";
Assert.AreEqual("bryan@parse.com", user.Email);
}
[Test]
public void TestAuthDataGetter() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "email", "james@parse.com" },
{ "authData", new Dictionary<string, object>() {
{ "facebook", new Dictionary<string, object>() {
{ "sessionToken", "none" }
}}
}}
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
Assert.AreEqual(1, user.GetAuthData().Count);
Assert.IsInstanceOf<IDictionary<string, object>>(user.GetAuthData()["facebook"]);
}
[Test]
public void TestGetUserQuery() {
Assert.IsInstanceOf<ParseQuery<ParseUser>>(ParseUser.Query);
}
[Test]
public void TestIsAuthenticated() {
IObjectState state = new MutableObjectState {
ObjectId = "wagimanPutraPetir",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockCurrentUserController = new Mock<IParseCurrentUserController>();
mockCurrentUserController.Setup(obj => obj.GetAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(user));
ParseCorePlugins.Instance = new ParseCorePlugins {
CurrentUserController = mockCurrentUserController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
Assert.True(user.IsAuthenticated);
}
[Test]
public void TestIsAuthenticatedWithOtherParseUser() {
IObjectState state = new MutableObjectState {
ObjectId = "wagimanPutraPetir",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
IObjectState state2 = new MutableObjectState {
ObjectId = "wagimanPutraPetir2",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
ParseUser user2 = ParseObjectExtensions.FromState<ParseUser>(state2, "_User");
var mockCurrentUserController = new Mock<IParseCurrentUserController>();
mockCurrentUserController.Setup(obj => obj.GetAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(user));
ParseCorePlugins.Instance = new ParseCorePlugins {
CurrentUserController = mockCurrentUserController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
Assert.False(user2.IsAuthenticated);
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestSignUpWithInvalidServerData() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
return user.SignUpAsync().ContinueWith(t => {
Assert.True(t.IsFaulted);
Assert.IsInstanceOf<InvalidOperationException>(t.Exception.InnerException);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestSignUp() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" },
{ "username", "ihave" },
{ "password", "adream" }
}
};
IObjectState newState = new MutableObjectState {
ObjectId = "some0neTol4v4"
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockController = new Mock<IParseUserController>();
mockController.Setup(obj => obj.SignUpAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
ParseCorePlugins.Instance = new ParseCorePlugins {
UserController = mockController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return user.SignUpAsync().ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockController.Verify(obj => obj.SignUpAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<CancellationToken>()), Times.Exactly(1));
Assert.False(user.IsDirty);
Assert.AreEqual("ihave", user.Username);
Assert.False(user.GetState().ContainsKey("password"));
Assert.AreEqual("some0neTol4v4", user.ObjectId);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestLogIn() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" },
{ "username", "ihave" },
{ "password", "adream" }
}
};
IObjectState newState = new MutableObjectState {
ObjectId = "some0neTol4v4"
};
var mockController = new Mock<IParseUserController>();
mockController.Setup(obj => obj.LogInAsync("ihave",
"adream",
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
ParseCorePlugins.Instance = new ParseCorePlugins {
UserController = mockController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return ParseUser.LogInAsync("ihave", "adream").ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockController.Verify(obj => obj.LogInAsync("ihave",
"adream",
It.IsAny<CancellationToken>()), Times.Exactly(1));
var user = t.Result;
Assert.False(user.IsDirty);
Assert.Null(user.Username);
Assert.AreEqual("some0neTol4v4", user.ObjectId);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestBecome() {
IObjectState state = new MutableObjectState {
ObjectId = "some0neTol4v4",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
var mockController = new Mock<IParseUserController>();
mockController.Setup(obj => obj.GetUserAsync("llaKcolnu",
It.IsAny<CancellationToken>())).Returns(Task.FromResult(state));
ParseCorePlugins.Instance = new ParseCorePlugins {
UserController = mockController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return ParseUser.BecomeAsync("llaKcolnu").ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockController.Verify(obj => obj.GetUserAsync("llaKcolnu",
It.IsAny<CancellationToken>()), Times.Exactly(1));
var user = t.Result;
Assert.AreEqual("some0neTol4v4", user.ObjectId);
Assert.AreEqual("llaKcolnu", user.SessionToken);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestLogOut() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "r:llaKcolnu" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockCurrentUserController = new Mock<IParseCurrentUserController>();
mockCurrentUserController.Setup(obj => obj.GetAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(user));
var mockSessionController = new Mock<IParseSessionController>();
mockSessionController.Setup(c => c.IsRevocableSessionToken(It.IsAny<string>())).Returns(true);
ParseCorePlugins.Instance = new ParseCorePlugins {
CurrentUserController = mockCurrentUserController.Object,
SessionController = mockSessionController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return ParseUser.LogOutAsync().ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockCurrentUserController.Verify(obj => obj.LogOutAsync(It.IsAny<CancellationToken>()), Times.Exactly(1));
mockSessionController.Verify(obj => obj.RevokeAsync("r:llaKcolnu", It.IsAny<CancellationToken>()), Times.Exactly(1));
});
}
[Test]
public void TestCurrentUser() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockCurrentUserController = new Mock<IParseCurrentUserController>();
mockCurrentUserController.Setup(obj => obj.GetAsync(It.IsAny<CancellationToken>()))
.Returns(Task.FromResult(user));
ParseCorePlugins.Instance = new ParseCorePlugins {
CurrentUserController = mockCurrentUserController.Object,
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
Assert.AreEqual(user, ParseUser.CurrentUser);
}
[Test]
public void TestCurrentUserWithEmptyResult() {
var mockCurrentUserController = new Mock<IParseCurrentUserController>();
ParseCorePlugins.Instance = new ParseCorePlugins {
CurrentUserController = mockCurrentUserController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
Assert.Null(ParseUser.CurrentUser);
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestRevocableSession() {
IObjectState state = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
IObjectState newState = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "r:llaKcolnu" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockSessionController = new Mock<IParseSessionController>();
mockSessionController.Setup(obj => obj.UpgradeToRevocableSessionAsync("llaKcolnu",
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
ParseCorePlugins.Instance = new ParseCorePlugins {
SessionController = mockSessionController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return user.UpgradeToRevocableSessionAsync(CancellationToken.None).ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockSessionController.Verify(obj => obj.UpgradeToRevocableSessionAsync("llaKcolnu",
It.IsAny<CancellationToken>()), Times.Exactly(1));
Assert.AreEqual("r:llaKcolnu", user.SessionToken);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestRequestPasswordReset() {
var mockController = new Mock<IParseUserController>();
ParseCorePlugins.Instance = new ParseCorePlugins {
UserController = mockController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return ParseUser.RequestPasswordResetAsync("gogo@parse.com").ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockController.Verify(obj => obj.RequestPasswordResetAsync("gogo@parse.com",
It.IsAny<CancellationToken>()), Times.Exactly(1));
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestUserSave() {
IObjectState state = new MutableObjectState {
ObjectId = "some0neTol4v4",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" },
{ "username", "ihave" },
{ "password", "adream" }
}
};
IObjectState newState = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "Alliance", "rekt" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockObjectController = new Mock<IParseObjectController>();
mockObjectController.Setup(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
ParseCorePlugins.Instance = new ParseCorePlugins {
ObjectController = mockObjectController.Object,
CurrentUserController = new Mock<IParseCurrentUserController>().Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
user["Alliance"] = "rekt";
return user.SaveAsync().ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockObjectController.Verify(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>()), Times.Exactly(1));
Assert.False(user.IsDirty);
Assert.AreEqual("ihave", user.Username);
Assert.False(user.GetState().ContainsKey("password"));
Assert.AreEqual("some0neTol4v4", user.ObjectId);
Assert.AreEqual("rekt", user["Alliance"]);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestUserFetch() {
IObjectState state = new MutableObjectState {
ObjectId = "some0neTol4v4",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" },
{ "username", "ihave" },
{ "password", "adream" }
}
};
IObjectState newState = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "Alliance", "rekt" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockObjectController = new Mock<IParseObjectController>();
mockObjectController.Setup(obj => obj.FetchAsync(It.IsAny<IObjectState>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
ParseCorePlugins.Instance = new ParseCorePlugins {
ObjectController = mockObjectController.Object,
CurrentUserController = new Mock<IParseCurrentUserController>().Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
user["Alliance"] = "rekt";
return user.FetchAsync().ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockObjectController.Verify(obj => obj.FetchAsync(It.IsAny<IObjectState>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>()), Times.Exactly(1));
Assert.True(user.IsDirty);
Assert.AreEqual("ihave", user.Username);
Assert.True(user.GetState().ContainsKey("password"));
Assert.AreEqual("some0neTol4v4", user.ObjectId);
Assert.AreEqual("rekt", user["Alliance"]);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestLink() {
IObjectState state = new MutableObjectState {
ObjectId = "some0neTol4v4",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
IObjectState newState = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "garden", "ofWords" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockObjectController = new Mock<IParseObjectController>();
mockObjectController.Setup(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
ParseCorePlugins.Instance = new ParseCorePlugins {
ObjectController = mockObjectController.Object,
CurrentUserController = new Mock<IParseCurrentUserController>().Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return user.LinkWithAsync("parse", new Dictionary<string, object>(), CancellationToken.None).ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockObjectController.Verify(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>()), Times.Exactly(1));
Assert.False(user.IsDirty);
Assert.NotNull(user.GetAuthData());
Assert.NotNull(user.GetAuthData()["parse"]);
Assert.AreEqual("some0neTol4v4", user.ObjectId);
Assert.AreEqual("ofWords", user["garden"]);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestUnlink() {
IObjectState state = new MutableObjectState {
ObjectId = "some0neTol4v4",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" },
{ "authData", new Dictionary<string, object> {
{ "parse", new Dictionary<string, object>() }
}}
}
};
IObjectState newState = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "garden", "ofWords" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockObjectController = new Mock<IParseObjectController>();
mockObjectController.Setup(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
var mockCurrentUserController = new Mock<IParseCurrentUserController>();
mockCurrentUserController.Setup(obj => obj.IsCurrent(user)).Returns(true);
ParseCorePlugins.Instance = new ParseCorePlugins {
ObjectController = mockObjectController.Object,
CurrentUserController = mockCurrentUserController.Object,
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return user.UnlinkFromAsync("parse", CancellationToken.None).ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockObjectController.Verify(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>()), Times.Exactly(1));
Assert.False(user.IsDirty);
Assert.NotNull(user.GetAuthData());
Assert.False(user.GetAuthData().ContainsKey("parse"));
Assert.AreEqual("some0neTol4v4", user.ObjectId);
Assert.AreEqual("ofWords", user["garden"]);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestUnlinkNonCurrentUser() {
IObjectState state = new MutableObjectState {
ObjectId = "some0neTol4v4",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" },
{ "authData", new Dictionary<string, object> {
{ "parse", new Dictionary<string, object>() }
}}
}
};
IObjectState newState = new MutableObjectState {
ServerData = new Dictionary<string, object>() {
{ "garden", "ofWords" }
}
};
ParseUser user = ParseObjectExtensions.FromState<ParseUser>(state, "_User");
var mockObjectController = new Mock<IParseObjectController>();
mockObjectController.Setup(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>())).Returns(Task.FromResult(newState));
var mockCurrentUserController = new Mock<IParseCurrentUserController>();
mockCurrentUserController.Setup(obj => obj.IsCurrent(user)).Returns(false);
ParseCorePlugins.Instance = new ParseCorePlugins {
ObjectController = mockObjectController.Object,
CurrentUserController = mockCurrentUserController.Object,
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return user.UnlinkFromAsync("parse", CancellationToken.None).ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockObjectController.Verify(obj => obj.SaveAsync(It.IsAny<IObjectState>(),
It.IsAny<IDictionary<string, IParseFieldOperation>>(),
It.IsAny<string>(),
It.IsAny<CancellationToken>()), Times.Exactly(1));
Assert.False(user.IsDirty);
Assert.NotNull(user.GetAuthData());
Assert.True(user.GetAuthData().ContainsKey("parse"));
Assert.Null(user.GetAuthData()["parse"]);
Assert.AreEqual("some0neTol4v4", user.ObjectId);
Assert.AreEqual("ofWords", user["garden"]);
});
}
[Test]
[AsyncStateMachine(typeof(UserTests))]
public Task TestLogInWith() {
IObjectState state = new MutableObjectState {
ObjectId = "some0neTol4v4",
ServerData = new Dictionary<string, object>() {
{ "sessionToken", "llaKcolnu" }
}
};
var mockController = new Mock<IParseUserController>();
mockController.Setup(obj => obj.LogInAsync("parse",
It.IsAny<IDictionary<string, object>>(),
It.IsAny<CancellationToken>())).Returns(Task.FromResult(state));
ParseCorePlugins.Instance = new ParseCorePlugins {
UserController = mockController.Object
};
ParseObject.RegisterSubclass<ParseUser>();
ParseObject.RegisterSubclass<ParseSession>();
return ParseUserExtensions.LogInWithAsync("parse", new Dictionary<string, object>(), CancellationToken.None).ContinueWith(t => {
Assert.False(t.IsFaulted);
Assert.False(t.IsCanceled);
mockController.Verify(obj => obj.LogInAsync("parse",
It.IsAny<IDictionary<string, object>>(),
It.IsAny<CancellationToken>()), Times.Exactly(1));
var user = t.Result;
Assert.NotNull(user.GetAuthData());
Assert.NotNull(user.GetAuthData()["parse"]);
Assert.AreEqual("some0neTol4v4", user.ObjectId);
});
}
[Test]
public void TestImmutableKeys() {
ParseUser user = new ParseUser();
string[] immutableKeys = new string[] {
"sessionToken", "isNew"
};
foreach (var key in immutableKeys) {
Assert.Throws<InvalidOperationException>(() =>
user[key] = "1234567890"
);
Assert.Throws<InvalidOperationException>(() =>
user.Add(key, "1234567890")
);
Assert.Throws<InvalidOperationException>(() =>
user.AddRangeUniqueToList(key, new string[] { "1234567890" })
);
Assert.Throws<InvalidOperationException>(() =>
user.Remove(key)
);
Assert.Throws<InvalidOperationException>(() =>
user.RemoveAllFromList(key, new string[] { "1234567890" })
);
}
// Other special keys should be good
user["username"] = "username";
user["password"] = "password";
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http.Headers;
using System.Text;
using Xunit;
namespace System.Net.Http.Tests
{
public class MediaTypeHeaderValueTest
{
[Fact]
public void Ctor_MediaTypeNull_Throw()
{
AssertExtensions.Throws<ArgumentException>("mediaType", () => { new MediaTypeHeaderValue(null); });
}
[Fact]
public void Ctor_MediaTypeEmpty_Throw()
{
// null and empty should be treated the same. So we also throw for empty strings.
AssertExtensions.Throws<ArgumentException>("mediaType", () => { new MediaTypeHeaderValue(string.Empty); });
}
[Fact]
public void Ctor_MediaTypeInvalidFormat_ThrowFormatException()
{
// When adding values using strongly typed objects, no leading/trailing LWS (whitespace) are allowed.
AssertFormatException(" text/plain ");
AssertFormatException("text / plain");
AssertFormatException("text/ plain");
AssertFormatException("text /plain");
AssertFormatException("text/plain ");
AssertFormatException(" text/plain");
AssertFormatException("te xt/plain");
AssertFormatException("te=xt/plain");
AssertFormatException("te\u00E4xt/plain");
AssertFormatException("text/pl\u00E4in");
AssertFormatException("text");
AssertFormatException("\"text/plain\"");
AssertFormatException("text/plain; charset=utf-8; ");
AssertFormatException("text/plain;");
AssertFormatException("text/plain;charset=utf-8"); // ctor takes only media-type name, no parameters
}
[Fact]
public void Ctor_MediaTypeValidFormat_SuccessfullyCreated()
{
MediaTypeHeaderValue mediaType = new MediaTypeHeaderValue("text/plain");
Assert.Equal("text/plain", mediaType.MediaType);
Assert.Equal(0, mediaType.Parameters.Count);
Assert.Null(mediaType.CharSet);
}
[Fact]
public void Parameters_AddNull_Throw()
{
MediaTypeHeaderValue mediaType = new MediaTypeHeaderValue("text/plain");
Assert.Throws<ArgumentNullException>(() => { mediaType.Parameters.Add(null); });
}
[Fact]
public void MediaType_SetAndGetMediaType_MatchExpectations()
{
MediaTypeHeaderValue mediaType = new MediaTypeHeaderValue("text/plain");
Assert.Equal("text/plain", mediaType.MediaType);
mediaType.MediaType = "application/xml";
Assert.Equal("application/xml", mediaType.MediaType);
}
[Fact]
public void CharSet_SetCharSetAndValidateObject_ParametersEntryForCharSetAdded()
{
MediaTypeHeaderValue mediaType = new MediaTypeHeaderValue("text/plain");
mediaType.CharSet = "mycharset";
Assert.Equal("mycharset", mediaType.CharSet);
Assert.Equal(1, mediaType.Parameters.Count);
Assert.Equal("charset", mediaType.Parameters.First().Name);
mediaType.CharSet = null;
Assert.Null(mediaType.CharSet);
Assert.Equal(0, mediaType.Parameters.Count);
mediaType.CharSet = null; // It's OK to set it again to null; no exception.
}
[Fact]
public void CharSet_AddCharSetParameterThenUseProperty_ParametersEntryIsOverwritten()
{
MediaTypeHeaderValue mediaType = new MediaTypeHeaderValue("text/plain");
// Note that uppercase letters are used. Comparison should happen case-insensitive.
NameValueHeaderValue charset = new NameValueHeaderValue("CHARSET", "old_charset");
mediaType.Parameters.Add(charset);
Assert.Equal(1, mediaType.Parameters.Count);
Assert.Equal("CHARSET", mediaType.Parameters.First().Name);
mediaType.CharSet = "new_charset";
Assert.Equal("new_charset", mediaType.CharSet);
Assert.Equal(1, mediaType.Parameters.Count);
Assert.Equal("CHARSET", mediaType.Parameters.First().Name);
mediaType.Parameters.Remove(charset);
Assert.Null(mediaType.CharSet);
}
[Fact]
public void ToString_UseDifferentMediaTypes_AllSerializedCorrectly()
{
MediaTypeHeaderValue mediaType = new MediaTypeHeaderValue("text/plain");
Assert.Equal("text/plain", mediaType.ToString());
mediaType.CharSet = "utf-8";
Assert.Equal("text/plain; charset=utf-8", mediaType.ToString());
mediaType.Parameters.Add(new NameValueHeaderValue("custom", "\"custom value\""));
Assert.Equal("text/plain; charset=utf-8; custom=\"custom value\"", mediaType.ToString());
mediaType.CharSet = null;
Assert.Equal("text/plain; custom=\"custom value\"", mediaType.ToString());
}
[Fact]
public void GetHashCode_UseMediaTypeWithAndWithoutParameters_SameOrDifferentHashCodes()
{
MediaTypeHeaderValue mediaType1 = new MediaTypeHeaderValue("text/plain");
MediaTypeHeaderValue mediaType2 = new MediaTypeHeaderValue("text/plain");
mediaType2.CharSet = "utf-8";
MediaTypeHeaderValue mediaType3 = new MediaTypeHeaderValue("text/plain");
mediaType3.Parameters.Add(new NameValueHeaderValue("name", "value"));
MediaTypeHeaderValue mediaType4 = new MediaTypeHeaderValue("TEXT/plain");
MediaTypeHeaderValue mediaType5 = new MediaTypeHeaderValue("TEXT/plain");
mediaType5.Parameters.Add(new NameValueHeaderValue("CHARSET", "UTF-8"));
Assert.NotEqual(mediaType1.GetHashCode(), mediaType2.GetHashCode());
Assert.NotEqual(mediaType1.GetHashCode(), mediaType3.GetHashCode());
Assert.NotEqual(mediaType2.GetHashCode(), mediaType3.GetHashCode());
Assert.Equal(mediaType1.GetHashCode(), mediaType4.GetHashCode());
Assert.Equal(mediaType2.GetHashCode(), mediaType5.GetHashCode());
}
[Fact]
public void Equals_UseMediaTypeWithAndWithoutParameters_EqualOrNotEqualNoExceptions()
{
MediaTypeHeaderValue mediaType1 = new MediaTypeHeaderValue("text/plain");
MediaTypeHeaderValue mediaType2 = new MediaTypeHeaderValue("text/plain");
mediaType2.CharSet = "utf-8";
MediaTypeHeaderValue mediaType3 = new MediaTypeHeaderValue("text/plain");
mediaType3.Parameters.Add(new NameValueHeaderValue("name", "value"));
MediaTypeHeaderValue mediaType4 = new MediaTypeHeaderValue("TEXT/plain");
MediaTypeHeaderValue mediaType5 = new MediaTypeHeaderValue("TEXT/plain");
mediaType5.Parameters.Add(new NameValueHeaderValue("CHARSET", "UTF-8"));
MediaTypeHeaderValue mediaType6 = new MediaTypeHeaderValue("TEXT/plain");
mediaType6.Parameters.Add(new NameValueHeaderValue("CHARSET", "UTF-8"));
mediaType6.Parameters.Add(new NameValueHeaderValue("custom", "value"));
MediaTypeHeaderValue mediaType7 = new MediaTypeHeaderValue("text/other");
Assert.False(mediaType1.Equals(mediaType2), "No params vs. charset.");
Assert.False(mediaType2.Equals(mediaType1), "charset vs. no params.");
Assert.False(mediaType1.Equals(null), "No params vs. <null>.");
Assert.False(mediaType1.Equals(mediaType3), "No params vs. custom param.");
Assert.False(mediaType2.Equals(mediaType3), "charset vs. custom param.");
Assert.True(mediaType1.Equals(mediaType4), "Different casing.");
Assert.True(mediaType2.Equals(mediaType5), "Different casing in charset.");
Assert.False(mediaType5.Equals(mediaType6), "charset vs. custom param.");
Assert.False(mediaType1.Equals(mediaType7), "text/plain vs. text/other.");
}
[Fact]
public void Clone_Call_CloneFieldsMatchSourceFields()
{
MediaTypeHeaderValue source = new MediaTypeHeaderValue("application/xml");
MediaTypeHeaderValue clone = (MediaTypeHeaderValue)((ICloneable)source).Clone();
Assert.Equal(source.MediaType, clone.MediaType);
Assert.Equal(0, clone.Parameters.Count);
source.CharSet = "utf-8";
clone = (MediaTypeHeaderValue)((ICloneable)source).Clone();
Assert.Equal(source.MediaType, clone.MediaType);
Assert.Equal("utf-8", clone.CharSet);
Assert.Equal(1, clone.Parameters.Count);
source.Parameters.Add(new NameValueHeaderValue("custom", "customValue"));
clone = (MediaTypeHeaderValue)((ICloneable)source).Clone();
Assert.Equal(source.MediaType, clone.MediaType);
Assert.Equal("utf-8", clone.CharSet);
Assert.Equal(2, clone.Parameters.Count);
Assert.Equal("custom", clone.Parameters.ElementAt(1).Name);
Assert.Equal("customValue", clone.Parameters.ElementAt(1).Value);
}
[Fact]
public void GetMediaTypeLength_DifferentValidScenarios_AllReturnNonZero()
{
MediaTypeHeaderValue result = null;
Assert.Equal(11, MediaTypeHeaderValue.GetMediaTypeLength("text/plain , other/charset", 0,
DummyCreator, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal(0, result.Parameters.Count);
Assert.Equal(10, MediaTypeHeaderValue.GetMediaTypeLength("text/plain", 0, DummyCreator, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal(0, result.Parameters.Count);
Assert.Equal(30, MediaTypeHeaderValue.GetMediaTypeLength("text/plain; charset=iso-8859-1", 0,
DummyCreator, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal("iso-8859-1", result.CharSet);
Assert.Equal(1, result.Parameters.Count);
Assert.Equal(38, MediaTypeHeaderValue.GetMediaTypeLength(" text/plain; custom=value;charset=utf-8",
1, DummyCreator, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal("utf-8", result.CharSet);
Assert.Equal(2, result.Parameters.Count);
Assert.Equal(18, MediaTypeHeaderValue.GetMediaTypeLength(" text/plain; custom, next/mediatype",
1, DummyCreator, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Null(result.CharSet);
Assert.Equal(1, result.Parameters.Count);
Assert.Equal("custom", result.Parameters.ElementAt(0).Name);
Assert.Null(result.Parameters.ElementAt(0).Value);
Assert.Equal(48, MediaTypeHeaderValue.GetMediaTypeLength(
"text / plain ; custom =\r\n \"x\" ; charset = utf-8 , next/mediatype", 0, DummyCreator, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal("utf-8", result.CharSet);
Assert.Equal(2, result.Parameters.Count);
Assert.Equal("custom", result.Parameters.ElementAt(0).Name);
Assert.Equal("\"x\"", result.Parameters.ElementAt(0).Value);
Assert.Equal("charset", result.Parameters.ElementAt(1).Name);
Assert.Equal("utf-8", result.Parameters.ElementAt(1).Value);
Assert.Equal(35, MediaTypeHeaderValue.GetMediaTypeLength(
"text/plain;custom=\"x\";charset=utf-8,next/mediatype", 0, DummyCreator, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal("utf-8", result.CharSet);
Assert.Equal(2, result.Parameters.Count);
Assert.Equal("custom", result.Parameters.ElementAt(0).Name);
Assert.Equal("\"x\"", result.Parameters.ElementAt(0).Value);
Assert.Equal("charset", result.Parameters.ElementAt(1).Name);
Assert.Equal("utf-8", result.Parameters.ElementAt(1).Value);
}
[Fact]
public void GetMediaTypeLength_UseCustomCreator_CustomCreatorUsedToCreateMediaTypeInstance()
{
MediaTypeHeaderValue result = null;
// Path: media-type only
Assert.Equal(10, MediaTypeHeaderValue.GetMediaTypeLength("text/plain", 0,
() => { return new MediaTypeWithQualityHeaderValue(); }, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal(0, result.Parameters.Count);
Assert.IsType<MediaTypeWithQualityHeaderValue>(result);
// Path: media-type and parameters
Assert.Equal(25, MediaTypeHeaderValue.GetMediaTypeLength("text/plain; charset=utf-8", 0,
() => { return new MediaTypeWithQualityHeaderValue(); }, out result));
Assert.Equal("text/plain", result.MediaType);
Assert.Equal(1, result.Parameters.Count);
Assert.Equal("utf-8", result.CharSet);
Assert.IsType<MediaTypeWithQualityHeaderValue>(result);
}
[Fact]
public void GetMediaTypeLength_DifferentInvalidScenarios_AllReturnZero()
{
MediaTypeHeaderValue result = null;
Assert.Equal(0, MediaTypeHeaderValue.GetMediaTypeLength(" text/plain", 0, DummyCreator, out result));
Assert.Null(result);
Assert.Equal(0, MediaTypeHeaderValue.GetMediaTypeLength("text/plain;", 0, DummyCreator, out result));
Assert.Null(result);
Assert.Equal(0, MediaTypeHeaderValue.GetMediaTypeLength("text/plain;name=", 0, DummyCreator, out result));
Assert.Null(result);
Assert.Equal(0, MediaTypeHeaderValue.GetMediaTypeLength("text/plain;name=value;", 0, DummyCreator, out result));
Assert.Null(result);
Assert.Equal(0, MediaTypeHeaderValue.GetMediaTypeLength("text/plain;", 0, DummyCreator, out result));
Assert.Null(result);
Assert.Equal(0, MediaTypeHeaderValue.GetMediaTypeLength(null, 0, DummyCreator, out result));
Assert.Null(result);
Assert.Equal(0, MediaTypeHeaderValue.GetMediaTypeLength(string.Empty, 0, DummyCreator, out result));
Assert.Null(result);
}
[Fact]
public void Parse_SetOfValidValueStrings_ParsedCorrectly()
{
MediaTypeHeaderValue expected = new MediaTypeHeaderValue("text/plain");
CheckValidParse("\r\n text/plain ", expected);
CheckValidParse("text/plain", expected);
// We don't have to test all possible input strings, since most of the pieces are handled by other parsers.
// The purpose of this test is to verify that these other parsers are combined correctly to build a
// media-type parser.
expected.CharSet = "utf-8";
CheckValidParse("\r\n text / plain ; charset = utf-8 ", expected);
CheckValidParse(" text/plain;charset=utf-8", expected);
}
[Fact]
public void Parse_SetOfInvalidValueStrings_Throws()
{
CheckInvalidParse("");
CheckInvalidParse(" ");
CheckInvalidParse(null);
CheckInvalidParse("text/plain\u4F1A");
CheckInvalidParse("text/plain ,");
CheckInvalidParse("text/plain,");
CheckInvalidParse("text/plain; charset=utf-8 ,");
CheckInvalidParse("text/plain; charset=utf-8,");
CheckInvalidParse("textplain");
CheckInvalidParse("text/");
}
[Fact]
public void TryParse_SetOfValidValueStrings_ParsedCorrectly()
{
MediaTypeHeaderValue expected = new MediaTypeHeaderValue("text/plain");
CheckValidTryParse("\r\n text/plain ", expected);
CheckValidTryParse("text/plain", expected);
// We don't have to test all possible input strings, since most of the pieces are handled by other parsers.
// The purpose of this test is to verify that these other parsers are combined correctly to build a
// media-type parser.
expected.CharSet = "utf-8";
CheckValidTryParse("\r\n text / plain ; charset = utf-8 ", expected);
CheckValidTryParse(" text/plain;charset=utf-8", expected);
}
[Fact]
public void TryParse_SetOfInvalidValueStrings_ReturnsFalse()
{
CheckInvalidTryParse("");
CheckInvalidTryParse(" ");
CheckInvalidTryParse(null);
CheckInvalidTryParse("text/plain\u4F1A");
CheckInvalidTryParse("text/plain ,");
CheckInvalidTryParse("text/plain,");
CheckInvalidTryParse("text/plain; charset=utf-8 ,");
CheckInvalidTryParse("text/plain; charset=utf-8,");
CheckInvalidTryParse("textplain");
CheckInvalidTryParse("text/");
}
#region Helper methods
private void CheckValidParse(string input, MediaTypeHeaderValue expectedResult)
{
MediaTypeHeaderValue result = MediaTypeHeaderValue.Parse(input);
Assert.Equal(expectedResult, result);
}
private void CheckInvalidParse(string input)
{
Assert.Throws<FormatException>(() => { MediaTypeHeaderValue.Parse(input); });
}
private void CheckValidTryParse(string input, MediaTypeHeaderValue expectedResult)
{
MediaTypeHeaderValue result = null;
Assert.True(MediaTypeHeaderValue.TryParse(input, out result));
Assert.Equal(expectedResult, result);
}
private void CheckInvalidTryParse(string input)
{
MediaTypeHeaderValue result = null;
Assert.False(MediaTypeHeaderValue.TryParse(input, out result));
Assert.Null(result);
}
private static void AssertFormatException(string mediaType)
{
Assert.Throws<FormatException>(() => { new MediaTypeHeaderValue(mediaType); });
}
private static MediaTypeHeaderValue DummyCreator()
{
return new MediaTypeHeaderValue();
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
#region StyleCop Suppression - generated code
using System;
using System.ComponentModel;
using System.Windows;
using System.Windows.Automation.Peers;
using System.Windows.Controls;
using System.Windows.Input;
namespace Microsoft.Management.UI.Internal
{
/// <summary>
/// Interaction logic for ManagementList.
/// </summary>
[TemplatePart(Name="PART_ViewManager", Type=typeof(ListOrganizer))]
[TemplatePart(Name="PART_ViewSaver", Type=typeof(PickerBase))]
[Localizability(LocalizationCategory.None)]
partial class ManagementList
{
//
// Fields
//
private ListOrganizer viewManager;
private PickerBase viewSaver;
//
// ViewsChanged RoutedEvent
//
/// <summary>
/// Identifies the ViewsChanged RoutedEvent.
/// </summary>
public static readonly RoutedEvent ViewsChangedEvent = EventManager.RegisterRoutedEvent("ViewsChanged",RoutingStrategy.Bubble,typeof(RoutedEventHandler),typeof(ManagementList));
/// <summary>
/// Occurs when any of this instance's views change.
/// </summary>
public event RoutedEventHandler ViewsChanged
{
add
{
AddHandler(ViewsChangedEvent,value);
}
remove
{
RemoveHandler(ViewsChangedEvent,value);
}
}
//
// ClearFilter routed command
//
/// <summary>
/// Informs the ManagementList that it should clear the filter that is applied.
/// </summary>
public static readonly RoutedCommand ClearFilterCommand = new RoutedCommand("ClearFilter",typeof(ManagementList));
static private void ClearFilterCommand_CommandCanExecute(object sender, CanExecuteRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnClearFilterCanExecute( e );
}
static private void ClearFilterCommand_CommandExecuted(object sender, ExecutedRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnClearFilterExecuted( e );
}
/// <summary>
/// Called to determine if ClearFilter can execute.
/// </summary>
protected virtual void OnClearFilterCanExecute(CanExecuteRoutedEventArgs e)
{
OnClearFilterCanExecuteImplementation(e);
}
partial void OnClearFilterCanExecuteImplementation(CanExecuteRoutedEventArgs e);
/// <summary>
/// Called when ClearFilter executes.
/// </summary>
/// <remarks>
/// Informs the ManagementList that it should clear the filter that is applied.
/// </remarks>
protected virtual void OnClearFilterExecuted(ExecutedRoutedEventArgs e)
{
OnClearFilterExecutedImplementation(e);
}
partial void OnClearFilterExecutedImplementation(ExecutedRoutedEventArgs e);
//
// SaveView routed command
//
/// <summary>
/// Informs the PickerBase that it should close the dropdown.
/// </summary>
public static readonly RoutedCommand SaveViewCommand = new RoutedCommand("SaveView",typeof(ManagementList));
static private void SaveViewCommand_CommandCanExecute(object sender, CanExecuteRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnSaveViewCanExecute( e );
}
static private void SaveViewCommand_CommandExecuted(object sender, ExecutedRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnSaveViewExecuted( e );
}
/// <summary>
/// Called to determine if SaveView can execute.
/// </summary>
protected virtual void OnSaveViewCanExecute(CanExecuteRoutedEventArgs e)
{
OnSaveViewCanExecuteImplementation(e);
}
partial void OnSaveViewCanExecuteImplementation(CanExecuteRoutedEventArgs e);
/// <summary>
/// Called when SaveView executes.
/// </summary>
/// <remarks>
/// Informs the PickerBase that it should close the dropdown.
/// </remarks>
protected virtual void OnSaveViewExecuted(ExecutedRoutedEventArgs e)
{
OnSaveViewExecutedImplementation(e);
}
partial void OnSaveViewExecutedImplementation(ExecutedRoutedEventArgs e);
//
// StartFilter routed command
//
/// <summary>
/// Informs the ManagementList that it should apply the filter.
/// </summary>
public static readonly RoutedCommand StartFilterCommand = new RoutedCommand("StartFilter",typeof(ManagementList));
static private void StartFilterCommand_CommandCanExecute(object sender, CanExecuteRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnStartFilterCanExecute( e );
}
static private void StartFilterCommand_CommandExecuted(object sender, ExecutedRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnStartFilterExecuted( e );
}
/// <summary>
/// Called to determine if StartFilter can execute.
/// </summary>
protected virtual void OnStartFilterCanExecute(CanExecuteRoutedEventArgs e)
{
OnStartFilterCanExecuteImplementation(e);
}
partial void OnStartFilterCanExecuteImplementation(CanExecuteRoutedEventArgs e);
/// <summary>
/// Called when StartFilter executes.
/// </summary>
/// <remarks>
/// Informs the ManagementList that it should apply the filter.
/// </remarks>
protected virtual void OnStartFilterExecuted(ExecutedRoutedEventArgs e)
{
OnStartFilterExecutedImplementation(e);
}
partial void OnStartFilterExecutedImplementation(ExecutedRoutedEventArgs e);
//
// StopFilter routed command
//
/// <summary>
/// Informs the ManagementList that it should stop filtering that is in progress.
/// </summary>
public static readonly RoutedCommand StopFilterCommand = new RoutedCommand("StopFilter",typeof(ManagementList));
static private void StopFilterCommand_CommandCanExecute(object sender, CanExecuteRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnStopFilterCanExecute( e );
}
static private void StopFilterCommand_CommandExecuted(object sender, ExecutedRoutedEventArgs e)
{
ManagementList obj = (ManagementList) sender;
obj.OnStopFilterExecuted( e );
}
/// <summary>
/// Called to determine if StopFilter can execute.
/// </summary>
protected virtual void OnStopFilterCanExecute(CanExecuteRoutedEventArgs e)
{
OnStopFilterCanExecuteImplementation(e);
}
partial void OnStopFilterCanExecuteImplementation(CanExecuteRoutedEventArgs e);
/// <summary>
/// Called when StopFilter executes.
/// </summary>
/// <remarks>
/// Informs the ManagementList that it should stop filtering that is in progress.
/// </remarks>
protected virtual void OnStopFilterExecuted(ExecutedRoutedEventArgs e)
{
OnStopFilterExecutedImplementation(e);
}
partial void OnStopFilterExecutedImplementation(ExecutedRoutedEventArgs e);
//
// AddFilterRulePicker dependency property
//
/// <summary>
/// Identifies the AddFilterRulePicker dependency property key.
/// </summary>
private static readonly DependencyPropertyKey AddFilterRulePickerPropertyKey = DependencyProperty.RegisterReadOnly( "AddFilterRulePicker", typeof(AddFilterRulePicker), typeof(ManagementList), new PropertyMetadata( null, AddFilterRulePickerProperty_PropertyChanged) );
/// <summary>
/// Identifies the AddFilterRulePicker dependency property.
/// </summary>
public static readonly DependencyProperty AddFilterRulePickerProperty = AddFilterRulePickerPropertyKey.DependencyProperty;
/// <summary>
/// Gets the filter rule picker.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets the filter rule picker.")]
[Localizability(LocalizationCategory.None)]
public AddFilterRulePicker AddFilterRulePicker
{
get
{
return (AddFilterRulePicker) GetValue(AddFilterRulePickerProperty);
}
private set
{
SetValue(AddFilterRulePickerPropertyKey,value);
}
}
static private void AddFilterRulePickerProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnAddFilterRulePickerChanged( new PropertyChangedEventArgs<AddFilterRulePicker>((AddFilterRulePicker)e.OldValue, (AddFilterRulePicker)e.NewValue) );
}
/// <summary>
/// Occurs when AddFilterRulePicker property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<AddFilterRulePicker>> AddFilterRulePickerChanged;
/// <summary>
/// Called when AddFilterRulePicker property changes.
/// </summary>
protected virtual void OnAddFilterRulePickerChanged(PropertyChangedEventArgs<AddFilterRulePicker> e)
{
OnAddFilterRulePickerChangedImplementation(e);
RaisePropertyChangedEvent(AddFilterRulePickerChanged, e);
}
partial void OnAddFilterRulePickerChangedImplementation(PropertyChangedEventArgs<AddFilterRulePicker> e);
//
// CurrentView dependency property
//
/// <summary>
/// Identifies the CurrentView dependency property key.
/// </summary>
private static readonly DependencyPropertyKey CurrentViewPropertyKey = DependencyProperty.RegisterReadOnly( "CurrentView", typeof(StateDescriptor<ManagementList>), typeof(ManagementList), new PropertyMetadata( null, CurrentViewProperty_PropertyChanged) );
/// <summary>
/// Identifies the CurrentView dependency property.
/// </summary>
public static readonly DependencyProperty CurrentViewProperty = CurrentViewPropertyKey.DependencyProperty;
/// <summary>
/// Gets or sets current view.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets or sets current view.")]
[Localizability(LocalizationCategory.None)]
public StateDescriptor<ManagementList> CurrentView
{
get
{
return (StateDescriptor<ManagementList>) GetValue(CurrentViewProperty);
}
private set
{
SetValue(CurrentViewPropertyKey,value);
}
}
static private void CurrentViewProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnCurrentViewChanged( new PropertyChangedEventArgs<StateDescriptor<ManagementList>>((StateDescriptor<ManagementList>)e.OldValue, (StateDescriptor<ManagementList>)e.NewValue) );
}
/// <summary>
/// Occurs when CurrentView property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<StateDescriptor<ManagementList>>> CurrentViewChanged;
/// <summary>
/// Called when CurrentView property changes.
/// </summary>
protected virtual void OnCurrentViewChanged(PropertyChangedEventArgs<StateDescriptor<ManagementList>> e)
{
OnCurrentViewChangedImplementation(e);
RaisePropertyChangedEvent(CurrentViewChanged, e);
}
partial void OnCurrentViewChangedImplementation(PropertyChangedEventArgs<StateDescriptor<ManagementList>> e);
//
// Evaluator dependency property
//
/// <summary>
/// Identifies the Evaluator dependency property.
/// </summary>
public static readonly DependencyProperty EvaluatorProperty = DependencyProperty.Register( "Evaluator", typeof(ItemsControlFilterEvaluator), typeof(ManagementList), new PropertyMetadata( null, EvaluatorProperty_PropertyChanged) );
/// <summary>
/// Gets or sets the FilterEvaluator.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets or sets the FilterEvaluator.")]
[Localizability(LocalizationCategory.None)]
public ItemsControlFilterEvaluator Evaluator
{
get
{
return (ItemsControlFilterEvaluator) GetValue(EvaluatorProperty);
}
set
{
SetValue(EvaluatorProperty,value);
}
}
static private void EvaluatorProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnEvaluatorChanged( new PropertyChangedEventArgs<ItemsControlFilterEvaluator>((ItemsControlFilterEvaluator)e.OldValue, (ItemsControlFilterEvaluator)e.NewValue) );
}
/// <summary>
/// Occurs when Evaluator property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<ItemsControlFilterEvaluator>> EvaluatorChanged;
/// <summary>
/// Called when Evaluator property changes.
/// </summary>
protected virtual void OnEvaluatorChanged(PropertyChangedEventArgs<ItemsControlFilterEvaluator> e)
{
OnEvaluatorChangedImplementation(e);
RaisePropertyChangedEvent(EvaluatorChanged, e);
}
partial void OnEvaluatorChangedImplementation(PropertyChangedEventArgs<ItemsControlFilterEvaluator> e);
//
// FilterRulePanel dependency property
//
/// <summary>
/// Identifies the FilterRulePanel dependency property key.
/// </summary>
private static readonly DependencyPropertyKey FilterRulePanelPropertyKey = DependencyProperty.RegisterReadOnly( "FilterRulePanel", typeof(FilterRulePanel), typeof(ManagementList), new PropertyMetadata( null, FilterRulePanelProperty_PropertyChanged) );
/// <summary>
/// Identifies the FilterRulePanel dependency property.
/// </summary>
public static readonly DependencyProperty FilterRulePanelProperty = FilterRulePanelPropertyKey.DependencyProperty;
/// <summary>
/// Gets the filter rule panel.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets the filter rule panel.")]
[Localizability(LocalizationCategory.None)]
public FilterRulePanel FilterRulePanel
{
get
{
return (FilterRulePanel) GetValue(FilterRulePanelProperty);
}
private set
{
SetValue(FilterRulePanelPropertyKey,value);
}
}
static private void FilterRulePanelProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnFilterRulePanelChanged( new PropertyChangedEventArgs<FilterRulePanel>((FilterRulePanel)e.OldValue, (FilterRulePanel)e.NewValue) );
}
/// <summary>
/// Occurs when FilterRulePanel property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<FilterRulePanel>> FilterRulePanelChanged;
/// <summary>
/// Called when FilterRulePanel property changes.
/// </summary>
protected virtual void OnFilterRulePanelChanged(PropertyChangedEventArgs<FilterRulePanel> e)
{
OnFilterRulePanelChangedImplementation(e);
RaisePropertyChangedEvent(FilterRulePanelChanged, e);
}
partial void OnFilterRulePanelChangedImplementation(PropertyChangedEventArgs<FilterRulePanel> e);
//
// IsFilterShown dependency property
//
/// <summary>
/// Identifies the IsFilterShown dependency property.
/// </summary>
public static readonly DependencyProperty IsFilterShownProperty = DependencyProperty.Register( "IsFilterShown", typeof(bool), typeof(ManagementList), new PropertyMetadata( BooleanBoxes.TrueBox, IsFilterShownProperty_PropertyChanged) );
/// <summary>
/// Gets or sets a value indicating whether the filter is shown.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets or sets a value indicating whether the filter is shown.")]
[Localizability(LocalizationCategory.None)]
public bool IsFilterShown
{
get
{
return (bool) GetValue(IsFilterShownProperty);
}
set
{
SetValue(IsFilterShownProperty,BooleanBoxes.Box(value));
}
}
static private void IsFilterShownProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnIsFilterShownChanged( new PropertyChangedEventArgs<bool>((bool)e.OldValue, (bool)e.NewValue) );
}
/// <summary>
/// Occurs when IsFilterShown property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<bool>> IsFilterShownChanged;
/// <summary>
/// Called when IsFilterShown property changes.
/// </summary>
protected virtual void OnIsFilterShownChanged(PropertyChangedEventArgs<bool> e)
{
OnIsFilterShownChangedImplementation(e);
RaisePropertyChangedEvent(IsFilterShownChanged, e);
}
partial void OnIsFilterShownChangedImplementation(PropertyChangedEventArgs<bool> e);
//
// IsLoadingItems dependency property
//
/// <summary>
/// Identifies the IsLoadingItems dependency property.
/// </summary>
public static readonly DependencyProperty IsLoadingItemsProperty = DependencyProperty.Register( "IsLoadingItems", typeof(bool), typeof(ManagementList), new PropertyMetadata( BooleanBoxes.FalseBox, IsLoadingItemsProperty_PropertyChanged) );
/// <summary>
/// Gets or sets a value indicating whether items are loading.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets or sets a value indicating whether items are loading.")]
[Localizability(LocalizationCategory.None)]
public bool IsLoadingItems
{
get
{
return (bool) GetValue(IsLoadingItemsProperty);
}
set
{
SetValue(IsLoadingItemsProperty,BooleanBoxes.Box(value));
}
}
static private void IsLoadingItemsProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnIsLoadingItemsChanged( new PropertyChangedEventArgs<bool>((bool)e.OldValue, (bool)e.NewValue) );
}
/// <summary>
/// Occurs when IsLoadingItems property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<bool>> IsLoadingItemsChanged;
/// <summary>
/// Called when IsLoadingItems property changes.
/// </summary>
protected virtual void OnIsLoadingItemsChanged(PropertyChangedEventArgs<bool> e)
{
OnIsLoadingItemsChangedImplementation(e);
RaisePropertyChangedEvent(IsLoadingItemsChanged, e);
}
partial void OnIsLoadingItemsChangedImplementation(PropertyChangedEventArgs<bool> e);
//
// IsSearchShown dependency property
//
/// <summary>
/// Identifies the IsSearchShown dependency property.
/// </summary>
public static readonly DependencyProperty IsSearchShownProperty = DependencyProperty.Register( "IsSearchShown", typeof(bool), typeof(ManagementList), new PropertyMetadata( BooleanBoxes.TrueBox, IsSearchShownProperty_PropertyChanged) );
/// <summary>
/// Gets or sets a value indicating whether the search box is shown.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets or sets a value indicating whether the search box is shown.")]
[Localizability(LocalizationCategory.None)]
public bool IsSearchShown
{
get
{
return (bool) GetValue(IsSearchShownProperty);
}
set
{
SetValue(IsSearchShownProperty,BooleanBoxes.Box(value));
}
}
static private void IsSearchShownProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnIsSearchShownChanged( new PropertyChangedEventArgs<bool>((bool)e.OldValue, (bool)e.NewValue) );
}
/// <summary>
/// Occurs when IsSearchShown property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<bool>> IsSearchShownChanged;
/// <summary>
/// Called when IsSearchShown property changes.
/// </summary>
protected virtual void OnIsSearchShownChanged(PropertyChangedEventArgs<bool> e)
{
OnIsSearchShownChangedImplementation(e);
RaisePropertyChangedEvent(IsSearchShownChanged, e);
}
partial void OnIsSearchShownChangedImplementation(PropertyChangedEventArgs<bool> e);
//
// List dependency property
//
/// <summary>
/// Identifies the List dependency property key.
/// </summary>
private static readonly DependencyPropertyKey ListPropertyKey = DependencyProperty.RegisterReadOnly( "List", typeof(InnerList), typeof(ManagementList), new PropertyMetadata( null, ListProperty_PropertyChanged) );
/// <summary>
/// Identifies the List dependency property.
/// </summary>
public static readonly DependencyProperty ListProperty = ListPropertyKey.DependencyProperty;
/// <summary>
/// Gets the list.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets the list.")]
[Localizability(LocalizationCategory.None)]
public InnerList List
{
get
{
return (InnerList) GetValue(ListProperty);
}
private set
{
SetValue(ListPropertyKey,value);
}
}
static private void ListProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnListChanged( new PropertyChangedEventArgs<InnerList>((InnerList)e.OldValue, (InnerList)e.NewValue) );
}
/// <summary>
/// Occurs when List property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<InnerList>> ListChanged;
/// <summary>
/// Called when List property changes.
/// </summary>
protected virtual void OnListChanged(PropertyChangedEventArgs<InnerList> e)
{
OnListChangedImplementation(e);
RaisePropertyChangedEvent(ListChanged, e);
}
partial void OnListChangedImplementation(PropertyChangedEventArgs<InnerList> e);
//
// SearchBox dependency property
//
/// <summary>
/// Identifies the SearchBox dependency property key.
/// </summary>
private static readonly DependencyPropertyKey SearchBoxPropertyKey = DependencyProperty.RegisterReadOnly( "SearchBox", typeof(SearchBox), typeof(ManagementList), new PropertyMetadata( null, SearchBoxProperty_PropertyChanged) );
/// <summary>
/// Identifies the SearchBox dependency property.
/// </summary>
public static readonly DependencyProperty SearchBoxProperty = SearchBoxPropertyKey.DependencyProperty;
/// <summary>
/// Gets the search box.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets the search box.")]
[Localizability(LocalizationCategory.None)]
public SearchBox SearchBox
{
get
{
return (SearchBox) GetValue(SearchBoxProperty);
}
private set
{
SetValue(SearchBoxPropertyKey,value);
}
}
static private void SearchBoxProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnSearchBoxChanged( new PropertyChangedEventArgs<SearchBox>((SearchBox)e.OldValue, (SearchBox)e.NewValue) );
}
/// <summary>
/// Occurs when SearchBox property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<SearchBox>> SearchBoxChanged;
/// <summary>
/// Called when SearchBox property changes.
/// </summary>
protected virtual void OnSearchBoxChanged(PropertyChangedEventArgs<SearchBox> e)
{
OnSearchBoxChangedImplementation(e);
RaisePropertyChangedEvent(SearchBoxChanged, e);
}
partial void OnSearchBoxChangedImplementation(PropertyChangedEventArgs<SearchBox> e);
//
// ViewManagerUserActionState dependency property
//
/// <summary>
/// Identifies the ViewManagerUserActionState dependency property.
/// </summary>
public static readonly DependencyProperty ViewManagerUserActionStateProperty = DependencyProperty.Register( "ViewManagerUserActionState", typeof(UserActionState), typeof(ManagementList), new PropertyMetadata( UserActionState.Enabled, ViewManagerUserActionStateProperty_PropertyChanged) );
/// <summary>
/// Gets or sets the user interaction state of the view manager.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets or sets the user interaction state of the view manager.")]
[Localizability(LocalizationCategory.None)]
public UserActionState ViewManagerUserActionState
{
get
{
return (UserActionState) GetValue(ViewManagerUserActionStateProperty);
}
set
{
SetValue(ViewManagerUserActionStateProperty,value);
}
}
static private void ViewManagerUserActionStateProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnViewManagerUserActionStateChanged( new PropertyChangedEventArgs<UserActionState>((UserActionState)e.OldValue, (UserActionState)e.NewValue) );
}
/// <summary>
/// Occurs when ViewManagerUserActionState property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<UserActionState>> ViewManagerUserActionStateChanged;
/// <summary>
/// Called when ViewManagerUserActionState property changes.
/// </summary>
protected virtual void OnViewManagerUserActionStateChanged(PropertyChangedEventArgs<UserActionState> e)
{
OnViewManagerUserActionStateChangedImplementation(e);
RaisePropertyChangedEvent(ViewManagerUserActionStateChanged, e);
}
partial void OnViewManagerUserActionStateChangedImplementation(PropertyChangedEventArgs<UserActionState> e);
//
// ViewSaverUserActionState dependency property
//
/// <summary>
/// Identifies the ViewSaverUserActionState dependency property.
/// </summary>
public static readonly DependencyProperty ViewSaverUserActionStateProperty = DependencyProperty.Register( "ViewSaverUserActionState", typeof(UserActionState), typeof(ManagementList), new PropertyMetadata( UserActionState.Enabled, ViewSaverUserActionStateProperty_PropertyChanged) );
/// <summary>
/// Gets or sets the user interaction state of the view saver.
/// </summary>
[Bindable(true)]
[Category("Common Properties")]
[Description("Gets or sets the user interaction state of the view saver.")]
[Localizability(LocalizationCategory.None)]
public UserActionState ViewSaverUserActionState
{
get
{
return (UserActionState) GetValue(ViewSaverUserActionStateProperty);
}
set
{
SetValue(ViewSaverUserActionStateProperty,value);
}
}
static private void ViewSaverUserActionStateProperty_PropertyChanged(DependencyObject o, DependencyPropertyChangedEventArgs e)
{
ManagementList obj = (ManagementList) o;
obj.OnViewSaverUserActionStateChanged( new PropertyChangedEventArgs<UserActionState>((UserActionState)e.OldValue, (UserActionState)e.NewValue) );
}
/// <summary>
/// Occurs when ViewSaverUserActionState property changes.
/// </summary>
public event EventHandler<PropertyChangedEventArgs<UserActionState>> ViewSaverUserActionStateChanged;
/// <summary>
/// Called when ViewSaverUserActionState property changes.
/// </summary>
protected virtual void OnViewSaverUserActionStateChanged(PropertyChangedEventArgs<UserActionState> e)
{
OnViewSaverUserActionStateChangedImplementation(e);
RaisePropertyChangedEvent(ViewSaverUserActionStateChanged, e);
}
partial void OnViewSaverUserActionStateChangedImplementation(PropertyChangedEventArgs<UserActionState> e);
/// <summary>
/// Called when a property changes.
/// </summary>
private void RaisePropertyChangedEvent<T>(EventHandler<PropertyChangedEventArgs<T>> eh, PropertyChangedEventArgs<T> e)
{
if (eh != null)
{
eh(this,e);
}
}
//
// OnApplyTemplate
//
/// <summary>
/// Called when ApplyTemplate is called.
/// </summary>
public override void OnApplyTemplate()
{
PreOnApplyTemplate();
base.OnApplyTemplate();
this.viewManager = WpfHelp.GetTemplateChild<ListOrganizer>(this,"PART_ViewManager");
this.viewSaver = WpfHelp.GetTemplateChild<PickerBase>(this,"PART_ViewSaver");
PostOnApplyTemplate();
}
partial void PreOnApplyTemplate();
partial void PostOnApplyTemplate();
//
// Static constructor
//
/// <summary>
/// Called when the type is initialized.
/// </summary>
static ManagementList()
{
DefaultStyleKeyProperty.OverrideMetadata(typeof(ManagementList), new FrameworkPropertyMetadata(typeof(ManagementList)));
CommandManager.RegisterClassCommandBinding( typeof(ManagementList), new CommandBinding( ManagementList.ClearFilterCommand, ClearFilterCommand_CommandExecuted, ClearFilterCommand_CommandCanExecute ));
CommandManager.RegisterClassCommandBinding( typeof(ManagementList), new CommandBinding( ManagementList.SaveViewCommand, SaveViewCommand_CommandExecuted, SaveViewCommand_CommandCanExecute ));
CommandManager.RegisterClassCommandBinding( typeof(ManagementList), new CommandBinding( ManagementList.StartFilterCommand, StartFilterCommand_CommandExecuted, StartFilterCommand_CommandCanExecute ));
CommandManager.RegisterClassCommandBinding( typeof(ManagementList), new CommandBinding( ManagementList.StopFilterCommand, StopFilterCommand_CommandExecuted, StopFilterCommand_CommandCanExecute ));
StaticConstructorImplementation();
}
static partial void StaticConstructorImplementation();
//
// CreateAutomationPeer
//
/// <summary>
/// Create an instance of the AutomationPeer.
/// </summary>
/// <returns>
/// An instance of the AutomationPeer.
/// </returns>
protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer()
{
return new ExtendedFrameworkElementAutomationPeer(this,AutomationControlType.Pane);
}
}
}
#endregion
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace Microsoft.Xml
{
using System;
using System.IO;
using System.Collections;
using System.Diagnostics;
using System.Text;
using Microsoft.Xml.Schema;
using System.Globalization;
internal class XmlLoader
{
private XmlDocument _doc;
private XmlReader _reader;
private bool _preserveWhitespace;
public XmlLoader()
{
}
internal void Load(XmlDocument doc, XmlReader reader, bool preserveWhitespace)
{
_doc = doc;
// perf: unwrap XmlTextReader if no one derived from it
if (reader.GetType() == typeof(Microsoft.Xml.XmlTextReader))
{
_reader = ((XmlTextReader)reader).Impl;
}
else
{
_reader = reader;
}
_preserveWhitespace = preserveWhitespace;
if (doc == null)
throw new ArgumentException(ResXml.Xdom_Load_NoDocument);
if (reader == null)
throw new ArgumentException(ResXml.Xdom_Load_NoReader);
doc.SetBaseURI(reader.BaseURI);
if (reader.Settings != null
&& reader.Settings.ValidationType == ValidationType.Schema)
{
doc.Schemas = reader.Settings.Schemas;
}
if (_reader.ReadState != ReadState.Interactive)
{
if (!_reader.Read())
return;
}
LoadDocSequence(doc);
}
//The function will start loading the document from where current XmlReader is pointing at.
private void LoadDocSequence(XmlDocument parentDoc)
{
Debug.Assert(_reader != null);
Debug.Assert(parentDoc != null);
XmlNode node = null;
while ((node = LoadNode(true)) != null)
{
parentDoc.AppendChildForLoad(node, parentDoc);
if (!_reader.Read())
return;
}
}
internal XmlNode ReadCurrentNode(XmlDocument doc, XmlReader reader)
{
_doc = doc;
_reader = reader;
// WS are optional only for loading (see XmlDocument.PreserveWhitespace)
_preserveWhitespace = true;
if (doc == null)
throw new ArgumentException(ResXml.Xdom_Load_NoDocument);
if (reader == null)
throw new ArgumentException(ResXml.Xdom_Load_NoReader);
if (reader.ReadState == ReadState.Initial)
{
reader.Read();
}
if (reader.ReadState == ReadState.Interactive)
{
XmlNode n = LoadNode(true);
// Move to the next node
if (n.NodeType != XmlNodeType.Attribute)
reader.Read();
return n;
}
return null;
}
private XmlNode LoadNode(bool skipOverWhitespace)
{
XmlReader r = _reader;
XmlNode parent = null;
XmlElement element;
IXmlSchemaInfo schemaInfo;
do
{
XmlNode node = null;
switch (r.NodeType)
{
case XmlNodeType.Element:
bool fEmptyElement = r.IsEmptyElement;
element = _doc.CreateElement(r.Prefix, r.LocalName, r.NamespaceURI);
element.IsEmpty = fEmptyElement;
if (r.MoveToFirstAttribute())
{
XmlAttributeCollection attributes = element.Attributes;
do
{
XmlAttribute attr = LoadAttributeNode();
attributes.Append(attr); // special case for load
}
while (r.MoveToNextAttribute());
r.MoveToElement();
}
// recursively load all children.
if (!fEmptyElement)
{
if (parent != null)
{
parent.AppendChildForLoad(element, _doc);
}
parent = element;
continue;
}
else
{
schemaInfo = r.SchemaInfo;
if (schemaInfo != null)
{
element.XmlName = _doc.AddXmlName(element.Prefix, element.LocalName, element.NamespaceURI, schemaInfo);
}
node = element;
break;
}
case XmlNodeType.EndElement:
if (parent == null)
{
return null;
}
Debug.Assert(parent.NodeType == XmlNodeType.Element);
schemaInfo = r.SchemaInfo;
if (schemaInfo != null)
{
element = parent as XmlElement;
if (element != null)
{
element.XmlName = _doc.AddXmlName(element.Prefix, element.LocalName, element.NamespaceURI, schemaInfo);
}
}
if (parent.ParentNode == null)
{
return parent;
}
parent = parent.ParentNode;
continue;
case XmlNodeType.EntityReference:
node = LoadEntityReferenceNode(false);
break;
case XmlNodeType.EndEntity:
Debug.Assert(parent == null);
return null;
case XmlNodeType.Attribute:
node = LoadAttributeNode();
break;
case XmlNodeType.Text:
node = _doc.CreateTextNode(r.Value);
break;
case XmlNodeType.SignificantWhitespace:
node = _doc.CreateSignificantWhitespace(r.Value);
break;
case XmlNodeType.Whitespace:
if (_preserveWhitespace)
{
node = _doc.CreateWhitespace(r.Value);
break;
}
else if (parent == null && !skipOverWhitespace)
{
// if called from LoadEntityReferenceNode, just return null
return null;
}
else
{
continue;
}
case XmlNodeType.CDATA:
node = _doc.CreateCDataSection(r.Value);
break;
case XmlNodeType.XmlDeclaration:
node = LoadDeclarationNode();
break;
case XmlNodeType.ProcessingInstruction:
node = _doc.CreateProcessingInstruction(r.Name, r.Value);
break;
case XmlNodeType.Comment:
node = _doc.CreateComment(r.Value);
break;
case XmlNodeType.DocumentType:
node = LoadDocumentTypeNode();
break;
default:
throw UnexpectedNodeType(r.NodeType);
}
Debug.Assert(node != null);
if (parent != null)
{
parent.AppendChildForLoad(node, _doc);
}
else
{
return node;
}
}
while (r.Read());
// when the reader ended before full subtree is read, return whatever we have created so far
if (parent != null)
{
while (parent.ParentNode != null)
{
parent = parent.ParentNode;
}
}
return parent;
}
private XmlAttribute LoadAttributeNode()
{
Debug.Assert(_reader.NodeType == XmlNodeType.Attribute);
XmlReader r = _reader;
if (r.IsDefault)
{
return LoadDefaultAttribute();
}
XmlAttribute attr = _doc.CreateAttribute(r.Prefix, r.LocalName, r.NamespaceURI);
IXmlSchemaInfo schemaInfo = r.SchemaInfo;
if (schemaInfo != null)
{
attr.XmlName = _doc.AddAttrXmlName(attr.Prefix, attr.LocalName, attr.NamespaceURI, schemaInfo);
}
while (r.ReadAttributeValue())
{
XmlNode node;
switch (r.NodeType)
{
case XmlNodeType.Text:
node = _doc.CreateTextNode(r.Value);
break;
case XmlNodeType.EntityReference:
node = _doc.CreateEntityReference(r.LocalName);
if (r.CanResolveEntity)
{
r.ResolveEntity();
LoadAttributeValue(node, false);
// Code internally relies on the fact that an EntRef nodes has at least one child (even an empty text node). Ensure that this holds true,
// if the reader does not present any children for the ent-ref
if (node.FirstChild == null)
{
node.AppendChildForLoad(_doc.CreateTextNode(string.Empty), _doc);
}
}
break;
default:
throw UnexpectedNodeType(r.NodeType);
}
Debug.Assert(node != null);
attr.AppendChildForLoad(node, _doc);
}
return attr;
}
private XmlAttribute LoadDefaultAttribute()
{
Debug.Assert(_reader.IsDefault);
XmlReader r = _reader;
XmlAttribute attr = _doc.CreateDefaultAttribute(r.Prefix, r.LocalName, r.NamespaceURI);
IXmlSchemaInfo schemaInfo = r.SchemaInfo;
if (schemaInfo != null)
{
attr.XmlName = _doc.AddAttrXmlName(attr.Prefix, attr.LocalName, attr.NamespaceURI, schemaInfo);
}
LoadAttributeValue(attr, false);
XmlUnspecifiedAttribute defAttr = attr as XmlUnspecifiedAttribute;
// If user overrides CreateDefaultAttribute, then attr will NOT be a XmlUnspecifiedAttribute instance.
if (defAttr != null)
defAttr.SetSpecified(false);
return attr;
}
private void LoadAttributeValue(XmlNode parent, bool direct)
{
XmlReader r = _reader;
while (r.ReadAttributeValue())
{
XmlNode node;
switch (r.NodeType)
{
case XmlNodeType.Text:
node = direct ? new XmlText(r.Value, _doc) : _doc.CreateTextNode(r.Value);
break;
case XmlNodeType.EndEntity:
return;
case XmlNodeType.EntityReference:
node = direct ? new XmlEntityReference(_reader.LocalName, _doc) : _doc.CreateEntityReference(_reader.LocalName);
if (r.CanResolveEntity)
{
r.ResolveEntity();
LoadAttributeValue(node, direct);
// Code internally relies on the fact that an EntRef nodes has at least one child (even an empty text node). Ensure that this holds true,
// if the reader does not present any children for the ent-ref
if (node.FirstChild == null)
{
node.AppendChildForLoad(direct ? new XmlText(string.Empty) : _doc.CreateTextNode(string.Empty), _doc);
}
}
break;
default:
throw UnexpectedNodeType(r.NodeType);
}
Debug.Assert(node != null);
parent.AppendChildForLoad(node, _doc);
}
return;
}
private XmlEntityReference LoadEntityReferenceNode(bool direct)
{
Debug.Assert(_reader.NodeType == XmlNodeType.EntityReference);
XmlEntityReference eref = direct ? new XmlEntityReference(_reader.Name, _doc) : _doc.CreateEntityReference(_reader.Name);
if (_reader.CanResolveEntity)
{
_reader.ResolveEntity();
while (_reader.Read() && _reader.NodeType != XmlNodeType.EndEntity)
{
XmlNode node = direct ? LoadNodeDirect() : LoadNode(false);
if (node != null)
{
eref.AppendChildForLoad(node, _doc);
}
}
// Code internally relies on the fact that an EntRef nodes has at least one child (even an empty text node). Ensure that this holds true,
// if the reader does not present any children for the ent-ref
if (eref.LastChild == null)
eref.AppendChildForLoad(_doc.CreateTextNode(string.Empty), _doc);
}
return eref;
}
private XmlDeclaration LoadDeclarationNode()
{
Debug.Assert(_reader.NodeType == XmlNodeType.XmlDeclaration);
//parse data
string version = null;
string encoding = null;
string standalone = null;
// Try first to use the reader to get the xml decl "attributes". Since not all readers are required to support this, it is possible to have
// implementations that do nothing
while (_reader.MoveToNextAttribute())
{
switch (_reader.Name)
{
case "version":
version = _reader.Value;
break;
case "encoding":
encoding = _reader.Value;
break;
case "standalone":
standalone = _reader.Value;
break;
default:
Debug.Assert(false);
break;
}
}
// For readers that do not break xml decl into attributes, we must parse the xml decl ourselfs. We use version attr, b/c xml decl MUST contain
// at least version attr, so if the reader implements them as attr, then version must be present
if (version == null)
ParseXmlDeclarationValue(_reader.Value, out version, out encoding, out standalone);
return _doc.CreateXmlDeclaration(version, encoding, standalone);
}
private XmlDocumentType LoadDocumentTypeNode()
{
Debug.Assert(_reader.NodeType == XmlNodeType.DocumentType);
String publicId = null;
String systemId = null;
String internalSubset = _reader.Value;
String localName = _reader.LocalName;
while (_reader.MoveToNextAttribute())
{
switch (_reader.Name)
{
case "PUBLIC":
publicId = _reader.Value;
break;
case "SYSTEM":
systemId = _reader.Value;
break;
}
}
XmlDocumentType dtNode = _doc.CreateDocumentType(localName, publicId, systemId, internalSubset);
IDtdInfo dtdInfo = _reader.DtdInfo;
if (dtdInfo != null)
LoadDocumentType(dtdInfo, dtNode);
else
{
//construct our own XmlValidatingReader to parse the DocumentType node so we could get Entities and notations information
ParseDocumentType(dtNode);
}
return dtNode;
}
// LoadNodeDirect does not use creator functions on XmlDocument. It is used loading nodes that are children of entity nodes,
// becaouse we do not want to let users extend these (if we would allow this, XmlDataDocument would have a problem, becaouse
// they do not know that those nodes should not be mapped). It can be also used for an optimized load path when if the
// XmlDocument is not extended if XmlDocumentType and XmlDeclaration handling is added.
private XmlNode LoadNodeDirect()
{
XmlReader r = _reader;
XmlNode parent = null;
do
{
XmlNode node = null;
switch (r.NodeType)
{
case XmlNodeType.Element:
bool fEmptyElement = _reader.IsEmptyElement;
XmlElement element = new XmlElement(_reader.Prefix, _reader.LocalName, _reader.NamespaceURI, _doc);
element.IsEmpty = fEmptyElement;
if (_reader.MoveToFirstAttribute())
{
XmlAttributeCollection attributes = element.Attributes;
do
{
XmlAttribute attr = LoadAttributeNodeDirect();
attributes.Append(attr); // special case for load
} while (r.MoveToNextAttribute());
}
// recursively load all children.
if (!fEmptyElement)
{
parent.AppendChildForLoad(element, _doc);
parent = element;
continue;
}
else
{
node = element;
break;
}
case XmlNodeType.EndElement:
Debug.Assert(parent.NodeType == XmlNodeType.Element);
if (parent.ParentNode == null)
{
return parent;
}
parent = parent.ParentNode;
continue;
case XmlNodeType.EntityReference:
node = LoadEntityReferenceNode(true);
break;
case XmlNodeType.EndEntity:
continue;
case XmlNodeType.Attribute:
node = LoadAttributeNodeDirect();
break;
case XmlNodeType.SignificantWhitespace:
node = new XmlSignificantWhitespace(_reader.Value, _doc);
break;
case XmlNodeType.Whitespace:
if (_preserveWhitespace)
{
node = new XmlWhitespace(_reader.Value, _doc);
}
else
{
continue;
}
break;
case XmlNodeType.Text:
node = new XmlText(_reader.Value, _doc);
break;
case XmlNodeType.CDATA:
node = new XmlCDataSection(_reader.Value, _doc);
break;
case XmlNodeType.ProcessingInstruction:
node = new XmlProcessingInstruction(_reader.Name, _reader.Value, _doc);
break;
case XmlNodeType.Comment:
node = new XmlComment(_reader.Value, _doc);
break;
default:
throw UnexpectedNodeType(_reader.NodeType);
}
Debug.Assert(node != null);
if (parent != null)
{
parent.AppendChildForLoad(node, _doc);
}
else
{
return node;
}
}
while (r.Read());
return null;
}
private XmlAttribute LoadAttributeNodeDirect()
{
XmlReader r = _reader;
XmlAttribute attr;
if (r.IsDefault)
{
XmlUnspecifiedAttribute defattr = new XmlUnspecifiedAttribute(r.Prefix, r.LocalName, r.NamespaceURI, _doc);
LoadAttributeValue(defattr, true);
defattr.SetSpecified(false);
return defattr;
}
else
{
attr = new XmlAttribute(r.Prefix, r.LocalName, r.NamespaceURI, _doc);
LoadAttributeValue(attr, true);
return attr;
}
}
internal void ParseDocumentType(XmlDocumentType dtNode)
{
XmlDocument doc = dtNode.OwnerDocument;
//if xmlresolver is set on doc, use that one, otherwise use the default one being created by xmlvalidatingreader
if (doc.HasSetResolver)
ParseDocumentType(dtNode, true, doc.GetResolver());
else
ParseDocumentType(dtNode, false, null);
}
private void ParseDocumentType(XmlDocumentType dtNode, bool bUseResolver, XmlResolver resolver)
{
_doc = dtNode.OwnerDocument;
XmlParserContext pc = new XmlParserContext(null, new XmlNamespaceManager(_doc.NameTable), null, null, null, null, _doc.BaseURI, string.Empty, XmlSpace.None);
XmlTextReaderImpl tr = new XmlTextReaderImpl("", XmlNodeType.Element, pc);
tr.Namespaces = dtNode.ParseWithNamespaces;
if (bUseResolver)
{
tr.XmlResolver = resolver;
}
IDtdParser dtdParser = DtdParser.Create();
XmlTextReaderImpl.DtdParserProxy proxy = new XmlTextReaderImpl.DtdParserProxy(tr);
IDtdInfo dtdInfo = dtdParser.ParseFreeFloatingDtd(_doc.BaseURI, dtNode.Name, dtNode.PublicId, dtNode.SystemId, dtNode.InternalSubset, proxy);
LoadDocumentType(dtdInfo, dtNode);
}
private void LoadDocumentType(IDtdInfo dtdInfo, XmlDocumentType dtNode)
{
SchemaInfo schInfo = dtdInfo as SchemaInfo;
if (schInfo == null)
{
throw new XmlException(ResXml.Xml_InternalError, string.Empty);
}
dtNode.DtdSchemaInfo = schInfo;
if (schInfo != null)
{
//set the schema information into the document
_doc.DtdSchemaInfo = schInfo;
// Notation hashtable
if (schInfo.Notations != null)
{
foreach (SchemaNotation scNot in schInfo.Notations.Values)
{
dtNode.Notations.SetNamedItem(new XmlNotation(scNot.Name.Name, scNot.Pubid, scNot.SystemLiteral, _doc));
}
}
// Entity hashtables
if (schInfo.GeneralEntities != null)
{
foreach (SchemaEntity scEnt in schInfo.GeneralEntities.Values)
{
XmlEntity ent = new XmlEntity(scEnt.Name.Name, scEnt.Text, scEnt.Pubid, scEnt.Url, scEnt.NData.IsEmpty ? null : scEnt.NData.Name, _doc);
ent.SetBaseURI(scEnt.DeclaredURI);
dtNode.Entities.SetNamedItem(ent);
}
}
if (schInfo.ParameterEntities != null)
{
foreach (SchemaEntity scEnt in schInfo.ParameterEntities.Values)
{
XmlEntity ent = new XmlEntity(scEnt.Name.Name, scEnt.Text, scEnt.Pubid, scEnt.Url, scEnt.NData.IsEmpty ? null : scEnt.NData.Name, _doc);
ent.SetBaseURI(scEnt.DeclaredURI);
dtNode.Entities.SetNamedItem(ent);
}
}
_doc.Entities = dtNode.Entities;
//extract the elements which has attribute defined as ID from the element declarations
IDictionaryEnumerator elementDecls = schInfo.ElementDecls.GetEnumerator();
if (elementDecls != null)
{
elementDecls.Reset();
while (elementDecls.MoveNext())
{
SchemaElementDecl elementDecl = (SchemaElementDecl)elementDecls.Value;
if (elementDecl.AttDefs != null)
{
IDictionaryEnumerator attDefs = elementDecl.AttDefs.GetEnumerator();
while (attDefs.MoveNext())
{
SchemaAttDef attdef = (SchemaAttDef)attDefs.Value;
if (attdef.Datatype.TokenizedType == XmlTokenizedType.ID)
{
//we only register the XmlElement based on their Prefix/LocalName and skip the namespace
_doc.AddIdInfo(
_doc.AddXmlName(elementDecl.Prefix, elementDecl.Name.Name, string.Empty, null),
_doc.AddAttrXmlName(attdef.Prefix, attdef.Name.Name, string.Empty, null));
break;
}
}
}
}
}
}
}
#pragma warning restore 618
private XmlParserContext GetContext(XmlNode node)
{
String lang = null;
XmlSpace spaceMode = XmlSpace.None;
XmlDocumentType docType = _doc.DocumentType;
String baseURI = _doc.BaseURI;
//constructing xmlnamespace
Hashtable prefixes = new Hashtable();
XmlNameTable nt = _doc.NameTable;
XmlNamespaceManager mgr = new XmlNamespaceManager(nt);
bool bHasDefXmlnsAttr = false;
// Process all xmlns, xmlns:prefix, xml:space and xml:lang attributes
while (node != null && node != _doc)
{
if (node is XmlElement && ((XmlElement)node).HasAttributes)
{
mgr.PushScope();
foreach (XmlAttribute attr in ((XmlElement)node).Attributes)
{
if (attr.Prefix == _doc.strXmlns && prefixes.Contains(attr.LocalName) == false)
{
// Make sure the next time we will not add this prefix
prefixes.Add(attr.LocalName, attr.LocalName);
mgr.AddNamespace(attr.LocalName, attr.Value);
}
else if (!bHasDefXmlnsAttr && attr.Prefix.Length == 0 && attr.LocalName == _doc.strXmlns)
{
// Save the case xmlns="..." where xmlns is the LocalName
mgr.AddNamespace(String.Empty, attr.Value);
bHasDefXmlnsAttr = true;
}
else if (spaceMode == XmlSpace.None && attr.Prefix == _doc.strXml && attr.LocalName == _doc.strSpace)
{
// Save xml:space context
if (attr.Value == "default")
spaceMode = XmlSpace.Default;
else if (attr.Value == "preserve")
spaceMode = XmlSpace.Preserve;
}
else if (lang == null && attr.Prefix == _doc.strXml && attr.LocalName == _doc.strLang)
{
// Save xml:lag context
lang = attr.Value;
}
}
}
node = node.ParentNode;
}
return new XmlParserContext(
nt,
mgr,
(docType == null) ? null : docType.Name,
(docType == null) ? null : docType.PublicId,
(docType == null) ? null : docType.SystemId,
(docType == null) ? null : docType.InternalSubset,
baseURI,
lang,
spaceMode
);
}
internal XmlNamespaceManager ParsePartialContent(XmlNode parentNode, string innerxmltext, XmlNodeType nt)
{
//the function shouldn't be used to set innerxml for XmlDocument node
Debug.Assert(parentNode.NodeType != XmlNodeType.Document);
_doc = parentNode.OwnerDocument;
Debug.Assert(_doc != null);
XmlParserContext pc = GetContext(parentNode);
_reader = CreateInnerXmlReader(innerxmltext, nt, pc, _doc);
try
{
_preserveWhitespace = true;
bool bOrigLoading = _doc.IsLoading;
_doc.IsLoading = true;
if (nt == XmlNodeType.Entity)
{
XmlNode node = null;
while (_reader.Read() && (node = LoadNodeDirect()) != null)
{
parentNode.AppendChildForLoad(node, _doc);
}
}
else
{
XmlNode node = null;
while (_reader.Read() && (node = LoadNode(true)) != null)
{
parentNode.AppendChildForLoad(node, _doc);
}
}
_doc.IsLoading = bOrigLoading;
}
finally
{
_reader.Close();
}
return pc.NamespaceManager;
}
internal void LoadInnerXmlElement(XmlElement node, string innerxmltext)
{
//construct a tree underneth the node
XmlNamespaceManager mgr = ParsePartialContent(node, innerxmltext, XmlNodeType.Element);
//remove the duplicate namesapce
if (node.ChildNodes.Count > 0)
RemoveDuplicateNamespace((XmlElement)node, mgr, false);
}
internal void LoadInnerXmlAttribute(XmlAttribute node, string innerxmltext)
{
ParsePartialContent(node, innerxmltext, XmlNodeType.Attribute);
}
private void RemoveDuplicateNamespace(XmlElement elem, XmlNamespaceManager mgr, bool fCheckElemAttrs)
{
//remove the duplicate attributes on current node first
mgr.PushScope();
XmlAttributeCollection attrs = elem.Attributes;
int cAttrs = attrs.Count;
if (fCheckElemAttrs && cAttrs > 0)
{
for (int i = cAttrs - 1; i >= 0; --i)
{
XmlAttribute attr = attrs[i];
if (attr.Prefix == _doc.strXmlns)
{
string nsUri = mgr.LookupNamespace(attr.LocalName);
if (nsUri != null)
{
if (attr.Value == nsUri)
elem.Attributes.RemoveNodeAt(i);
}
else
{
// Add this namespace, so it we will behave corectly when setting "<bar xmlns:p="BAR"><foo2 xmlns:p="FOO"/></bar>" as
// InnerXml on this foo elem where foo is like this "<foo xmlns:p="FOO"></foo>"
// If do not do this, then we will remove the inner p prefix definition and will let the 1st p to be in scope for
// the subsequent InnerXml_set or setting an EntRef inside.
mgr.AddNamespace(attr.LocalName, attr.Value);
}
}
else if (attr.Prefix.Length == 0 && attr.LocalName == _doc.strXmlns)
{
string nsUri = mgr.DefaultNamespace;
if (nsUri != null)
{
if (attr.Value == nsUri)
elem.Attributes.RemoveNodeAt(i);
}
else
{
// Add this namespace, so it we will behave corectly when setting "<bar xmlns:p="BAR"><foo2 xmlns:p="FOO"/></bar>" as
// InnerXml on this foo elem where foo is like this "<foo xmlns:p="FOO"></foo>"
// If do not do this, then we will remove the inner p prefix definition and will let the 1st p to be in scope for
// the subsequent InnerXml_set or setting an EntRef inside.
mgr.AddNamespace(attr.LocalName, attr.Value);
}
}
}
}
//now recursively remove the duplicate attributes on the children
XmlNode child = elem.FirstChild;
while (child != null)
{
XmlElement childElem = child as XmlElement;
if (childElem != null)
RemoveDuplicateNamespace(childElem, mgr, true);
child = child.NextSibling;
}
mgr.PopScope();
}
private String EntitizeName(String name)
{
return "&" + name + ";";
}
//The function is called when expanding the entity when its children being asked
internal void ExpandEntity(XmlEntity ent)
{
ParsePartialContent(ent, EntitizeName(ent.Name), XmlNodeType.Entity);
}
//The function is called when expanding the entity ref. ( inside XmlEntityReference.SetParent )
internal void ExpandEntityReference(XmlEntityReference eref)
{
//when the ent ref is not associated w/ an entity, append an empty string text node as child
_doc = eref.OwnerDocument;
bool bOrigLoadingState = _doc.IsLoading;
_doc.IsLoading = true;
switch (eref.Name)
{
case "lt":
eref.AppendChildForLoad(_doc.CreateTextNode("<"), _doc);
_doc.IsLoading = bOrigLoadingState;
return;
case "gt":
eref.AppendChildForLoad(_doc.CreateTextNode(">"), _doc);
_doc.IsLoading = bOrigLoadingState;
return;
case "amp":
eref.AppendChildForLoad(_doc.CreateTextNode("&"), _doc);
_doc.IsLoading = bOrigLoadingState;
return;
case "apos":
eref.AppendChildForLoad(_doc.CreateTextNode("'"), _doc);
_doc.IsLoading = bOrigLoadingState;
return;
case "quot":
eref.AppendChildForLoad(_doc.CreateTextNode("\""), _doc);
_doc.IsLoading = bOrigLoadingState;
return;
}
XmlNamedNodeMap entities = _doc.Entities;
foreach (XmlEntity ent in entities)
{
if (Ref.Equal(ent.Name, eref.Name))
{
ParsePartialContent(eref, EntitizeName(eref.Name), XmlNodeType.EntityReference);
return;
}
}
//no fit so far
if (!(_doc.ActualLoadingStatus))
{
eref.AppendChildForLoad(_doc.CreateTextNode(""), _doc);
_doc.IsLoading = bOrigLoadingState;
}
else
{
_doc.IsLoading = bOrigLoadingState;
throw new XmlException(ResXml.Xml_UndeclaredParEntity, eref.Name);
}
}
#pragma warning disable 618
// Creates a XmlValidatingReader suitable for parsing InnerXml strings
private XmlReader CreateInnerXmlReader(String xmlFragment, XmlNodeType nt, XmlParserContext context, XmlDocument doc)
{
XmlNodeType contentNT = nt;
if (contentNT == XmlNodeType.Entity || contentNT == XmlNodeType.EntityReference)
contentNT = XmlNodeType.Element;
XmlTextReaderImpl tr = new XmlTextReaderImpl(xmlFragment, contentNT, context);
tr.XmlValidatingReaderCompatibilityMode = true;
if (doc.HasSetResolver)
{
tr.XmlResolver = doc.GetResolver();
}
if (!(doc.ActualLoadingStatus))
{
tr.DisableUndeclaredEntityCheck = true;
}
Debug.Assert(tr.EntityHandling == EntityHandling.ExpandCharEntities);
XmlDocumentType dtdNode = doc.DocumentType;
if (dtdNode != null)
{
tr.Namespaces = dtdNode.ParseWithNamespaces;
if (dtdNode.DtdSchemaInfo != null)
{
tr.SetDtdInfo(dtdNode.DtdSchemaInfo);
}
else
{
IDtdParser dtdParser = DtdParser.Create();
XmlTextReaderImpl.DtdParserProxy proxy = new XmlTextReaderImpl.DtdParserProxy(tr);
IDtdInfo dtdInfo = dtdParser.ParseFreeFloatingDtd(context.BaseURI, context.DocTypeName, context.PublicId, context.SystemId, context.InternalSubset, proxy);
// TODO: Change all of XmlDocument to IDtdInfo interfaces
dtdNode.DtdSchemaInfo = dtdInfo as SchemaInfo;
tr.SetDtdInfo(dtdInfo);
}
}
if (nt == XmlNodeType.Entity || nt == XmlNodeType.EntityReference)
{
tr.Read(); //this will skip the first element "wrapper"
tr.ResolveEntity();
}
return tr;
}
#pragma warning restore 618
internal static void ParseXmlDeclarationValue(string strValue, out string version, out string encoding, out string standalone)
{
version = null;
encoding = null;
standalone = null;
XmlTextReaderImpl tempreader = new XmlTextReaderImpl(strValue, (XmlParserContext)null);
try
{
tempreader.Read();
//get version info.
if (tempreader.MoveToAttribute("version"))
version = tempreader.Value;
//get encoding info
if (tempreader.MoveToAttribute("encoding"))
encoding = tempreader.Value;
//get standalone info
if (tempreader.MoveToAttribute("standalone"))
standalone = tempreader.Value;
}
finally
{
tempreader.Close();
}
}
static internal Exception UnexpectedNodeType(XmlNodeType nodetype)
{
return new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, ResXml.Xml_UnexpectedNodeType, nodetype.ToString()));
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Diagnostics.Contracts;
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
namespace System.ServiceModel.Channels
{
/// <summary>
/// This class is based on BufferedStream from the Desktop version of .Net. Only the write functionality
/// is needed by WCF so the read capability has been removed. This allowed some extra logic to be removed
/// from the write code path. Also some validation code has been removed as this class is no longer
/// general purpose and is only used in pre-known scenarios and only called by WCF code. Some validation
/// checks have been converted to only run on a debug build to allow catching code bugs in other WCF code,
/// but not causing release build overhead.
///
/// One of the design goals here is to prevent the buffer from getting in the way and slowing
/// down underlying stream accesses when it is not needed.
/// See a large comment in Write for the details of the write buffer heuristic.
///
/// This class will never cache more bytes than the max specified buffer size.
/// However, it may use a temporary buffer of up to twice the size in order to combine several IO operations on
/// the underlying stream into a single operation. This is because we assume that memory copies are significantly
/// faster than IO operations on the underlying stream (if this was not true, using buffering is never appropriate).
/// The max size of this "shadow" buffer is limited as to not allocate it on the LOH.
/// Shadowing is always transient. Even when using this technique, this class still guarantees that the number of
/// bytes cached (not yet written to the target stream or not yet consumed by the user) is never larger than the
/// actual specified buffer size.
/// </summary>
internal sealed class BufferedWriteStream : Stream
{
public const int DefaultBufferSize = 4096;
private Stream _stream; // Underlying stream. Close sets _stream to null.
private byte[] _buffer; // Wwrite buffer.
private readonly int _bufferSize; // Length of internal buffer (not counting the shadow buffer).
private int _writePos; // Write pointer within buffer.
private readonly SemaphoreSlim _sem = new SemaphoreSlim(1, 1);
public BufferedWriteStream(Stream stream) : this(stream, DefaultBufferSize) {}
public BufferedWriteStream(Stream stream, int bufferSize)
{
Contract.Assert(stream!=Null, "stream!=Null");
Contract.Assert(bufferSize > 0, "bufferSize>0");
Contract.Assert(stream.CanWrite);
_stream = stream;
_bufferSize = bufferSize;
EnsureBufferAllocated();
}
private void EnsureNotClosed()
{
if (_stream == null)
throw new ObjectDisposedException("BufferedWriteStream");
}
private void EnsureCanWrite()
{
Contract.Requires(_stream != null);
if (!_stream.CanWrite)
throw new NotSupportedException("write");
}
/// <summary><code>MaxShadowBufferSize</code> is chosen such that shadow buffers are not allocated on the Large Object Heap.
/// Currently, an object is allocated on the LOH if it is larger than 85000 bytes.
/// We will go with exactly 80 KBytes, although this is somewhat arbitrary.</summary>
private const int MaxShadowBufferSize = 81920; // Make sure not to get to the Large Object Heap.
private void EnsureShadowBufferAllocated()
{
Contract.Assert(_buffer != null);
Contract.Assert(_bufferSize > 0);
// Already have shadow buffer?
if (_buffer.Length != _bufferSize || _bufferSize >= MaxShadowBufferSize)
{
return;
}
byte[] shadowBuffer = new byte[Math.Min(_bufferSize + _bufferSize, MaxShadowBufferSize)];
Array.Copy(_buffer, 0, shadowBuffer, 0, _writePos);
_buffer = shadowBuffer;
}
private void EnsureBufferAllocated()
{
if (_buffer == null)
_buffer = new byte[_bufferSize];
}
public override bool CanRead
{
get { return false; }
}
public override bool CanWrite
{
get { return _stream != null && _stream.CanWrite; }
}
public override bool CanSeek
{
get { return false; }
}
public override long Length
{
get { throw new NotSupportedException("Position"); }
}
public override long Position
{
get { throw new NotSupportedException("Position"); }
set { throw new NotSupportedException("Position"); }
}
protected override void Dispose(bool disposing)
{
try
{
if (disposing && _stream != null)
{
try
{
Flush();
}
finally
{
_stream.Dispose();
}
}
}
finally
{
_stream = null;
_buffer = null;
// Call base.Dispose(bool) to cleanup async IO resources
base.Dispose(disposing);
}
}
public override void Flush()
{
EnsureNotClosed();
// Has WRITE data in the buffer:
if (_writePos > 0)
{
FlushWrite();
Contract.Assert(_writePos == 0);
return;
}
// We had no data in the buffer, but we still need to tell the underlying stream to flush.
_stream.Flush();
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled<int>(cancellationToken);
}
EnsureNotClosed();
return FlushAsyncInternal(cancellationToken);
}
private async Task FlushAsyncInternal(CancellationToken cancellationToken)
{
await _sem.WaitAsync().ConfigureAwait(false);
try
{
if (_writePos > 0)
{
await FlushWriteAsync(cancellationToken).ConfigureAwait(false);
Contract.Assert(_writePos == 0);
return;
}
// We had no data in the buffer, but we still need to tell the underlying stream to flush.
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
// There was nothing in the buffer:
Contract.Assert(_writePos == 0);
}
finally
{
_sem.Release();
}
}
private void FlushWrite()
{
Contract.Assert(_buffer != null && _bufferSize >= _writePos,
"BufferedWriteStream: Write buffer must be allocated and write position must be in the bounds of the buffer in FlushWrite!");
_stream.Write(_buffer, 0, _writePos);
_writePos = 0;
_stream.Flush();
}
private async Task FlushWriteAsync(CancellationToken cancellationToken)
{
Contract.Assert(_buffer != null && _bufferSize >= _writePos,
"BufferedWriteStream: Write buffer must be allocated and write position must be in the bounds of the buffer in FlushWrite!");
await _stream.WriteAsync(_buffer, 0, _writePos, cancellationToken).ConfigureAwait(false);
_writePos = 0;
await _stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
public override int Read([In, Out] byte[] array, int offset, int count)
{
throw new NotSupportedException("Read");
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
throw new NotSupportedException("ReadAsync");
}
public override int ReadByte()
{
throw new NotSupportedException("ReadByte");
}
private void WriteToBuffer(byte[] array, ref int offset, ref int count)
{
int bytesToWrite = Math.Min(_bufferSize - _writePos, count);
if (bytesToWrite <= 0)
{
return;
}
Array.Copy(array, offset, _buffer, _writePos, bytesToWrite);
_writePos += bytesToWrite;
count -= bytesToWrite;
offset += bytesToWrite;
}
private void WriteToBuffer(byte[] array, ref int offset, ref int count, out Exception error)
{
try
{
error = null;
WriteToBuffer(array, ref offset, ref count);
}
catch (Exception ex)
{
error = ex;
}
}
public override void Write(byte[] array, int offset, int count)
{
Contract.Assert(array != null);
Contract.Assert(offset >= 0);
Contract.Assert(count >= 0);
Contract.Assert(count <= array.Length - offset);
EnsureNotClosed();
EnsureCanWrite();
#region Write algorithm comment
// We need to use the buffer, while avoiding unnecessary buffer usage / memory copies.
// We ASSUME that memory copies are much cheaper than writes to the underlying stream, so if an extra copy is
// guaranteed to reduce the number of writes, we prefer it.
// We pick a simple strategy that makes degenerate cases rare if our assumptions are right.
//
// For every write, we use a simple heuristic (below) to decide whether to use the buffer.
// The heuristic has the desirable property (*) that if the specified user data can fit into the currently available
// buffer space without filling it up completely, the heuristic will always tell us to use the buffer. It will also
// tell us to use the buffer in cases where the current write would fill the buffer, but the remaining data is small
// enough such that subsequent operations can use the buffer again.
//
// Algorithm:
// Determine whether or not to buffer according to the heuristic (below).
// If we decided to use the buffer:
// Copy as much user data as we can into the buffer.
// If we consumed all data: We are finished.
// Otherwise, write the buffer out.
// Copy the rest of user data into the now cleared buffer (no need to write out the buffer again as the heuristic
// will prevent it from being filled twice).
// If we decided not to use the buffer:
// Can the data already in the buffer and current user data be combines to a single write
// by allocating a "shadow" buffer of up to twice the size of _bufferSize (up to a limit to avoid LOH)?
// Yes, it can:
// Allocate a larger "shadow" buffer and ensure the buffered data is moved there.
// Copy user data to the shadow buffer.
// Write shadow buffer to the underlying stream in a single operation.
// No, it cannot (amount of data is still too large):
// Write out any data possibly in the buffer.
// Write out user data directly.
//
// Heuristic:
// If the subsequent write operation that follows the current write operation will result in a write to the
// underlying stream in case that we use the buffer in the current write, while it would not have if we avoided
// using the buffer in the current write (by writing current user data to the underlying stream directly), then we
// prefer to avoid using the buffer since the corresponding memory copy is wasted (it will not reduce the number
// of writes to the underlying stream, which is what we are optimising for).
// ASSUME that the next write will be for the same amount of bytes as the current write (most common case) and
// determine if it will cause a write to the underlying stream. If the next write is actually larger, our heuristic
// still yields the right behaviour, if the next write is actually smaller, we may making an unnecessary write to
// the underlying stream. However, this can only occur if the current write is larger than half the buffer size and
// we will recover after one iteration.
// We have:
// useBuffer = (_writePos + count + count < _bufferSize + _bufferSize)
//
// Example with _bufferSize = 20, _writePos = 6, count = 10:
//
// +---------------------------------------+---------------------------------------+
// | current buffer | next iteration's "future" buffer |
// +---------------------------------------+---------------------------------------+
// |0| | | | | | | | | |1| | | | | | | | | |2| | | | | | | | | |3| | | | | | | | | |
// |0|1|2|3|4|5|6|7|8|9|0|1|2|3|4|5|6|7|8|9|0|1|2|3|4|5|6|7|8|9|0|1|2|3|4|5|6|7|8|9|
// +-----------+-------------------+-------------------+---------------------------+
// | _writePos | current count | assumed next count|avail buff after next write|
// +-----------+-------------------+-------------------+---------------------------+
//
// A nice property (*) of this heuristic is that it will always succeed if the user data completely fits into the
// available buffer, i.e. if count < (_bufferSize - _writePos).
#endregion Write algorithm comment
Contract.Assert(_writePos < _bufferSize);
int totalUserBytes;
bool useBuffer;
checked
{
// We do not expect buffer sizes big enough for an overflow, but if it happens, lets fail early:
totalUserBytes = _writePos + count;
useBuffer = (totalUserBytes + count < (_bufferSize + _bufferSize));
}
if (useBuffer)
{
WriteToBuffer(array, ref offset, ref count);
if (_writePos < _bufferSize)
{
Contract.Assert(count == 0);
return;
}
Contract.Assert(count >= 0);
Contract.Assert(_writePos == _bufferSize);
Contract.Assert(_buffer != null);
_stream.Write(_buffer, 0, _writePos);
_writePos = 0;
WriteToBuffer(array, ref offset, ref count);
Contract.Assert(count == 0);
Contract.Assert(_writePos < _bufferSize);
}
else
{
// if (!useBuffer)
// Write out the buffer if necessary.
if (_writePos > 0)
{
Contract.Assert(_buffer != null);
Contract.Assert(totalUserBytes >= _bufferSize);
// Try avoiding extra write to underlying stream by combining previously buffered data with current user data:
if (totalUserBytes <= (_bufferSize + _bufferSize) && totalUserBytes <= MaxShadowBufferSize)
{
EnsureShadowBufferAllocated();
Array.Copy(array, offset, _buffer, _writePos, count);
_stream.Write(_buffer, 0, totalUserBytes);
_writePos = 0;
return;
}
_stream.Write(_buffer, 0, _writePos);
_writePos = 0;
}
// Write out user data.
_stream.Write(array, offset, count);
}
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
Contract.Assert(buffer != null);
Contract.Assert(offset >= 0);
Contract.Assert(count >= 0);
Contract.Assert(count <= buffer.Length - offset);
// Fast path check for cancellation already requested
if (cancellationToken.IsCancellationRequested)
return Task.FromCanceled<int>(cancellationToken);
EnsureNotClosed();
EnsureCanWrite();
// Try to satisfy the request from the buffer synchronously. But still need a sem-lock in case that another
// Async IO Task accesses the buffer concurrently. If we fail to acquire the lock without waiting, make this
// an Async operation.
Task semaphoreLockTask = _sem.WaitAsync();
if (semaphoreLockTask.Status == TaskStatus.RanToCompletion)
{
bool completeSynchronously = true;
try
{
Contract.Assert(_writePos < _bufferSize);
// If the write completely fits into the buffer, we can complete synchronously:
completeSynchronously = (count < _bufferSize - _writePos);
if (completeSynchronously)
{
Exception error;
WriteToBuffer(buffer, ref offset, ref count, out error);
Contract.Assert(count == 0);
return (error == null)
? Task.CompletedTask
: Task.FromException(error);
}
}
finally
{
if (completeSynchronously)
{
// if this is FALSE, we will be entering WriteToUnderlyingStreamAsync and releasing there.
_sem.Release();
}
}
}
// Delegate to the async implementation.
return WriteToUnderlyingStreamAsync(buffer, offset, count, cancellationToken, semaphoreLockTask);
}
private async Task WriteToUnderlyingStreamAsync(byte[] array, int offset, int count,
CancellationToken cancellationToken,
Task semaphoreLockTask)
{
// (These should be Contract.Requires(..) but that method had some issues in async methods; using Assert(..) for now.)
EnsureNotClosed();
EnsureCanWrite();
// See the LARGE COMMENT in Write(..) for the explanation of the write buffer algorithm.
await semaphoreLockTask.ConfigureAwait(false);
try
{
// The buffer might have been changed by another async task while we were waiting on the semaphore.
// However, note that if we recalculate the sync completion condition to TRUE, then useBuffer will also be TRUE.
int totalUserBytes;
bool useBuffer;
checked
{
// We do not expect buffer sizes big enough for an overflow, but if it happens, lets fail early:
totalUserBytes = _writePos + count;
useBuffer = (totalUserBytes + count < (_bufferSize + _bufferSize));
}
if (useBuffer)
{
WriteToBuffer(array, ref offset, ref count);
if (_writePos < _bufferSize)
{
Contract.Assert(count == 0);
return;
}
Contract.Assert(count >= 0);
Contract.Assert(_writePos == _bufferSize);
Contract.Assert(_buffer != null);
await _stream.WriteAsync(_buffer, 0, _writePos, cancellationToken).ConfigureAwait(false);
_writePos = 0;
WriteToBuffer(array, ref offset, ref count);
Contract.Assert(count == 0);
Contract.Assert(_writePos < _bufferSize);
}
else
{
// if (!useBuffer)
// Write out the buffer if necessary.
if (_writePos > 0)
{
Contract.Assert(_buffer != null);
Contract.Assert(totalUserBytes >= _bufferSize);
// Try avoiding extra write to underlying stream by combining previously buffered data with current user data:
if (totalUserBytes <= (_bufferSize + _bufferSize) && totalUserBytes <= MaxShadowBufferSize)
{
EnsureShadowBufferAllocated();
Buffer.BlockCopy(array, offset, _buffer, _writePos, count);
await _stream.WriteAsync(_buffer, 0, totalUserBytes, cancellationToken).ConfigureAwait(false);
_writePos = 0;
return;
}
await _stream.WriteAsync(_buffer, 0, _writePos, cancellationToken).ConfigureAwait(false);
_writePos = 0;
}
// Write out user data.
await _stream.WriteAsync(array, offset, count, cancellationToken).ConfigureAwait(false);
}
}
finally
{
_sem.Release();
}
}
public override void WriteByte(byte value)
{
EnsureNotClosed();
// We should not be flushing here, but only writing to the underlying stream, but previous version flushed, so we keep this.
if (_writePos >= _bufferSize - 1)
{
FlushWrite();
}
_buffer[_writePos++] = value;
Contract.Assert(_writePos < _bufferSize);
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotSupportedException("seek");
}
public override void SetLength(long value)
{
throw new NotSupportedException("SetLength");
}
}
}
| |
/*
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License. See License.txt in the project root for license information.
*/
namespace Adxstudio.Xrm.ContentAccess
{
using System;
using System.Collections.Generic;
using System.Linq;
using Adxstudio.Xrm.Cms;
using Adxstudio.Xrm.Resources;
using Adxstudio.Xrm.Security;
using Adxstudio.Xrm.Services.Query;
using Microsoft.Xrm.Portal.Configuration;
using Microsoft.Xrm.Sdk;
using Microsoft.Xrm.Sdk.Client;
using Microsoft.Xrm.Sdk.Query;
using Adxstudio.Xrm.Web.UI;
using Microsoft.Xrm.Portal;
using Microsoft.Xrm.Sdk.Metadata;
/// <summary>
/// Implementation of <see cref="ProductAccessProvider"/>. Provides filtering based on Product associations.
/// </summary>
public sealed class ProductAccessProvider : ContentAccessProvider
{
#region Private Members
/// <summary>
/// DisplayArticlesWithoutAssociatedProducts Site Setting Name
/// </summary>
private const string DisplayArticlesWithoutAssociatedProductsSiteSettingName = "ProductFiltering/DisplayArticlesWithoutAssociatedProducts";
/// <summary>
/// ContactToProductRelationshipNames Site Setting Name
/// </summary>
private const string ContactToProductRelationshipNames = "ProductFiltering/ContactToProductRelationshipNames";
/// <summary>
/// AccountToProductRelationshipNames Site Setting Name
/// </summary>
private const string AccountToProductRelationshipNames = "ProductFiltering/AccountToProductRelationshipNames";
/// <summary>
/// Fallback relationship name that maps from Contact to Product for Portals that don't have the new Site Setting data
/// </summary>
private const string AccountToProductFallbackRelationshipName = "adx_accountproduct";
/// <summary>
/// Fallback relationship name that maps from Contact to Product for Portals that don't have the new Site Setting data
/// </summary>
private const string ContactToProductFallbackRelationshipName = "adx_contactproduct";
/// <summary>
/// Semicolon delimited string of relationship names
/// </summary>
private readonly Dictionary<string, string> relationshipNamesDictionary;
/// <summary>
/// Dictionary of relationship metadata that defines relationship attributes
/// </summary>
private Dictionary<string, ProductAccessProvider.RelationshipMetadata> relationshipMetadataDictionary;
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="ProductAccessProvider"/> class.
/// </summary>
public ProductAccessProvider()
: this(ContentAccessConfiguration.DefaultProductFilteringConfiguration())
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ProductAccessProvider"/> class.
/// </summary>
/// <param name="configuration">Configuration for FetchXML attributes</param>
public ProductAccessProvider(ContentAccessConfiguration configuration)
: base(configuration)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ProductAccessProvider"/> class.
/// </summary>
/// <param name="portalContext">Configuration for FetchXML attributes</param>
public ProductAccessProvider(IPortalContext portalContext)
: base(ContentAccessConfiguration.DefaultProductFilteringConfiguration(), portalContext)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ProductAccessProvider"/> class.
/// </summary>
/// <param name="portalContext">Configuration for FetchXML attributes</param>
/// <param name="relationshipNamesDictionary">Semicolon delimited string of relationship names</param>
/// <param name="relationshipMetadataDictionary">Relationship metadata that defines relationship attributes</param>
/// <param name="siteSettingDictionary">Site Setting for Product Filtering</param>
public ProductAccessProvider(IPortalContext portalContext, Dictionary<string, string> relationshipNamesDictionary, Dictionary<string, ProductAccessProvider.RelationshipMetadata> relationshipMetadataDictionary, Dictionary<string, string> siteSettingDictionary)
: base(ContentAccessConfiguration.DefaultProductFilteringConfiguration(), portalContext, siteSettingDictionary)
{
this.relationshipNamesDictionary = relationshipNamesDictionary;
this.relationshipMetadataDictionary = relationshipMetadataDictionary;
}
#endregion
#region Public Methods
/// <summary>
/// Applies the Product filtering to the existing FetchXML query
/// </summary>
/// <param name="right">Current Permission Right</param>
/// <param name="fetchIn">FetchXML to modify</param>
public override void TryApplyRecordLevelFiltersToFetch(CrmEntityPermissionRight right, Fetch fetchIn)
{
// Apply filter only if Entity is "Knowledge Article" and Right is "Read"
if (!this.IsRightEntityAndPermissionRight(right, fetchIn, this.Config.SourceEntityName, CrmEntityPermissionRight.Read))
{
return;
}
// If Product Filtering is not enabled
if (!this.IsEnabled())
{
return;
}
// Retrieve Product IDs
var userProductIDs = this.GetProducts();
// Inject Product IDs filter into FetchXML
this.TryRecordLevelFiltersToFetch(fetchIn, userProductIDs);
}
/// <summary>
/// Retrieves the list of Products available to the current user
/// </summary>
/// <returns>Product entity collection</returns>
public List<Guid> GetProducts()
{
// If anonymous user, return nothing
if (this.CurrentUserEntityReference == null)
{
return Enumerable.Empty<Guid>().ToList();
}
var productFetch = new Fetch
{
Distinct = true,
Entity = new FetchEntity
{
Name = "product",
Attributes = new List<FetchAttribute>
{
new FetchAttribute("productid")
},
Filters = new List<Filter>()
}
};
var associatedToAccountOrContactFilter = new Filter
{
Type = LogicalOperator.Or,
Conditions = new List<Condition>(),
Filters = new List<Filter>()
};
// Get alias generator instance to maintain alias names consistency
// via postfix incrementation
var linkEntityAliasGenerator = LinkEntityAliasGenerator.CreateInstance();
// Retrieve Contact to Product relationships and build Entity Permission links
var contactToProductRelationshipNamesCollection =
this.GetDelimitedSiteSettingValueCollection(ContactToProductRelationshipNames, ContactToProductFallbackRelationshipName);
var contactLink = this.BuildLinksAndFilterChain(
contactToProductRelationshipNamesCollection, productFetch, associatedToAccountOrContactFilter,
this.CurrentUserEntityReference, null, OwningCustomerType.Contact, linkEntityAliasGenerator);
productFetch.AddLink(contactLink);
if (this.ParentCustomerEntityReference != null && this.ParentCustomerEntityReference.LogicalName == "contact")
{
// Retrieve parent Contact to Product relationships and build Entity Permission links
var parentContactLink = this.BuildLinksAndFilterChain(
contactToProductRelationshipNamesCollection, productFetch, associatedToAccountOrContactFilter,
this.ParentCustomerEntityReference, null, OwningCustomerType.Contact, linkEntityAliasGenerator);
productFetch.AddLink(parentContactLink);
}
else if (this.ParentCustomerEntityReference != null && this.ParentCustomerEntityReference.LogicalName == "account")
{
// Retrieve Account to Product relationships and build Entity Permission links
var accountToProductRelationshipNamesCollection =
this.GetDelimitedSiteSettingValueCollection(AccountToProductRelationshipNames, AccountToProductFallbackRelationshipName);
var accountLink = this.BuildLinksAndFilterChain(
accountToProductRelationshipNamesCollection, productFetch, associatedToAccountOrContactFilter,
null, this.ParentCustomerEntityReference, OwningCustomerType.Account, linkEntityAliasGenerator);
productFetch.AddLink(accountLink);
}
var accountOrContactNotNullFilter = new Filter
{
Type = LogicalOperator.Or,
Conditions =
associatedToAccountOrContactFilter.Conditions.Select(
condition =>
new Condition
{
EntityName = condition.EntityName,
Attribute = condition.Attribute,
Operator = ConditionOperator.NotNull
}).ToList()
};
// This is the AND Filter that will ensure state is Active and the Product is joined to either Contact or Account
productFetch.AddFilter(new Filter
{
Type = LogicalOperator.And,
Conditions = new List<Condition>
{
new Condition("statecode", ConditionOperator.Equal, 0)
},
Filters = new List<Filter>
{
accountOrContactNotNullFilter,
associatedToAccountOrContactFilter,
}
});
var productsCollection = productFetch.Execute(this.Portal.ServiceContext as IOrganizationService);
return productsCollection.Entities.Select(x => x.Id).ToList();
}
/// <summary>
/// Checks if the sitesetting for allowing users to see non associated articles.
/// </summary>
/// <returns>
/// The <see cref="bool"/>.
/// </returns>
public bool DisplayArticlesWithoutAssociatedProductsEnabled()
{
return this.IsSiteSettingEnabled(DisplayArticlesWithoutAssociatedProductsSiteSettingName);
}
/// <summary>
/// Specifies relationship type between two entities
/// </summary>
public enum RelationshipType
{
OneToManyRelationship,
ManyToOneRelationship,
ManyToManyRelationship
}
/// <summary>
/// Specifies relationship metadata between two entities
/// </summary>
public class RelationshipMetadata
{
/// <summary>
/// Name of Intersect Entity
/// </summary>
public string IntersectEntityName { get; set; }
/// <summary>
/// Entity 1 Logical Name
/// </summary>
public string Entity1LogicalName { get; set; }
/// <summary>
/// Entity 2 Logical Name
/// </summary>
public string Entity2LogicalName { get; set; }
/// <summary>
/// Entity 1 Intersect Attribute
/// </summary>
public string Entity1IntersectAttribute { get; set; }
/// <summary>
/// Entity 2 Intersect Attribute
/// </summary>
public string Entity2IntersectAttribute { get; set; }
/// <summary>
/// Relationship Type
/// </summary>
public RelationshipType RelationshipType { get; set; }
/// <summary>
/// Referencing Attribute
/// </summary>
public string ReferencingAttribute { get; set; }
/// <summary>
/// Referenced Attribute
/// </summary>
public string ReferencedAttribute { get; set; }
/// <summary>
/// Referenced Entity
/// </summary>
public string ReferencedEntity { get; set; }
/// <summary>
/// Entity 1 Primary Id Attribute
/// </summary>
public string Entity1PrimaryIdAttribute { get; set; }
/// <summary>
/// Entity 2 Primary Id Attribute
/// </summary>
public string Entity2PrimaryIdAttribute { get; set; }
}
#endregion
#region Helper Methods
/// <summary>
/// Returns the first empty innermost link or constructs new one if necessary
/// </summary>
/// <param name="rootLink">Root link to search</param>
/// <returns>Empty innermost link</returns>
private static Link GetInnermostLink(Link rootLink)
{
var currentLink = rootLink;
while (currentLink.Links != null && currentLink.Links.Any())
{
currentLink = currentLink.Links.First();
}
if (!string.IsNullOrWhiteSpace(currentLink.Name))
{
var newInnermostLink = new Link();
currentLink.Links = new List<Link>
{
newInnermostLink
};
currentLink = newInnermostLink;
}
return currentLink;
}
/// <summary>
/// Modify a fetch and add necessary link entity elements and filter conditions to satisfy record level security trimming based on the relationship definitions.
/// </summary>
/// <param name="serviceContext"><see cref="OrganizationServiceContext"/> to use</param>
/// <param name="relationshipMetadata">Relationship metadata that defines relationship attributes</param>
/// <param name="linkDetails"><see cref="ContentAccessProvider.LinkDetails"/> to use</param>
/// <param name="fetch">Fetch to modify</param>
/// <param name="link">Link to construct</param>
/// <param name="filter">Filter to construct</param>
/// <param name="contact">Associated Contact</param>
/// <param name="account">Associated Account</param>
/// <param name="addCondition">Construct Account/Contact relationship filter</param>
/// <param name="linkEntityAliasGenerator">LinkEntityAliasGenerator to track and create Aliases</param>
private static void BuildLinksAndFilter(OrganizationServiceContext serviceContext, ProductAccessProvider.RelationshipMetadata relationshipMetadata, LinkDetails linkDetails, Fetch fetch, Link link, Filter filter, EntityReference contact, EntityReference account, bool addCondition, LinkEntityAliasGenerator linkEntityAliasGenerator)
{
var alias = linkEntityAliasGenerator.CreateUniqueAlias(relationshipMetadata.Entity2LogicalName);
Link newLink = null;
if (relationshipMetadata.RelationshipType == ProductAccessProvider.RelationshipType.ManyToManyRelationship)
{
var intersectLinkEntityName = relationshipMetadata.IntersectEntityName;
string linkTargetFromAttribute;
string linkTargetToAttribute;
string linkIntersectFromAttribute;
string linkIntersectToAttribute;
if (relationshipMetadata.Entity1LogicalName == relationshipMetadata.Entity2LogicalName)
{
linkIntersectFromAttribute = relationshipMetadata.Entity2IntersectAttribute;
linkIntersectToAttribute = relationshipMetadata.Entity1PrimaryIdAttribute;
linkTargetFromAttribute = relationshipMetadata.Entity1PrimaryIdAttribute;
linkTargetToAttribute = relationshipMetadata.Entity1IntersectAttribute;
}
else
{
linkIntersectFromAttribute =
linkIntersectToAttribute = relationshipMetadata.Entity1LogicalName == linkDetails.Entity1Name
? relationshipMetadata.Entity1IntersectAttribute
: relationshipMetadata.Entity2IntersectAttribute;
linkTargetFromAttribute =
linkTargetToAttribute = relationshipMetadata.Entity2LogicalName == linkDetails.Entity2Name
? relationshipMetadata.Entity2IntersectAttribute
: relationshipMetadata.Entity1IntersectAttribute;
}
newLink = new Link
{
Name = intersectLinkEntityName,
FromAttribute = linkIntersectFromAttribute,
ToAttribute = linkIntersectToAttribute,
Intersect = true,
Visible = false,
Type = JoinOperator.LeftOuter,
Links = new List<Link>
{
new Link
{
Name = relationshipMetadata.Entity2LogicalName,
FromAttribute = linkTargetFromAttribute,
ToAttribute = linkTargetToAttribute,
Alias = alias,
Type = JoinOperator.LeftOuter
}
}
};
}
else if (relationshipMetadata.RelationshipType == ProductAccessProvider.RelationshipType.ManyToOneRelationship)
{
var linkFromAttribute = relationshipMetadata.ReferencedEntity == relationshipMetadata.Entity2LogicalName
? relationshipMetadata.ReferencedAttribute
: relationshipMetadata.ReferencingAttribute;
var linkToAttribute = relationshipMetadata.ReferencedEntity == relationshipMetadata.Entity2LogicalName
? relationshipMetadata.ReferencingAttribute
: relationshipMetadata.ReferencedAttribute;
newLink = new Link
{
Name = relationshipMetadata.Entity2LogicalName,
FromAttribute = linkFromAttribute,
ToAttribute = linkToAttribute,
Type = JoinOperator.LeftOuter,
Alias = alias
};
}
else if (relationshipMetadata.RelationshipType == ProductAccessProvider.RelationshipType.OneToManyRelationship)
{
var linkFromAttribute = relationshipMetadata.ReferencedEntity == relationshipMetadata.Entity2LogicalName
? relationshipMetadata.ReferencedAttribute
: relationshipMetadata.ReferencingAttribute;
var linkToAttribute = relationshipMetadata.ReferencedEntity == relationshipMetadata.Entity2LogicalName
? relationshipMetadata.ReferencingAttribute
: relationshipMetadata.ReferencedAttribute;
newLink = new Link
{
Name = relationshipMetadata.Entity2LogicalName,
FromAttribute = linkFromAttribute,
ToAttribute = linkToAttribute,
Type = JoinOperator.LeftOuter,
Alias = alias
};
}
else
{
throw new ApplicationException(string.Format("Retrieve relationship request failed for relationship name {0}", linkDetails.RelationshipName));
}
ContentAccessProvider.AddLink(link, newLink);
if (addCondition) // Only add the condition if we are at the end of the chain
{
var condition = new Condition { Attribute = relationshipMetadata.Entity2PrimaryIdAttribute };
if (linkDetails.Scope.HasValue && linkDetails.Scope.Value == OwningCustomerType.Contact)
{
condition.EntityName = alias;
condition.Operator = ConditionOperator.Equal;
condition.Value = contact.Id;
}
else if (linkDetails.Scope.HasValue && linkDetails.Scope.Value == OwningCustomerType.Account)
{
condition.EntityName = alias;
condition.Operator = ConditionOperator.Equal;
condition.Value = account.Id;
}
else
{
condition.EntityName = alias;
condition.Operator = ConditionOperator.NotNull;
}
filter.Conditions.Add(condition);
}
fetch.Distinct = true;
}
/// <summary>
/// Modify a fetch and add necessary link entity elements and filter conditions to satisfy record level security trimming based on the relationship definitions.
/// </summary>
/// <param name="serviceContext"><see cref="OrganizationServiceContext"/> to use</param>
/// <param name="linkDetails"><see cref="ContentAccessProvider.LinkDetails"/> to use</param>
/// <param name="fetch">Fetch to modify</param>
/// <param name="link">Link to construct</param>
/// <param name="filter">Filter to construct</param>
/// <param name="contact">Associated Contact</param>
/// <param name="account">Associated Account</param>
/// <param name="addCondition">Construct Account/Contact relationship filter</param>
/// <param name="linkEntityAliasGenerator">LinkEntityAliasGenerator to track and create Aliases</param>
private void BuildLinksAndFilter(OrganizationServiceContext serviceContext, LinkDetails linkDetails, Fetch fetch, Link link, Filter filter, EntityReference contact, EntityReference account, bool addCondition, LinkEntityAliasGenerator linkEntityAliasGenerator)
{
var relationshipMetadata = this.BuildRelationshipMetadata(serviceContext, linkDetails);
ProductAccessProvider.BuildLinksAndFilter(serviceContext, relationshipMetadata, linkDetails, fetch, link, filter, contact, account, addCondition, linkEntityAliasGenerator);
}
/// <summary>
/// Builds relationship metadata
/// </summary>
/// <param name="serviceContext">Service Context</param>
/// <param name="linkDetails">Link Details</param>
/// <returns>Relationshi pMetadata</returns>
private ProductAccessProvider.RelationshipMetadata BuildRelationshipMetadata(OrganizationServiceContext serviceContext, LinkDetails linkDetails)
{
// This is used for Mocking
if (this.relationshipMetadataDictionary != null &&
this.relationshipMetadataDictionary.ContainsKey(linkDetails.Entity2Name))
{
return this.relationshipMetadataDictionary[linkDetails.Entity2Name];
}
// Standard flow
var entity1Metadata = GetEntityMetadata(serviceContext, linkDetails.Entity1Name);
var entity2Metadata = linkDetails.Entity2Name == linkDetails.Entity1Name ? entity1Metadata : GetEntityMetadata(serviceContext, linkDetails.Entity2Name);
var relationshipMetadata = new ProductAccessProvider.RelationshipMetadata();
relationshipMetadata.Entity1PrimaryIdAttribute = entity1Metadata.PrimaryIdAttribute;
relationshipMetadata.Entity2PrimaryIdAttribute = entity2Metadata.PrimaryIdAttribute;
relationshipMetadata.Entity1LogicalName = entity1Metadata.LogicalName;
relationshipMetadata.Entity2LogicalName = entity2Metadata.LogicalName;
var relationshipManyToMany = entity1Metadata.ManyToManyRelationships.FirstOrDefault(r => r.SchemaName == linkDetails.RelationshipName);
if (relationshipManyToMany != null)
{
relationshipMetadata.RelationshipType = ProductAccessProvider.RelationshipType.ManyToManyRelationship;
relationshipMetadata.Entity1LogicalName = relationshipManyToMany.Entity2LogicalName;
relationshipMetadata.Entity2LogicalName = relationshipManyToMany.Entity1LogicalName;
relationshipMetadata.Entity1IntersectAttribute = relationshipManyToMany.Entity2IntersectAttribute;
relationshipMetadata.Entity2IntersectAttribute = relationshipManyToMany.Entity1IntersectAttribute;
relationshipMetadata.IntersectEntityName = relationshipManyToMany.IntersectEntityName;
return relationshipMetadata;
}
var relationshipManyToOne = entity1Metadata.ManyToOneRelationships.FirstOrDefault(r => r.SchemaName == linkDetails.RelationshipName);
if (relationshipManyToOne != null)
{
relationshipMetadata.RelationshipType = ProductAccessProvider.RelationshipType.ManyToOneRelationship;
relationshipMetadata.ReferencedEntity = relationshipManyToOne.ReferencedEntity;
relationshipMetadata.ReferencingAttribute = relationshipManyToOne.ReferencingAttribute;
relationshipMetadata.ReferencedAttribute = relationshipManyToOne.ReferencedAttribute;
return relationshipMetadata;
}
var relationshipOneToMany = entity1Metadata.OneToManyRelationships.FirstOrDefault(r => r.SchemaName == linkDetails.RelationshipName);
if (relationshipOneToMany != null)
{
relationshipMetadata.RelationshipType = ProductAccessProvider.RelationshipType.OneToManyRelationship;
relationshipMetadata.ReferencedEntity = relationshipOneToMany.ReferencedEntity;
relationshipMetadata.ReferencedAttribute = relationshipOneToMany.ReferencedAttribute;
relationshipMetadata.ReferencingAttribute = relationshipOneToMany.ReferencingAttribute;
return relationshipMetadata;
}
// This would be a failed case
return null;
}
/// <summary>
/// Retrieves site setting value collection as a delimited collection, if list is empty or exceeds 2 entries it will return empty collection
/// </summary>
/// <param name="siteSettingName">Site Setting Name</param>
/// <param name="fallbackValue">Value to return if the Site Setting doesn't exist</param>
/// <returns>Array of site setting values</returns>
private string[] GetDelimitedSiteSettingValueCollection(string siteSettingName, string fallbackValue = "")
{
// Retrieve site setting by name
var customerToProductRelationshipNamesString = this.relationshipNamesDictionary != null && this.relationshipNamesDictionary.ContainsKey(siteSettingName)
? this.relationshipNamesDictionary[siteSettingName]
: this.Portal.ServiceContext.GetSiteSettingValueByName(this.Portal.Website, siteSettingName);
// If site setting doesn't exist, return fallbackValue
if (string.IsNullOrWhiteSpace(customerToProductRelationshipNamesString))
{
return new string[] { fallbackValue };
}
// Ensure that the relationship depth is <= 2 (This ensure that only one intersect is between Product and Owning Entity (Contact/Account)
var customerToProductRelationshipNamesCollection = customerToProductRelationshipNamesString.Split(';');
if (customerToProductRelationshipNamesCollection.Count() > 2)
{
// This would indicate a customer misconfiguration by specifying too many relationships
return new string[0];
}
return customerToProductRelationshipNamesCollection;
}
/// <summary>
/// Constructs a link-entity chain from the <paramref name="fetch"/>
/// </summary>
/// <param name="customerToProductRelationshipNamesCollection">Collection of relationships that map from Customer to Product</param>
/// <param name="fetch">Fetch used to construct link chain</param>
/// <param name="filter">Filter to inject conditions into</param>
/// <param name="contact">Contact EntityReference</param>
/// <param name="account">Account EntityReference</param>
/// <param name="owningCustomerType">Owning Customer Type</param>
/// <param name="linkEntityAliasGenerator">Single instance to maintain alias postfix incrementation</param>
/// <returns>Root link of the constructed chain</returns>
private Link BuildLinksAndFilterChain(string[] customerToProductRelationshipNamesCollection,
Fetch fetch, Filter filter, EntityReference contact, EntityReference account,
OwningCustomerType owningCustomerType, LinkEntityAliasGenerator linkEntityAliasGenerator)
{
var rootLink = new Link();
var linkDetails = this.GetLinkDetails(customerToProductRelationshipNamesCollection, owningCustomerType);
foreach (var linkDetail in linkDetails)
{
var innermostLink = GetInnermostLink(rootLink);
var currentLinkDetailConnectsToAccountOrContact = (contact != null && (linkDetail.Entity1Name == "contact" || linkDetail.Entity2Name == "contact")) ||
(account != null && (linkDetail.Entity1Name == "account" || linkDetail.Entity2Name == "account"));
this.BuildLinksAndFilter(this.Portal.ServiceContext, linkDetail, fetch, innermostLink, filter, contact, account, currentLinkDetailConnectsToAccountOrContact, linkEntityAliasGenerator);
}
return rootLink;
}
/// <summary>
/// Constructs the <see cref="LinkDetails"/> collection that maps from specified <see cref="EntityPermissionScope"/> to Product
/// </summary>
/// <param name="contactToProductRelationshipNamesCollection">Collection of relationship names that map from specified <see cref="EntityPermissionScope"/> to Product</param>
/// <param name="owningCustomerType"><see cref="EntityPermissionScope"/> of owning Customer</param>
/// <returns>Collection of <see cref="LinkDetails"/> from specified <see cref="OwningCustomerType"/> to Product</returns>
private IEnumerable<LinkDetails> GetLinkDetails(string[] contactToProductRelationshipNamesCollection, OwningCustomerType owningCustomerType)
{
// Specify the corresponding Customer schema name based on passed EntityPermissionScope
var entityPermissionScopeName = owningCustomerType == OwningCustomerType.Account
? "account"
: "contact";
var linkDetailsCollection = new List<LinkDetails>();
if (contactToProductRelationshipNamesCollection.Count() == 1)
{
linkDetailsCollection.Add(new LinkDetails("product", entityPermissionScopeName, contactToProductRelationshipNamesCollection[0], owningCustomerType));
}
else if (contactToProductRelationshipNamesCollection.Count() == 2)
{
// Retrieve Customer and Product metadata
var customerMetadata = GetEntityMetadata(this.Portal.ServiceContext, entityPermissionScopeName);
var productMetadata = GetEntityMetadata(this.Portal.ServiceContext, "product");
// Retrieve the intersecting entities schema name for the specified relationship name
var customer2IntersectSchemaName = customerMetadata.OneToManyRelationships.FirstOrDefault(metadata => metadata.SchemaName == contactToProductRelationshipNamesCollection[0]);
var product2IntersectSchemaName = productMetadata.OneToManyRelationships.FirstOrDefault(metadata => metadata.SchemaName == contactToProductRelationshipNamesCollection[1]);
// Ensure that the insersecting entity for the specified relationships match
if (customer2IntersectSchemaName == null || product2IntersectSchemaName == null || product2IntersectSchemaName.ReferencingEntity != customer2IntersectSchemaName.ReferencingEntity)
{
return linkDetailsCollection;
}
// Add the specified LinkDetails to the collection
linkDetailsCollection.Add(new LinkDetails("product", product2IntersectSchemaName.ReferencingEntity, contactToProductRelationshipNamesCollection[1]));
linkDetailsCollection.Add(new LinkDetails(customer2IntersectSchemaName.ReferencingEntity, entityPermissionScopeName, contactToProductRelationshipNamesCollection[0], owningCustomerType));
}
return linkDetailsCollection;
}
/// <summary>
/// Determines whether Product filtering should filter only Matching Products
/// </summary>
private bool FilterWithMatchingProductsOnly
{
get
{
var portalContext = PortalCrmConfigurationManager.CreatePortalContext();
var displayArticlesWithoutAssociatedProductsString = portalContext.ServiceContext.GetSiteSettingValueByName(portalContext.Website, DisplayArticlesWithoutAssociatedProductsSiteSettingName);
bool result;
// If the site setting is missing or value is empty, default to filtering with matching Products only
if (string.IsNullOrWhiteSpace(displayArticlesWithoutAssociatedProductsString) || !bool.TryParse(displayArticlesWithoutAssociatedProductsString, out result))
{
return true;
}
// If it doesn't match value below, default to filtering with matching Products only
return !result;
}
}
/// <summary>
/// Adds required Product filter into existing FetchXMl
/// </summary>
/// <param name="fetchIn">FetchXML that is constructed through Entity Permissions</param>
/// <param name="userProductIDs">Collection of Product IDs</param>
private void TryRecordLevelFiltersToFetch(Fetch fetchIn, List<Guid> userProductIDs)
{
// Build link to Product from Knowledge Article
var link = new Link()
{
Alias = this.Config.IntersectAlias,
Name = this.Config.IntersectEntityName,
FromAttribute = this.Config.IntersectFromAttribute,
ToAttribute = this.Config.IntersectToAttribute,
Visible = false,
Type = JoinOperator.LeftOuter,
Intersect = true,
Links = new List<Link>
{
new Link()
{
Alias = this.Config.TargetAlias,
Name = this.Config.TargetEntityName,
FromAttribute = this.Config.TargetFromAttribute,
ToAttribute = this.Config.TargetToAttribute,
Visible = false,
Type = JoinOperator.LeftOuter,
Intersect = true,
}
}
};
// Add Link to the FetchXML
if (fetchIn.Entity.Links == null)
{
fetchIn.Entity.Links = new List<Link>();
}
fetchIn.Entity.Links.Add(link);
// Build and add Filter to the FetchXML
var filter = this.BuildFilter(userProductIDs);
if (fetchIn.Entity.Filters == null)
{
fetchIn.Entity.Filters = new List<Filter>();
}
fetchIn.Entity.Filters.Add(filter);
fetchIn.Distinct = true;
}
/// <summary>
/// Builds the Filter to trim the Knowledge Article fetch results
/// </summary>
/// <param name="userProductIDs">Collection of Product IDs</param>
/// <returns>Product filter</returns>
private Filter BuildFilter(List<Guid> userProductIDs)
{
var userProductsObjectCollection = new List<object>();
if (userProductIDs != null && userProductIDs.Any())
{
userProductsObjectCollection.AddRange(userProductIDs.Cast<object>().ToList());
}
else
{
userProductsObjectCollection.Add((object)Guid.Empty);
}
var filterConditions = new List<Condition>
{
new Condition { EntityName = this.Config.TargetAlias, Attribute = this.Config.TargetFromAttribute, Operator = ConditionOperator.In, Values = userProductsObjectCollection }
};
// If User is Authenticated and Site Setting configured for showing unassociated Articles
if (this.CurrentUserEntityReference != null && !this.FilterWithMatchingProductsOnly)
{
filterConditions.Add(new Condition { EntityName = this.Config.IntersectAlias, Attribute = this.Config.IntersectFromAttribute, Operator = ConditionOperator.Null });
}
return new Filter()
{
Type = LogicalOperator.Or,
Conditions = filterConditions
};
}
/// <summary>
/// String array of current User's associated Webroles
/// </summary>
private string[] CurrentUserRoleNames { get; set; }
#endregion Helper Methods
}
}
| |
namespace Azure.Data.Tables
{
public partial interface ITableEntity
{
Azure.ETag ETag { get; set; }
string PartitionKey { get; set; }
string RowKey { get; set; }
System.DateTimeOffset? Timestamp { get; }
}
public partial class TableClient
{
protected TableClient() { }
public TableClient(string connectionString, string tableName) { }
public TableClient(string connectionString, string tableName, Azure.Data.Tables.TablesClientOptions options = null) { }
public TableClient(System.Uri endpoint, Azure.AzureSasCredential credential, Azure.Data.Tables.TablesClientOptions options = null) { }
public TableClient(System.Uri endpoint, Azure.Data.Tables.TablesClientOptions options = null) { }
public TableClient(System.Uri endpoint, string tableName, Azure.Data.Tables.TableSharedKeyCredential credential) { }
public TableClient(System.Uri endpoint, string tableName, Azure.Data.Tables.TableSharedKeyCredential credential, Azure.Data.Tables.TablesClientOptions options = null) { }
public virtual string AccountName { get { throw null; } }
public virtual string Name { get { throw null; } }
public virtual System.Threading.Tasks.Task<Azure.Response> AddEntityAsync<T>(T entity, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Response AddEntity<T>(T entity, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Response<Azure.Data.Tables.Models.TableItem> Create(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Data.Tables.Models.TableItem>> CreateAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Data.Tables.Models.TableItem> CreateIfNotExists(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Data.Tables.Models.TableItem>> CreateIfNotExistsAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public static string CreateQueryFilter(System.FormattableString filter) { throw null; }
public static string CreateQueryFilter<T>(System.Linq.Expressions.Expression<System.Func<T, bool>> filter) { throw null; }
public virtual Azure.Response Delete(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> DeleteAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response DeleteEntity(string partitionKey, string rowKey, Azure.ETag ifMatch = default(Azure.ETag), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> DeleteEntityAsync(string partitionKey, string rowKey, Azure.ETag ifMatch = default(Azure.ETag), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Data.Tables.TableSignedIdentifier>> GetAccessPolicies(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Data.Tables.TableSignedIdentifier>>> GetAccessPoliciesAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<T>> GetEntityAsync<T>(string partitionKey, string rowKey, System.Collections.Generic.IEnumerable<string> select = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Response<T> GetEntity<T>(string partitionKey, string rowKey, System.Collections.Generic.IEnumerable<string> select = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Data.Tables.Sas.TableSasBuilder GetSasBuilder(Azure.Data.Tables.Sas.TableSasPermissions permissions, System.DateTimeOffset expiresOn) { throw null; }
public virtual Azure.Data.Tables.Sas.TableSasBuilder GetSasBuilder(string rawPermissions, System.DateTimeOffset expiresOn) { throw null; }
public virtual Azure.AsyncPageable<T> QueryAsync<T>(System.Linq.Expressions.Expression<System.Func<T, bool>> filter, int? maxPerPage = default(int?), System.Collections.Generic.IEnumerable<string> select = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.AsyncPageable<T> QueryAsync<T>(string filter = null, int? maxPerPage = default(int?), System.Collections.Generic.IEnumerable<string> select = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Pageable<T> Query<T>(System.Linq.Expressions.Expression<System.Func<T, bool>> filter, int? maxPerPage = default(int?), System.Collections.Generic.IEnumerable<string> select = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Pageable<T> Query<T>(string filter = null, int? maxPerPage = default(int?), System.Collections.Generic.IEnumerable<string> select = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Response SetAccessPolicy(System.Collections.Generic.IEnumerable<Azure.Data.Tables.TableSignedIdentifier> tableAcl, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> SetAccessPolicyAsync(System.Collections.Generic.IEnumerable<Azure.Data.Tables.TableSignedIdentifier> tableAcl, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Response>> SubmitTransaction(System.Collections.Generic.IEnumerable<Azure.Data.Tables.TableTransactionAction> transactionActions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<System.Collections.Generic.IReadOnlyList<Azure.Response>>> SubmitTransactionAsync(System.Collections.Generic.IEnumerable<Azure.Data.Tables.TableTransactionAction> transactionActions, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> UpdateEntityAsync<T>(T entity, Azure.ETag ifMatch, Azure.Data.Tables.TableUpdateMode mode = Azure.Data.Tables.TableUpdateMode.Merge, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Response UpdateEntity<T>(T entity, Azure.ETag ifMatch, Azure.Data.Tables.TableUpdateMode mode = Azure.Data.Tables.TableUpdateMode.Merge, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> UpsertEntityAsync<T>(T entity, Azure.Data.Tables.TableUpdateMode mode = Azure.Data.Tables.TableUpdateMode.Merge, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
public virtual Azure.Response UpsertEntity<T>(T entity, Azure.Data.Tables.TableUpdateMode mode = Azure.Data.Tables.TableUpdateMode.Merge, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) where T : class, Azure.Data.Tables.ITableEntity, new() { throw null; }
}
public sealed partial class TableEntity : Azure.Data.Tables.ITableEntity, System.Collections.Generic.ICollection<System.Collections.Generic.KeyValuePair<string, object>>, System.Collections.Generic.IDictionary<string, object>, System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<string, object>>, System.Collections.IEnumerable
{
public TableEntity() { }
public TableEntity(System.Collections.Generic.IDictionary<string, object> values) { }
public TableEntity(string partitionKey, string rowKey) { }
public int Count { get { throw null; } }
public Azure.ETag ETag { get { throw null; } set { } }
public object this[string key] { get { throw null; } set { } }
public System.Collections.Generic.ICollection<string> Keys { get { throw null; } }
public string PartitionKey { get { throw null; } set { } }
public string RowKey { get { throw null; } set { } }
bool System.Collections.Generic.ICollection<System.Collections.Generic.KeyValuePair<System.String,System.Object>>.IsReadOnly { get { throw null; } }
System.Collections.Generic.ICollection<object> System.Collections.Generic.IDictionary<System.String,System.Object>.Values { get { throw null; } }
public System.DateTimeOffset? Timestamp { get { throw null; } set { } }
public void Add(string key, object value) { }
public void Clear() { }
public bool ContainsKey(string key) { throw null; }
public byte[] GetBinary(string key) { throw null; }
public System.BinaryData GetBinaryData(string key) { throw null; }
public bool? GetBoolean(string key) { throw null; }
public System.DateTime? GetDateTime(string key) { throw null; }
public System.DateTimeOffset? GetDateTimeOffset(string key) { throw null; }
public double? GetDouble(string key) { throw null; }
public System.Guid? GetGuid(string key) { throw null; }
public int? GetInt32(string key) { throw null; }
public long? GetInt64(string key) { throw null; }
public string GetString(string key) { throw null; }
public bool Remove(string key) { throw null; }
void System.Collections.Generic.ICollection<System.Collections.Generic.KeyValuePair<System.String,System.Object>>.Add(System.Collections.Generic.KeyValuePair<string, object> item) { }
bool System.Collections.Generic.ICollection<System.Collections.Generic.KeyValuePair<System.String,System.Object>>.Contains(System.Collections.Generic.KeyValuePair<string, object> item) { throw null; }
void System.Collections.Generic.ICollection<System.Collections.Generic.KeyValuePair<System.String,System.Object>>.CopyTo(System.Collections.Generic.KeyValuePair<string, object>[] array, int arrayIndex) { }
bool System.Collections.Generic.ICollection<System.Collections.Generic.KeyValuePair<System.String,System.Object>>.Remove(System.Collections.Generic.KeyValuePair<string, object> item) { throw null; }
System.Collections.Generic.IEnumerator<System.Collections.Generic.KeyValuePair<string, object>> System.Collections.Generic.IEnumerable<System.Collections.Generic.KeyValuePair<System.String,System.Object>>.GetEnumerator() { throw null; }
System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { throw null; }
public bool TryGetValue(string key, out object value) { throw null; }
}
public partial class TableRetentionPolicy
{
public TableRetentionPolicy(bool enabled) { }
public int? Days { get { throw null; } set { } }
public bool Enabled { get { throw null; } set { } }
}
public partial class TablesClientOptions : Azure.Core.ClientOptions
{
public TablesClientOptions(Azure.Data.Tables.TablesClientOptions.ServiceVersion serviceVersion = Azure.Data.Tables.TablesClientOptions.ServiceVersion.V2019_02_02) { }
public enum ServiceVersion
{
V2019_02_02 = 1,
}
}
public partial class TableServiceClient
{
protected TableServiceClient() { }
public TableServiceClient(string connectionString) { }
public TableServiceClient(string connectionString, Azure.Data.Tables.TablesClientOptions options = null) { }
public TableServiceClient(System.Uri endpoint, Azure.AzureSasCredential credential) { }
public TableServiceClient(System.Uri endpoint, Azure.AzureSasCredential credential, Azure.Data.Tables.TablesClientOptions options = null) { }
public TableServiceClient(System.Uri endpoint, Azure.Data.Tables.TableSharedKeyCredential credential) { }
public TableServiceClient(System.Uri endpoint, Azure.Data.Tables.TableSharedKeyCredential credential, Azure.Data.Tables.TablesClientOptions options) { }
public virtual string AccountName { get { throw null; } }
public static string CreateQueryFilter(System.FormattableString filter) { throw null; }
public static string CreateQueryFilter(System.Linq.Expressions.Expression<System.Func<Azure.Data.Tables.Models.TableItem, bool>> filter) { throw null; }
public virtual Azure.Response<Azure.Data.Tables.Models.TableItem> CreateTable(string tableName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Data.Tables.Models.TableItem>> CreateTableAsync(string tableName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Data.Tables.Models.TableItem> CreateTableIfNotExists(string tableName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Data.Tables.Models.TableItem>> CreateTableIfNotExistsAsync(string tableName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response DeleteTable(string tableName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> DeleteTableAsync(string tableName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Data.Tables.Models.TableServiceProperties> GetProperties(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Data.Tables.Models.TableServiceProperties>> GetPropertiesAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Data.Tables.Sas.TableAccountSasBuilder GetSasBuilder(Azure.Data.Tables.Sas.TableAccountSasPermissions permissions, Azure.Data.Tables.Sas.TableAccountSasResourceTypes resourceTypes, System.DateTimeOffset expiresOn) { throw null; }
public virtual Azure.Data.Tables.Sas.TableAccountSasBuilder GetSasBuilder(string rawPermissions, Azure.Data.Tables.Sas.TableAccountSasResourceTypes resourceTypes, System.DateTimeOffset expiresOn) { throw null; }
public virtual Azure.Response<Azure.Data.Tables.Models.TableServiceStatistics> GetStatistics(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Data.Tables.Models.TableServiceStatistics>> GetStatisticsAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Data.Tables.TableClient GetTableClient(string tableName) { throw null; }
public virtual Azure.Pageable<Azure.Data.Tables.Models.TableItem> Query(System.FormattableString filter, int? maxPerPage = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Data.Tables.Models.TableItem> Query(System.Linq.Expressions.Expression<System.Func<Azure.Data.Tables.Models.TableItem, bool>> filter, int? maxPerPage = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Data.Tables.Models.TableItem> Query(string filter = null, int? maxPerPage = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Data.Tables.Models.TableItem> QueryAsync(System.FormattableString filter, int? maxPerPage = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Data.Tables.Models.TableItem> QueryAsync(System.Linq.Expressions.Expression<System.Func<Azure.Data.Tables.Models.TableItem, bool>> filter, int? maxPerPage = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Data.Tables.Models.TableItem> QueryAsync(string filter = null, int? maxPerPage = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response SetProperties(Azure.Data.Tables.Models.TableServiceProperties properties, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> SetPropertiesAsync(Azure.Data.Tables.Models.TableServiceProperties properties, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class TableSharedKeyCredential
{
public TableSharedKeyCredential(string accountName, string accountKey) { }
public string AccountName { get { throw null; } }
public void SetAccountKey(string accountKey) { }
}
public partial class TableSignedIdentifier
{
public TableSignedIdentifier(string id, Azure.Data.Tables.Models.TableAccessPolicy accessPolicy) { }
public Azure.Data.Tables.Models.TableAccessPolicy AccessPolicy { get { throw null; } set { } }
public string Id { get { throw null; } set { } }
}
public partial class TableTransactionAction
{
public TableTransactionAction(Azure.Data.Tables.TableTransactionActionType actionType, Azure.Data.Tables.ITableEntity entity) { }
public TableTransactionAction(Azure.Data.Tables.TableTransactionActionType actionType, Azure.Data.Tables.ITableEntity entity, Azure.ETag etag = default(Azure.ETag)) { }
public Azure.Data.Tables.TableTransactionActionType ActionType { get { throw null; } }
public Azure.Data.Tables.ITableEntity Entity { get { throw null; } }
public Azure.ETag ETag { get { throw null; } }
}
public enum TableTransactionActionType
{
Add = 0,
UpdateMerge = 1,
UpdateReplace = 2,
Delete = 3,
UpsertMerge = 4,
UpsertReplace = 5,
}
public partial class TableTransactionFailedException : Azure.RequestFailedException
{
public TableTransactionFailedException(Azure.RequestFailedException requestFailedException) : base (default(string)) { }
protected TableTransactionFailedException(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context) : base (default(string)) { }
public int? FailedTransactionActionIndex { get { throw null; } }
}
public enum TableUpdateMode
{
Merge = 0,
Replace = 1,
}
}
namespace Azure.Data.Tables.Models
{
public partial class TableAccessPolicy
{
public TableAccessPolicy(System.DateTimeOffset? startsOn, System.DateTimeOffset? expiresOn, string permission) { }
public System.DateTimeOffset? ExpiresOn { get { throw null; } set { } }
public string Permission { get { throw null; } set { } }
public System.DateTimeOffset? StartsOn { get { throw null; } set { } }
}
public partial class TableAnalyticsLoggingSettings
{
public TableAnalyticsLoggingSettings(string version, bool delete, bool read, bool write, Azure.Data.Tables.TableRetentionPolicy retentionPolicy) { }
public bool Delete { get { throw null; } set { } }
public bool Read { get { throw null; } set { } }
public Azure.Data.Tables.TableRetentionPolicy RetentionPolicy { get { throw null; } set { } }
public string Version { get { throw null; } set { } }
public bool Write { get { throw null; } set { } }
}
public partial class TableCorsRule
{
public TableCorsRule(string allowedOrigins, string allowedMethods, string allowedHeaders, string exposedHeaders, int maxAgeInSeconds) { }
public string AllowedHeaders { get { throw null; } set { } }
public string AllowedMethods { get { throw null; } set { } }
public string AllowedOrigins { get { throw null; } set { } }
public string ExposedHeaders { get { throw null; } set { } }
public int MaxAgeInSeconds { get { throw null; } set { } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct TableErrorCode : System.IEquatable<Azure.Data.Tables.Models.TableErrorCode>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public TableErrorCode(string value) { throw null; }
public static Azure.Data.Tables.Models.TableErrorCode AccountIOPSLimitExceeded { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode AtomFormatNotSupported { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode AuthorizationPermissionMismatch { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode AuthorizationResourceTypeMismatch { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode CannotCreateTableWithIOPSGreaterThanMaxAllowedPerTable { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode CommandsInBatchActOnDifferentPartitions { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode ContentLengthExceeded { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode DuplicateKeyPropertySpecified { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode DuplicatePropertiesSpecified { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode EntityAlreadyExists { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode EntityNotFound { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode EntityTooLarge { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode Forbidden { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode InvalidDuplicateRow { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode InvalidInput { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode InvalidValueType { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode JsonFormatNotSupported { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode JsonVerboseFormatNotSupported { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode KeyValueTooLarge { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode MediaTypeNotSupported { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode MethodNotAllowed { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode NotImplemented { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode OperationTimedOut { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode OperatorInvalid { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode OutOfRangeInput { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PartitionKeyEqualityComparisonExpected { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PartitionKeyNotSpecified { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PartitionKeyPropertyCannotBeUpdated { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PartitionKeySpecifiedMoreThanOnce { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PerTableIOPSDecrementLimitReached { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PerTableIOPSIncrementLimitReached { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PrimaryKeyPropertyIsInvalidType { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PropertiesNeedValue { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PropertyNameInvalid { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PropertyNameTooLong { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode PropertyValueTooLarge { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode ResourceNotFound { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode SettingIOPSForATableInProvisioningNotAllowed { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode TableAlreadyExists { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode TableBeingDeleted { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode TableHasNoProperties { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode TableHasNoSuchProperty { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode TableNotFound { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode TooManyProperties { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode UpdateConditionNotSatisfied { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode XMethodIncorrectCount { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode XMethodIncorrectValue { get { throw null; } }
public static Azure.Data.Tables.Models.TableErrorCode XMethodNotUsingPost { get { throw null; } }
public bool Equals(Azure.Data.Tables.Models.TableErrorCode other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Data.Tables.Models.TableErrorCode left, Azure.Data.Tables.Models.TableErrorCode right) { throw null; }
public static implicit operator Azure.Data.Tables.Models.TableErrorCode (string value) { throw null; }
public static bool operator !=(Azure.Data.Tables.Models.TableErrorCode left, Azure.Data.Tables.Models.TableErrorCode right) { throw null; }
public override string ToString() { throw null; }
}
public partial class TableGeoReplicationInfo
{
internal TableGeoReplicationInfo() { }
public System.DateTimeOffset LastSyncedOn { get { throw null; } }
public Azure.Data.Tables.Models.TableGeoReplicationStatus Status { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct TableGeoReplicationStatus : System.IEquatable<Azure.Data.Tables.Models.TableGeoReplicationStatus>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public TableGeoReplicationStatus(string value) { throw null; }
public static Azure.Data.Tables.Models.TableGeoReplicationStatus Bootstrap { get { throw null; } }
public static Azure.Data.Tables.Models.TableGeoReplicationStatus Live { get { throw null; } }
public static Azure.Data.Tables.Models.TableGeoReplicationStatus Unavailable { get { throw null; } }
public bool Equals(Azure.Data.Tables.Models.TableGeoReplicationStatus other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Data.Tables.Models.TableGeoReplicationStatus left, Azure.Data.Tables.Models.TableGeoReplicationStatus right) { throw null; }
public static implicit operator Azure.Data.Tables.Models.TableGeoReplicationStatus (string value) { throw null; }
public static bool operator !=(Azure.Data.Tables.Models.TableGeoReplicationStatus left, Azure.Data.Tables.Models.TableGeoReplicationStatus right) { throw null; }
public override string ToString() { throw null; }
}
public partial class TableItem
{
public TableItem(string name) { }
public string Name { get { throw null; } }
}
public partial class TableMetrics
{
public TableMetrics(bool enabled) { }
public bool Enabled { get { throw null; } set { } }
public bool? IncludeApis { get { throw null; } set { } }
public Azure.Data.Tables.TableRetentionPolicy RetentionPolicy { get { throw null; } set { } }
public string Version { get { throw null; } set { } }
}
public partial class TableServiceProperties
{
public TableServiceProperties() { }
public System.Collections.Generic.IList<Azure.Data.Tables.Models.TableCorsRule> Cors { get { throw null; } }
public Azure.Data.Tables.Models.TableMetrics HourMetrics { get { throw null; } set { } }
public Azure.Data.Tables.Models.TableAnalyticsLoggingSettings Logging { get { throw null; } set { } }
public Azure.Data.Tables.Models.TableMetrics MinuteMetrics { get { throw null; } set { } }
}
public partial class TableServiceStatistics
{
internal TableServiceStatistics() { }
public Azure.Data.Tables.Models.TableGeoReplicationInfo GeoReplication { get { throw null; } }
}
public partial class TableTransactionResult
{
internal TableTransactionResult() { }
public int ResponseCount { get { throw null; } }
public Azure.Response GetResponseForEntity(string rowKey) { throw null; }
}
}
namespace Azure.Data.Tables.Sas
{
public partial class TableAccountSasBuilder
{
public TableAccountSasBuilder(Azure.Data.Tables.Sas.TableAccountSasPermissions permissions, Azure.Data.Tables.Sas.TableAccountSasResourceTypes resourceTypes, System.DateTimeOffset expiresOn) { }
public TableAccountSasBuilder(string rawPermissions, Azure.Data.Tables.Sas.TableAccountSasResourceTypes resourceTypes, System.DateTimeOffset expiresOn) { }
public TableAccountSasBuilder(System.Uri uri) { }
public System.DateTimeOffset ExpiresOn { get { throw null; } set { } }
public string Identifier { get { throw null; } set { } }
public Azure.Data.Tables.Sas.TableSasIPRange IPRange { get { throw null; } set { } }
public string Permissions { get { throw null; } }
public Azure.Data.Tables.Sas.TableSasProtocol Protocol { get { throw null; } set { } }
public Azure.Data.Tables.Sas.TableAccountSasResourceTypes ResourceTypes { get { throw null; } set { } }
public System.DateTimeOffset StartsOn { get { throw null; } set { } }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public void SetPermissions(Azure.Data.Tables.Sas.TableAccountSasPermissions permissions) { }
public void SetPermissions(string rawPermissions) { }
public string Sign(Azure.Data.Tables.TableSharedKeyCredential sharedKeyCredential) { throw null; }
public Azure.Data.Tables.Sas.TableAccountSasQueryParameters ToSasQueryParameters(Azure.Data.Tables.TableSharedKeyCredential sharedKeyCredential) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override string ToString() { throw null; }
}
[System.FlagsAttribute]
public enum TableAccountSasPermissions
{
All = -1,
Read = 1,
Write = 2,
Delete = 4,
List = 8,
Add = 16,
Update = 64,
}
public partial class TableAccountSasQueryParameters
{
internal TableAccountSasQueryParameters() { }
public System.DateTimeOffset ExpiresOn { get { throw null; } }
public string Identifier { get { throw null; } }
public Azure.Data.Tables.Sas.TableSasIPRange IPRange { get { throw null; } }
public string Permissions { get { throw null; } }
public Azure.Data.Tables.Sas.TableSasProtocol Protocol { get { throw null; } }
public string Resource { get { throw null; } }
public Azure.Data.Tables.Sas.TableAccountSasResourceTypes? ResourceTypes { get { throw null; } }
public string Signature { get { throw null; } }
public System.DateTimeOffset StartsOn { get { throw null; } }
public string Version { get { throw null; } }
public override string ToString() { throw null; }
}
[System.FlagsAttribute]
public enum TableAccountSasResourceTypes
{
All = -1,
Service = 1,
Container = 2,
Object = 4,
}
public partial class TableSasBuilder
{
public TableSasBuilder(string tableName, Azure.Data.Tables.Sas.TableSasPermissions permissions, System.DateTimeOffset expiresOn) { }
public TableSasBuilder(string tableName, string rawPermissions, System.DateTimeOffset expiresOn) { }
public TableSasBuilder(System.Uri uri) { }
public System.DateTimeOffset ExpiresOn { get { throw null; } set { } }
public string Identifier { get { throw null; } set { } }
public Azure.Data.Tables.Sas.TableSasIPRange IPRange { get { throw null; } set { } }
public string PartitionKeyEnd { get { throw null; } set { } }
public string PartitionKeyStart { get { throw null; } set { } }
public string Permissions { get { throw null; } }
public Azure.Data.Tables.Sas.TableSasProtocol Protocol { get { throw null; } set { } }
public string RowKeyEnd { get { throw null; } set { } }
public string RowKeyStart { get { throw null; } set { } }
public System.DateTimeOffset StartsOn { get { throw null; } set { } }
public string TableName { get { throw null; } set { } }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public void SetPermissions(Azure.Data.Tables.Sas.TableSasPermissions permissions) { }
public void SetPermissions(string rawPermissions) { }
public string Sign(Azure.Data.Tables.TableSharedKeyCredential sharedKeyCredential) { throw null; }
public Azure.Data.Tables.Sas.TableSasQueryParameters ToSasQueryParameters(Azure.Data.Tables.TableSharedKeyCredential sharedKeyCredential) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override string ToString() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct TableSasIPRange : System.IEquatable<Azure.Data.Tables.Sas.TableSasIPRange>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public TableSasIPRange(System.Net.IPAddress start, System.Net.IPAddress end = null) { throw null; }
public System.Net.IPAddress End { get { throw null; } }
public System.Net.IPAddress Start { get { throw null; } }
public bool Equals(Azure.Data.Tables.Sas.TableSasIPRange other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Data.Tables.Sas.TableSasIPRange left, Azure.Data.Tables.Sas.TableSasIPRange right) { throw null; }
public static bool operator !=(Azure.Data.Tables.Sas.TableSasIPRange left, Azure.Data.Tables.Sas.TableSasIPRange right) { throw null; }
public static Azure.Data.Tables.Sas.TableSasIPRange Parse(string s) { throw null; }
public override string ToString() { throw null; }
}
[System.FlagsAttribute]
public enum TableSasPermissions
{
All = -1,
Read = 1,
Add = 2,
Update = 4,
Delete = 8,
}
public enum TableSasProtocol
{
None = 0,
HttpsAndHttp = 1,
Https = 2,
}
public sealed partial class TableSasQueryParameters : Azure.Data.Tables.Sas.TableAccountSasQueryParameters
{
internal TableSasQueryParameters() { }
public static Azure.Data.Tables.Sas.TableSasQueryParameters Empty { get { throw null; } }
public string EndPartitionKey { get { throw null; } set { } }
public string EndRowKey { get { throw null; } set { } }
public string StartPartitionKey { get { throw null; } set { } }
public string StartRowKey { get { throw null; } set { } }
public override string ToString() { throw null; }
}
public partial class TableUriBuilder
{
public TableUriBuilder(System.Uri uri) { }
public string AccountName { get { throw null; } set { } }
public string Host { get { throw null; } set { } }
public int Port { get { throw null; } set { } }
public string Query { get { throw null; } set { } }
public Azure.Data.Tables.Sas.TableSasQueryParameters Sas { get { throw null; } set { } }
public string Scheme { get { throw null; } set { } }
public string Tablename { get { throw null; } set { } }
public override string ToString() { throw null; }
public System.Uri ToUri() { throw null; }
}
}
namespace Microsoft.Extensions.Azure
{
public static partial class TableClientBuilderExtensions
{
public static Azure.Core.Extensions.IAzureClientBuilder<Azure.Data.Tables.TableServiceClient, Azure.Data.Tables.TablesClientOptions> AddTableServiceClient<TBuilder>(this TBuilder builder, string connectionString) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; }
public static Azure.Core.Extensions.IAzureClientBuilder<Azure.Data.Tables.TableServiceClient, Azure.Data.Tables.TablesClientOptions> AddTableServiceClient<TBuilder>(this TBuilder builder, System.Uri serviceUri, Azure.Data.Tables.TableSharedKeyCredential sharedKeyCredential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; }
public static Azure.Core.Extensions.IAzureClientBuilder<Azure.Data.Tables.TableServiceClient, Azure.Data.Tables.TablesClientOptions> AddTableServiceClient<TBuilder, TConfiguration>(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration<TConfiguration> { throw null; }
}
}
| |
//
// ImageView.cs
//
// Author:
// Stephane Delcroix <stephane@delcroix.org>
// Ruben Vermeersch <ruben@savanne.be>
//
// Copyright (C) 2009-2010 Novell, Inc.
// Copyright (C) 2009 Stephane Delcroix
// Copyright (C) 2010 Ruben Vermeersch
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using Gtk;
using Gdk;
using FSpot.Utils;
using TagLib.Image;
using Hyena;
namespace FSpot.Widgets
{
public partial class ImageView : Container
{
#region public API
protected ImageView (IntPtr raw) : base (raw) { }
public ImageView (Adjustment hadjustment, Adjustment vadjustment, bool canSelect)
{
OnSetScrollAdjustments (hadjustment, vadjustment);
AdjustmentsChanged += ScrollToAdjustments;
WidgetFlags &= ~WidgetFlags.NoWindow;
SetFlag (WidgetFlags.CanFocus);
can_select = canSelect;
}
public ImageView (bool canSelect) : this (null, null, canSelect)
{
}
public ImageView () : this (true)
{
}
Pixbuf pixbuf;
public Pixbuf Pixbuf {
get { return pixbuf; }
set {
if (pixbuf == value)
return;
pixbuf = value;
min_zoom = ComputeMinZoom (upscale);
ComputeScaledSize ();
AdjustmentsChanged -= ScrollToAdjustments;
Hadjustment.Value = Vadjustment.Value = 0;
XOffset = YOffset = 0;
AdjustmentsChanged += ScrollToAdjustments;
QueueDraw ();
}
}
ImageOrientation pixbuf_orientation;
public ImageOrientation PixbufOrientation {
get { return pixbuf_orientation; }
set {
if (value == pixbuf_orientation)
return;
pixbuf_orientation = value;
min_zoom = ComputeMinZoom (upscale);
ComputeScaledSize ();
QueueDraw ();
}
}
CheckPattern check_pattern = CheckPattern.Dark;
public CheckPattern CheckPattern {
get { return check_pattern; }
set {
if (check_pattern == value)
return;
check_pattern = value;
if (Pixbuf != null && Pixbuf.HasAlpha)
QueueDraw ();
}
}
PointerMode pointer_mode = PointerMode.Select;
public PointerMode PointerMode {
get { return pointer_mode; }
set { pointer_mode = value; }
}
public Adjustment Hadjustment { get; private set; }
public Adjustment Vadjustment { get; private set; }
bool can_select = false;
public bool CanSelect {
get { return can_select; }
set {
if (can_select == value)
return;
if (!value)
Selection = Rectangle.Zero;
can_select = value;
}
}
Gdk.Rectangle selection = Rectangle.Zero;
public Gdk.Rectangle Selection {
get {
if (!can_select)
return Rectangle.Zero;
return selection;
}
set {
if (!can_select)
return;
if (value == selection)
return;
selection = value;
EventHandler eh = SelectionChanged;
if (eh != null)
eh (this, EventArgs.Empty);
QueueDraw ();
}
}
double selection_xy_ratio = 0;
public double SelectionXyRatio {
get { return selection_xy_ratio; }
set {
if (selection_xy_ratio == value)
return;
selection_xy_ratio = value;
if (selection_xy_ratio == 0)
return;
if (Selection == Rectangle.Zero)
return;
Selection = ConstrainSelection (Selection, false, false);
}
}
InterpType interpolation = InterpType.Bilinear;
public Gdk.InterpType Interpolation {
get { return interpolation; }
set {
if (interpolation == value)
return;
interpolation = value;
QueueDraw ();
}
}
double zoom = 1.0;
public double Zoom {
get { return zoom; }
set {
// Zoom around the center of the image.
DoZoom (value, Allocation.Width / 2, Allocation.Height / 2);
}
}
public void ZoomIn ()
{
Zoom *= ZOOM_FACTOR;
}
public void ZoomOut ()
{
Zoom *= 1.0 / ZOOM_FACTOR;
}
public void ZoomAboutPoint (double zoomIncrement, int x, int y)
{
DoZoom (zoom * zoomIncrement, x, y);
}
public bool Fit { get; private set; }
public void ZoomFit (bool upscale)
{
Gtk.ScrolledWindow scrolled = Parent as Gtk.ScrolledWindow;
if (scrolled != null)
scrolled.SetPolicy (Gtk.PolicyType.Never, Gtk.PolicyType.Never);
min_zoom = ComputeMinZoom (upscale);
this.upscale = upscale;
Fit = true;
DoZoom (MIN_ZOOM, Allocation.Width / 2, Allocation.Height / 2);
if (scrolled != null) {
ThreadAssist.ProxyToMain (() => {
scrolled.SetPolicy (Gtk.PolicyType.Automatic, Gtk.PolicyType.Automatic);
});
}
}
public Point WindowCoordsToImage (Point win)
{
if (Pixbuf == null)
return Point.Zero;
int x_offset = scaled_width < Allocation.Width ? (int)(Allocation.Width - scaled_width) / 2 : -XOffset;
int y_offset = scaled_height < Allocation.Height ? (int)(Allocation.Height - scaled_height) / 2 : -YOffset;
win.X = Clamp (win.X - x_offset, 0, (int)scaled_width - 1);
win.Y = Clamp (win.Y - y_offset, 0, (int)scaled_height - 1);
win = PixbufUtils.TransformOrientation ((int)scaled_width, (int)scaled_height, win, PixbufUtils.ReverseTransformation (pixbuf_orientation));
return new Point ((int) Math.Floor (win.X * (double)(((int)PixbufOrientation <= 4 ? Pixbuf.Width : Pixbuf.Height) - 1) / (double)(scaled_width - 1) + .5),
(int) Math.Floor (win.Y * (double)(((int)PixbufOrientation <= 4 ? Pixbuf.Height : Pixbuf.Width) - 1) / (double)(scaled_height - 1) + .5));
}
public Point ImageCoordsToWindow (Point image)
{
if (Pixbuf == null)
return Point.Zero;
image = PixbufUtils.TransformOrientation (Pixbuf.Width, Pixbuf.Height, image, pixbuf_orientation);
int x_offset = scaled_width < Allocation.Width ? (int)(Allocation.Width - scaled_width) / 2 : -XOffset;
int y_offset = scaled_height < Allocation.Height ? (int)(Allocation.Height - scaled_height) / 2 : -YOffset;
return new Point ((int) Math.Floor (image.X * (double) (scaled_width - 1) / (((int)pixbuf_orientation <= 4 ? Pixbuf.Width : Pixbuf.Height) - 1) + 0.5) + x_offset,
(int) Math.Floor (image.Y * (double) (scaled_height - 1) / (((int)pixbuf_orientation <= 4 ? Pixbuf.Height : Pixbuf.Width) - 1) + 0.5) + y_offset);
}
public Rectangle ImageCoordsToWindow (Rectangle image)
{
if (Pixbuf == null)
return Gdk.Rectangle.Zero;
image = PixbufUtils.TransformOrientation (Pixbuf.Width, Pixbuf.Height, image, pixbuf_orientation);
int x_offset = scaled_width < Allocation.Width ? (int)(Allocation.Width - scaled_width) / 2 : -XOffset;
int y_offset = scaled_height < Allocation.Height ? (int)(Allocation.Height - scaled_height) / 2 : -YOffset;
Gdk.Rectangle win = Gdk.Rectangle.Zero;
win.X = (int) Math.Floor (image.X * (double) (scaled_width - 1) / (((int)pixbuf_orientation <= 4 ? Pixbuf.Width : Pixbuf.Height) - 1) + 0.5) + x_offset;
win.Y = (int) Math.Floor (image.Y * (double) (scaled_height - 1) / (((int)pixbuf_orientation <= 4 ? Pixbuf.Height : Pixbuf.Width) - 1) + 0.5) + y_offset;
win.Width = (int) Math.Floor ((image.X + image.Width) * (double) (scaled_width - 1) / (((int)pixbuf_orientation <= 4 ? Pixbuf.Width : Pixbuf.Height) - 1) + 0.5) - win.X + x_offset;
win.Height = (int) Math.Floor ((image.Y + image.Height) * (double) (scaled_height - 1) / (((int)pixbuf_orientation <= 4 ? Pixbuf.Height : Pixbuf.Width) - 1) + 0.5) - win.Y + y_offset;
return win;
}
public event EventHandler ZoomChanged;
public event EventHandler SelectionChanged;
#endregion
#region protected API
protected static double ZOOM_FACTOR = 1.1;
protected double max_zoom = 10.0;
protected double MAX_ZOOM {
get { return max_zoom; }
}
protected double min_zoom = 0.1;
protected double MIN_ZOOM {
get { return min_zoom; }
}
bool upscale;
protected void ZoomFit ()
{
ZoomFit (upscale);
}
protected virtual void ApplyColorTransform (Pixbuf pixbuf)
{
}
#endregion
#region GtkWidgetry
protected override void OnRealized ()
{
SetFlag (Gtk.WidgetFlags.Realized);
GdkWindow = new Gdk.Window (ParentWindow,
new Gdk.WindowAttr {
WindowType = Gdk.WindowType.Child,
X = Allocation.X,
Y = Allocation.Y,
Width = Allocation.Width,
Height = Allocation.Height,
Wclass = Gdk.WindowClass.InputOutput,
Visual = ParentWindow.Visual,
Colormap = ParentWindow.Colormap,
Mask = this.Events
| EventMask.ExposureMask
| EventMask.ButtonPressMask
| EventMask.ButtonReleaseMask
| EventMask.PointerMotionMask
| EventMask.PointerMotionHintMask
| EventMask.ScrollMask
| EventMask.KeyPressMask
| EventMask.LeaveNotifyMask
},
Gdk.WindowAttributesType.X | Gdk.WindowAttributesType.Y |
Gdk.WindowAttributesType.Visual | Gdk.WindowAttributesType.Colormap);
GdkWindow.SetBackPixmap (null, false);
GdkWindow.UserData = Handle;
Style.Attach (GdkWindow);
Style.SetBackground (GdkWindow, Gtk.StateType.Normal);
OnRealizedChildren ();
}
protected override void OnMapped ()
{
SetFlag (Gtk.WidgetFlags.Mapped);
OnMappedChildren ();
GdkWindow.Show ();
}
protected override void OnSizeRequested (ref Gtk.Requisition requisition)
{
requisition.Width = requisition.Height = 0;
OnSizeRequestedChildren ();
}
protected override void OnSizeAllocated (Gdk.Rectangle allocation)
{
min_zoom = ComputeMinZoom (upscale);
if (Fit || zoom < MIN_ZOOM)
zoom = MIN_ZOOM;
// Since this affects the zoom_scale we should alert it
EventHandler eh = ZoomChanged;
if (eh != null)
eh (this, EventArgs.Empty);
ComputeScaledSize ();
OnSizeAllocatedChildren ();
if (IsRealized) {
GdkWindow.MoveResize (allocation.X, allocation.Y, allocation.Width, allocation.Height);
}
if (XOffset > Hadjustment.Upper - Hadjustment.PageSize)
ScrollTo ((int)(Hadjustment.Upper - Hadjustment.PageSize), YOffset, false);
if (YOffset > Vadjustment.Upper - Vadjustment.PageSize)
ScrollTo (XOffset, (int)(Vadjustment.Upper - Vadjustment.PageSize), false);
base.OnSizeAllocated (allocation);
if (Fit)
ZoomFit (upscale);
}
protected override bool OnExposeEvent (Gdk.EventExpose evnt)
{
if (evnt.Window != GdkWindow)
return false;
foreach (Rectangle area in evnt.Region.GetRectangles ())
{
var p_area = new Rectangle (Math.Max (0, area.X), Math.Max (0, area.Y),
Math.Min (Allocation.Width, area.Width), Math.Min (Allocation.Height, area.Height));
if (p_area == Rectangle.Zero)
continue;
//draw synchronously if InterpType.Nearest or zoom 1:1
if (Interpolation == InterpType.Nearest || zoom == 1.0) {
PaintRectangle (p_area, InterpType.Nearest);
continue;
}
//Do this on idle ???
PaintRectangle (p_area, Interpolation);
}
if (can_select)
OnSelectionExposeEvent (evnt);
return true;
}
protected override void OnSetScrollAdjustments (Gtk.Adjustment hadjustment, Gtk.Adjustment vadjustment)
{
if (hadjustment == null)
hadjustment = new Gtk.Adjustment (0, 0, 0, 0, 0, 0);
if (vadjustment == null)
vadjustment = new Gtk.Adjustment (0, 0, 0, 0, 0, 0);
bool need_change = false;
if (Hadjustment != hadjustment) {
Hadjustment = hadjustment;
Hadjustment.Upper = scaled_width;
Hadjustment.ValueChanged += HandleAdjustmentsValueChanged;
need_change = true;
}
if (Vadjustment != vadjustment) {
Vadjustment = vadjustment;
Vadjustment.Upper = scaled_height;
Vadjustment.ValueChanged += HandleAdjustmentsValueChanged;
need_change = true;
}
if (need_change)
HandleAdjustmentsValueChanged (this, EventArgs.Empty);
}
protected override bool OnButtonPressEvent (EventButton evnt)
{
bool handled = false;
if (!HasFocus)
GrabFocus ();
if (PointerMode == PointerMode.None)
return false;
handled = handled || OnPanButtonPressEvent (evnt);
if (can_select)
handled = handled || OnSelectionButtonPressEvent (evnt);
return handled || base.OnButtonPressEvent (evnt);
}
protected override bool OnButtonReleaseEvent (EventButton evnt)
{
bool handled = false;
handled = handled || OnPanButtonReleaseEvent (evnt);
if (can_select)
handled = handled || OnSelectionButtonReleaseEvent (evnt);
return handled || base.OnButtonReleaseEvent (evnt);
}
protected override bool OnMotionNotifyEvent (EventMotion evnt)
{
bool handled = false;
handled = handled || OnPanMotionNotifyEvent (evnt);
if (can_select)
handled = handled || OnSelectionMotionNotifyEvent (evnt);
return handled || base.OnMotionNotifyEvent (evnt);
}
protected override bool OnScrollEvent (EventScroll evnt)
{
if ((evnt.State & ModifierType.ShiftMask) == 0) {//no shift, let's zoom
ZoomAboutPoint ((evnt.Direction == ScrollDirection.Up || evnt.Direction == ScrollDirection.Right) ? ZOOM_FACTOR : 1.0 / ZOOM_FACTOR,
(int)evnt.X, (int)evnt.Y);
return true;
}
int x_incr = (int)Hadjustment.PageIncrement / 4;
int y_incr = (int)Vadjustment.PageIncrement / 4;
if ((evnt.State & ModifierType.ControlMask) == 0) {//no control scroll
ScrollBy ((evnt.Direction == ScrollDirection.Left) ? -x_incr : (evnt.Direction == ScrollDirection.Right) ? x_incr : 0,
(evnt.Direction == ScrollDirection.Up) ? -y_incr : (evnt.Direction == ScrollDirection.Down) ? y_incr : 0);
return true;
}
//invert x and y for scrolling
ScrollBy ((evnt.Direction == ScrollDirection.Up) ? -y_incr : (evnt.Direction == ScrollDirection.Down) ? y_incr : 0,
(evnt.Direction == ScrollDirection.Left) ? -x_incr : (evnt.Direction == ScrollDirection.Right) ? x_incr : 0);
return true;
}
protected override bool OnKeyPressEvent (EventKey evnt)
{
if ((evnt.State & (ModifierType.Mod1Mask | ModifierType.ControlMask)) != 0)
return base.OnKeyPressEvent (evnt);
bool handled = true;
int x, y;
Gdk.ModifierType type;
switch(evnt.Key) {
case Gdk.Key.Up:
case Gdk.Key.KP_Up:
case Gdk.Key.k:
case Gdk.Key.K:
ScrollBy (0, -Vadjustment.StepIncrement);
break;
case Gdk.Key.Down:
case Gdk.Key.KP_Down:
case Gdk.Key.j:
case Gdk.Key.J:
ScrollBy (0, Vadjustment.StepIncrement);
break;
case Gdk.Key.Left:
case Gdk.Key.KP_Left:
case Gdk.Key.h:
case Gdk.Key.H:
ScrollBy (-Hadjustment.StepIncrement, 0);
break;
case Gdk.Key.Right:
case Gdk.Key.KP_Right:
case Gdk.Key.l:
case Gdk.Key.L:
ScrollBy (Hadjustment.StepIncrement, 0);
break;
case Gdk.Key.equal:
case Gdk.Key.plus:
case Gdk.Key.KP_Add:
ZoomIn ();
break;
case Gdk.Key.minus:
case Gdk.Key.KP_Subtract:
ZoomOut ();
break;
case Gdk.Key.Key_0:
case Gdk.Key.KP_0:
ZoomFit ();
break;
case Gdk.Key.KP_1:
case Gdk.Key.Key_1:
GdkWindow.GetPointer (out x, out y, out type);
DoZoom (1.0, x, y);
break;
case Gdk.Key.Key_2:
case Gdk.Key.KP_2:
GdkWindow.GetPointer (out x, out y, out type);
DoZoom (2.0, x, y);
break;
default:
handled = false;
break;
}
return handled || base.OnKeyPressEvent (evnt);
}
#endregion
#region private painting, zooming and misc
int XOffset { get; set;}
int YOffset { get; set;}
/// <summary>
/// Zoom to the given factor.
/// </summary>
/// <param name='zoom'>
/// A zoom factor, expressed as a double.
/// </param>
/// <param name='x'>
/// The point of the viewport around which to zoom.
/// </param>
/// <param name='y'>
/// The point of the viewport around which to zoom.
/// </param>
void DoZoom (double zoom, int x, int y)
{
Fit = zoom == MIN_ZOOM;
if (zoom == this.zoom || Math.Abs (this.zoom - zoom) < Double.Epsilon) {
// Don't recalculate if the zoom factor stays the same.
return;
}
// Clamp the zoom factor within the [ MIN_ZOOM , MAX_ZOOM ] interval.
zoom = Math.Max (Math.Min (zoom, MAX_ZOOM), MIN_ZOOM);
this.zoom = zoom;
int x_offset = scaled_width < Allocation.Width ? (int)(Allocation.Width - scaled_width) / 2 : -XOffset;
int y_offset = scaled_height < Allocation.Height ? (int)(Allocation.Height - scaled_height) / 2 : -YOffset;
double x_anchor = (double)(x - x_offset) / (double)scaled_width;
double y_anchor = (double)(y - y_offset) / (double)scaled_height;
ComputeScaledSize ();
AdjustmentsChanged -= ScrollToAdjustments;
if (scaled_width < Allocation.Width)
Hadjustment.Value = XOffset = 0;
else
Hadjustment.Value = XOffset = Clamp ((int)(x_anchor * scaled_width - x), 0, (int)(Hadjustment.Upper - Hadjustment.PageSize));
if (scaled_height < Allocation.Height)
Vadjustment.Value = YOffset = 0;
else
Vadjustment.Value = YOffset = Clamp ((int)(y_anchor * scaled_height - y), 0, (int)(Vadjustment.Upper - Vadjustment.PageSize));
AdjustmentsChanged += ScrollToAdjustments;
EventHandler eh = ZoomChanged;
if (eh != null)
eh (this, EventArgs.Empty);
QueueDraw ();
}
void PaintBackground (Rectangle backgound, Rectangle area)
{
GdkWindow.DrawRectangle (Style.BackgroundGCs [(int)StateType.Normal], true, area);
}
void PaintRectangle (Rectangle area, InterpType interpolation)
{
int x_offset = scaled_width < Allocation.Width ? (int)(Allocation.Width - scaled_width) / 2 : -XOffset;
int y_offset = scaled_height < Allocation.Height ? (int)(Allocation.Height - scaled_height) / 2 : -YOffset;
//Draw background
if (y_offset > 0) //Top
PaintBackground (new Rectangle (0, 0, Allocation.Width, y_offset), area);
if (x_offset > 0) //Left
PaintBackground (new Rectangle (0, y_offset, x_offset, (int)scaled_height), area);
if (x_offset >= 0) //Right
PaintBackground (new Rectangle (x_offset + (int)scaled_width, y_offset, Allocation.Width - x_offset - (int)scaled_width, (int)scaled_height), area);
if (y_offset >= 0) //Bottom
PaintBackground (new Rectangle (0, y_offset + (int)scaled_height, Allocation.Width, Allocation.Height - y_offset - (int)scaled_height), area);
if (Pixbuf == null)
return;
area.Intersect (new Rectangle (x_offset, y_offset, (int)scaled_width, (int)scaled_height));
if (area.Width <= 0 || area.Height <= 0)
return;
//Short circuit for 1:1 zoom
if (zoom == 1.0 &&
!Pixbuf.HasAlpha &&
Pixbuf.BitsPerSample == 8 &&
pixbuf_orientation == ImageOrientation.TopLeft) {
GdkWindow.DrawPixbuf (Style.BlackGC,
Pixbuf,
area.X - x_offset, area.Y - y_offset,
area.X, area.Y,
area.Width, area.Height,
RgbDither.Max,
area.X - x_offset, area.Y - y_offset);
return;
}
Rectangle pixbuf_area = PixbufUtils.TransformOrientation ((int)scaled_width,
(int)scaled_height,
new Rectangle ((area.X - x_offset),
(area.Y - y_offset),
area.Width,
area.Height),
PixbufUtils.ReverseTransformation (pixbuf_orientation));
using (Pixbuf temp_pixbuf = new Pixbuf (Colorspace.Rgb, false, 8, pixbuf_area.Width, pixbuf_area.Height)) {
if (Pixbuf.HasAlpha)
temp_pixbuf.Fill (0x00000000);
Pixbuf.CompositeColor (temp_pixbuf,
0, 0,
pixbuf_area.Width, pixbuf_area.Height,
-pixbuf_area.X, -pixbuf_area.Y,
zoom, zoom,
zoom == 1.0 ? InterpType.Nearest : interpolation, 255,
pixbuf_area.X, pixbuf_area.Y,
CheckPattern.CheckSize, CheckPattern.Color1, CheckPattern.Color2);
ApplyColorTransform (temp_pixbuf);
using (var dest_pixbuf = PixbufUtils.TransformOrientation (temp_pixbuf, pixbuf_orientation)) {
GdkWindow.DrawPixbuf (Style.BlackGC,
dest_pixbuf,
0, 0,
area.X, area.Y,
area.Width, area.Height,
RgbDither.Max,
area.X - x_offset, area.Y - y_offset);
}
}
}
uint scaled_width, scaled_height;
void ComputeScaledSize ()
{
if (Pixbuf == null)
scaled_width = scaled_height = 0;
else {
double width;
double height;
if ((int)pixbuf_orientation <= 4 ) { //TopLeft, TopRight, BottomRight, BottomLeft
width = Pixbuf.Width;
height = Pixbuf.Height;
} else { //LeftTop, RightTop, RightBottom, LeftBottom
width = Pixbuf.Height;
height = Pixbuf.Width;
}
scaled_width = (uint)Math.Floor (width * Zoom + .5);
scaled_height = (uint)Math.Floor (height * Zoom + .5);
}
Hadjustment.PageSize = Math.Min (scaled_width, Allocation.Width);
Hadjustment.PageIncrement = scaled_width * .9;
Hadjustment.StepIncrement = 32;
Hadjustment.Upper = scaled_width;
Hadjustment.Lower = 0;
Vadjustment.PageSize = Math.Min (scaled_height, Allocation.Height);
Vadjustment.PageIncrement = scaled_height * .9;
Vadjustment.StepIncrement = 32;
Vadjustment.Upper = scaled_height;
Vadjustment.Lower = 0;
}
event EventHandler AdjustmentsChanged;
void HandleAdjustmentsValueChanged (object sender, EventArgs e)
{
EventHandler eh = AdjustmentsChanged;
if (eh != null)
eh (this, EventArgs.Empty);
}
void ScrollToAdjustments (object sender, EventArgs e)
{
ScrollTo ((int)Hadjustment.Value, (int)Vadjustment.Value, false);
}
void ScrollTo (int x, int y, bool change_adjustments)
{
x = Clamp (x, 0, (int)(Hadjustment.Upper - Hadjustment.PageSize));
y = Clamp (y, 0, (int)(Vadjustment.Upper - Vadjustment.PageSize));
int xof = x - XOffset;
int yof = y - YOffset;
XOffset = x;
YOffset = y;
if (IsRealized) {
GdkWindow.Scroll (-xof, -yof);
GdkWindow.ProcessUpdates (true);
}
if (change_adjustments) {
AdjustmentsChanged -= ScrollToAdjustments;
Hadjustment.Value = XOffset;
Vadjustment.Value = YOffset;
AdjustmentsChanged += ScrollToAdjustments;
}
}
void ScrollBy (double x, double y)
{
ScrollTo ((int)(XOffset + x), (int)(YOffset + y), true);
}
static int Clamp (int value, int min, int max)
{
return Math.Min (Math.Max (value, min), max);
}
double ComputeMinZoom (bool upscale)
{
if (Pixbuf == null)
return 0.1;
double width;
double height;
if ((int)pixbuf_orientation <= 4 ) { //TopLeft, TopRight, BottomRight, BottomLeft
width = Pixbuf.Width;
height = Pixbuf.Height;
} else { //LeftTop, RightTop, RightBottom, LeftBottom
width = Pixbuf.Height;
height = Pixbuf.Width;
}
if (upscale)
return Math.Min ((double)Allocation.Width / width,
(double)Allocation.Height / height);
return Math.Min (1.0,
Math.Min ((double)Allocation.Width / width,
(double)Allocation.Height / height));
}
#endregion
#region selection
bool OnSelectionExposeEvent (EventExpose evnt)
{
if (selection == Rectangle.Zero)
return false;
Rectangle win_selection = ImageCoordsToWindow (selection);
using (var evnt_region = evnt.Region.Copy ()) {
using (Region r = new Region ()) {
r.UnionWithRect (win_selection);
evnt_region.Subtract (r);
}
using (Cairo.Context ctx = CairoHelper.Create (GdkWindow)) {
ctx.SetSourceRGBA (.5, .5, .5, .7);
CairoHelper.Region (ctx, evnt_region);
ctx.Fill ();
}
}
return true;
}
enum DragMode {
None,
Move,
Extend,
}
const int SELECTION_SNAP_DISTANCE = 8;
DragMode GetDragMode (int x, int y)
{
Rectangle win_selection = ImageCoordsToWindow (selection);
if (Rectangle.Inflate (win_selection, -SELECTION_SNAP_DISTANCE, -SELECTION_SNAP_DISTANCE).Contains (x, y))
return DragMode.Move;
if (Rectangle.Inflate (win_selection, SELECTION_SNAP_DISTANCE, SELECTION_SNAP_DISTANCE).Contains (x, y))
return DragMode.Extend;
return DragMode.None;
}
bool is_dragging_selection = false;
bool fixed_height = false;
bool fixed_width = false;
bool is_moving_selection = false;
Point selection_anchor = Point.Zero;
bool OnSelectionButtonPressEvent (EventButton evnt)
{
if (evnt.Button != 1)
return false;
if (evnt.Type == EventType.TwoButtonPress) {
is_dragging_selection = false;
is_moving_selection = false;
return false;
}
Point img = WindowCoordsToImage (new Point ((int)evnt.X, (int)evnt.Y));
switch (GetDragMode ((int)evnt.X, (int)evnt.Y)) {
case DragMode.None:
is_dragging_selection = true;
PointerMode = PointerMode.Select;
Selection = Rectangle.Zero;
selection_anchor = img;
break;
case DragMode.Extend:
Rectangle win_sel = ImageCoordsToWindow (Selection);
is_dragging_selection = true;
if (Math.Abs (win_sel.X - evnt.X) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y - evnt.Y) < SELECTION_SNAP_DISTANCE) { //TopLeft
selection_anchor = new Point (Selection.X + Selection.Width, Selection.Y + Selection.Height);
} else if (Math.Abs (win_sel.X + win_sel.Width - evnt.X) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y - evnt.Y) < SELECTION_SNAP_DISTANCE) { //TopRight
selection_anchor = new Point (Selection.X, Selection.Y + Selection.Height);
} else if (Math.Abs (win_sel.X - evnt.X) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y + win_sel.Height - evnt.Y) < SELECTION_SNAP_DISTANCE) { //BottomLeft
selection_anchor = new Point (Selection.X + Selection.Width, Selection.Y);
} else if (Math.Abs (win_sel.X + win_sel.Width - evnt.X) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y + win_sel.Height - evnt.Y) < SELECTION_SNAP_DISTANCE) { //BottomRight
selection_anchor = new Point (Selection.X, Selection.Y);
} else if (Math.Abs (win_sel.X - evnt.X) < SELECTION_SNAP_DISTANCE) { //Left
selection_anchor = new Point (Selection.X + Selection.Width, Selection.Y);
fixed_height = true;
} else if (Math.Abs (win_sel.X + win_sel.Width - evnt.X) < SELECTION_SNAP_DISTANCE) { //Right
selection_anchor = new Point (Selection.X, Selection.Y);
fixed_height = true;
} else if (Math.Abs (win_sel.Y - evnt.Y) < SELECTION_SNAP_DISTANCE) { //Top
selection_anchor = new Point (Selection.X, Selection.Y + Selection.Height);
fixed_width = true;
} else if (Math.Abs (win_sel.Y + win_sel.Height - evnt.Y) < SELECTION_SNAP_DISTANCE) { //Bottom
selection_anchor = new Point (Selection.X, Selection.Y);
fixed_width = true;
} else {
fixed_width = fixed_height = false;
is_dragging_selection = false;
}
break;
case DragMode.Move:
is_moving_selection = true;
selection_anchor = img;
SelectionSetPointer ((int)evnt.X, (int)evnt.Y);
break;
}
return true;
}
bool OnSelectionButtonReleaseEvent (EventButton evnt)
{
if (evnt.Button != 1)
return false;
is_dragging_selection = false;
is_moving_selection = false;
fixed_width = fixed_height = false;
SelectionSetPointer ((int)evnt.X, (int)evnt.Y);
return true;
}
void SelectionSetPointer (int x, int y)
{
if (is_moving_selection)
GdkWindow.Cursor = new Cursor (CursorType.Crosshair);
else {
switch (GetDragMode (x, y)) {
case DragMode.Move:
GdkWindow.Cursor = new Cursor (CursorType.Hand1);
break;
default:
GdkWindow.Cursor = null;
break;
case DragMode.Extend:
Rectangle win_sel = ImageCoordsToWindow (Selection);
if (Math.Abs (win_sel.X - x) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y - y) < SELECTION_SNAP_DISTANCE) { //TopLeft
GdkWindow.Cursor = new Cursor (CursorType.TopLeftCorner);
} else if (Math.Abs (win_sel.X + win_sel.Width - x) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y - y) < SELECTION_SNAP_DISTANCE) { //TopRight
GdkWindow.Cursor = new Cursor (CursorType.TopRightCorner);
} else if (Math.Abs (win_sel.X - x) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y + win_sel.Height - y) < SELECTION_SNAP_DISTANCE) { //BottomLeft
GdkWindow.Cursor = new Cursor (CursorType.BottomLeftCorner);
} else if (Math.Abs (win_sel.X + win_sel.Width - x) < SELECTION_SNAP_DISTANCE &&
Math.Abs (win_sel.Y + win_sel.Height - y) < SELECTION_SNAP_DISTANCE) { //BottomRight
GdkWindow.Cursor = new Cursor (CursorType.BottomRightCorner);
} else if (Math.Abs (win_sel.X - x) < SELECTION_SNAP_DISTANCE) { //Left
GdkWindow.Cursor = new Cursor (CursorType.LeftSide);
} else if (Math.Abs (win_sel.X + win_sel.Width - x) < SELECTION_SNAP_DISTANCE) { //Right
GdkWindow.Cursor = new Cursor (CursorType.RightSide);
} else if (Math.Abs (win_sel.Y - y) < SELECTION_SNAP_DISTANCE) { //Top
GdkWindow.Cursor = new Cursor (CursorType.TopSide);
} else if (Math.Abs (win_sel.Y + win_sel.Height - y) < SELECTION_SNAP_DISTANCE) { //Bottom
GdkWindow.Cursor = new Cursor (CursorType.BottomSide);
}
break;
}
}
}
const int SELECTION_THRESHOLD = 5;
bool OnSelectionMotionNotifyEvent (EventMotion evnt)
{
int x, y;
ModifierType mod;
if (evnt.IsHint)
GdkWindow.GetPointer (out x, out y, out mod);
else {
x = (int)evnt.X;
y = (int)evnt.Y;
}
Point img = WindowCoordsToImage (new Point (x, y));
if (is_dragging_selection) {
Point win_anchor = ImageCoordsToWindow (selection_anchor);
if (Selection == Rectangle.Zero &&
Math.Abs (evnt.X - win_anchor.X) < SELECTION_THRESHOLD &&
Math.Abs (evnt.Y - win_anchor.Y) < SELECTION_THRESHOLD) {
SelectionSetPointer (x, y);
return true;
}
if (selection_xy_ratio == 0)
Selection = new Rectangle (fixed_width ? Selection.X : Math.Min (selection_anchor.X, img.X),
fixed_height ? Selection.Y : Math.Min (selection_anchor.Y, img.Y),
fixed_width ? Selection.Width : Math.Abs (selection_anchor.X - img.X),
fixed_height ? Selection.Height : Math.Abs (selection_anchor.Y - img.Y));
else
Selection = ConstrainSelection (new Rectangle (Math.Min (selection_anchor.X, img.X),
Math.Min (selection_anchor.Y, img.Y),
Math.Abs (selection_anchor.X - img.X),
Math.Abs (selection_anchor.Y - img.Y)),
fixed_width, fixed_height);
SelectionSetPointer (x, y);
return true;
}
if (is_moving_selection) {
Selection = new Rectangle (Clamp (Selection.X + img.X - selection_anchor.X, 0, Pixbuf.Width - Selection.Width),
Clamp (Selection.Y + img.Y - selection_anchor.Y, 0, Pixbuf.Height - Selection.Height),
Selection.Width, Selection.Height);
selection_anchor = img;
SelectionSetPointer (x, y);
return true;
}
SelectionSetPointer (x, y);
return true;
}
Rectangle ConstrainSelection (Rectangle sel, bool fixed_width, bool fixed_height)
{
double constrain = selection_xy_ratio;
if ((double)sel.Width > (double)sel.Height && selection_xy_ratio < 1 ||
(double)sel.Width < (double)sel.Height && selection_xy_ratio > 1)
constrain = 1.0 / constrain;
double ratio = (double)sel.Width / (double)sel.Height;
int height = sel.Height;
int width = sel.Width;
if (ratio > constrain) {
height = (int)((double)sel.Width / constrain);
if (height > Pixbuf.Height) {
height = sel.Height;
width = (int)(height * constrain);
}
} else {
width = (int)(height * constrain);
if (width > Pixbuf.Width) {
width = sel.Width;
height = (int)((double)width / constrain);
}
}
return new Rectangle (sel.X + width < Pixbuf.Width ? sel.X : Pixbuf.Width - width,
sel.Y + height < Pixbuf.Height ? sel.Y : Pixbuf.Height - height,
width, height);
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.Threading.Tests
{
public static class ReaderWriterLockSlimTests
{
[Fact]
public static void Ctor()
{
ReaderWriterLockSlim rwls;
using (rwls = new ReaderWriterLockSlim())
{
Assert.Equal(LockRecursionPolicy.NoRecursion, rwls.RecursionPolicy);
}
using (rwls = new ReaderWriterLockSlim(LockRecursionPolicy.NoRecursion))
{
Assert.Equal(LockRecursionPolicy.NoRecursion, rwls.RecursionPolicy);
}
using (rwls = new ReaderWriterLockSlim((LockRecursionPolicy)12345))
{
Assert.Equal(LockRecursionPolicy.NoRecursion, rwls.RecursionPolicy);
}
using (rwls = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion))
{
Assert.Equal(LockRecursionPolicy.SupportsRecursion, rwls.RecursionPolicy);
}
}
[Fact]
public static void Dispose()
{
ReaderWriterLockSlim rwls;
rwls = new ReaderWriterLockSlim();
rwls.Dispose();
Assert.Throws<ObjectDisposedException>(() => rwls.TryEnterReadLock(0));
Assert.Throws<ObjectDisposedException>(() => rwls.TryEnterUpgradeableReadLock(0));
Assert.Throws<ObjectDisposedException>(() => rwls.TryEnterWriteLock(0));
rwls.Dispose();
for (int i = 0; i < 3; i++)
{
rwls = new ReaderWriterLockSlim();
switch (i)
{
case 0: rwls.EnterReadLock(); break;
case 1: rwls.EnterUpgradeableReadLock(); break;
case 2: rwls.EnterWriteLock(); break;
}
Assert.Throws<SynchronizationLockException>(() => rwls.Dispose());
}
}
[Fact]
public static void EnterExit()
{
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
Assert.False(rwls.IsReadLockHeld);
rwls.EnterReadLock();
Assert.True(rwls.IsReadLockHeld);
rwls.ExitReadLock();
Assert.False(rwls.IsReadLockHeld);
Assert.False(rwls.IsUpgradeableReadLockHeld);
rwls.EnterUpgradeableReadLock();
Assert.True(rwls.IsUpgradeableReadLockHeld);
rwls.ExitUpgradeableReadLock();
Assert.False(rwls.IsUpgradeableReadLockHeld);
Assert.False(rwls.IsWriteLockHeld);
rwls.EnterWriteLock();
Assert.True(rwls.IsWriteLockHeld);
rwls.ExitWriteLock();
Assert.False(rwls.IsWriteLockHeld);
Assert.False(rwls.IsUpgradeableReadLockHeld);
rwls.EnterUpgradeableReadLock();
Assert.False(rwls.IsWriteLockHeld);
Assert.True(rwls.IsUpgradeableReadLockHeld);
rwls.EnterWriteLock();
Assert.True(rwls.IsWriteLockHeld);
rwls.ExitWriteLock();
Assert.False(rwls.IsWriteLockHeld);
Assert.True(rwls.IsUpgradeableReadLockHeld);
rwls.ExitUpgradeableReadLock();
Assert.False(rwls.IsUpgradeableReadLockHeld);
Assert.True(rwls.TryEnterReadLock(0));
rwls.ExitReadLock();
Assert.True(rwls.TryEnterReadLock(Timeout.InfiniteTimeSpan));
rwls.ExitReadLock();
Assert.True(rwls.TryEnterUpgradeableReadLock(0));
rwls.ExitUpgradeableReadLock();
Assert.True(rwls.TryEnterUpgradeableReadLock(Timeout.InfiniteTimeSpan));
rwls.ExitUpgradeableReadLock();
Assert.True(rwls.TryEnterWriteLock(0));
rwls.ExitWriteLock();
Assert.True(rwls.TryEnterWriteLock(Timeout.InfiniteTimeSpan));
rwls.ExitWriteLock();
}
}
[Fact]
public static void DeadlockAvoidance()
{
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
rwls.EnterReadLock();
Assert.Throws<LockRecursionException>(() => rwls.EnterReadLock());
Assert.Throws<LockRecursionException>(() => rwls.EnterUpgradeableReadLock());
Assert.Throws<LockRecursionException>(() => rwls.EnterWriteLock());
rwls.ExitReadLock();
rwls.EnterUpgradeableReadLock();
rwls.EnterReadLock();
Assert.Throws<LockRecursionException>(() => rwls.EnterReadLock());
rwls.ExitReadLock();
Assert.Throws<LockRecursionException>(() => rwls.EnterUpgradeableReadLock());
rwls.EnterWriteLock();
Assert.Throws<LockRecursionException>(() => rwls.EnterWriteLock());
rwls.ExitWriteLock();
rwls.ExitUpgradeableReadLock();
rwls.EnterWriteLock();
Assert.Throws<LockRecursionException>(() => rwls.EnterReadLock());
Assert.Throws<LockRecursionException>(() => rwls.EnterUpgradeableReadLock());
Assert.Throws<LockRecursionException>(() => rwls.EnterWriteLock());
rwls.ExitWriteLock();
}
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion))
{
rwls.EnterReadLock();
Assert.Throws<LockRecursionException>(() => rwls.EnterWriteLock());
rwls.EnterReadLock();
Assert.Throws<LockRecursionException>(() => rwls.EnterUpgradeableReadLock());
rwls.ExitReadLock();
rwls.ExitReadLock();
rwls.EnterUpgradeableReadLock();
rwls.EnterReadLock();
rwls.EnterUpgradeableReadLock();
rwls.ExitUpgradeableReadLock();
rwls.EnterReadLock();
rwls.ExitReadLock();
rwls.ExitReadLock();
rwls.EnterWriteLock();
rwls.EnterWriteLock();
rwls.ExitWriteLock();
rwls.ExitWriteLock();
rwls.ExitUpgradeableReadLock();
rwls.EnterWriteLock();
rwls.EnterReadLock();
rwls.ExitReadLock();
rwls.EnterUpgradeableReadLock();
rwls.ExitUpgradeableReadLock();
rwls.EnterWriteLock();
rwls.ExitWriteLock();
rwls.ExitWriteLock();
}
}
[Theory]
[InlineData(LockRecursionPolicy.NoRecursion)]
[InlineData(LockRecursionPolicy.SupportsRecursion)]
public static void InvalidExits(LockRecursionPolicy policy)
{
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim(policy))
{
Assert.Throws<SynchronizationLockException>(() => rwls.ExitReadLock());
Assert.Throws<SynchronizationLockException>(() => rwls.ExitUpgradeableReadLock());
Assert.Throws<SynchronizationLockException>(() => rwls.ExitWriteLock());
rwls.EnterReadLock();
Assert.Throws<SynchronizationLockException>(() => rwls.ExitUpgradeableReadLock());
Assert.Throws<SynchronizationLockException>(() => rwls.ExitWriteLock());
rwls.ExitReadLock();
rwls.EnterUpgradeableReadLock();
Assert.Throws<SynchronizationLockException>(() => rwls.ExitReadLock());
Assert.Throws<SynchronizationLockException>(() => rwls.ExitWriteLock());
rwls.ExitUpgradeableReadLock();
rwls.EnterWriteLock();
Assert.Throws<SynchronizationLockException>(() => rwls.ExitReadLock());
Assert.Throws<SynchronizationLockException>(() => rwls.ExitUpgradeableReadLock());
rwls.ExitWriteLock();
using (Barrier barrier = new Barrier(2))
{
Task t = Task.Factory.StartNew(() =>
{
rwls.EnterWriteLock();
barrier.SignalAndWait();
barrier.SignalAndWait();
rwls.ExitWriteLock();
}, CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default);
barrier.SignalAndWait();
Assert.Throws<SynchronizationLockException>(() => rwls.ExitWriteLock());
barrier.SignalAndWait();
t.GetAwaiter().GetResult();
}
}
}
[Fact]
public static void InvalidTimeouts()
{
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
Assert.Throws<ArgumentOutOfRangeException>(() => rwls.TryEnterReadLock(-2));
Assert.Throws<ArgumentOutOfRangeException>(() => rwls.TryEnterUpgradeableReadLock(-3));
Assert.Throws<ArgumentOutOfRangeException>(() => rwls.TryEnterWriteLock(-4));
Assert.Throws<ArgumentOutOfRangeException>(() => rwls.TryEnterReadLock(TimeSpan.MaxValue));
Assert.Throws<ArgumentOutOfRangeException>(() => rwls.TryEnterUpgradeableReadLock(TimeSpan.MinValue));
Assert.Throws<ArgumentOutOfRangeException>(() => rwls.TryEnterWriteLock(TimeSpan.FromMilliseconds(-2)));
}
}
[Fact]
public static void WritersAreMutuallyExclusiveFromReaders()
{
using (Barrier barrier = new Barrier(2))
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
Task.WaitAll(
Task.Run(() =>
{
rwls.EnterWriteLock();
barrier.SignalAndWait();
Assert.True(rwls.IsWriteLockHeld);
barrier.SignalAndWait();
rwls.ExitWriteLock();
}),
Task.Run(() =>
{
barrier.SignalAndWait();
Assert.False(rwls.TryEnterReadLock(0));
Assert.False(rwls.IsReadLockHeld);
barrier.SignalAndWait();
}));
}
}
[Fact]
public static void WritersAreMutuallyExclusiveFromWriters()
{
using (Barrier barrier = new Barrier(2))
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
Task.WaitAll(
Task.Run(() =>
{
rwls.EnterWriteLock();
barrier.SignalAndWait();
Assert.True(rwls.IsWriteLockHeld);
barrier.SignalAndWait();
rwls.ExitWriteLock();
}),
Task.Run(() =>
{
barrier.SignalAndWait();
Assert.False(rwls.TryEnterWriteLock(0));
Assert.False(rwls.IsReadLockHeld);
barrier.SignalAndWait();
}));
}
}
[Fact]
public static void ReadersMayBeConcurrent()
{
using (Barrier barrier = new Barrier(2))
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
Assert.Equal(0, rwls.CurrentReadCount);
Task.WaitAll(
Task.Run(() =>
{
rwls.EnterReadLock();
barrier.SignalAndWait(); // 1
Assert.True(rwls.IsReadLockHeld);
barrier.SignalAndWait(); // 2
Assert.Equal(2, rwls.CurrentReadCount);
barrier.SignalAndWait(); // 3
barrier.SignalAndWait(); // 4
rwls.ExitReadLock();
}),
Task.Run(() =>
{
barrier.SignalAndWait(); // 1
rwls.EnterReadLock();
barrier.SignalAndWait(); // 2
Assert.True(rwls.IsReadLockHeld);
Assert.Equal(0, rwls.WaitingReadCount);
barrier.SignalAndWait(); // 3
rwls.ExitReadLock();
barrier.SignalAndWait(); // 4
}));
Assert.Equal(0, rwls.CurrentReadCount);
}
}
[Fact]
public static void WriterToWriterChain()
{
using (AutoResetEvent are = new AutoResetEvent(false))
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
rwls.EnterWriteLock();
Task t = Task.Factory.StartNew(() =>
{
Assert.False(rwls.TryEnterWriteLock(10));
Task.Run(() => are.Set()); // ideally this won't fire until we've called EnterWriteLock, but it's a benign race in that the test will succeed either way
rwls.EnterWriteLock();
rwls.ExitWriteLock();
}, CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default);
are.WaitOne();
rwls.ExitWriteLock();
t.GetAwaiter().GetResult();
}
}
[Fact]
public static void WriterToReaderChain()
{
using (AutoResetEvent are = new AutoResetEvent(false))
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
rwls.EnterWriteLock();
Task t = Task.Factory.StartNew(() =>
{
Assert.False(rwls.TryEnterReadLock(TimeSpan.FromMilliseconds(10)));
Task.Run(() => are.Set()); // ideally this won't fire until we've called EnterReadLock, but it's a benign race in that the test will succeed either way
rwls.EnterReadLock();
rwls.ExitReadLock();
}, CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default);
are.WaitOne();
rwls.ExitWriteLock();
t.GetAwaiter().GetResult();
}
}
[Fact]
public static void WriterToUpgradeableReaderChain()
{
using (AutoResetEvent are = new AutoResetEvent(false))
using (ReaderWriterLockSlim rwls = new ReaderWriterLockSlim())
{
rwls.EnterWriteLock();
Task t = Task.Factory.StartNew(() =>
{
Assert.False(rwls.TryEnterUpgradeableReadLock(TimeSpan.FromMilliseconds(10)));
Task.Run(() => are.Set()); // ideally this won't fire until we've called EnterReadLock, but it's a benign race in that the test will succeed either way
rwls.EnterUpgradeableReadLock();
rwls.ExitUpgradeableReadLock();
}, CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default);
are.WaitOne();
rwls.ExitWriteLock();
t.GetAwaiter().GetResult();
}
}
[Fact]
[OuterLoop]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "Hangs in desktop, issue dotnet/corefx#3364 is not fixed there")]
public static void ReleaseReadersWhenWaitingWriterTimesOut()
{
using (var rwls = new ReaderWriterLockSlim())
{
// Enter the read lock
rwls.EnterReadLock();
// Typical order of execution: 0
Thread writeWaiterThread;
using (var beforeTryEnterWriteLock = new ManualResetEvent(false))
{
writeWaiterThread =
new Thread(() =>
{
// Typical order of execution: 1
// Add a writer to the wait list for enough time to allow successive readers to enter the wait list while this
// writer is waiting
beforeTryEnterWriteLock.Set();
if (rwls.TryEnterWriteLock(1000))
{
// The typical order of execution is not guaranteed, as sleep times are not guaranteed. For
// instance, before this write lock is added to the wait list, the two new read locks may be
// acquired. In that case, the test may complete before or while the write lock is taken.
rwls.ExitWriteLock();
}
// Typical order of execution: 4
});
writeWaiterThread.IsBackground = true;
writeWaiterThread.Start();
beforeTryEnterWriteLock.WaitOne();
}
Thread.Sleep(500); // wait for TryEnterWriteLock to enter the wait list
// A writer should now be waiting, add readers to the wait list. Since a read lock is still acquired, the writer
// should time out waiting, then these readers should enter and exit the lock.
ThreadStart EnterAndExitReadLock = () =>
{
// Typical order of execution: 2, 3
rwls.EnterReadLock();
// Typical order of execution: 5, 6
rwls.ExitReadLock();
};
var readerThreads =
new Thread[]
{
new Thread(EnterAndExitReadLock),
new Thread(EnterAndExitReadLock)
};
foreach (var readerThread in readerThreads)
{
readerThread.IsBackground = true;
readerThread.Start();
}
foreach (var readerThread in readerThreads)
{
readerThread.Join();
}
rwls.ExitReadLock();
// Typical order of execution: 7
writeWaiterThread.Join();
}
}
[Fact]
[OuterLoop]
public static void DontReleaseWaitingReadersWhenThereAreWaitingWriters()
{
using(var rwls = new ReaderWriterLockSlim())
{
rwls.EnterUpgradeableReadLock();
rwls.EnterWriteLock();
// Typical order of execution: 0
// Add a waiting writer
var threads = new Thread[2];
using(var beforeEnterWriteLock = new ManualResetEvent(false))
{
var thread =
new Thread(() =>
{
beforeEnterWriteLock.Set();
rwls.EnterWriteLock();
// Typical order of execution: 3
rwls.ExitWriteLock();
});
thread.IsBackground = true;
thread.Start();
threads[0] = thread;
beforeEnterWriteLock.WaitOne();
}
// Add a waiting reader
using(var beforeEnterReadLock = new ManualResetEvent(false))
{
var thread =
new Thread(() =>
{
beforeEnterReadLock.Set();
rwls.EnterReadLock();
// Typical order of execution: 4
rwls.ExitReadLock();
});
thread.IsBackground = true;
thread.Start();
threads[1] = thread;
beforeEnterReadLock.WaitOne();
}
// Wait for the background threads to block waiting for their locks
Thread.Sleep(1000);
// Typical order of execution: 1
rwls.ExitWriteLock();
// At this point there is still one reader and one waiting writer, so the reader-writer lock should not try to
// release any of the threads waiting for a lock
// Typical order of execution: 2
rwls.ExitUpgradeableReadLock();
// At this point, the waiting writer should be released, and the waiting reader should not
foreach(var thread in threads)
thread.Join();
// Typical order of execution: 5
}
}
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.Linq;
using ASC.Common.Data;
using ASC.Common.Data.Sql;
using ASC.Common.Data.Sql.Expressions;
using ASC.Core;
using ASC.Files.Core;
using ASC.Files.Core.Data;
using ASC.Files.Core.Security;
using ASC.Web.Core;
using ASC.Web.Core.Files;
namespace ASC.Feed.Aggregator.Modules.Documents
{
internal class FoldersModule : FeedModule
{
private const string folderItem = "folder";
private const string sharedFolderItem = "sharedFolder";
protected override string Table
{
get { return "files_folder"; }
}
protected override string LastUpdatedColumn
{
get { return "create_on"; }
}
protected override string TenantColumn
{
get { return "tenant_id"; }
}
protected override string DbId
{
get { return Constants.FilesDbId; }
}
public override string Name
{
get { return Constants.FoldersModule; }
}
public override string Product
{
get { return ModulesHelper.DocumentsProductName; }
}
public override Guid ProductID
{
get { return ModulesHelper.DocumentsProductID; }
}
public override bool VisibleFor(Feed feed, object data, Guid userId)
{
if (!WebItemSecurity.IsAvailableForUser(ProductID, userId)) return false;
var tuple = (Tuple<Folder, SmallShareRecord>)data;
var folder = tuple.Item1;
var shareRecord = tuple.Item2;
bool targetCond;
if (feed.Target != null)
{
if (shareRecord != null && shareRecord.ShareBy == userId) return false;
var owner = (Guid)feed.Target;
var groupUsers = CoreContext.UserManager.GetUsersByGroup(owner).Select(x => x.ID).ToList();
if (!groupUsers.Any())
{
groupUsers.Add(owner);
}
targetCond = groupUsers.Contains(userId);
}
else
{
targetCond = true;
}
return targetCond &&
new FileSecurity(new DaoFactory()).CanRead(folder, userId);
}
public override IEnumerable<int> GetTenantsWithFeeds(DateTime fromTime)
{
var q1 = new SqlQuery("files_folder")
.Select("tenant_id")
.Where(Exp.Gt("modified_on", fromTime))
.GroupBy(1)
.Having(Exp.Gt("count(*)", 0));
var q2 = new SqlQuery("files_security")
.Select("tenant_id")
.Where(Exp.Gt("timestamp", fromTime))
.GroupBy(1)
.Having(Exp.Gt("count(*)", 0));
using (var db = DbManager.FromHttpContext(DbId))
{
return db.ExecuteList(q1)
.ConvertAll(r => Convert.ToInt32(r[0]))
.Union(db.ExecuteList(q2).ConvertAll(r => Convert.ToInt32(r[0])));
}
}
public override IEnumerable<Tuple<Feed, object>> GetFeeds(FeedFilter filter)
{
var q1 = new SqlQuery("files_folder f")
.Select(FolderColumns().Select(f => "f." + f).ToArray())
.Select(DocumentsDbHelper.GetRootFolderType("parent_id"))
.Select("null, null, null")
.Where(
Exp.Eq("f.tenant_id", filter.Tenant) &
Exp.Eq("f.folder_type", 0) &
Exp.Between("f.create_on", filter.Time.From, filter.Time.To)
);
var q2 = new SqlQuery("files_folder f")
.LeftOuterJoin("files_security s",
Exp.EqColumns("s.entry_id", "f.id") &
Exp.Eq("s.tenant_id", filter.Tenant) &
Exp.Eq("s.entry_type", (int)FileEntryType.Folder)
)
.Select(FolderColumns().Select(f => "f." + f).ToArray())
.Select(DocumentsDbHelper.GetRootFolderType("parent_id"))
.Select("s.timestamp, s.owner, s.subject")
.Where(
Exp.Eq("f.tenant_id", filter.Tenant) &
Exp.Eq("f.folder_type", 0) &
Exp.Lt("s.security", 3) &
Exp.Between("s.timestamp", filter.Time.From, filter.Time.To)
);
List<Tuple<Folder, SmallShareRecord>> folders;
using (var db = DbManager.FromHttpContext(DbId))
{
folders = db.ExecuteList(q1.UnionAll(q2))
.ConvertAll(ToFolder)
.Where(f => f.Item1.RootFolderType != FolderType.TRASH && f.Item1.RootFolderType != FolderType.BUNCH)
.ToList();
}
var parentFolderIDs = folders.Select(r => r.Item1.ParentFolderID).ToList();
var parentFolders = new FolderDao(Tenant, DbId).GetFolders(parentFolderIDs, checkShare: false);
return folders.Select(f => new Tuple<Feed, object>(ToFeed(f, parentFolders.FirstOrDefault(r => r.ID.Equals(f.Item1.ParentFolderID))), f));
}
private static IEnumerable<string> FolderColumns()
{
return new[]
{
"id",
"parent_id",
"title",
"create_by",
"create_on",
"modified_by",
"modified_on",
"foldersCount",
"filesCount" // 8
};
}
private static Tuple<Folder, SmallShareRecord> ToFolder(object[] r)
{
var folder = new Folder
{
ID = Convert.ToInt32(r[0]),
ParentFolderID = Convert.ToInt32(r[1]),
Title = Convert.ToString(r[2]),
CreateBy = new Guid(Convert.ToString(r[3])),
CreateOn = Convert.ToDateTime(r[4]),
ModifiedBy = new Guid(Convert.ToString(r[5])),
ModifiedOn = Convert.ToDateTime(r[6]),
TotalSubFolders = Convert.ToInt32(r[7]),
TotalFiles = Convert.ToInt32(r[8]),
RootFolderType = DocumentsDbHelper.ParseRootFolderType(r[9]),
RootFolderCreator = DocumentsDbHelper.ParseRootFolderCreator(r[9]),
RootFolderId = DocumentsDbHelper.ParseRootFolderId(r[9])
};
SmallShareRecord shareRecord = null;
if (r[10] != null)
{
shareRecord = new SmallShareRecord
{
ShareOn = Convert.ToDateTime(r[10]),
ShareBy = new Guid(Convert.ToString(r[11])),
ShareTo = new Guid(Convert.ToString(r[12]))
};
}
return new Tuple<Folder, SmallShareRecord>(folder, shareRecord);
}
private Feed ToFeed(Tuple<Folder, SmallShareRecord> tuple, Folder rootFolder)
{
var folder = tuple.Item1;
var shareRecord = tuple.Item2;
if (shareRecord != null)
{
var feed = new Feed(shareRecord.ShareBy, shareRecord.ShareOn, true)
{
Item = sharedFolderItem,
ItemId = string.Format("{0}_{1}", folder.ID, shareRecord.ShareTo),
ItemUrl = FilesLinkUtility.GetFileRedirectPreviewUrl(folder.ID, false),
Product = Product,
Module = Name,
Title = folder.Title,
ExtraLocation = rootFolder.FolderType == FolderType.DEFAULT ? rootFolder.Title : string.Empty,
ExtraLocationUrl = rootFolder.FolderType == FolderType.DEFAULT ? FilesLinkUtility.GetFileRedirectPreviewUrl(folder.ParentFolderID, false) : string.Empty,
Keywords = string.Format("{0}", folder.Title),
HasPreview = false,
CanComment = false,
Target = shareRecord.ShareTo,
GroupId = GetGroupId(sharedFolderItem, shareRecord.ShareBy, folder.ParentFolderID.ToString())
};
return feed;
}
return new Feed(folder.CreateBy, folder.CreateOn)
{
Item = folderItem,
ItemId = folder.ID.ToString(),
ItemUrl = FilesLinkUtility.GetFileRedirectPreviewUrl(folder.ID, false),
Product = Product,
Module = Name,
Title = folder.Title,
ExtraLocation = rootFolder.FolderType == FolderType.DEFAULT ? rootFolder.Title : string.Empty,
ExtraLocationUrl = rootFolder.FolderType == FolderType.DEFAULT ? FilesLinkUtility.GetFileRedirectPreviewUrl(folder.ParentFolderID, false) : string.Empty,
Keywords = string.Format("{0}", folder.Title),
HasPreview = false,
CanComment = false,
Target = null,
GroupId = GetGroupId(folderItem, folder.CreateBy, folder.ParentFolderID.ToString())
};
}
}
}
| |
namespace Microsoft.Protocols.TestSuites.Common
{
#region AutodiscoverResponse
/// <summary>
/// The class of Autodiscover response.
/// </summary>
public class AutodiscoverResponse : ActiveSyncResponseBase<Response.Autodiscover>
{
/// <summary>
/// Initializes a new instance of the AutodiscoverResponse class.
/// </summary>
public AutodiscoverResponse()
{
this.ResponseData = new Response.Autodiscover();
}
}
#endregion
#region SyncResponse
/// <summary>
/// The class of Sync response.
/// </summary>
public class SyncResponse : ActiveSyncResponseBase<Response.Sync>
{
/// <summary>
/// Initializes a new instance of the SyncResponse class.
/// </summary>
public SyncResponse()
{
this.ResponseData = new Response.Sync();
}
}
#endregion
#region Find
/// <summary>
/// The class of Find response.
/// </summary>
public class FindResponse : ActiveSyncResponseBase<Response.Find>
{
/// <summary>
/// Initializes a new instance of the FindResponse class.
/// </summary>
public FindResponse()
{
this.ResponseData = new Response.Find();
}
}
#endregion
#region FolderSyncResponse
/// <summary>
/// The class of FolderSync response.
/// </summary>
public class FolderSyncResponse : ActiveSyncResponseBase<Response.FolderSync>
{
/// <summary>
/// Initializes a new instance of the FolderSyncResponse class.
/// </summary>
public FolderSyncResponse()
{
this.ResponseData = new Response.FolderSync();
}
}
#endregion
#region FolderCreateResponse
/// <summary>
/// The class of FolderCreate response.
/// </summary>
public class FolderCreateResponse : ActiveSyncResponseBase<Response.FolderCreate>
{
/// <summary>
/// Initializes a new instance of the FolderCreateResponse class.
/// </summary>
public FolderCreateResponse()
{
this.ResponseData = new Response.FolderCreate();
}
}
#endregion
#region FolderDeleteResponse
/// <summary>
/// The class of FolderDelete response.
/// </summary>
public class FolderDeleteResponse : ActiveSyncResponseBase<Response.FolderDelete>
{
/// <summary>
/// Initializes a new instance of the FolderDeleteResponse class.
/// </summary>
public FolderDeleteResponse()
{
this.ResponseData = new Response.FolderDelete();
}
}
#endregion
#region FolderUpdateResponse
/// <summary>
/// The class of FolderUpdate response.
/// </summary>
public class FolderUpdateResponse : ActiveSyncResponseBase<Response.FolderUpdate>
{
/// <summary>
/// Initializes a new instance of the FolderUpdateResponse class.
/// </summary>
public FolderUpdateResponse()
{
this.ResponseData = new Response.FolderUpdate();
}
}
#endregion
#region GetAttachmentResponse
/// <summary>
/// The class of GetAttachment response.
/// </summary>
public class GetAttachmentResponse : ActiveSyncResponseBase<object>
{
/// <summary>
/// Initializes a new instance of the GetAttachmentResponse class.
/// </summary>
public GetAttachmentResponse()
{
this.ResponseData = null;
}
}
#endregion
#region GetHierarchyResponse
/// <summary>
/// The class of GetHierarchy response.
/// </summary>
public class GetHierarchyResponse : ActiveSyncResponseBase<Response.Folders>
{
/// <summary>
/// Initializes a new instance of the GetHierarchyResponse class.
/// </summary>
public GetHierarchyResponse()
{
this.ResponseData = new Response.Folders();
}
}
#endregion
#region GetItemEstimateResponse
/// <summary>
/// The class of GetItemEstimate response.
/// </summary>
public class GetItemEstimateResponse : ActiveSyncResponseBase<Response.GetItemEstimate>
{
/// <summary>
/// Initializes a new instance of the GetItemEstimateResponse class.
/// </summary>
public GetItemEstimateResponse()
{
this.ResponseData = new Response.GetItemEstimate();
}
}
#endregion
#region ItemOperationsResponse
/// <summary>
/// The class of ItemOperations response.
/// </summary>
public class ItemOperationsResponse : ActiveSyncResponseBase<Response.ItemOperations>
{
/// <summary>
/// The MultiPart response.
/// </summary>
private MultipartMetadata metadata;
/// <summary>
/// Initializes a new instance of the ItemOperationsResponse class.
/// </summary>
public ItemOperationsResponse()
{
this.ResponseData = new Response.ItemOperations();
this.metadata = null;
}
/// <summary>
/// Gets the MultiPart response.
/// </summary>
public MultipartMetadata MultipartMetadata
{
get
{
return this.metadata;
}
}
}
#endregion
#region MeetingResponseResponse
/// <summary>
/// The class of MeetingResponse response.
/// </summary>
public class MeetingResponseResponse : ActiveSyncResponseBase<Response.MeetingResponse>
{
/// <summary>
/// Initializes a new instance of the MeetingResponseResponse class.
/// </summary>
public MeetingResponseResponse()
{
this.ResponseData = new Response.MeetingResponse();
}
}
#endregion
#region MoveItemsResponse
/// <summary>
/// The class of MoveItems response.
/// </summary>
public class MoveItemsResponse : ActiveSyncResponseBase<Response.MoveItems>
{
/// <summary>
/// Initializes a new instance of the MoveItemsResponse class.
/// </summary>
public MoveItemsResponse()
{
this.ResponseData = new Response.MoveItems();
}
}
#endregion
#region PingResponse
/// <summary>
/// The class of Ping response.
/// </summary>
public class PingResponse : ActiveSyncResponseBase<Response.Ping>
{
/// <summary>
/// Initializes a new instance of the PingResponse class.
/// </summary>
public PingResponse()
{
this.ResponseData = new Response.Ping();
}
}
#endregion
#region ProvisionResponse
/// <summary>
/// The class of Provision response.
/// </summary>
public class ProvisionResponse : ActiveSyncResponseBase<Response.Provision>
{
/// <summary>
/// Initializes a new instance of the ProvisionResponse class.
/// </summary>
public ProvisionResponse()
{
this.ResponseData = new Response.Provision();
}
}
#endregion
#region ResolveRecipientsResponse
/// <summary>
/// The class of ResolveRecipient response.
/// </summary>
public class ResolveRecipientsResponse : ActiveSyncResponseBase<Response.ResolveRecipients>
{
/// <summary>
/// Initializes a new instance of the ResolveRecipientsResponse class.
/// </summary>
public ResolveRecipientsResponse()
{
this.ResponseData = new Response.ResolveRecipients();
}
}
#endregion
#region SearchResponse
/// <summary>
/// The class of Search response.
/// </summary>
public class SearchResponse : ActiveSyncResponseBase<Response.Search>
{
/// <summary>
/// Initializes a new instance of the SearchResponse class.
/// </summary>
public SearchResponse()
{
this.ResponseData = new Response.Search();
}
}
#endregion
#region SendMailResponse
/// <summary>
/// The class of SendMail response.
/// </summary>
public class SendMailResponse : ActiveSyncResponseBase<Response.SendMail>
{
/// <summary>
/// Initializes a new instance of the SendMailResponse class.
/// </summary>
public SendMailResponse()
{
this.ResponseData = new Response.SendMail();
}
}
#endregion
#region SettingsResponse
/// <summary>
/// The class of Settings response.
/// </summary>
public class SettingsResponse : ActiveSyncResponseBase<Response.Settings>
{
/// <summary>
/// Initializes a new instance of the SettingsResponse class.
/// </summary>
public SettingsResponse()
{
this.ResponseData = new Response.Settings();
}
}
#endregion
#region SmartForwardResponse
/// <summary>
/// The class of SmartForward response.
/// </summary>
public class SmartForwardResponse : ActiveSyncResponseBase<Response.SmartForward>
{
/// <summary>
/// Initializes a new instance of the SmartForwardResponse class.
/// </summary>
public SmartForwardResponse()
{
this.ResponseData = new Response.SmartForward();
}
}
#endregion
#region SmartReplyResponse
/// <summary>
/// The class of SmartReply response.
/// </summary>
public class SmartReplyResponse : ActiveSyncResponseBase<Response.SmartReply>
{
/// <summary>
/// Initializes a new instance of the SmartReplyResponse class.
/// </summary>
public SmartReplyResponse()
{
this.ResponseData = new Response.SmartReply();
}
}
#endregion
#region ValidateCertResponse
/// <summary>
/// The class of ValidateCert response.
/// </summary>
public class ValidateCertResponse : ActiveSyncResponseBase<Response.ValidateCert>
{
/// <summary>
/// Initializes a new instance of the ValidateCertResponse class.
/// </summary>
public ValidateCertResponse()
{
this.ResponseData = new Response.ValidateCert();
}
}
#endregion
#region OptionsResponse
/// <summary>
/// The class of Options response.
/// </summary>
public class OptionsResponse : ActiveSyncResponseBase<object>
{
/// <summary>
/// Initializes a new instance of the OptionsResponse class.
/// </summary>
public OptionsResponse()
{
this.ResponseData = null;
}
}
#endregion
#region SendStringResponse
/// <summary>
/// The class of SendString response.
/// </summary>
public class SendStringResponse : ActiveSyncResponseBase<object>
{
/// <summary>
/// Initializes a new instance of the SendStringResponse class.
/// </summary>
public SendStringResponse()
{
this.ResponseData = null;
}
}
#endregion
}
| |
using PhotoNet.Common;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace RawNet.Format.Tiff
{
internal class IFD
{
public ushort tagNumber;
public Dictionary<TagType, Tag> tags = new Dictionary<TagType, Tag>();
public List<IFD> subIFD = new List<IFD>();
public uint NextOffset { get; protected set; }
public Endianness endian = Endianness.Unknown;
public int Depth { private set; get; }
public int RelativeOffset { protected set; get; }
public uint Offset { protected set; get; }
public IFDType type;
protected static char[] fuji_signature = {
'F', 'U', 'J', 'I', 'F', 'I', 'L', 'M', (char)0x0c,(char) 0x00,(char) 0x00,(char) 0x00
};
protected static char[] nikon_v3_signature = {
'N', 'i', 'k', 'o', 'n', (char)0x0,(char) 0x2
};
private static readonly int MaxRecursion = 20;
public IFD(Endianness endian, int depth)
{
this.endian = endian;
Depth = depth + 1;
}
public IFD(ImageBinaryReader fileStream, uint offset, Endianness endian, int depth) : this(IFDType.Plain, fileStream, offset, endian, depth, 0) { }
public IFD(ImageBinaryReader fileStream, uint offset, Endianness endian, int depth, int relativeOffset) : this(IFDType.Plain, fileStream, offset, endian, depth, relativeOffset) { }
public IFD(IFDType type, ImageBinaryReader fileStream, uint offset, Endianness endian, int depth) : this(type, fileStream, offset, endian, depth, 0) { }
public IFD(IFDType type, ImageBinaryReader fileStream, uint offset, Endianness endian, int depth, int relativeOffset) : this(endian, depth)
{
this.type = type;
if (relativeOffset > 0)
fileStream.Position = offset + relativeOffset;
else
fileStream.Position = offset;
Offset = offset;
RelativeOffset = relativeOffset;
if (depth > MaxRecursion)
{
throw new IndexOutOfRangeException();
}
Parse(fileStream);
}
protected void Parse(ImageBinaryReader fileStream)
{
tagNumber = fileStream.ReadUInt16();
Debug.Assert(tagNumber < UInt16.MaxValue);
for (int i = 0; i < tagNumber; i++)
{
Tag temp = new Tag(fileStream, RelativeOffset);
if (!tags.ContainsKey(temp.TagId))
{
tags.Add(temp.TagId, temp);
}
}
NextOffset = fileStream.ReadUInt32();
foreach (Tag tag in tags.Values)
{
if (tag.TagId != TagType.MAKERNOTE && tag.TagId != TagType.MAKERNOTE_ALT)
{
tag.ReadData(fileStream);
}
}
foreach (Tag tag in tags.Values)
{
try
{
//Special tag
switch (tag.TagId)
{
case TagType.DNGPRIVATEDATA:
IFD maker_ifd = ParseDngPrivateData(tag);
if (maker_ifd != null)
{
subIFD.Add(maker_ifd);
tag.data = null;
}
break;
case TagType.MAKERNOTE:
case TagType.MAKERNOTE_ALT:
case (TagType)288:
Makernote makernote = ParseMakerNote(fileStream, tag, endian);
if (makernote != null) subIFD.Add(makernote);
break;
case TagType.OLYMPUSIMAGEPROCESSING:
case TagType.FUJI_RAW_IFD:
case TagType.NIKONTHUMB:
case TagType.SUBIFDS:
case TagType.EXIFIFDPOINTER:
for (Int32 k = 0; k < tag.dataCount; k++)
{
subIFD.Add(new IFD(IFDType.Plain, fileStream, tag.GetUInt(k), endian, Depth, RelativeOffset));
}
break;
case TagType.GPSINFOIFDPOINTER:
subIFD.Add(new IFD(IFDType.GPS, fileStream, tag.GetUInt(0), endian, Depth));
break;
}
}
catch (Exception) { }
}
}
/* This will attempt to parse makernotes and return it as an IFD */
//makernote should be self contained
Makernote ParseMakerNote(ImageBinaryReader reader, Tag tag, Endianness parentEndian)
{
//read twice the makernote lenght, should be enough
reader.BaseStream.Position = tag.dataOffset + RelativeOffset;
byte[] data = reader.ReadBytes((int)Math.Min(tag.dataCount * 3, reader.BaseStream.Length));
return ParseMakerNote(data, parentEndian, (int)tag.dataOffset);
}
Makernote ParseMakerNote(byte[] data, Endianness parentEndian, int parentOffset)
{
if (Depth + 1 > IFD.MaxRecursion) return null;
uint offset = 0;
// Pentax makernote starts with AOC\0 - If it's there, skip it
if (data[0] == 0x41 && data[1] == 0x4f && data[2] == 0x43 && data[3] == 0)
{
return new PentaxMakernote(data, 4, parentOffset, parentEndian, Depth);
//data = data.Skip(4).ToArray();
//offset += 4;
}
// Pentax also has "PENTAX" at the start, makernote starts at 8
if (data[0] == 0x50 && data[1] == 0x45
&& data[2] == 0x4e && data[3] == 0x54 && data[4] == 0x41 && data[5] == 0x58)
{
return new PentaxMakernote(data, 8, parentOffset, parentEndian, Depth);
}
else if (Common.Memcmp(fuji_signature, data))
{
return new FujiMakerNote(data, parentEndian, Depth);
//offset = 12;
//mFile = new TiffBinaryReader(reader.BaseStream, offset + off, (uint)data.Length);
}
else if (Common.Memcmp(nikon_v3_signature, data))
{
return new NikonMakerNote(data, Depth);
}
// Panasonic has the word Exif at byte 6, a complete Tiff header starts at byte 12
// This TIFF is 0 offset based
if (data[6 + offset] == 0x45 && data[7 + offset] == 0x78 && data[8 + offset] == 0x69 && data[9 + offset] == 0x66)
{
return new PanasonicMakernote(data.Skip(12).ToArray(), parentEndian, Depth);
}
else if (Common.Strncmp(data, "Panasonic", 9))
{
data = data.Skip(12).ToArray();
}
// Olympus starts the makernote with their own name, sometimes truncated
if (Common.Strncmp(data, "OLYMP", 5))
{
//there is a anothre ifd right after the first
offset += 8;
if (Common.Strncmp(data, "OLYMPUS", 7))
{
offset += 4;
}
}
// Epson starts the makernote with its own name
if (Common.Strncmp(data, "EPSON", 5))
{
offset += 8;
}
// Some have MM or II to indicate endianness - read that
if (data[offset] == 0x49 && data[offset + 1] == 0x49)
{
offset += 2;
parentEndian = Endianness.Little;
if (data[offset] == 42 && data[offset + 1] == 0 && data[offset + 2] == 8)
{
offset += 6;
}
}
else if (data[offset] == 0x4D && data[offset + 1] == 0x4D)
{
parentEndian = Endianness.Big;
offset += 2;
if (data[offset] == 42 && data[offset + 1] == 0 && data[offset + 2] == 8)
{
offset += 6;
}
}
// Attempt to parse the rest as an IFD
try
{
return new Makernote(data, offset, parentEndian, Depth, parentOffset);
}
catch (Exception)
{
return null;
}
// If the structure cannot be read, a RawDecoderException will be thrown.
}
Makernote ParseDngPrivateData(Tag t)
{
/*
1. Six bytes containing the zero-terminated string "Adobe". (The DNG specification calls for the DNGPrivateData tag to start with an ASCII string identifying the creator/format).
2. 4 bytes: an ASCII string ("MakN" for a Makernote), indicating what sort of data is being stored here. Note that this is not zero-terminated.
3. A four-byte count (number of data bytes following); this is the length of the original MakerNote data. (This is always in "most significant byte first" format).
4. 2 bytes: the byte-order indicator from the original file (the usual 'MM'/4D4D or 'II'/4949).
5. 4 bytes: the original file offset for the MakerNote tag data (stored according to the byte order given above).
6. The contents of the MakerNote tag. This is a simple byte-for-byte copy, with no modification.
*/
uint size = t.dataCount;
Common.ConvertArray(t.data, out byte[] data);
Common.ByteToChar(data, out char[] dataAsChar, (int)size);
string id = new String(dataAsChar);
/*
if (id.StartsWith("Microsoft") || id.StartsWith("Nokia")) {
return new NokiaMakernote(data,0,endian,Depth,0);
//windows phone dng
}*/
if (!id.StartsWith("Adobe"))
{
return null;
}
if (!(data[6] == 'M' && data[7] == 'a' && data[8] == 'k' && data[9] == 'N'))
{
return null;
}
data = data.Skip(10).ToArray();
uint count;
count = (uint)data[0] << 24 | (uint)data[1] << 16 | (uint)data[2] << 8 | data[3];
data = data.Skip(4).ToArray();
if (count > size)
{
return null;
}
Endianness makernote_endian = Endianness.Unknown;
if (data[0] == 0x49 && data[1] == 0x49)
makernote_endian = Endianness.Little;
else if (data[0] == 0x4D && data[1] == 0x4D)
makernote_endian = Endianness.Big;
else
{
return null;
}
uint org_offset;
org_offset = (uint)data[2] << 24 | (uint)data[3] << 16 | (uint)data[4] << 8 | data[5];
data = data.Skip(6).ToArray();
/* We don't parse original makernotes that are placed after 300MB mark in the original file */
if (org_offset + count > 300 * 1024 * 1024)
{
return null;
}
Makernote makerIfd;
try
{
makerIfd = ParseMakerNote(data, makernote_endian, 0);
}
catch (RawDecoderException)
{
//Makernote are optional and sometimes not even IFD (See Nokia)
return null;
}
return makerIfd;
}
public Tag GetEntry(TagType type)
{
tags.TryGetValue(type, out Tag tag);
return tag;
}
public List<IFD> GetIFDsWithTag(TagType tag)
{
List<IFD> matchingIFDs = new List<IFD>();
if (tags.ContainsKey(tag))
{
matchingIFDs.Add(this);
}
foreach (IFD i in subIFD)
{
List<IFD> t = (i).GetIFDsWithTag(tag);
for (int j = 0; j < t.Count; j++)
{
matchingIFDs.Add(t[j]);
}
}
return matchingIFDs;
}
public IFD GetIFDWithType(IFDType t)
{
if (type == t) return this;
foreach (IFD i in subIFD)
{
var l = i.GetIFDWithType(t);
if (l != null) { return l; }
}
return null;
}
protected void MergeIFD(IFD other_tiff)
{
if (other_tiff?.subIFD.Count == 0)
return;
foreach (IFD i in other_tiff.subIFD)
{
subIFD.Add(i);
}
foreach (KeyValuePair<TagType, Tag> i in other_tiff.tags)
{
tags.Add(i.Key, i.Value); ;
}
}
public Tag GetEntryRecursive(TagType t)
{
Tag tag = null;
tag = GetEntry(t);
if (tag == null)
{
foreach (IFD ifd in subIFD)
{
tag = ifd.GetEntryRecursive(t);
if (tag != null) break;
}
}
return tag;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using GitTfs.Commands;
using GitTfs.Core.TfsInterop;
using GitTfs.Util;
namespace GitTfs.Core
{
public class GitTfsRemote : IGitTfsRemote
{
private static readonly Regex isInDotGit = new Regex("(?:^|/)\\.git(?:/|$)", RegexOptions.Compiled);
private readonly Globals _globals;
private readonly RemoteOptions _remoteOptions;
private readonly ConfigProperties _properties;
private readonly bool _disableGitignoreSupport;
private int? firstChangesetId;
private int? maxChangesetId;
private string maxCommitHash;
private bool isTfsAuthenticated;
public RemoteInfo RemoteInfo { get; private set; }
public GitTfsRemote(RemoteInfo info, IGitRepository repository, RemoteOptions remoteOptions, Globals globals,
ITfsHelper tfsHelper, ConfigProperties properties)
{
_remoteOptions = remoteOptions;
_globals = globals;
_properties = properties;
Tfs = tfsHelper;
Repository = repository;
RemoteInfo = info;
Id = info.Id;
TfsUrl = info.Url;
TfsRepositoryPath = info.Repository;
TfsUsername = info.Username;
TfsPassword = info.Password;
Aliases = (info.Aliases ?? Enumerable.Empty<string>()).ToArray();
IgnoreRegexExpression = info.IgnoreRegex;
IgnoreExceptRegexExpression = info.IgnoreExceptRegex;
GitIgnorePath = _remoteOptions.GitIgnorePath ?? info.GitIgnorePath;
UseGitIgnore = !_remoteOptions.NoGitIgnore && (_remoteOptions.UseGitIgnore || IsGitIgnoreSupportEnabled());
Autotag = info.Autotag;
IsSubtree = CheckSubtree();
}
private bool IsGitIgnoreSupportEnabled()
{
var isGitIgnoreSupportDisabled = true;
var value = Repository.GetConfig<string>(GitTfsConstants.DisableGitignoreSupport, null);
bool disableGitignoreSupport;
if (value != null && bool.TryParse(value, out disableGitignoreSupport))
isGitIgnoreSupportDisabled = disableGitignoreSupport;
return !isGitIgnoreSupportDisabled;
}
private bool CheckSubtree()
{
var m = GitTfsConstants.RemoteSubtreeRegex.Match(Id);
if (m.Success)
{
OwningRemoteId = m.Groups["owner"].Value;
Prefix = m.Groups["prefix"].Value;
return true;
}
return false;
}
public void EnsureTfsAuthenticated()
{
if (isTfsAuthenticated)
return;
Tfs.EnsureAuthenticated();
isTfsAuthenticated = true;
}
public bool IsDerived
{
get { return false; }
}
public int? GetFirstChangeset()
{
return firstChangesetId;
}
public void SetFirstChangeset(int? changesetId)
{
Trace.WriteLine($"Set first changeset in branch to C{changesetId}");
firstChangesetId = changesetId;
}
public bool IsSubtree { get; private set; }
public bool IsSubtreeOwner
{
get
{
return TfsRepositoryPath == null;
}
}
public string Id { get; set; }
public string TfsUrl
{
get { return Tfs.Url; }
set { Tfs.Url = value; }
}
private string[] Aliases { get; set; }
public bool Autotag { get; set; }
public string TfsUsername
{
get { return Tfs.Username; }
set { Tfs.Username = value; }
}
public string TfsPassword
{
get { return Tfs.Password; }
set { Tfs.Password = value; }
}
public string TfsRepositoryPath { get; set; }
/// <summary>
/// Gets the TFS server-side paths of all subtrees of this remote.
/// Valid if the remote has subtrees, which occurs when <see cref="TfsRepositoryPath"/> is null.
/// </summary>
public string[] TfsSubtreePaths
{
get
{
if (tfsSubtreePaths == null)
tfsSubtreePaths = Repository.GetSubtrees(this).Select(x => x.TfsRepositoryPath).ToArray();
return tfsSubtreePaths;
}
}
private string[] tfsSubtreePaths = null;
public string IgnoreRegexExpression { get; set; }
public string IgnoreExceptRegexExpression { get; set; }
public string GitIgnorePath { get; set; }
public bool UseGitIgnore { get; set; }
public IGitRepository Repository { get; set; }
public ITfsHelper Tfs { get; set; }
public string OwningRemoteId { get; private set; }
public string Prefix { get; private set; }
public bool ExportMetadatas { get; set; }
public Dictionary<string, IExportWorkItem> ExportWorkitemsMapping { get; set; }
public int MaxChangesetId
{
get { InitHistory(); return maxChangesetId.Value; }
set { maxChangesetId = value; }
}
public string MaxCommitHash
{
get { InitHistory(); return maxCommitHash; }
set { maxCommitHash = value; }
}
private TfsChangesetInfo GetTfsChangesetById(int id)
{
return Repository.GetTfsChangesetById(RemoteRef, id);
}
private void InitHistory()
{
if (maxChangesetId == null)
{
var mostRecentUpdate = Repository.GetLastParentTfsCommits(RemoteRef).FirstOrDefault();
if (mostRecentUpdate != null)
{
MaxCommitHash = mostRecentUpdate.GitCommit;
MaxChangesetId = mostRecentUpdate.ChangesetId;
}
else
{
MaxChangesetId = 0;
// Manage the special case where a .gitignore has been committed
var gitCommit = Repository.GetCommit(RemoteRef);
if (gitCommit != null)
{
MaxCommitHash = gitCommit.Sha;
}
}
}
}
private const string WorkspaceDirectory = "~w";
private string WorkingDirectory
{
get
{
var dir = Repository.GetConfig(GitTfsConstants.WorkspaceConfigKey);
if (IsSubtree)
{
if (dir != null)
{
return Path.Combine(dir, Prefix);
}
//find the relative path to the owning remote
return Ext.CombinePaths(_globals.GitDir, WorkspaceDirectory, OwningRemoteId, Prefix);
}
return dir ?? DefaultWorkingDirectory;
}
}
private string DefaultWorkingDirectory
{
get
{
return Path.Combine(_globals.GitDir, WorkspaceDirectory);
}
}
public void CleanupWorkspace()
{
Tfs.CleanupWorkspaces(WorkingDirectory);
}
public void CleanupWorkspaceDirectory()
{
try
{
if (Directory.Exists(WorkingDirectory))
{
var allFiles = Directory.EnumerateFiles(WorkingDirectory, "*", SearchOption.AllDirectories);
foreach (var file in allFiles)
File.SetAttributes(file, File.GetAttributes(file) & ~FileAttributes.ReadOnly);
Directory.Delete(WorkingDirectory, true);
}
}
catch (Exception ex)
{
Trace.WriteLine("CleanupWorkspaceDirectory: " + ex.Message);
}
}
public bool ShouldSkip(string path)
{
return IsInDotGit(path) || IsIgnored(path);
}
public bool IsIgnored(string path)
{
return Ignorance.IsIncluded(path) || IsPathIgnored(path);
}
private bool IsPathIgnored(string path)
{
return UseGitIgnore && Repository.IsPathIgnored(path);
}
private Bouncer _ignorance;
private Bouncer Ignorance
{
get
{
if (_ignorance == null)
{
_ignorance = new Bouncer();
_ignorance.Include(IgnoreRegexExpression);
_ignorance.Include(_remoteOptions.IgnoreRegex);
_ignorance.Exclude(IgnoreExceptRegexExpression);
_ignorance.Exclude(_remoteOptions.ExceptRegex);
}
return _ignorance;
}
}
public bool IsInDotGit(string path)
{
return isInDotGit.IsMatch(path);
}
public string GetPathInGitRepo(string tfsPath)
{
if (tfsPath == null) return null;
if (!IsSubtreeOwner)
{
if (!tfsPath.StartsWith(TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase)) return null;
if (TfsRepositoryPath == GitTfsConstants.TfsRoot)
{
tfsPath = tfsPath.Substring(TfsRepositoryPath.Length);
}
else
{
if (tfsPath.Length > TfsRepositoryPath.Length && tfsPath[TfsRepositoryPath.Length] != '/')
return null;
tfsPath = tfsPath.Substring(TfsRepositoryPath.Length);
}
}
else
{
//look through the subtrees
var p = _globals.Repository.GetSubtrees(this)
.Where(x => x.IsSubtree)
.FirstOrDefault(x => tfsPath.StartsWith(x.TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase)
&& (tfsPath.Length == x.TfsRepositoryPath.Length || tfsPath[x.TfsRepositoryPath.Length] == '/'));
if (p == null) return null;
tfsPath = p.GetPathInGitRepo(tfsPath);
//we must prepend the prefix in order to get the correct directory
if (tfsPath.StartsWith("/"))
tfsPath = p.Prefix + tfsPath;
else
tfsPath = p.Prefix + "/" + tfsPath;
}
while (tfsPath.StartsWith("/"))
tfsPath = tfsPath.Substring(1);
return tfsPath;
}
public class FetchResult : IFetchResult
{
public bool IsSuccess { get; set; }
public int LastFetchedChangesetId { get; set; }
public int NewChangesetCount { get; set; }
public string ParentBranchTfsPath { get; set; }
public bool IsProcessingRenameChangeset { get; set; }
public string LastParentCommitBeforeRename { get; set; }
}
public IFetchResult Fetch(bool stopOnFailMergeCommit = false, int lastChangesetIdToFetch = -1, IRenameResult renameResult = null)
{
return FetchWithMerge(-1, stopOnFailMergeCommit, lastChangesetIdToFetch, renameResult);
}
public IFetchResult FetchWithMerge(int mergeChangesetId, bool stopOnFailMergeCommit = false, IRenameResult renameResult = null, params string[] parentCommitsHashes)
{
return FetchWithMerge(mergeChangesetId, stopOnFailMergeCommit, -1, renameResult, parentCommitsHashes);
}
public IFetchResult FetchWithMerge(int mergeChangesetId, bool stopOnFailMergeCommit = false, int lastChangesetIdToFetch = -1, IRenameResult renameResult = null, params string[] parentCommitsHashes)
{
var fetchResult = new FetchResult { IsSuccess = true, NewChangesetCount = 0 };
var latestChangesetId = GetLatestChangesetId();
if (lastChangesetIdToFetch != -1)
latestChangesetId = Math.Min(latestChangesetId, lastChangesetIdToFetch);
// TFS 2010 doesn't like when we ask for history past its last changeset.
if (MaxChangesetId >= latestChangesetId)
return fetchResult;
bool fetchRetrievedChangesets;
do
{
var fetchedChangesets = FetchChangesets(true, lastChangesetIdToFetch);
var objects = BuildEntryDictionary();
fetchRetrievedChangesets = false;
foreach (var changeset in fetchedChangesets)
{
fetchRetrievedChangesets = true;
fetchResult.NewChangesetCount++;
if (lastChangesetIdToFetch > 0 && changeset.Summary.ChangesetId > lastChangesetIdToFetch)
return fetchResult;
string parentCommitSha = null;
if (changeset.IsMergeChangeset && !ProcessMergeChangeset(changeset, stopOnFailMergeCommit, ref parentCommitSha))
{
fetchResult.NewChangesetCount--; // Merge wasn't successful - so don't count the changeset we found
fetchResult.IsSuccess = false;
return fetchResult;
}
var parentSha = (renameResult != null && renameResult.IsProcessingRenameChangeset) ? renameResult.LastParentCommitBeforeRename : MaxCommitHash;
var isFirstCommitInRepository = (parentSha == null);
var log = Apply(parentSha, changeset, objects);
if (changeset.IsRenameChangeset && !isFirstCommitInRepository)
{
if (renameResult == null || !renameResult.IsProcessingRenameChangeset)
{
fetchResult.IsProcessingRenameChangeset = true;
fetchResult.LastParentCommitBeforeRename = MaxCommitHash;
return fetchResult;
}
renameResult.IsProcessingRenameChangeset = false;
renameResult.LastParentCommitBeforeRename = null;
}
if (parentCommitSha != null)
log.CommitParents.Add(parentCommitSha);
if (changeset.Summary.ChangesetId == mergeChangesetId)
{
foreach (var parent in parentCommitsHashes)
log.CommitParents.Add(parent);
}
var commitSha = ProcessChangeset(changeset, log);
fetchResult.LastFetchedChangesetId = changeset.Summary.ChangesetId;
// set commit sha for added git objects
foreach (var commit in objects)
{
if (commit.Value.Commit == null)
commit.Value.Commit = commitSha;
}
DoGcIfNeeded();
}
} while (fetchRetrievedChangesets && latestChangesetId > fetchResult.LastFetchedChangesetId);
return fetchResult;
}
private Dictionary<string, GitObject> BuildEntryDictionary()
{
return new Dictionary<string, GitObject>(StringComparer.InvariantCultureIgnoreCase);
}
private bool ProcessMergeChangeset(ITfsChangeset changeset, bool stopOnFailMergeCommit, ref string parentCommit)
{
if (!Tfs.CanGetBranchInformation)
{
Trace.TraceInformation("info: this changeset " + changeset.Summary.ChangesetId +
" is a merge changeset. But was not treated as is because this version of TFS can't manage branches...");
}
else if (!IsIgnoringBranches())
{
var parentChangesetId = Tfs.FindMergeChangesetParent(TfsRepositoryPath, changeset.Summary.ChangesetId, this);
if (parentChangesetId < 1) // Handle missing merge parent info
{
if (stopOnFailMergeCommit)
{
return false;
}
Trace.TraceInformation("warning: this changeset " + changeset.Summary.ChangesetId +
" is a merge changeset. But git-tfs is unable to determine the parent changeset.");
return true;
}
var shaParent = Repository.FindCommitHashByChangesetId(parentChangesetId);
if (shaParent == null)
{
string omittedParentBranch;
shaParent = FindMergedRemoteAndFetch(parentChangesetId, stopOnFailMergeCommit, out omittedParentBranch);
changeset.OmittedParentBranch = omittedParentBranch;
}
if (shaParent != null)
{
parentCommit = shaParent;
}
else
{
if (stopOnFailMergeCommit)
return false;
Trace.TraceInformation("warning: this changeset " + changeset.Summary.ChangesetId +
" is a merge changeset. But git-tfs failed to find and fetch the parent changeset "
+ parentChangesetId + ". Parent changeset will be ignored...");
}
}
else
{
Trace.TraceInformation("info: this changeset " + changeset.Summary.ChangesetId +
" is a merge changeset. But was not treated as is because of your git setting...");
changeset.OmittedParentBranch = ";C" + changeset.Summary.ChangesetId;
}
return true;
}
public bool IsIgnoringBranches()
{
var value = Repository.GetConfig<string>(GitTfsConstants.IgnoreBranches, null);
bool isIgnoringBranches;
if (value != null && bool.TryParse(value, out isIgnoringBranches))
return isIgnoringBranches;
Trace.TraceInformation("warning: no value found for branch management setting '" + GitTfsConstants.IgnoreBranches +
"'...");
var isIgnoringBranchesDetected = Repository.ReadAllTfsRemotes().Count() < 2;
Trace.TraceInformation("=> Branch support " + (isIgnoringBranchesDetected ? "disabled!" : "enabled!"));
if (isIgnoringBranchesDetected)
Trace.TraceInformation(" if you want to enable branch support, use the command:" + Environment.NewLine
+ " git config --local " + GitTfsConstants.IgnoreBranches + " false");
_globals.Repository.SetConfig(GitTfsConstants.IgnoreBranches, isIgnoringBranchesDetected);
return isIgnoringBranchesDetected;
}
private string ProcessChangeset(ITfsChangeset changeset, LogEntry log)
{
if (ExportMetadatas)
{
if (changeset.Summary.Workitems.Any())
{
var workItems = TranslateWorkItems(changeset.Summary.Workitems.Select(wi => new ExportWorkItem(wi)));
if (workItems != null)
{
log.Log += "\nWorkitems:";
foreach (var workItem in workItems)
{
log.Log += "\n#" + workItem.Id + " " + workItem.Title;
}
}
}
if (!string.IsNullOrWhiteSpace(changeset.Summary.PolicyOverrideComment))
log.Log += "\n" + GitTfsConstants.GitTfsPolicyOverrideCommentPrefix + " " + changeset.Summary.PolicyOverrideComment;
foreach (var checkinNote in changeset.Summary.CheckinNotes)
{
if (!string.IsNullOrWhiteSpace(checkinNote.Name) && !string.IsNullOrWhiteSpace(checkinNote.Value))
log.Log += "\n" + GitTfsConstants.GitTfsPrefix + "-" + CamelCaseToDelimitedStringConverter.Convert(checkinNote.Name, "-") + ": " + checkinNote.Value;
}
}
var commitSha = Commit(log);
UpdateTfsHead(commitSha, changeset.Summary.ChangesetId);
StringBuilder metadatas = new StringBuilder();
if (changeset.Summary.Workitems.Any())
{
string workitemNote = "Workitems:\n";
foreach (var workitem in changeset.Summary.Workitems)
{
var workitemId = workitem.Id.ToString();
var workitemUrl = workitem.Url;
if (ExportMetadatas && ExportWorkitemsMapping.Count != 0)
{
if (ExportWorkitemsMapping.ContainsKey(workitemId))
{
var oldWorkitemId = workitemId;
workitemId = ExportWorkitemsMapping[workitemId].Id;
workitemUrl = workitemUrl.Replace(oldWorkitemId, workitemId);
}
}
workitemNote += string.Format("[{0}] {1}\n {2}\n", workitemId, workitem.Title, workitemUrl);
}
metadatas.Append(workitemNote);
}
if (!string.IsNullOrWhiteSpace(changeset.Summary.PolicyOverrideComment))
metadatas.Append("\nPolicy Override Comment: " + changeset.Summary.PolicyOverrideComment);
foreach (var checkinNote in changeset.Summary.CheckinNotes)
{
if (!string.IsNullOrWhiteSpace(checkinNote.Name) && !string.IsNullOrWhiteSpace(checkinNote.Value))
metadatas.Append("\n" + checkinNote.Name + ": " + checkinNote.Value);
}
if (!string.IsNullOrWhiteSpace(changeset.OmittedParentBranch))
metadatas.Append("\nOmitted parent branch: " + changeset.OmittedParentBranch);
if (metadatas.Length != 0)
Repository.CreateNote(commitSha, metadatas.ToString(), log.AuthorName, log.AuthorEmail, log.Date);
return commitSha;
}
private IEnumerable<IExportWorkItem> TranslateWorkItems(IEnumerable<IExportWorkItem> workItemsOriginal)
{
if (ExportWorkitemsMapping.Count == 0)
return workItemsOriginal;
List<IExportWorkItem> workItemsTranslated = new List<IExportWorkItem>();
if (workItemsOriginal == null)
return workItemsTranslated;
foreach (var oldWorkItemId in workItemsOriginal)
{
IExportWorkItem translatedWorkItem = null;
if (oldWorkItemId != null && !ExportWorkitemsMapping.TryGetValue(oldWorkItemId.Id, out translatedWorkItem))
translatedWorkItem = oldWorkItemId;
if (translatedWorkItem != null)
workItemsTranslated.Add(translatedWorkItem);
}
return workItemsTranslated;
}
private string FindRootRemoteAndFetch(int parentChangesetId, IRenameResult renameResult = null)
{
string omittedParentBranch;
return FindRemoteAndFetch(parentChangesetId, false, false, renameResult, out omittedParentBranch);
}
private string FindMergedRemoteAndFetch(int parentChangesetId, bool stopOnFailMergeCommit, out string omittedParentBranch)
{
return FindRemoteAndFetch(parentChangesetId, false, true, null, out omittedParentBranch);
}
private string FindRemoteAndFetch(int parentChangesetId, bool stopOnFailMergeCommit, bool mergeChangeset, IRenameResult renameResult, out string omittedParentBranch)
{
var tfsRemote = FindOrInitTfsRemoteOfChangeset(parentChangesetId, mergeChangeset, renameResult, out omittedParentBranch);
if (tfsRemote != null && string.Compare(tfsRemote.TfsRepositoryPath, TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase) != 0)
{
Trace.TraceInformation("\tFetching from dependent TFS remote '{0}'...", tfsRemote.Id);
try
{
var fetchResult = ((GitTfsRemote)tfsRemote).FetchWithMerge(-1, stopOnFailMergeCommit, parentChangesetId, renameResult);
}
finally
{
Trace.WriteLine("Cleaning...");
tfsRemote.CleanupWorkspaceDirectory();
if (tfsRemote.Repository.IsBare)
tfsRemote.Repository.UpdateRef(GitRepository.ShortToLocalName(tfsRemote.Id), tfsRemote.MaxCommitHash);
}
return Repository.FindCommitHashByChangesetId(parentChangesetId);
}
return null;
}
private IGitTfsRemote FindOrInitTfsRemoteOfChangeset(int parentChangesetId, bool mergeChangeset, IRenameResult renameResult, out string omittedParentBranch)
{
omittedParentBranch = null;
IGitTfsRemote tfsRemote;
IChangeset parentChangeset = Tfs.GetChangeset(parentChangesetId);
//I think you want something that uses GetPathInGitRepo and ShouldSkip. See TfsChangeset.Apply.
//Don't know if there is a way to extract remote tfs repository path from changeset datas! Should be better!!!
var remote = Repository.ReadAllTfsRemotes().FirstOrDefault(r => parentChangeset.Changes.Any(c => r.GetPathInGitRepo(c.Item.ServerItem) != null));
if (remote != null)
tfsRemote = remote;
else
{
// If the changeset has created multiple folders, the expected branch folder will not always be the first
// so we scan all the changes of type folder to try to detect the first one which is a branch.
// In most cases it will change nothing: the first folder is the good one
IBranchObject tfsBranch = null;
string tfsPath = null;
var allBranches = Tfs.GetBranches(true);
foreach (var change in parentChangeset.Changes)
{
tfsPath = change.Item.ServerItem;
tfsPath = tfsPath.EndsWith("/") ? tfsPath : tfsPath + "/";
tfsBranch = allBranches.SingleOrDefault(b => tfsPath.StartsWith(b.Path.EndsWith("/") ? b.Path : b.Path + "/"));
if (tfsBranch != null)
{
// we found a branch, we stop here
break;
}
}
var filterRegex = Repository.GetConfig(GitTfsConstants.IgnoreBranchesRegex);
if (mergeChangeset && tfsBranch != null && !string.IsNullOrEmpty(filterRegex)
&& Regex.IsMatch(tfsBranch.Path, filterRegex, RegexOptions.IgnoreCase))
{
Trace.TraceInformation("warning: skip filtered branch for path " + tfsBranch.Path + " (regex:" + filterRegex + ")");
tfsRemote = null;
omittedParentBranch = tfsBranch.Path + ";C" + parentChangesetId;
}
else if (mergeChangeset && tfsBranch != null &&
string.Equals(Repository.GetConfig(GitTfsConstants.IgnoreNotInitBranches), true.ToString(), StringComparison.InvariantCultureIgnoreCase))
{
Trace.TraceInformation("warning: skip not initialized branch for path " + tfsBranch.Path);
tfsRemote = null;
omittedParentBranch = tfsBranch.Path + ";C" + parentChangesetId;
}
else if (tfsBranch == null)
{
Trace.TraceInformation("error: branch not found. Verify that all the folders have been converted to branches (or something else :().\n\tpath {0}", tfsPath);
tfsRemote = null;
omittedParentBranch = ";C" + parentChangesetId;
}
else
{
tfsRemote = InitTfsRemoteOfChangeset(tfsBranch, parentChangeset.ChangesetId, renameResult);
if (tfsRemote == null)
omittedParentBranch = tfsBranch.Path + ";C" + parentChangesetId;
}
}
return tfsRemote;
}
private IGitTfsRemote InitTfsRemoteOfChangeset(IBranchObject tfsBranch, int parentChangesetId, IRenameResult renameResult = null)
{
if (tfsBranch.IsRoot)
{
return InitTfsBranch(_remoteOptions, tfsBranch.Path);
}
var branchesDatas = Tfs.GetRootChangesetForBranch(tfsBranch.Path, parentChangesetId);
IGitTfsRemote remote = null;
foreach (var branch in branchesDatas)
{
var rootChangesetId = branch.SourceBranchChangesetId;
remote = InitBranch(_remoteOptions, tfsBranch.Path, rootChangesetId, true);
if (remote == null)
{
Trace.TraceInformation("warning: root commit not found corresponding to changeset " + rootChangesetId);
Trace.TraceInformation("=> continuing anyway by creating a branch without parent...");
return InitTfsBranch(_remoteOptions, tfsBranch.Path);
}
if (branch.IsRenamedBranch)
{
try
{
remote.Fetch(renameResult: renameResult);
}
finally
{
Trace.WriteLine("Cleaning...");
remote.CleanupWorkspaceDirectory();
if (remote.Repository.IsBare)
remote.Repository.UpdateRef(GitRepository.ShortToLocalName(remote.Id), remote.MaxCommitHash);
}
}
}
return remote;
}
public void QuickFetch(int changesetId, bool ignoreRestricted, bool printRestrictionHint)
{
try
{
ITfsChangeset changeset;
if (changesetId < 0)
changeset = GetLatestChangeset();
else
changeset = Tfs.GetChangeset(changesetId, this);
quickFetch(changeset);
}
catch (Exception ex)
{
Trace.WriteLine("Quick fetch failed: " + ex.Message);
if (!IgnoreException(ex.Message, ignoreRestricted, printRestrictionHint))
throw;
}
}
private void quickFetch(ITfsChangeset changeset)
{
var log = CopyTree(MaxCommitHash, changeset);
UpdateTfsHead(Commit(log), changeset.Summary.ChangesetId);
DoGcIfNeeded();
}
private IEnumerable<ITfsChangeset> FetchChangesets(bool byLots, int lastVersion = -1)
{
int lowerBoundChangesetId;
// If we're starting at the Root side of a branch commit (e.g. C1), but there ar
// invalid commits between C1 and the actual branch side of the commit operation
// (e.g. a Folder with the branch name was created [C2] and then deleted [C3],
// then the root-side was branched [C4; C1 --branch--> C4]), this will detecte
// only the folder creation and deletion operations due to the lowerBound being
// detected as the root-side of the commit +1 (C1+1=C2) instead of referencing
// the branch-side of the branching operation [C4].
if (_properties.InitialChangeset.HasValue || firstChangesetId.HasValue)
{
var firstChangesetInBranch = Math.Max(_properties.InitialChangeset ?? int.MinValue, firstChangesetId ?? int.MinValue);
lowerBoundChangesetId = Math.Max(MaxChangesetId + 1, firstChangesetInBranch);
}
else
lowerBoundChangesetId = MaxChangesetId + 1;
Trace.WriteLine(RemoteRef + ": Getting changesets from " + lowerBoundChangesetId +
" to " + lastVersion + " ...", "info");
if (!IsSubtreeOwner)
return Tfs.GetChangesets(TfsRepositoryPath, lowerBoundChangesetId, this, lastVersion, byLots);
return _globals.Repository.GetSubtrees(this)
.SelectMany(x => Tfs.GetChangesets(x.TfsRepositoryPath, lowerBoundChangesetId, x, lastVersion, byLots))
.OrderBy(x => x.Summary.ChangesetId);
}
public ITfsChangeset GetChangeset(int changesetId)
{
return Tfs.GetChangeset(changesetId, this);
}
private ITfsChangeset GetLatestChangeset()
{
if (!string.IsNullOrEmpty(TfsRepositoryPath))
return Tfs.GetLatestChangeset(this);
var changesetId = _globals.Repository.GetSubtrees(this).Select(x => Tfs.GetLatestChangeset(x)).Max(x => x.Summary.ChangesetId);
return GetChangeset(changesetId);
}
private int GetLatestChangesetId()
{
if (!string.IsNullOrEmpty(TfsRepositoryPath))
return Tfs.GetLatestChangesetId(this);
return _globals.Repository.GetSubtrees(this).Select(x => Tfs.GetLatestChangesetId(x)).Max();
}
public void UpdateTfsHead(string commitHash, int changesetId)
{
MaxCommitHash = commitHash;
MaxChangesetId = changesetId;
Repository.UpdateRef(RemoteRef, MaxCommitHash, "C" + MaxChangesetId);
if (Autotag)
Repository.UpdateRef(TagPrefix + "C" + MaxChangesetId, MaxCommitHash);
LogCurrentMapping();
}
private void LogCurrentMapping()
{
Trace.TraceInformation("C" + MaxChangesetId + " = " + MaxCommitHash);
}
private string TagPrefix
{
get { return "refs/tags/tfs/" + Id + "/"; }
}
public string RemoteRef
{
get { return "refs/remotes/tfs/" + Id; }
}
private void DoGcIfNeeded()
{
Trace.WriteLine("GC Countdown: " + _globals.GcCountdown);
if (--_globals.GcCountdown < 0)
{
_globals.GcCountdown = _globals.GcPeriod;
Repository.GarbageCollect(true, "Try running it after git-tfs is finished.");
}
}
private LogEntry Apply(string parent, ITfsChangeset changeset, IDictionary<string, GitObject> entries)
{
return Apply(parent, changeset, entries, null);
}
private LogEntry Apply(string parent, ITfsChangeset changeset, Action<Exception> ignorableErrorHandler)
{
return Apply(parent, changeset, BuildEntryDictionary(), ignorableErrorHandler);
}
private LogEntry Apply(string parent, ITfsChangeset changeset, IDictionary<string, GitObject> entries, Action<Exception> ignorableErrorHandler)
{
LogEntry result = null;
WithWorkspace(changeset.Summary, workspace =>
{
var treeBuilder = workspace.Remote.Repository.GetTreeBuilder(parent);
result = changeset.Apply(parent, treeBuilder, workspace, entries, ignorableErrorHandler);
result.Tree = treeBuilder.GetTree();
});
if (!string.IsNullOrEmpty(parent)) result.CommitParents.Add(parent);
return result;
}
private LogEntry CopyTree(string lastCommit, ITfsChangeset changeset)
{
LogEntry result = null;
WithWorkspace(changeset.Summary, workspace =>
{
var treeBuilder = workspace.Remote.Repository.GetTreeBuilder(null);
result = changeset.CopyTree(treeBuilder, workspace);
result.Tree = treeBuilder.GetTree();
});
if (!string.IsNullOrEmpty(lastCommit)) result.CommitParents.Add(lastCommit);
return result;
}
private string Commit(LogEntry logEntry)
{
logEntry.Log = BuildCommitMessage(logEntry.Log, logEntry.ChangesetId);
return Repository.Commit(logEntry).Sha;
}
private string BuildCommitMessage(string tfsCheckinComment, int changesetId)
{
var builder = new StringWriter();
builder.WriteLine(tfsCheckinComment);
builder.WriteLine(GitTfsConstants.TfsCommitInfoFormat,
TfsUrl, TfsRepositoryPath, changesetId);
return builder.ToString();
}
public void Unshelve(string shelvesetOwner, string shelvesetName, string destinationBranch, Action<Exception> ignorableErrorHandler, bool force)
{
var destinationRef = GitRepository.ShortToLocalName(destinationBranch);
if (Repository.HasRef(destinationRef))
throw new GitTfsException("ERROR: Destination branch (" + destinationBranch + ") already exists!");
var shelvesetChangeset = Tfs.GetShelvesetData(this, shelvesetOwner, shelvesetName);
var parentId = shelvesetChangeset.BaseChangesetId;
var ch = GetTfsChangesetById(parentId);
string rootCommit;
if (ch == null)
{
if (!force)
throw new GitTfsException("ERROR: Parent changeset C" + parentId + " not found.", new[]
{
"Try fetching the latest changes from TFS",
"Try applying the shelveset on the currently checkouted commit using the '--force' option"
}
);
Trace.TraceInformation("warning: Parent changeset C" + parentId + " not found."
+ " Trying to apply the shelveset on the current commit...");
rootCommit = Repository.GetCurrentCommit();
}
else
{
rootCommit = ch.GitCommit;
}
var log = Apply(rootCommit, shelvesetChangeset, ignorableErrorHandler);
var commit = Commit(log);
Repository.UpdateRef(destinationRef, commit, "Shelveset " + shelvesetName + " from " + shelvesetOwner);
}
public void Shelve(string shelvesetName, string head, TfsChangesetInfo parentChangeset, CheckinOptions options, bool evaluateCheckinPolicies)
{
WithWorkspace(parentChangeset, workspace => Shelve(shelvesetName, head, parentChangeset, options, evaluateCheckinPolicies, workspace));
}
public bool HasShelveset(string shelvesetName)
{
return Tfs.HasShelveset(shelvesetName);
}
private void Shelve(string shelvesetName, string head, TfsChangesetInfo parentChangeset, CheckinOptions options, bool evaluateCheckinPolicies, ITfsWorkspace workspace)
{
PendChangesToWorkspace(head, parentChangeset.GitCommit, workspace);
workspace.Shelve(shelvesetName, evaluateCheckinPolicies, options, () => Repository.GetCommitMessage(head, parentChangeset.GitCommit));
}
public int CheckinTool(string head, TfsChangesetInfo parentChangeset)
{
var changeset = 0;
WithWorkspace(parentChangeset, workspace => changeset = CheckinTool(head, parentChangeset, workspace));
return changeset;
}
private int CheckinTool(string head, TfsChangesetInfo parentChangeset, ITfsWorkspace workspace)
{
PendChangesToWorkspace(head, parentChangeset.GitCommit, workspace);
return workspace.CheckinTool(() => Repository.GetCommitMessage(head, parentChangeset.GitCommit));
}
private void PendChangesToWorkspace(string head, string parent, ITfsWorkspaceModifier workspace)
{
using (var tidyWorkspace = new DirectoryTidier(workspace, () => GetLatestChangeset().GetFullTree()))
{
foreach (var change in Repository.GetChangedFiles(parent, head))
{
change.Apply(tidyWorkspace);
}
}
}
public int Checkin(string head, TfsChangesetInfo parentChangeset, CheckinOptions options, string sourceTfsPath = null)
{
var changeset = 0;
WithWorkspace(parentChangeset, workspace => changeset = Checkin(head, parentChangeset.GitCommit, workspace, options, sourceTfsPath));
return changeset;
}
public int Checkin(string head, string parent, TfsChangesetInfo parentChangeset, CheckinOptions options, string sourceTfsPath = null)
{
var changeset = 0;
WithWorkspace(parentChangeset, workspace => changeset = Checkin(head, parent, workspace, options, sourceTfsPath));
return changeset;
}
private void WithWorkspace(TfsChangesetInfo parentChangeset, Action<ITfsWorkspace> action)
{
//are there any subtrees?
var subtrees = _globals.Repository.GetSubtrees(this);
if (subtrees.Any())
{
Tfs.WithWorkspace(WorkingDirectory, this, subtrees.Select(x => new Tuple<string, string>(x.TfsRepositoryPath, x.Prefix)), parentChangeset, action);
}
else
{
Tfs.WithWorkspace(WorkingDirectory, this, parentChangeset, action);
}
}
private int Checkin(string head, string parent, ITfsWorkspace workspace, CheckinOptions options, string sourceTfsPath)
{
PendChangesToWorkspace(head, parent, workspace);
if (!string.IsNullOrWhiteSpace(sourceTfsPath))
workspace.Merge(sourceTfsPath, TfsRepositoryPath);
return workspace.Checkin(options, () => Repository.GetCommitMessage(head, parent));
}
public bool MatchesUrlAndRepositoryPath(string tfsUrl, string tfsRepositoryPath)
{
if (!MatchesTfsUrl(tfsUrl))
return false;
if (TfsRepositoryPath == null)
return tfsRepositoryPath == null;
return TfsRepositoryPath.Equals(tfsRepositoryPath, StringComparison.OrdinalIgnoreCase);
}
public void DeleteShelveset(string shelvesetName)
{
WithWorkspace(null, workspace => workspace.DeleteShelveset(shelvesetName));
}
private bool MatchesTfsUrl(string tfsUrl)
{
return TfsUrl.Equals(tfsUrl, StringComparison.OrdinalIgnoreCase) || Aliases.Contains(tfsUrl, StringComparison.OrdinalIgnoreCase);
}
private string ExtractGitBranchNameFromTfsRepositoryPath(string tfsRepositoryPath)
{
var includeTeamProjectName = !Repository.IsInSameTeamProjectAsDefaultRepository(tfsRepositoryPath);
var gitBranchName = tfsRepositoryPath.ToGitBranchNameFromTfsRepositoryPath(includeTeamProjectName);
gitBranchName = Repository.AssertValidBranchName(gitBranchName);
Trace.TraceInformation("The name of the local branch will be : " + gitBranchName);
return gitBranchName;
}
public IGitTfsRemote InitBranch(RemoteOptions remoteOptions, string tfsRepositoryPath, int rootChangesetId, bool fetchParentBranch, string gitBranchNameExpected = null, IRenameResult renameResult = null)
{
return InitTfsBranch(remoteOptions, tfsRepositoryPath, rootChangesetId, fetchParentBranch, gitBranchNameExpected, renameResult);
}
private bool IgnoreException(string message, bool ignoreRestricted, bool printHint = true)
{
// Detect exception "TF14098: Access Denied: User ??? needs
// Read permission(s) for at least one item in changeset ???."
if (message.Contains("TF14098"))
{
if (ignoreRestricted)
return true;
if (printHint)
Trace.TraceWarning("\nAccess to changeset denied. Try the '--ignore-restricted-changesets' option!\n");
}
return false;
}
private IGitTfsRemote InitTfsBranch(RemoteOptions remoteOptions, string tfsRepositoryPath, int rootChangesetId = -1, bool fetchParentBranch = false, string gitBranchNameExpected = null, IRenameResult renameResult = null, bool ignoreRestricted = false)
{
Trace.WriteLine("Begin process of creating branch for remote :" + tfsRepositoryPath);
// TFS string representations of repository paths do not end in trailing slashes
tfsRepositoryPath = (tfsRepositoryPath ?? string.Empty).TrimEnd('/');
string gitBranchName = ExtractGitBranchNameFromTfsRepositoryPath(
string.IsNullOrWhiteSpace(gitBranchNameExpected) ? tfsRepositoryPath : gitBranchNameExpected);
if (string.IsNullOrWhiteSpace(gitBranchName))
throw new GitTfsException("error: The Git branch name '" + gitBranchName + "' is not valid...\n");
Trace.WriteLine("Git local branch will be :" + gitBranchName);
string sha1RootCommit = null;
if (rootChangesetId != -1)
{
sha1RootCommit = Repository.FindCommitHashByChangesetId(rootChangesetId);
if (fetchParentBranch && string.IsNullOrWhiteSpace(sha1RootCommit))
{
try
{
sha1RootCommit = FindRootRemoteAndFetch(rootChangesetId, renameResult);
}
catch (Exception ex)
{
Trace.WriteLine("Getting changeset fetch failed: " + ex.Message);
if (!IgnoreException(ex.Message, ignoreRestricted))
throw;
}
}
if (string.IsNullOrWhiteSpace(sha1RootCommit))
return null;
Trace.WriteLine("Found commit " + sha1RootCommit + " for changeset :" + rootChangesetId);
}
IGitTfsRemote tfsRemote;
if (Repository.HasRemote(gitBranchName))
{
Trace.WriteLine("Remote already exist");
tfsRemote = Repository.ReadTfsRemote(gitBranchName);
if (tfsRemote.TfsUrl != TfsUrl)
Trace.WriteLine("warning: Url is different");
if (tfsRemote.TfsRepositoryPath != tfsRepositoryPath)
Trace.WriteLine("warning: TFS repository path is different");
}
else
{
Trace.WriteLine("Try creating remote...");
tfsRemote = Repository.CreateTfsRemote(new RemoteInfo
{
Id = gitBranchName,
Url = TfsUrl,
Repository = tfsRepositoryPath,
RemoteOptions = remoteOptions
}, string.Empty);
tfsRemote.ExportMetadatas = ExportMetadatas;
tfsRemote.ExportWorkitemsMapping = ExportWorkitemsMapping;
}
if (sha1RootCommit != null && !Repository.HasRef(tfsRemote.RemoteRef))
{
if (!Repository.CreateBranch(tfsRemote.RemoteRef, sha1RootCommit))
throw new GitTfsException("error: Fail to create remote branch ref file!");
}
Trace.WriteLine("Remote created!");
return tfsRemote;
}
}
}
| |
using System;
using UnityEngine;
[AddComponentMenu("NGUI/Interaction/Play Sound")]
public class UIPlaySound : MonoBehaviour
{
public enum Trigger
{
OnClick,
OnMouseOver,
OnMouseOut,
OnPress,
OnRelease,
Custom
}
public enum SoundMode
{
NULL,
NormalClick,
ViewClosed,
Login,
Award,
RANDOM,
SKILL,
Button,
GameButton,
ViewEject,
TipsEject,
Dropdown,
Putaway,
FunctionEject,
FunctionIncome,
FunctionSwitch,
ForGold,
DressUp,
Strengthen,
JewelSet,
JewelSynthesis,
EquipRemove,
Smelting,
SkillUpgrade,
SoulUpgrade,
ScreenOPen,
ScreenClose,
createshow_yijian,
AddMoney,
system_button_all_8,
system_button_all_9,
createshow_yijian_1,
createshow_bahuang,
createshow_bahuang_1,
createshow_taiqing,
createshow_taiqing_1,
Button_Arrange,
Button_Bottle_of_HP,
Button_Draw,
Button_Mix,
Button_Strengthen,
Button_Horse,
Button_Country,
Button_Skill,
Button_TenDraw,
taskcomplete = 50,
scene_Fail,
Arena_01,
system_remind_all_18 = 55,
system_remind_all_19,
system_remind_all_20,
system_remind_all_21,
system_remind_all_22,
system_huoban_001 = 61,
system_remind_shen,
system_remind_001 = 89,
system_remind_002,
system_remind_003,
system_remind_004,
system_remind_005,
system_remind_006,
Playertalk_01_m = 101,
Playertalk_01_f = 104,
Playertalk_02_m = 102,
Playertalk_02_f = 105,
Playertalk_03_m = 103,
Playertalk_03_f = 106,
system_remind_all_30,
system_remind_all_31,
system_remind_all_32,
system_remind_all_33,
system_remind_all_34,
system_remind_all_35,
system_remind_all_36,
system_remind_all_37,
system_remind_all_38,
system_remind_all_39,
system_remind_all_40,
system_remind_all_41,
Npctalk0007 = 124,
Npctalk0008,
Npctalk0009,
Npctalk0010,
Npctalk0011,
Npctalk0012,
Npctalk0013,
Npctalk0014,
Npctalk0015,
system_remind_item_18 = 137,
system_remind_all_43 = 139,
system_remind_all_44,
system_remind_all_45,
system_remind_all_50 = 146,
system_remind_all_51,
system_ride_015 = 152,
system_ride_016,
system_ride_017,
system_pet_023,
system_Role_ChangeBoss_Success,
system_Role_ChangeBoss_Dead
}
public UIPlaySound.SoundMode soundMode;
public AudioClip audioClip;
public UIPlaySound.Trigger trigger;
private bool mIsOver;
[Range(0f, 1f)]
public float volume = 1f;
[Range(0f, 2f)]
public float pitch = 1f;
private void OnHover(bool isOver)
{
if (this.trigger == UIPlaySound.Trigger.OnMouseOver)
{
if (this.mIsOver == isOver)
{
return;
}
this.mIsOver = isOver;
}
if (base.enabled && ((isOver && this.trigger == UIPlaySound.Trigger.OnMouseOver) || (!isOver && this.trigger == UIPlaySound.Trigger.OnMouseOut)))
{
if (this.soundMode != UIPlaySound.SoundMode.NULL)
{
NGUITools.PlaySound((int)this.soundMode);
}
else
{
NGUITools.PlaySound(this.audioClip, this.volume, this.pitch);
}
}
}
private void OnPress(bool isPressed)
{
if (this.trigger == UIPlaySound.Trigger.OnPress)
{
if (this.mIsOver == isPressed)
{
return;
}
this.mIsOver = isPressed;
}
if (base.enabled && ((isPressed && this.trigger == UIPlaySound.Trigger.OnPress) || (!isPressed && this.trigger == UIPlaySound.Trigger.OnRelease)))
{
if (this.soundMode != UIPlaySound.SoundMode.NULL)
{
NGUITools.PlaySound((int)this.soundMode);
}
else
{
NGUITools.PlaySound(this.audioClip, this.volume, this.pitch);
}
}
}
private void OnClick()
{
if (base.enabled && this.trigger == UIPlaySound.Trigger.OnClick)
{
if (this.soundMode != UIPlaySound.SoundMode.NULL)
{
NGUITools.PlaySound((int)this.soundMode);
}
else
{
NGUITools.PlaySound(this.audioClip, this.volume, this.pitch);
}
}
}
private void OnSelect(bool isSelected)
{
if (base.enabled && (!isSelected || UICamera.currentScheme == UICamera.ControlScheme.Controller))
{
this.OnHover(isSelected);
}
}
public void Play()
{
if (this.soundMode != UIPlaySound.SoundMode.NULL)
{
NGUITools.PlaySound((int)this.soundMode);
}
else
{
NGUITools.PlaySound(this.audioClip, this.volume, this.pitch);
}
}
}
| |
/*
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.ElastiCache.Model
{
/// <summary>
/// Container for the parameters to the ModifyReplicationGroup operation.
/// <para>The <i>ModifyReplicationGroup</i> operation modifies the settings for a replication group.</para>
/// </summary>
/// <seealso cref="Amazon.ElastiCache.AmazonElastiCache.ModifyReplicationGroup"/>
public class ModifyReplicationGroupRequest : AmazonWebServiceRequest
{
private string replicationGroupId;
private string replicationGroupDescription;
private List<string> cacheSecurityGroupNames = new List<string>();
private List<string> securityGroupIds = new List<string>();
private string preferredMaintenanceWindow;
private string notificationTopicArn;
private string cacheParameterGroupName;
private string notificationTopicStatus;
private bool? applyImmediately;
private string engineVersion;
private bool? autoMinorVersionUpgrade;
private string primaryClusterId;
/// <summary>
/// The identifier of the replication group to modify.
///
/// </summary>
public string ReplicationGroupId
{
get { return this.replicationGroupId; }
set { this.replicationGroupId = value; }
}
/// <summary>
/// Sets the ReplicationGroupId property
/// </summary>
/// <param name="replicationGroupId">The value to set for the ReplicationGroupId property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithReplicationGroupId(string replicationGroupId)
{
this.replicationGroupId = replicationGroupId;
return this;
}
// Check to see if ReplicationGroupId property is set
internal bool IsSetReplicationGroupId()
{
return this.replicationGroupId != null;
}
/// <summary>
/// A description for the replication group. Maximum length is 255 characters.
///
/// </summary>
public string ReplicationGroupDescription
{
get { return this.replicationGroupDescription; }
set { this.replicationGroupDescription = value; }
}
/// <summary>
/// Sets the ReplicationGroupDescription property
/// </summary>
/// <param name="replicationGroupDescription">The value to set for the ReplicationGroupDescription property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithReplicationGroupDescription(string replicationGroupDescription)
{
this.replicationGroupDescription = replicationGroupDescription;
return this;
}
// Check to see if ReplicationGroupDescription property is set
internal bool IsSetReplicationGroupDescription()
{
return this.replicationGroupDescription != null;
}
/// <summary>
/// A list of cache security group names to authorize for the clusters in this replication group. This change is asynchronously applied as soon
/// as possible. This parameter can be used only with replication groups containing cache clusters running outside of an Amazon Virtual Private
/// Cloud (VPC). Constraints: Must contain no more than 255 alphanumeric characters. Must not be "Default".
///
/// </summary>
public List<string> CacheSecurityGroupNames
{
get { return this.cacheSecurityGroupNames; }
set { this.cacheSecurityGroupNames = value; }
}
/// <summary>
/// Adds elements to the CacheSecurityGroupNames collection
/// </summary>
/// <param name="cacheSecurityGroupNames">The values to add to the CacheSecurityGroupNames collection </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithCacheSecurityGroupNames(params string[] cacheSecurityGroupNames)
{
foreach (string element in cacheSecurityGroupNames)
{
this.cacheSecurityGroupNames.Add(element);
}
return this;
}
/// <summary>
/// Adds elements to the CacheSecurityGroupNames collection
/// </summary>
/// <param name="cacheSecurityGroupNames">The values to add to the CacheSecurityGroupNames collection </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithCacheSecurityGroupNames(IEnumerable<string> cacheSecurityGroupNames)
{
foreach (string element in cacheSecurityGroupNames)
{
this.cacheSecurityGroupNames.Add(element);
}
return this;
}
// Check to see if CacheSecurityGroupNames property is set
internal bool IsSetCacheSecurityGroupNames()
{
return this.cacheSecurityGroupNames.Count > 0;
}
/// <summary>
/// Specifies the VPC Security Groups associated with the cache clusters in the replication group. This parameter can be used only with
/// replication groups containing cache clusters running in an Amazon Virtual Private Cloud (VPC).
///
/// </summary>
public List<string> SecurityGroupIds
{
get { return this.securityGroupIds; }
set { this.securityGroupIds = value; }
}
/// <summary>
/// Adds elements to the SecurityGroupIds collection
/// </summary>
/// <param name="securityGroupIds">The values to add to the SecurityGroupIds collection </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithSecurityGroupIds(params string[] securityGroupIds)
{
foreach (string element in securityGroupIds)
{
this.securityGroupIds.Add(element);
}
return this;
}
/// <summary>
/// Adds elements to the SecurityGroupIds collection
/// </summary>
/// <param name="securityGroupIds">The values to add to the SecurityGroupIds collection </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithSecurityGroupIds(IEnumerable<string> securityGroupIds)
{
foreach (string element in securityGroupIds)
{
this.securityGroupIds.Add(element);
}
return this;
}
// Check to see if SecurityGroupIds property is set
internal bool IsSetSecurityGroupIds()
{
return this.securityGroupIds.Count > 0;
}
/// <summary>
/// The weekly time range (in UTC) during which replication group system maintenance can occur. Note that system maintenance may result in an
/// outage. This change is made immediately. If you are moving this window to the current time, there must be at least 120 minutes between the
/// current time and end of the window to ensure that pending changes are applied.
///
/// </summary>
public string PreferredMaintenanceWindow
{
get { return this.preferredMaintenanceWindow; }
set { this.preferredMaintenanceWindow = value; }
}
/// <summary>
/// Sets the PreferredMaintenanceWindow property
/// </summary>
/// <param name="preferredMaintenanceWindow">The value to set for the PreferredMaintenanceWindow property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithPreferredMaintenanceWindow(string preferredMaintenanceWindow)
{
this.preferredMaintenanceWindow = preferredMaintenanceWindow;
return this;
}
// Check to see if PreferredMaintenanceWindow property is set
internal bool IsSetPreferredMaintenanceWindow()
{
return this.preferredMaintenanceWindow != null;
}
/// <summary>
/// The Amazon Resource Name (ARN) of the SNS topic to which notifications will be sent. <note> The SNS topic owner must be same as the
/// replication group owner. </note>
///
/// </summary>
public string NotificationTopicArn
{
get { return this.notificationTopicArn; }
set { this.notificationTopicArn = value; }
}
/// <summary>
/// Sets the NotificationTopicArn property
/// </summary>
/// <param name="notificationTopicArn">The value to set for the NotificationTopicArn property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithNotificationTopicArn(string notificationTopicArn)
{
this.notificationTopicArn = notificationTopicArn;
return this;
}
// Check to see if NotificationTopicArn property is set
internal bool IsSetNotificationTopicArn()
{
return this.notificationTopicArn != null;
}
/// <summary>
/// The name of the cache parameter group to apply to all of the cache nodes in this replication group. This change is asynchronously applied as
/// soon as possible for parameters when the <i>ApplyImmediately</i> parameter is specified as <i>true</i> for this request.
///
/// </summary>
public string CacheParameterGroupName
{
get { return this.cacheParameterGroupName; }
set { this.cacheParameterGroupName = value; }
}
/// <summary>
/// Sets the CacheParameterGroupName property
/// </summary>
/// <param name="cacheParameterGroupName">The value to set for the CacheParameterGroupName property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithCacheParameterGroupName(string cacheParameterGroupName)
{
this.cacheParameterGroupName = cacheParameterGroupName;
return this;
}
// Check to see if CacheParameterGroupName property is set
internal bool IsSetCacheParameterGroupName()
{
return this.cacheParameterGroupName != null;
}
/// <summary>
/// The status of the Amazon SNS notification topic for the replication group. Notifications are sent only if the status is <i>active</i>. Valid
/// values: <c>active</c> | <c>inactive</c>
///
/// </summary>
public string NotificationTopicStatus
{
get { return this.notificationTopicStatus; }
set { this.notificationTopicStatus = value; }
}
/// <summary>
/// Sets the NotificationTopicStatus property
/// </summary>
/// <param name="notificationTopicStatus">The value to set for the NotificationTopicStatus property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithNotificationTopicStatus(string notificationTopicStatus)
{
this.notificationTopicStatus = notificationTopicStatus;
return this;
}
// Check to see if NotificationTopicStatus property is set
internal bool IsSetNotificationTopicStatus()
{
return this.notificationTopicStatus != null;
}
/// <summary>
/// If <c>true</c>, this parameter causes the modifications in this request and any pending modifications to be applied, asynchronously and as
/// soon as possible, regardless of the <i>PreferredMaintenanceWindow</i> setting for the replication group. If <c>false</c>, then changes to
/// the nodes in the replication group are applied on the next maintenance reboot, or the next failure reboot, whichever occurs first. Valid
/// values: <c>true</c> | <c>false</c> Default: <c>false</c>
///
/// </summary>
public bool ApplyImmediately
{
get { return this.applyImmediately ?? default(bool); }
set { this.applyImmediately = value; }
}
/// <summary>
/// Sets the ApplyImmediately property
/// </summary>
/// <param name="applyImmediately">The value to set for the ApplyImmediately property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithApplyImmediately(bool applyImmediately)
{
this.applyImmediately = applyImmediately;
return this;
}
// Check to see if ApplyImmediately property is set
internal bool IsSetApplyImmediately()
{
return this.applyImmediately.HasValue;
}
/// <summary>
/// The upgraded version of the cache engine to be run on the nodes in the replication group..
///
/// </summary>
public string EngineVersion
{
get { return this.engineVersion; }
set { this.engineVersion = value; }
}
/// <summary>
/// Sets the EngineVersion property
/// </summary>
/// <param name="engineVersion">The value to set for the EngineVersion property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithEngineVersion(string engineVersion)
{
this.engineVersion = engineVersion;
return this;
}
// Check to see if EngineVersion property is set
internal bool IsSetEngineVersion()
{
return this.engineVersion != null;
}
/// <summary>
/// Determines whether minor engine upgrades will be applied automatically to all of the cache nodes in the replication group during the
/// maintenance window. A value of <c>true</c> allows these upgrades to occur; <c>false</c> disables automatic upgrades.
///
/// </summary>
public bool AutoMinorVersionUpgrade
{
get { return this.autoMinorVersionUpgrade ?? default(bool); }
set { this.autoMinorVersionUpgrade = value; }
}
/// <summary>
/// Sets the AutoMinorVersionUpgrade property
/// </summary>
/// <param name="autoMinorVersionUpgrade">The value to set for the AutoMinorVersionUpgrade property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithAutoMinorVersionUpgrade(bool autoMinorVersionUpgrade)
{
this.autoMinorVersionUpgrade = autoMinorVersionUpgrade;
return this;
}
// Check to see if AutoMinorVersionUpgrade property is set
internal bool IsSetAutoMinorVersionUpgrade()
{
return this.autoMinorVersionUpgrade.HasValue;
}
/// <summary>
/// If this parameter is specified, ElastiCache will promote each of the nodes in the specified cache cluster to the primary role. The nodes of
/// all other clusters in the replication group will be read replicas.
///
/// </summary>
public string PrimaryClusterId
{
get { return this.primaryClusterId; }
set { this.primaryClusterId = value; }
}
/// <summary>
/// Sets the PrimaryClusterId property
/// </summary>
/// <param name="primaryClusterId">The value to set for the PrimaryClusterId property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ModifyReplicationGroupRequest WithPrimaryClusterId(string primaryClusterId)
{
this.primaryClusterId = primaryClusterId;
return this;
}
// Check to see if PrimaryClusterId property is set
internal bool IsSetPrimaryClusterId()
{
return this.primaryClusterId != null;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.ObjectModel;
using System.Diagnostics;
using Microsoft.VisualStudio.Debugger.Evaluation;
using Microsoft.VisualStudio.Debugger.Evaluation.ClrCompilation;
using Microsoft.VisualStudio.Debugger.Metadata;
using Type = Microsoft.VisualStudio.Debugger.Metadata.Type;
namespace Microsoft.CodeAnalysis.ExpressionEvaluator
{
/// <summary>
/// Type member expansion.
/// </summary>
/// <remarks>
/// Includes accesses to static members with instance receivers and
/// accesses to instance members with dynamic receivers.
/// </remarks>
internal sealed class MemberExpansion : Expansion
{
internal static Expansion CreateExpansion(
DkmInspectionContext inspectionContext,
TypeAndCustomInfo declaredTypeAndInfo,
DkmClrValue value,
ExpansionFlags flags,
Predicate<MemberInfo> predicate,
Formatter formatter)
{
// For members of type DynamicProperty (part of Dynamic View expansion), we want
// to expand the underlying value (not the members of the DynamicProperty type).
var type = value.Type;
var isDynamicProperty = type.GetLmrType().IsDynamicProperty();
if (isDynamicProperty)
{
Debug.Assert(!value.IsNull);
value = value.GetFieldValue("value", inspectionContext);
}
var runtimeType = type.GetLmrType();
// Primitives, enums, function pointers, and null values with a declared type that is an interface have no visible members.
Debug.Assert(!runtimeType.IsInterface || value.IsNull);
if (formatter.IsPredefinedType(runtimeType) || runtimeType.IsEnum || runtimeType.IsInterface || runtimeType.IsFunctionPointer())
{
return null;
}
// As in the old C# EE, DynamicProperty members are only expandable if they have a Dynamic View expansion.
var dynamicViewExpansion = DynamicViewExpansion.CreateExpansion(inspectionContext, value, formatter);
if (isDynamicProperty && (dynamicViewExpansion == null))
{
return null;
}
var dynamicFlagsMap = DynamicFlagsMap.Create(declaredTypeAndInfo);
var expansions = ArrayBuilder<Expansion>.GetInstance();
// From the members, collect the fields and properties,
// separated into static and instance members.
var staticMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var instanceMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var appDomain = value.Type.AppDomain;
// Expand members. (Ideally, this should be done lazily.)
var allMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var includeInherited = (flags & ExpansionFlags.IncludeBaseMembers) == ExpansionFlags.IncludeBaseMembers;
var hideNonPublic = (inspectionContext.EvaluationFlags & DkmEvaluationFlags.HideNonPublicMembers) == DkmEvaluationFlags.HideNonPublicMembers;
runtimeType.AppendTypeMembers(allMembers, predicate, declaredTypeAndInfo.Type, appDomain, includeInherited, hideNonPublic);
foreach (var member in allMembers)
{
var name = member.Name;
if (name.IsCompilerGenerated())
{
continue;
}
if (member.IsStatic)
{
staticMembers.Add(member);
}
else if (!value.IsNull)
{
instanceMembers.Add(member);
}
}
allMembers.Free();
// Public and non-public instance members.
Expansion publicInstanceExpansion;
Expansion nonPublicInstanceExpansion;
GetPublicAndNonPublicMembers(
instanceMembers,
dynamicFlagsMap,
out publicInstanceExpansion,
out nonPublicInstanceExpansion);
// Public and non-public static members.
Expansion publicStaticExpansion;
Expansion nonPublicStaticExpansion;
GetPublicAndNonPublicMembers(
staticMembers,
dynamicFlagsMap,
out publicStaticExpansion,
out nonPublicStaticExpansion);
if (publicInstanceExpansion != null)
{
expansions.Add(publicInstanceExpansion);
}
if ((publicStaticExpansion != null) || (nonPublicStaticExpansion != null))
{
var staticExpansions = ArrayBuilder<Expansion>.GetInstance();
if (publicStaticExpansion != null)
{
staticExpansions.Add(publicStaticExpansion);
}
if (nonPublicStaticExpansion != null)
{
staticExpansions.Add(nonPublicStaticExpansion);
}
Debug.Assert(staticExpansions.Count > 0);
var staticMembersExpansion = new StaticMembersExpansion(
runtimeType,
AggregateExpansion.CreateExpansion(staticExpansions));
staticExpansions.Free();
expansions.Add(staticMembersExpansion);
}
if (value.NativeComPointer != 0)
{
expansions.Add(NativeViewExpansion.Instance);
}
if (nonPublicInstanceExpansion != null)
{
expansions.Add(nonPublicInstanceExpansion);
}
// Include Results View if necessary.
if ((flags & ExpansionFlags.IncludeResultsView) != 0)
{
var resultsViewExpansion = ResultsViewExpansion.CreateExpansion(inspectionContext, value, formatter);
if (resultsViewExpansion != null)
{
expansions.Add(resultsViewExpansion);
}
}
if (dynamicViewExpansion != null)
{
expansions.Add(dynamicViewExpansion);
}
var result = AggregateExpansion.CreateExpansion(expansions);
expansions.Free();
return result;
}
private static void GetPublicAndNonPublicMembers(
ArrayBuilder<MemberAndDeclarationInfo> allMembers,
DynamicFlagsMap dynamicFlagsMap,
out Expansion publicExpansion,
out Expansion nonPublicExpansion)
{
var publicExpansions = ArrayBuilder<Expansion>.GetInstance();
var publicMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
var nonPublicMembers = ArrayBuilder<MemberAndDeclarationInfo>.GetInstance();
foreach (var member in allMembers)
{
if (member.BrowsableState.HasValue)
{
switch (member.BrowsableState.Value)
{
case DkmClrDebuggerBrowsableAttributeState.RootHidden:
if (publicMembers.Count > 0)
{
publicExpansions.Add(new MemberExpansion(publicMembers.ToArray(), dynamicFlagsMap));
publicMembers.Clear();
}
publicExpansions.Add(new RootHiddenExpansion(member, dynamicFlagsMap));
continue;
case DkmClrDebuggerBrowsableAttributeState.Never:
continue;
}
}
if (member.HideNonPublic && !member.IsPublic)
{
nonPublicMembers.Add(member);
}
else
{
publicMembers.Add(member);
}
}
if (publicMembers.Count > 0)
{
publicExpansions.Add(new MemberExpansion(publicMembers.ToArray(), dynamicFlagsMap));
}
publicMembers.Free();
publicExpansion = AggregateExpansion.CreateExpansion(publicExpansions);
publicExpansions.Free();
nonPublicExpansion = (nonPublicMembers.Count > 0) ?
new NonPublicMembersExpansion(
members: new MemberExpansion(nonPublicMembers.ToArray(), dynamicFlagsMap)) :
null;
nonPublicMembers.Free();
}
private readonly MemberAndDeclarationInfo[] _members;
private readonly DynamicFlagsMap _dynamicFlagsMap;
private MemberExpansion(MemberAndDeclarationInfo[] members, DynamicFlagsMap dynamicFlagsMap)
{
Debug.Assert(members != null);
Debug.Assert(members.Length > 0);
Debug.Assert(dynamicFlagsMap != null);
_members = members;
_dynamicFlagsMap = dynamicFlagsMap;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
int startIndex2;
int count2;
GetIntersection(startIndex, count, index, _members.Length, out startIndex2, out count2);
int offset = startIndex2 - index;
for (int i = 0; i < count2; i++)
{
rows.Add(GetMemberRow(resultProvider, inspectionContext, value, _members[i + offset], parent, _dynamicFlagsMap));
}
index += _members.Length;
}
private static EvalResultDataItem GetMemberRow(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
DkmClrValue value,
MemberAndDeclarationInfo member,
EvalResultDataItem parent,
DynamicFlagsMap dynamicFlagsMap)
{
var memberValue = value.GetMemberValue(member, inspectionContext);
return CreateMemberDataItem(
resultProvider,
inspectionContext,
member,
memberValue,
parent,
dynamicFlagsMap,
ExpansionFlags.All);
}
/// <summary>
/// An explicit user request to bypass "Just My Code" and display
/// the inaccessible members of an instance of an imported type.
/// </summary>
private sealed class NonPublicMembersExpansion : Expansion
{
private readonly Expansion _members;
internal NonPublicMembersExpansion(Expansion members)
{
_members = members;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
if (InRange(startIndex, count, index))
{
rows.Add(GetRow(
resultProvider,
inspectionContext,
value,
_members,
parent));
}
index++;
}
private static readonly ReadOnlyCollection<string> s_hiddenFormatSpecifiers = new ReadOnlyCollection<string>(new[] { "hidden" });
private static EvalResultDataItem GetRow(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
DkmClrValue value,
Expansion expansion,
EvalResultDataItem parent)
{
return new EvalResultDataItem(
ExpansionKind.NonPublicMembers,
name: Resources.NonPublicMembers,
typeDeclaringMemberAndInfo: default(TypeAndCustomInfo),
declaredTypeAndInfo: default(TypeAndCustomInfo),
parent: null,
value: value,
displayValue: null,
expansion: expansion,
childShouldParenthesize: parent.ChildShouldParenthesize,
fullName: parent.FullNameWithoutFormatSpecifiers,
childFullNamePrefixOpt: parent.ChildFullNamePrefix,
formatSpecifiers: s_hiddenFormatSpecifiers,
category: DkmEvaluationResultCategory.Data,
flags: DkmEvaluationResultFlags.ReadOnly,
editableValue: null,
inspectionContext: inspectionContext);
}
}
/// <summary>
/// A transition from an instance of a type to the type itself (for inspecting static members).
/// </summary>
private sealed class StaticMembersExpansion : Expansion
{
private readonly Type _runtimeType;
private readonly Expansion _members;
internal StaticMembersExpansion(Type runtimeType, Expansion members)
{
_runtimeType = runtimeType;
_members = members;
}
internal override void GetRows(
ResultProvider resultProvider,
ArrayBuilder<EvalResultDataItem> rows,
DkmInspectionContext inspectionContext,
EvalResultDataItem parent,
DkmClrValue value,
int startIndex,
int count,
bool visitAll,
ref int index)
{
if (InRange(startIndex, count, index))
{
rows.Add(GetRow(
resultProvider,
inspectionContext,
new TypeAndCustomInfo(_runtimeType),
value,
_members));
}
index++;
}
private static EvalResultDataItem GetRow(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
TypeAndCustomInfo declaredTypeAndInfo,
DkmClrValue value,
Expansion expansion)
{
var formatter = resultProvider.Formatter;
bool sawInvalidIdentifier;
var fullName = formatter.GetTypeName(declaredTypeAndInfo, escapeKeywordIdentifiers: true, sawInvalidIdentifier: out sawInvalidIdentifier);
if (sawInvalidIdentifier)
{
fullName = null;
}
return new EvalResultDataItem(
ExpansionKind.StaticMembers,
name: formatter.StaticMembersString,
typeDeclaringMemberAndInfo: default(TypeAndCustomInfo),
declaredTypeAndInfo: declaredTypeAndInfo,
parent: null,
value: value,
displayValue: null,
expansion: expansion,
childShouldParenthesize: false,
fullName: fullName,
childFullNamePrefixOpt: fullName,
formatSpecifiers: Formatter.NoFormatSpecifiers,
category: DkmEvaluationResultCategory.Class,
flags: DkmEvaluationResultFlags.ReadOnly,
editableValue: null,
inspectionContext: inspectionContext);
}
}
internal static EvalResultDataItem CreateMemberDataItem(
ResultProvider resultProvider,
DkmInspectionContext inspectionContext,
MemberAndDeclarationInfo member,
DkmClrValue memberValue,
EvalResultDataItem parent,
DynamicFlagsMap dynamicFlagsMap,
ExpansionFlags flags)
{
var declaredType = member.Type;
var declaredTypeInfo = dynamicFlagsMap.SubstituteDynamicFlags(member.OriginalDefinitionType, DynamicFlagsCustomTypeInfo.Create(member.TypeInfo)).GetCustomTypeInfo();
string memberName;
// Considering, we're not handling the case of a member inherited from a generic base type.
var typeDeclaringMember = member.GetExplicitlyImplementedInterface(out memberName) ?? member.DeclaringType;
var typeDeclaringMemberInfo = typeDeclaringMember.IsInterface
? dynamicFlagsMap.SubstituteDynamicFlags(typeDeclaringMember.GetInterfaceListEntry(member.DeclaringType), originalDynamicFlags: default(DynamicFlagsCustomTypeInfo)).GetCustomTypeInfo()
: null;
var formatter = resultProvider.Formatter;
bool sawInvalidIdentifier;
memberName = formatter.GetIdentifierEscapingPotentialKeywords(memberName, out sawInvalidIdentifier);
var fullName = sawInvalidIdentifier
? null
: MakeFullName(
formatter,
memberName,
new TypeAndCustomInfo(typeDeclaringMember, typeDeclaringMemberInfo), // Note: Won't include DynamicAttribute.
member.RequiresExplicitCast,
member.IsStatic,
parent);
return resultProvider.CreateDataItem(
inspectionContext,
memberName,
typeDeclaringMemberAndInfo: (member.IncludeTypeInMemberName || typeDeclaringMember.IsInterface) ? new TypeAndCustomInfo(typeDeclaringMember, typeDeclaringMemberInfo) : default(TypeAndCustomInfo), // Note: Won't include DynamicAttribute.
declaredTypeAndInfo: new TypeAndCustomInfo(declaredType, declaredTypeInfo),
value: memberValue,
parent: parent,
expansionFlags: flags,
childShouldParenthesize: false,
fullName: fullName,
formatSpecifiers: Formatter.NoFormatSpecifiers,
category: DkmEvaluationResultCategory.Other,
flags: memberValue.EvalFlags,
evalFlags: DkmEvaluationFlags.None);
}
private static string MakeFullName(
Formatter formatter,
string name,
TypeAndCustomInfo typeDeclaringMemberAndInfo,
bool memberAccessRequiresExplicitCast,
bool memberIsStatic,
EvalResultDataItem parent)
{
// If the parent is an exception thrown during evaluation,
// there is no valid fullname expression for the child.
if (parent.Value.EvalFlags.Includes(DkmEvaluationResultFlags.ExceptionThrown))
{
return null;
}
var parentFullName = parent.ChildFullNamePrefix;
if (parentFullName == null)
{
return null;
}
if (parent.ChildShouldParenthesize)
{
parentFullName = $"({parentFullName})";
}
bool sawInvalidIdentifier;
var typeDeclaringMember = typeDeclaringMemberAndInfo.Type;
if (!typeDeclaringMember.IsInterface)
{
string qualifier;
if (memberIsStatic)
{
qualifier = formatter.GetTypeName(typeDeclaringMemberAndInfo, escapeKeywordIdentifiers: true, sawInvalidIdentifier: out sawInvalidIdentifier);
if (sawInvalidIdentifier)
{
return null; // FullName wouldn't be parseable.
}
}
else if (memberAccessRequiresExplicitCast)
{
var typeName = formatter.GetTypeName(typeDeclaringMemberAndInfo, escapeKeywordIdentifiers: true, sawInvalidIdentifier: out sawInvalidIdentifier);
if (sawInvalidIdentifier)
{
return null; // FullName wouldn't be parseable.
}
qualifier = formatter.GetCastExpression(
parentFullName,
typeName,
parenthesizeEntireExpression: true);
}
else
{
qualifier = parentFullName;
}
return $"{qualifier}.{name}";
}
else
{
// NOTE: This should never interact with debugger proxy types:
// 1) Interfaces cannot have debugger proxy types.
// 2) Debugger proxy types cannot be interfaces.
if (typeDeclaringMember.Equals(parent.DeclaredTypeAndInfo.Type))
{
return $"{parentFullName}.{name}";
}
else
{
var interfaceName = formatter.GetTypeName(typeDeclaringMemberAndInfo, escapeKeywordIdentifiers: true, sawInvalidIdentifier: out sawInvalidIdentifier);
if (sawInvalidIdentifier)
{
return null; // FullName wouldn't be parseable.
}
return $"(({interfaceName}){parentFullName}).{name}";
}
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#define CONTRACTS_FULL
using System;
using System.Diagnostics.Contracts;
using Microsoft.Research.ClousotRegression;
namespace TestPostconditionInference
{
public class TestRangesAreFiltered
{
[ClousotRegressionTest]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Int32>() == x);")]
public static int IntPostcondition(int x)
{
return x;
}
[ClousotRegressionTest]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Int64>() == x);")]
public static long LongPostcondition(long x)
{
return x;
}
[ClousotRegressionTest]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.UInt32>() == u);")]
public static UInt32 UInt32Postcondition(uint u)
{
return u;
}
[ClousotRegressionTest]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.UInt64>() == u);")]
public static UInt64 UInt64Postcondition(UInt64 u)
{
return u;
}
[ClousotRegressionTest]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.SByte>() == s);")]
public static SByte SBytePostcondition(SByte s)
{
return s;
}
public long _field;
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=2,MethodILOffset=0)]
[RegressionOutcome("Contract.Ensures(z == this._field);")]
public void SetField(long z)
{
this._field = z;
}
}
public class FilterRedundantPostcondition
{
public string applicationVersion;
public string ApplicationVersion
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as field receiver)",PrimaryILOffset=22,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as field receiver)",PrimaryILOffset=6,MethodILOffset=27)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="ensures is valid",PrimaryILOffset=16,MethodILOffset=27)]
get
{
Contract.Ensures(Contract.Result<string>() == applicationVersion);
return applicationVersion;
}
}
extern string GetApplicationVersion();
}
public class TimeSpan
{
public long _ticks;
public double TotalDays
{
// We should emit a range, as this implies that the result is not NaN
// Also the range is the one of longs, which is included in double
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=1,MethodILOffset=0)]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Double>() == (double)(this._ticks) * 1.15740740740741E-12);")]
[RegressionOutcome("Contract.Ensures(-9223372036854775808 <= Contract.Result<System.Double>());")]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Double>() <= 9223372036854775807);")]
get
{
return (this._ticks * 1.1574074074074074E-12);
}
}
// Here we should not emit a range, as d may be NaN or +00 or -oo
[ClousotRegressionTest]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Double>() == d);")]
public static double DoubleInPostcondition(double d)
{
return d;
}
}
#pragma warning disable 0626
public class Array
{
extern int Length { get; }
// We cast from an Int32, so we should get a smaller interval than the range of long
public long LongLength
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as receiver)",PrimaryILOffset=1,MethodILOffset=0)]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Int64>() == (long)(this.Length));")]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Int64>() == this.Length);")]
[RegressionOutcome("Contract.Ensures(-2147483648 <= Contract.Result<System.Int64>());")]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Int64>() <= 2147483647);")]
get
{
return (long)this.Length;
}
}
}
public struct TmpStruct
{
public int[] arr;
}
public class Tmp
{
public int[] arr;
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="Lower bound access ok",PrimaryILOffset=38,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="Upper bound access ok",PrimaryILOffset=38,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="Lower bound access ok",PrimaryILOffset=69,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="Upper bound access ok",PrimaryILOffset=69,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="Lower bound access ok",PrimaryILOffset=95,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="Upper bound access ok",PrimaryILOffset=95,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as field receiver)",PrimaryILOffset=47,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message="Possible use of a null array 't.arr'",PrimaryILOffset=52,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as field receiver)",PrimaryILOffset=32,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as array)",PrimaryILOffset=38,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message="Possible use of a null array 'tstruct.arr'. The static checker determined that the condition 'tstruct.arr != null' should hold on entry. Nevertheless, the condition may be too strong for the callers. If you think it is ok, add a precondition to document it: Contract.Requires(tstruct.arr != null);",PrimaryILOffset=84,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as array)",PrimaryILOffset=69,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as array)",PrimaryILOffset=104,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as array)",PrimaryILOffset=95,MethodILOffset=0)]
// Should not suggest t.arr.Length >= 0 and tstruct.arr.Length >= 0
[RegressionOutcome("Contract.Requires(t.arr != null);")]
[RegressionOutcome("Contract.Ensures(t.arr != null);")]
[RegressionOutcome("Contract.Ensures(tstruct.arr != null);")]
public static int RemoveSimpleAccess(Tmp t, TmpStruct tstruct, int[] p)
{
Contract.Requires(t != null);
Contract.Requires(p != null);
int sum = 0;
for (var i = 0; i < t.arr.Length; i++)
{
sum += t.arr[i];
}
for (var i = 0; i < tstruct.arr.Length; i++)
{
sum += tstruct.arr[i];
}
for (var i = 0; i < p.Length; i++)
{
sum += p[i];
}
return sum;
}
}
public class Expression { }
public class ExpandSegment
{
private readonly Expression filter;
public ExpandSegment(Expression filter)
{
this.filter = filter;
}
public Expression Filter
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as field receiver)",PrimaryILOffset=1,MethodILOffset=0)]
[RegressionOutcome("Contract.Ensures(Contract.Result<TestPostconditionInference.Expression>() == this.filter);")]
get
{
return this.filter;
}
}
public bool HasFilter
{
// We used to infer Contract.Result<bool>() == ((this.filter == null) == 0))
[RegressionOutcome(Outcome=ProofOutcome.True,Message="valid non-null reference (as receiver)",PrimaryILOffset=1,MethodILOffset=0)]
[RegressionOutcome("Contract.Ensures(Contract.Result<System.Boolean>() == ((this.Filter == null) == false));")]
[RegressionOutcome("Contract.Ensures(this.Filter == this.filter);")]
[ClousotRegressionTest]
get
{
return (this.Filter != null);
}
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the PnMensaje class.
/// </summary>
[Serializable]
public partial class PnMensajeCollection : ActiveList<PnMensaje, PnMensajeCollection>
{
public PnMensajeCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>PnMensajeCollection</returns>
public PnMensajeCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
PnMensaje o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the PN_mensajes table.
/// </summary>
[Serializable]
public partial class PnMensaje : ActiveRecord<PnMensaje>, IActiveRecord
{
#region .ctors and Default Settings
public PnMensaje()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public PnMensaje(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public PnMensaje(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public PnMensaje(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("PN_mensajes", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdMensaje = new TableSchema.TableColumn(schema);
colvarIdMensaje.ColumnName = "id_mensaje";
colvarIdMensaje.DataType = DbType.Int32;
colvarIdMensaje.MaxLength = 0;
colvarIdMensaje.AutoIncrement = true;
colvarIdMensaje.IsNullable = false;
colvarIdMensaje.IsPrimaryKey = true;
colvarIdMensaje.IsForeignKey = false;
colvarIdMensaje.IsReadOnly = false;
colvarIdMensaje.DefaultSetting = @"";
colvarIdMensaje.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdMensaje);
TableSchema.TableColumn colvarTipo1 = new TableSchema.TableColumn(schema);
colvarTipo1.ColumnName = "tipo1";
colvarTipo1.DataType = DbType.AnsiString;
colvarTipo1.MaxLength = -1;
colvarTipo1.AutoIncrement = false;
colvarTipo1.IsNullable = true;
colvarTipo1.IsPrimaryKey = false;
colvarTipo1.IsForeignKey = false;
colvarTipo1.IsReadOnly = false;
colvarTipo1.DefaultSetting = @"";
colvarTipo1.ForeignKeyTableName = "";
schema.Columns.Add(colvarTipo1);
TableSchema.TableColumn colvarTipo2 = new TableSchema.TableColumn(schema);
colvarTipo2.ColumnName = "tipo2";
colvarTipo2.DataType = DbType.AnsiString;
colvarTipo2.MaxLength = -1;
colvarTipo2.AutoIncrement = false;
colvarTipo2.IsNullable = true;
colvarTipo2.IsPrimaryKey = false;
colvarTipo2.IsForeignKey = false;
colvarTipo2.IsReadOnly = false;
colvarTipo2.DefaultSetting = @"";
colvarTipo2.ForeignKeyTableName = "";
schema.Columns.Add(colvarTipo2);
TableSchema.TableColumn colvarNumero = new TableSchema.TableColumn(schema);
colvarNumero.ColumnName = "numero";
colvarNumero.DataType = DbType.Int32;
colvarNumero.MaxLength = 0;
colvarNumero.AutoIncrement = false;
colvarNumero.IsNullable = true;
colvarNumero.IsPrimaryKey = false;
colvarNumero.IsForeignKey = false;
colvarNumero.IsReadOnly = false;
colvarNumero.DefaultSetting = @"";
colvarNumero.ForeignKeyTableName = "";
schema.Columns.Add(colvarNumero);
TableSchema.TableColumn colvarUsuarioOrigen = new TableSchema.TableColumn(schema);
colvarUsuarioOrigen.ColumnName = "usuario_origen";
colvarUsuarioOrigen.DataType = DbType.AnsiString;
colvarUsuarioOrigen.MaxLength = -1;
colvarUsuarioOrigen.AutoIncrement = false;
colvarUsuarioOrigen.IsNullable = true;
colvarUsuarioOrigen.IsPrimaryKey = false;
colvarUsuarioOrigen.IsForeignKey = false;
colvarUsuarioOrigen.IsReadOnly = false;
colvarUsuarioOrigen.DefaultSetting = @"";
colvarUsuarioOrigen.ForeignKeyTableName = "";
schema.Columns.Add(colvarUsuarioOrigen);
TableSchema.TableColumn colvarComentario = new TableSchema.TableColumn(schema);
colvarComentario.ColumnName = "comentario";
colvarComentario.DataType = DbType.AnsiString;
colvarComentario.MaxLength = -1;
colvarComentario.AutoIncrement = false;
colvarComentario.IsNullable = true;
colvarComentario.IsPrimaryKey = false;
colvarComentario.IsForeignKey = false;
colvarComentario.IsReadOnly = false;
colvarComentario.DefaultSetting = @"";
colvarComentario.ForeignKeyTableName = "";
schema.Columns.Add(colvarComentario);
TableSchema.TableColumn colvarUsuarioDestino = new TableSchema.TableColumn(schema);
colvarUsuarioDestino.ColumnName = "usuario_destino";
colvarUsuarioDestino.DataType = DbType.AnsiString;
colvarUsuarioDestino.MaxLength = -1;
colvarUsuarioDestino.AutoIncrement = false;
colvarUsuarioDestino.IsNullable = true;
colvarUsuarioDestino.IsPrimaryKey = false;
colvarUsuarioDestino.IsForeignKey = false;
colvarUsuarioDestino.IsReadOnly = false;
colvarUsuarioDestino.DefaultSetting = @"";
colvarUsuarioDestino.ForeignKeyTableName = "";
schema.Columns.Add(colvarUsuarioDestino);
TableSchema.TableColumn colvarFechaEntrega = new TableSchema.TableColumn(schema);
colvarFechaEntrega.ColumnName = "fecha_entrega";
colvarFechaEntrega.DataType = DbType.DateTime;
colvarFechaEntrega.MaxLength = 0;
colvarFechaEntrega.AutoIncrement = false;
colvarFechaEntrega.IsNullable = true;
colvarFechaEntrega.IsPrimaryKey = false;
colvarFechaEntrega.IsForeignKey = false;
colvarFechaEntrega.IsReadOnly = false;
colvarFechaEntrega.DefaultSetting = @"";
colvarFechaEntrega.ForeignKeyTableName = "";
schema.Columns.Add(colvarFechaEntrega);
TableSchema.TableColumn colvarFechaRecibo = new TableSchema.TableColumn(schema);
colvarFechaRecibo.ColumnName = "fecha_recibo";
colvarFechaRecibo.DataType = DbType.DateTime;
colvarFechaRecibo.MaxLength = 0;
colvarFechaRecibo.AutoIncrement = false;
colvarFechaRecibo.IsNullable = true;
colvarFechaRecibo.IsPrimaryKey = false;
colvarFechaRecibo.IsForeignKey = false;
colvarFechaRecibo.IsReadOnly = false;
colvarFechaRecibo.DefaultSetting = @"";
colvarFechaRecibo.ForeignKeyTableName = "";
schema.Columns.Add(colvarFechaRecibo);
TableSchema.TableColumn colvarFechaVencimiento = new TableSchema.TableColumn(schema);
colvarFechaVencimiento.ColumnName = "fecha_vencimiento";
colvarFechaVencimiento.DataType = DbType.DateTime;
colvarFechaVencimiento.MaxLength = 0;
colvarFechaVencimiento.AutoIncrement = false;
colvarFechaVencimiento.IsNullable = true;
colvarFechaVencimiento.IsPrimaryKey = false;
colvarFechaVencimiento.IsForeignKey = false;
colvarFechaVencimiento.IsReadOnly = false;
colvarFechaVencimiento.DefaultSetting = @"";
colvarFechaVencimiento.ForeignKeyTableName = "";
schema.Columns.Add(colvarFechaVencimiento);
TableSchema.TableColumn colvarFechaTerminado = new TableSchema.TableColumn(schema);
colvarFechaTerminado.ColumnName = "fecha_terminado";
colvarFechaTerminado.DataType = DbType.DateTime;
colvarFechaTerminado.MaxLength = 0;
colvarFechaTerminado.AutoIncrement = false;
colvarFechaTerminado.IsNullable = true;
colvarFechaTerminado.IsPrimaryKey = false;
colvarFechaTerminado.IsForeignKey = false;
colvarFechaTerminado.IsReadOnly = false;
colvarFechaTerminado.DefaultSetting = @"";
colvarFechaTerminado.ForeignKeyTableName = "";
schema.Columns.Add(colvarFechaTerminado);
TableSchema.TableColumn colvarNroOrden = new TableSchema.TableColumn(schema);
colvarNroOrden.ColumnName = "nro_orden";
colvarNroOrden.DataType = DbType.Int32;
colvarNroOrden.MaxLength = 0;
colvarNroOrden.AutoIncrement = false;
colvarNroOrden.IsNullable = true;
colvarNroOrden.IsPrimaryKey = false;
colvarNroOrden.IsForeignKey = false;
colvarNroOrden.IsReadOnly = false;
colvarNroOrden.DefaultSetting = @"";
colvarNroOrden.ForeignKeyTableName = "";
schema.Columns.Add(colvarNroOrden);
TableSchema.TableColumn colvarRecibido = new TableSchema.TableColumn(schema);
colvarRecibido.ColumnName = "recibido";
colvarRecibido.DataType = DbType.AnsiString;
colvarRecibido.MaxLength = 5;
colvarRecibido.AutoIncrement = false;
colvarRecibido.IsNullable = true;
colvarRecibido.IsPrimaryKey = false;
colvarRecibido.IsForeignKey = false;
colvarRecibido.IsReadOnly = false;
colvarRecibido.DefaultSetting = @"";
colvarRecibido.ForeignKeyTableName = "";
schema.Columns.Add(colvarRecibido);
TableSchema.TableColumn colvarTerminado = new TableSchema.TableColumn(schema);
colvarTerminado.ColumnName = "terminado";
colvarTerminado.DataType = DbType.AnsiString;
colvarTerminado.MaxLength = 5;
colvarTerminado.AutoIncrement = false;
colvarTerminado.IsNullable = true;
colvarTerminado.IsPrimaryKey = false;
colvarTerminado.IsForeignKey = false;
colvarTerminado.IsReadOnly = false;
colvarTerminado.DefaultSetting = @"";
colvarTerminado.ForeignKeyTableName = "";
schema.Columns.Add(colvarTerminado);
TableSchema.TableColumn colvarDesestimado = new TableSchema.TableColumn(schema);
colvarDesestimado.ColumnName = "desestimado";
colvarDesestimado.DataType = DbType.AnsiString;
colvarDesestimado.MaxLength = 5;
colvarDesestimado.AutoIncrement = false;
colvarDesestimado.IsNullable = true;
colvarDesestimado.IsPrimaryKey = false;
colvarDesestimado.IsForeignKey = false;
colvarDesestimado.IsReadOnly = false;
colvarDesestimado.DefaultSetting = @"";
colvarDesestimado.ForeignKeyTableName = "";
schema.Columns.Add(colvarDesestimado);
TableSchema.TableColumn colvarTitulo = new TableSchema.TableColumn(schema);
colvarTitulo.ColumnName = "titulo";
colvarTitulo.DataType = DbType.AnsiString;
colvarTitulo.MaxLength = -1;
colvarTitulo.AutoIncrement = false;
colvarTitulo.IsNullable = true;
colvarTitulo.IsPrimaryKey = false;
colvarTitulo.IsForeignKey = false;
colvarTitulo.IsReadOnly = false;
colvarTitulo.DefaultSetting = @"";
colvarTitulo.ForeignKeyTableName = "";
schema.Columns.Add(colvarTitulo);
TableSchema.TableColumn colvarEstadoFinal = new TableSchema.TableColumn(schema);
colvarEstadoFinal.ColumnName = "estado_final";
colvarEstadoFinal.DataType = DbType.AnsiString;
colvarEstadoFinal.MaxLength = -1;
colvarEstadoFinal.AutoIncrement = false;
colvarEstadoFinal.IsNullable = true;
colvarEstadoFinal.IsPrimaryKey = false;
colvarEstadoFinal.IsForeignKey = false;
colvarEstadoFinal.IsReadOnly = false;
colvarEstadoFinal.DefaultSetting = @"";
colvarEstadoFinal.ForeignKeyTableName = "";
schema.Columns.Add(colvarEstadoFinal);
TableSchema.TableColumn colvarUsuarioFinaliza = new TableSchema.TableColumn(schema);
colvarUsuarioFinaliza.ColumnName = "usuario_finaliza";
colvarUsuarioFinaliza.DataType = DbType.AnsiString;
colvarUsuarioFinaliza.MaxLength = -1;
colvarUsuarioFinaliza.AutoIncrement = false;
colvarUsuarioFinaliza.IsNullable = true;
colvarUsuarioFinaliza.IsPrimaryKey = false;
colvarUsuarioFinaliza.IsForeignKey = false;
colvarUsuarioFinaliza.IsReadOnly = false;
colvarUsuarioFinaliza.DefaultSetting = @"";
colvarUsuarioFinaliza.ForeignKeyTableName = "";
schema.Columns.Add(colvarUsuarioFinaliza);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("PN_mensajes",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdMensaje")]
[Bindable(true)]
public int IdMensaje
{
get { return GetColumnValue<int>(Columns.IdMensaje); }
set { SetColumnValue(Columns.IdMensaje, value); }
}
[XmlAttribute("Tipo1")]
[Bindable(true)]
public string Tipo1
{
get { return GetColumnValue<string>(Columns.Tipo1); }
set { SetColumnValue(Columns.Tipo1, value); }
}
[XmlAttribute("Tipo2")]
[Bindable(true)]
public string Tipo2
{
get { return GetColumnValue<string>(Columns.Tipo2); }
set { SetColumnValue(Columns.Tipo2, value); }
}
[XmlAttribute("Numero")]
[Bindable(true)]
public int? Numero
{
get { return GetColumnValue<int?>(Columns.Numero); }
set { SetColumnValue(Columns.Numero, value); }
}
[XmlAttribute("UsuarioOrigen")]
[Bindable(true)]
public string UsuarioOrigen
{
get { return GetColumnValue<string>(Columns.UsuarioOrigen); }
set { SetColumnValue(Columns.UsuarioOrigen, value); }
}
[XmlAttribute("Comentario")]
[Bindable(true)]
public string Comentario
{
get { return GetColumnValue<string>(Columns.Comentario); }
set { SetColumnValue(Columns.Comentario, value); }
}
[XmlAttribute("UsuarioDestino")]
[Bindable(true)]
public string UsuarioDestino
{
get { return GetColumnValue<string>(Columns.UsuarioDestino); }
set { SetColumnValue(Columns.UsuarioDestino, value); }
}
[XmlAttribute("FechaEntrega")]
[Bindable(true)]
public DateTime? FechaEntrega
{
get { return GetColumnValue<DateTime?>(Columns.FechaEntrega); }
set { SetColumnValue(Columns.FechaEntrega, value); }
}
[XmlAttribute("FechaRecibo")]
[Bindable(true)]
public DateTime? FechaRecibo
{
get { return GetColumnValue<DateTime?>(Columns.FechaRecibo); }
set { SetColumnValue(Columns.FechaRecibo, value); }
}
[XmlAttribute("FechaVencimiento")]
[Bindable(true)]
public DateTime? FechaVencimiento
{
get { return GetColumnValue<DateTime?>(Columns.FechaVencimiento); }
set { SetColumnValue(Columns.FechaVencimiento, value); }
}
[XmlAttribute("FechaTerminado")]
[Bindable(true)]
public DateTime? FechaTerminado
{
get { return GetColumnValue<DateTime?>(Columns.FechaTerminado); }
set { SetColumnValue(Columns.FechaTerminado, value); }
}
[XmlAttribute("NroOrden")]
[Bindable(true)]
public int? NroOrden
{
get { return GetColumnValue<int?>(Columns.NroOrden); }
set { SetColumnValue(Columns.NroOrden, value); }
}
[XmlAttribute("Recibido")]
[Bindable(true)]
public string Recibido
{
get { return GetColumnValue<string>(Columns.Recibido); }
set { SetColumnValue(Columns.Recibido, value); }
}
[XmlAttribute("Terminado")]
[Bindable(true)]
public string Terminado
{
get { return GetColumnValue<string>(Columns.Terminado); }
set { SetColumnValue(Columns.Terminado, value); }
}
[XmlAttribute("Desestimado")]
[Bindable(true)]
public string Desestimado
{
get { return GetColumnValue<string>(Columns.Desestimado); }
set { SetColumnValue(Columns.Desestimado, value); }
}
[XmlAttribute("Titulo")]
[Bindable(true)]
public string Titulo
{
get { return GetColumnValue<string>(Columns.Titulo); }
set { SetColumnValue(Columns.Titulo, value); }
}
[XmlAttribute("EstadoFinal")]
[Bindable(true)]
public string EstadoFinal
{
get { return GetColumnValue<string>(Columns.EstadoFinal); }
set { SetColumnValue(Columns.EstadoFinal, value); }
}
[XmlAttribute("UsuarioFinaliza")]
[Bindable(true)]
public string UsuarioFinaliza
{
get { return GetColumnValue<string>(Columns.UsuarioFinaliza); }
set { SetColumnValue(Columns.UsuarioFinaliza, value); }
}
#endregion
//no foreign key tables defined (0)
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(string varTipo1,string varTipo2,int? varNumero,string varUsuarioOrigen,string varComentario,string varUsuarioDestino,DateTime? varFechaEntrega,DateTime? varFechaRecibo,DateTime? varFechaVencimiento,DateTime? varFechaTerminado,int? varNroOrden,string varRecibido,string varTerminado,string varDesestimado,string varTitulo,string varEstadoFinal,string varUsuarioFinaliza)
{
PnMensaje item = new PnMensaje();
item.Tipo1 = varTipo1;
item.Tipo2 = varTipo2;
item.Numero = varNumero;
item.UsuarioOrigen = varUsuarioOrigen;
item.Comentario = varComentario;
item.UsuarioDestino = varUsuarioDestino;
item.FechaEntrega = varFechaEntrega;
item.FechaRecibo = varFechaRecibo;
item.FechaVencimiento = varFechaVencimiento;
item.FechaTerminado = varFechaTerminado;
item.NroOrden = varNroOrden;
item.Recibido = varRecibido;
item.Terminado = varTerminado;
item.Desestimado = varDesestimado;
item.Titulo = varTitulo;
item.EstadoFinal = varEstadoFinal;
item.UsuarioFinaliza = varUsuarioFinaliza;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdMensaje,string varTipo1,string varTipo2,int? varNumero,string varUsuarioOrigen,string varComentario,string varUsuarioDestino,DateTime? varFechaEntrega,DateTime? varFechaRecibo,DateTime? varFechaVencimiento,DateTime? varFechaTerminado,int? varNroOrden,string varRecibido,string varTerminado,string varDesestimado,string varTitulo,string varEstadoFinal,string varUsuarioFinaliza)
{
PnMensaje item = new PnMensaje();
item.IdMensaje = varIdMensaje;
item.Tipo1 = varTipo1;
item.Tipo2 = varTipo2;
item.Numero = varNumero;
item.UsuarioOrigen = varUsuarioOrigen;
item.Comentario = varComentario;
item.UsuarioDestino = varUsuarioDestino;
item.FechaEntrega = varFechaEntrega;
item.FechaRecibo = varFechaRecibo;
item.FechaVencimiento = varFechaVencimiento;
item.FechaTerminado = varFechaTerminado;
item.NroOrden = varNroOrden;
item.Recibido = varRecibido;
item.Terminado = varTerminado;
item.Desestimado = varDesestimado;
item.Titulo = varTitulo;
item.EstadoFinal = varEstadoFinal;
item.UsuarioFinaliza = varUsuarioFinaliza;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdMensajeColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn Tipo1Column
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn Tipo2Column
{
get { return Schema.Columns[2]; }
}
public static TableSchema.TableColumn NumeroColumn
{
get { return Schema.Columns[3]; }
}
public static TableSchema.TableColumn UsuarioOrigenColumn
{
get { return Schema.Columns[4]; }
}
public static TableSchema.TableColumn ComentarioColumn
{
get { return Schema.Columns[5]; }
}
public static TableSchema.TableColumn UsuarioDestinoColumn
{
get { return Schema.Columns[6]; }
}
public static TableSchema.TableColumn FechaEntregaColumn
{
get { return Schema.Columns[7]; }
}
public static TableSchema.TableColumn FechaReciboColumn
{
get { return Schema.Columns[8]; }
}
public static TableSchema.TableColumn FechaVencimientoColumn
{
get { return Schema.Columns[9]; }
}
public static TableSchema.TableColumn FechaTerminadoColumn
{
get { return Schema.Columns[10]; }
}
public static TableSchema.TableColumn NroOrdenColumn
{
get { return Schema.Columns[11]; }
}
public static TableSchema.TableColumn RecibidoColumn
{
get { return Schema.Columns[12]; }
}
public static TableSchema.TableColumn TerminadoColumn
{
get { return Schema.Columns[13]; }
}
public static TableSchema.TableColumn DesestimadoColumn
{
get { return Schema.Columns[14]; }
}
public static TableSchema.TableColumn TituloColumn
{
get { return Schema.Columns[15]; }
}
public static TableSchema.TableColumn EstadoFinalColumn
{
get { return Schema.Columns[16]; }
}
public static TableSchema.TableColumn UsuarioFinalizaColumn
{
get { return Schema.Columns[17]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdMensaje = @"id_mensaje";
public static string Tipo1 = @"tipo1";
public static string Tipo2 = @"tipo2";
public static string Numero = @"numero";
public static string UsuarioOrigen = @"usuario_origen";
public static string Comentario = @"comentario";
public static string UsuarioDestino = @"usuario_destino";
public static string FechaEntrega = @"fecha_entrega";
public static string FechaRecibo = @"fecha_recibo";
public static string FechaVencimiento = @"fecha_vencimiento";
public static string FechaTerminado = @"fecha_terminado";
public static string NroOrden = @"nro_orden";
public static string Recibido = @"recibido";
public static string Terminado = @"terminado";
public static string Desestimado = @"desestimado";
public static string Titulo = @"titulo";
public static string EstadoFinal = @"estado_final";
public static string UsuarioFinaliza = @"usuario_finaliza";
}
#endregion
#region Update PK Collections
#endregion
#region Deep Save
#endregion
}
}
| |
#region PDFsharp - A .NET library for processing PDF
//
// Authors:
// Stefan Lange (mailto:Stefan.Lange@pdfsharp.com)
//
// Copyright (c) 2005-2009 empira Software GmbH, Cologne (Germany)
//
// http://www.pdfsharp.com
// http://sourceforge.net/projects/pdfsharp
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Diagnostics;
using System.IO;
#if GDI
using System.Drawing;
using System.Drawing.Drawing2D;
#endif
#if WPF
using System.Windows.Media;
#endif
#if __IOS__
using UIKit;
#endif
using PdfSharp.Pdf;
using PdfSharp.Fonts.OpenType;
using System.Linq;
// WPFHACK
#pragma warning disable 162
namespace PdfSharp.Drawing
{
/// <summary>
/// Defines a group of type faces having a similar basic design and certain variations in styles.
/// </summary>
public sealed class XFontFamily
{
internal XFontFamily() { }
#if GDI
internal XFontFamily(System.Drawing.FontFamily family)
{
this.name = family.Name;
this.gdiFamily = family;
#if WPF
this.wpfFamily = new System.Windows.Media.FontFamily(family.Name);
#endif
}
#endif
#if WPF
internal XFontFamily(System.Windows.Media.FontFamily family)
{
this.name = family.Source;
// HACK
int idxHash = this.name.LastIndexOf('#');
if (idxHash > 0)
this.name = this.name.Substring(idxHash + 1);
this.wpfFamily = family;
#if GDI
this.gdiFamily = new System.Drawing.FontFamily(family.Source);
#endif
}
#endif
//internal FontFamily();
//public FontFamily(GenericFontFamilies genericFamily);
//internal FontFamily(IntPtr family);
/// <summary>
/// Initializes a new instance of the <see cref="XFontFamily"/> class.
/// </summary>
/// <param name="name">The family name of a font.</param>
public XFontFamily(string name)
{
this.name = name;
#if GDI
this.gdiFamily = new System.Drawing.FontFamily(name);
#endif
#if WPF
this.wpfFamily = new System.Windows.Media.FontFamily(name);
#endif
#if __IOS__
this.name = name;
#endif
}
//public FontFamily(string name, FontCollection fontCollection);
//public override bool Equals(object obj);
/// <summary>
/// Gets the name of the font family.
/// </summary>
public string Name
{
get { return this.name; }
}
readonly string name;
/// <summary>
/// Returns the cell ascent, in design units, of the XFontFamily object of the specified style.
/// </summary>
public int GetCellAscent(XFontStyle style)
{
#if GDI && !WPF
return this.gdiFamily.GetCellAscent((FontStyle)style);
#endif
#if WPF && !GDI
return FontHelper.GetWpfValue(this, style, GWV.GetCellAscent);
#endif
#if WPF && GDI
#if DEBUG
int gdiResult = this.gdiFamily.GetCellAscent((FontStyle)style);
int wpfResult = FontHelper.GetWpfValue(this, style, GWV.GetCellAscent);
Debug.Assert(gdiResult == wpfResult, "GDI+ and WPF provides different values.");
#endif
return FontHelper.GetWpfValue(this, style, GWV.GetCellAscent);
#endif
#if __IOS__
return 5;
#endif
}
/// <summary>
/// Returns the cell descent, in design units, of the XFontFamily object of the specified style.
/// </summary>
public int GetCellDescent(XFontStyle style)
{
#if GDI && !WPF
return this.gdiFamily.GetCellDescent((FontStyle)style);
#endif
#if WPF && !GDI
return FontHelper.GetWpfValue(this, style, GWV.GetCellDescent);
#endif
#if WPF && GDI
#if DEBUG
int gdiResult = this.gdiFamily.GetCellDescent((FontStyle)style);
int wpfResult = FontHelper.GetWpfValue(this, style, GWV.GetCellDescent);
Debug.Assert(gdiResult == wpfResult, "GDI+ and WPF provides different values.");
#endif
return FontHelper.GetWpfValue(this, style, GWV.GetCellDescent);
#endif
#if __IOS__
return 5;
#endif
}
/// <summary>
/// Gets the height, in font design units, of the em square for the specified style.
/// </summary>
public int GetEmHeight(XFontStyle style)
{
#if GDI && !WPF
#if DEBUG
int gdiResult = this.gdiFamily.GetEmHeight((FontStyle)style);
//int wpfResult = FontHelper.GetWpfValue(this, style, GWV.GetEmHeight);
#endif
return this.gdiFamily.GetEmHeight((FontStyle)style);
#endif
#if WPF && !GDI
return FontHelper.GetWpfValue(this, style, GWV.GetEmHeight);
#endif
#if WPF && GDI
#if DEBUG
int gdiResult = this.gdiFamily.GetEmHeight((FontStyle)style);
int wpfResult = FontHelper.GetWpfValue(this, style, GWV.GetEmHeight);
Debug.Assert(gdiResult == wpfResult, "GDI+ and WPF provides different values.");
#endif
return FontHelper.GetWpfValue(this, style, GWV.GetEmHeight);
#endif
#if __IOS__
return 5;
#endif
}
//public override int GetHashCode();
/// <summary>
/// Returns the line spacing, in design units, of the FontFamily object of the specified style.
/// The line spacing is the vertical distance between the base lines of two consecutive lines of text.
/// </summary>
public int GetLineSpacing(XFontStyle style)
{
#if GDI && !WPF
return this.gdiFamily.GetLineSpacing((FontStyle)style);
#endif
#if WPF && !GDI
return FontHelper.GetWpfValue(this, style, GWV.GetLineSpacing);
#endif
#if WPF && GDI
#if DEBUG
int gdiResult = this.gdiFamily.GetLineSpacing((FontStyle)style);
int wpfResult = FontHelper.GetWpfValue(this, style, GWV.GetLineSpacing);
//Debug.Assert(gdiResult == wpfResult, "GDI+ and WPF provides different values.");
#endif
return FontHelper.GetWpfValue(this, style, GWV.GetLineSpacing);
#endif
#if __IOS__
return 5;
#endif
}
//public string GetName(int language);
/// <summary>
/// Indicates whether the specified FontStyle enumeration is available.
/// </summary>
public bool IsStyleAvailable(XFontStyle style)
{
#if GDI && !WPF
return this.gdiFamily.IsStyleAvailable((FontStyle)style);
#endif
#if WPF && !GDI
return FontHelper.IsStyleAvailable(this, style);
#endif
#if WPF && GDI
#if DEBUG
bool gdiResult = this.gdiFamily.IsStyleAvailable((FontStyle)style);
bool wpfResult = FontHelper.IsStyleAvailable(this, style);
// TODOWPF: check when fails
Debug.Assert(gdiResult == wpfResult, "GDI+ and WPF provides different values.");
#endif
return FontHelper.IsStyleAvailable(this, style);
#endif
#if __IOS__
return false;
#endif
}
//internal void SetNative(IntPtr native);
//public override string ToString();
//
//// Properties
//private static int CurrentLanguage { get; }
/// <summary>
/// Returns an array that contains all the FontFamily objects associated with the current graphics context.
/// </summary>
public static XFontFamily[] Families
{
get
{
#if GDI
System.Drawing.FontFamily[] families = System.Drawing.FontFamily.Families;
int count = families.Length;
XFontFamily[] result = new XFontFamily[count];
for (int idx = 0; idx < count; idx++)
result[idx] = new XFontFamily(families[idx]);
return result;
#endif
#if WPF
//System.Windows.Media.Fonts.GetFontFamilies(
// TODOWPF: not very important
return null;
#endif
#if __IOS__
return UIFont.FamilyNames.Select(x => new XFontFamily(x)).ToArray();
#endif
}
}
/// <summary>
/// Returns an array that contains all the FontFamily objects available for the specified
/// graphics context.
/// </summary>
public static XFontFamily[] GetFamilies(XGraphics graphics)
{
XFontFamily[] result;
#if GDI
System.Drawing.FontFamily[] families = null;
//families = System.Drawing.FontFamily.GetFamilies(graphics.gfx);
families = System.Drawing.FontFamily.Families;
int count = families.Length;
result = new XFontFamily[count];
for (int idx = 0; idx < count; idx++)
result[idx] = new XFontFamily(families[idx]);
#endif
#if WPF
// TODOWPF: not very important
result = null;
#endif
#if __IOS__
result = Families;
#endif
return result;
}
//public static FontFamily GenericMonospace { get; }
//public static FontFamily GenericSansSerif { get; }
//public static FontFamily GenericSerif { get; }
//public string Name { get; }
#if GDI
/// <summary>
/// GDI+ object.
/// </summary>
internal System.Drawing.FontFamily gdiFamily;
#endif
#if WPF
/// <summary>
/// WPF object.
/// </summary>
internal System.Windows.Media.FontFamily wpfFamily;
#endif
#if __IOS__
internal string _family;
#endif
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using log4net;
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Framework.Console;
using OpenSim.Framework.Communications;
using OpenSim.Framework.Monitoring;
using OpenSim.Services.Interfaces;
using OpenSim.Server.Base;
using OpenMetaverse;
namespace OpenSim.Services.Connectors
{
public class XInventoryServicesConnector : BaseServiceConnector, IInventoryService
{
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// Number of requests made to the remote inventory service.
/// </summary>
public int RequestsMade { get; private set; }
private string m_ServerURI = String.Empty;
/// <summary>
/// Timeout for remote requests.
/// </summary>
/// <remarks>
/// In this case, -1 is default timeout (100 seconds), not infinite.
/// </remarks>
private int m_requestTimeoutSecs = -1;
public XInventoryServicesConnector()
{
}
public XInventoryServicesConnector(string serverURI)
{
m_ServerURI = serverURI.TrimEnd('/');
}
public XInventoryServicesConnector(IConfigSource source)
: base(source, "InventoryService")
{
Initialise(source);
}
public virtual void Initialise(IConfigSource source)
{
IConfig config = source.Configs["InventoryService"];
if (config == null)
{
m_log.Error("[INVENTORY CONNECTOR]: InventoryService missing from OpenSim.ini");
throw new Exception("Inventory connector init error");
}
string serviceURI = config.GetString("InventoryServerURI",
String.Empty);
if (serviceURI == String.Empty)
{
m_log.Error("[INVENTORY CONNECTOR]: No Server URI named in section InventoryService");
throw new Exception("Inventory connector init error");
}
m_ServerURI = serviceURI;
m_requestTimeoutSecs = config.GetInt("RemoteRequestTimeout", m_requestTimeoutSecs);
StatsManager.RegisterStat(
new Stat(
"RequestsMade",
"Requests made",
"Number of requests made to the remove inventory service",
"requests",
"inventory",
serviceURI,
StatType.Pull,
MeasuresOfInterest.AverageChangeOverTime,
s => s.Value = RequestsMade,
StatVerbosity.Debug));
}
private bool CheckReturn(Dictionary<string, object> ret)
{
if (ret == null)
return false;
if (ret.Count == 0)
return false;
if (ret.ContainsKey("RESULT"))
{
if (ret["RESULT"] is string)
{
bool result;
if (bool.TryParse((string)ret["RESULT"], out result))
return result;
return false;
}
}
return true;
}
public bool CreateUserInventory(UUID principalID)
{
Dictionary<string,object> ret = MakeRequest("CREATEUSERINVENTORY",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() }
});
return CheckReturn(ret);
}
public List<InventoryFolderBase> GetInventorySkeleton(UUID principalID)
{
Dictionary<string,object> ret = MakeRequest("GETINVENTORYSKELETON",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() }
});
if (!CheckReturn(ret))
return null;
Dictionary<string, object> folders = (Dictionary<string, object>)ret["FOLDERS"];
List<InventoryFolderBase> fldrs = new List<InventoryFolderBase>();
try
{
foreach (Object o in folders.Values)
fldrs.Add(BuildFolder((Dictionary<string, object>)o));
}
catch (Exception e)
{
m_log.Error("[XINVENTORY SERVICES CONNECTOR]: Exception unwrapping folder list: ", e);
}
return fldrs;
}
public InventoryFolderBase GetRootFolder(UUID principalID)
{
Dictionary<string,object> ret = MakeRequest("GETROOTFOLDER",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() }
});
if (!CheckReturn(ret))
return null;
return BuildFolder((Dictionary<string, object>)ret["folder"]);
}
public InventoryFolderBase GetFolderForType(UUID principalID, AssetType type)
{
Dictionary<string,object> ret = MakeRequest("GETFOLDERFORTYPE",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() },
{ "TYPE", ((int)type).ToString() }
});
if (!CheckReturn(ret))
return null;
return BuildFolder((Dictionary<string, object>)ret["folder"]);
}
public InventoryCollection GetFolderContent(UUID principalID, UUID folderID)
{
InventoryCollection inventory = new InventoryCollection();
inventory.Folders = new List<InventoryFolderBase>();
inventory.Items = new List<InventoryItemBase>();
inventory.UserID = principalID;
try
{
Dictionary<string,object> ret = MakeRequest("GETFOLDERCONTENT",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() },
{ "FOLDER", folderID.ToString() }
});
if (!CheckReturn(ret))
return null;
Dictionary<string,object> folders =
(Dictionary<string,object>)ret["FOLDERS"];
Dictionary<string,object> items =
(Dictionary<string,object>)ret["ITEMS"];
foreach (Object o in folders.Values) // getting the values directly, we don't care about the keys folder_i
inventory.Folders.Add(BuildFolder((Dictionary<string, object>)o));
foreach (Object o in items.Values) // getting the values directly, we don't care about the keys item_i
inventory.Items.Add(BuildItem((Dictionary<string, object>)o));
}
catch (Exception e)
{
m_log.WarnFormat("[XINVENTORY SERVICES CONNECTOR]: Exception in GetFolderContent: {0}", e.Message);
}
return inventory;
}
public List<InventoryItemBase> GetFolderItems(UUID principalID, UUID folderID)
{
Dictionary<string,object> ret = MakeRequest("GETFOLDERITEMS",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() },
{ "FOLDER", folderID.ToString() }
});
if (!CheckReturn(ret))
return null;
Dictionary<string, object> items = (Dictionary<string, object>)ret["ITEMS"];
List<InventoryItemBase> fitems = new List<InventoryItemBase>();
foreach (Object o in items.Values) // getting the values directly, we don't care about the keys item_i
fitems.Add(BuildItem((Dictionary<string, object>)o));
return fitems;
}
public bool AddFolder(InventoryFolderBase folder)
{
Dictionary<string,object> ret = MakeRequest("ADDFOLDER",
new Dictionary<string,object> {
{ "ParentID", folder.ParentID.ToString() },
{ "Type", folder.Type.ToString() },
{ "Version", folder.Version.ToString() },
{ "Name", folder.Name.ToString() },
{ "Owner", folder.Owner.ToString() },
{ "ID", folder.ID.ToString() }
});
return CheckReturn(ret);
}
public bool UpdateFolder(InventoryFolderBase folder)
{
Dictionary<string,object> ret = MakeRequest("UPDATEFOLDER",
new Dictionary<string,object> {
{ "ParentID", folder.ParentID.ToString() },
{ "Type", folder.Type.ToString() },
{ "Version", folder.Version.ToString() },
{ "Name", folder.Name.ToString() },
{ "Owner", folder.Owner.ToString() },
{ "ID", folder.ID.ToString() }
});
return CheckReturn(ret);
}
public bool MoveFolder(InventoryFolderBase folder)
{
Dictionary<string,object> ret = MakeRequest("MOVEFOLDER",
new Dictionary<string,object> {
{ "ParentID", folder.ParentID.ToString() },
{ "ID", folder.ID.ToString() },
{ "PRINCIPAL", folder.Owner.ToString() }
});
return CheckReturn(ret);
}
public bool DeleteFolders(UUID principalID, List<UUID> folderIDs)
{
List<string> slist = new List<string>();
foreach (UUID f in folderIDs)
slist.Add(f.ToString());
Dictionary<string,object> ret = MakeRequest("DELETEFOLDERS",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() },
{ "FOLDERS", slist }
});
return CheckReturn(ret);
}
public bool PurgeFolder(InventoryFolderBase folder)
{
Dictionary<string,object> ret = MakeRequest("PURGEFOLDER",
new Dictionary<string,object> {
{ "ID", folder.ID.ToString() }
});
return CheckReturn(ret);
}
public bool AddItem(InventoryItemBase item)
{
if (item.CreatorData == null)
item.CreatorData = String.Empty;
Dictionary<string,object> ret = MakeRequest("ADDITEM",
new Dictionary<string,object> {
{ "AssetID", item.AssetID.ToString() },
{ "AssetType", item.AssetType.ToString() },
{ "Name", item.Name.ToString() },
{ "Owner", item.Owner.ToString() },
{ "ID", item.ID.ToString() },
{ "InvType", item.InvType.ToString() },
{ "Folder", item.Folder.ToString() },
{ "CreatorId", item.CreatorId.ToString() },
{ "CreatorData", item.CreatorData.ToString() },
{ "Description", item.Description.ToString() },
{ "NextPermissions", item.NextPermissions.ToString() },
{ "CurrentPermissions", item.CurrentPermissions.ToString() },
{ "BasePermissions", item.BasePermissions.ToString() },
{ "EveryOnePermissions", item.EveryOnePermissions.ToString() },
{ "GroupPermissions", item.GroupPermissions.ToString() },
{ "GroupID", item.GroupID.ToString() },
{ "GroupOwned", item.GroupOwned.ToString() },
{ "SalePrice", item.SalePrice.ToString() },
{ "SaleType", item.SaleType.ToString() },
{ "Flags", item.Flags.ToString() },
{ "CreationDate", item.CreationDate.ToString() }
});
return CheckReturn(ret);
}
public bool UpdateItem(InventoryItemBase item)
{
if (item.CreatorData == null)
item.CreatorData = String.Empty;
Dictionary<string,object> ret = MakeRequest("UPDATEITEM",
new Dictionary<string,object> {
{ "AssetID", item.AssetID.ToString() },
{ "AssetType", item.AssetType.ToString() },
{ "Name", item.Name.ToString() },
{ "Owner", item.Owner.ToString() },
{ "ID", item.ID.ToString() },
{ "InvType", item.InvType.ToString() },
{ "Folder", item.Folder.ToString() },
{ "CreatorId", item.CreatorId.ToString() },
{ "CreatorData", item.CreatorData.ToString() },
{ "Description", item.Description.ToString() },
{ "NextPermissions", item.NextPermissions.ToString() },
{ "CurrentPermissions", item.CurrentPermissions.ToString() },
{ "BasePermissions", item.BasePermissions.ToString() },
{ "EveryOnePermissions", item.EveryOnePermissions.ToString() },
{ "GroupPermissions", item.GroupPermissions.ToString() },
{ "GroupID", item.GroupID.ToString() },
{ "GroupOwned", item.GroupOwned.ToString() },
{ "SalePrice", item.SalePrice.ToString() },
{ "SaleType", item.SaleType.ToString() },
{ "Flags", item.Flags.ToString() },
{ "CreationDate", item.CreationDate.ToString() }
});
return CheckReturn(ret);
}
public bool MoveItems(UUID principalID, List<InventoryItemBase> items)
{
List<string> idlist = new List<string>();
List<string> destlist = new List<string>();
foreach (InventoryItemBase item in items)
{
idlist.Add(item.ID.ToString());
destlist.Add(item.Folder.ToString());
}
Dictionary<string,object> ret = MakeRequest("MOVEITEMS",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() },
{ "IDLIST", idlist },
{ "DESTLIST", destlist }
});
return CheckReturn(ret);
}
public bool DeleteItems(UUID principalID, List<UUID> itemIDs)
{
List<string> slist = new List<string>();
foreach (UUID f in itemIDs)
slist.Add(f.ToString());
Dictionary<string,object> ret = MakeRequest("DELETEITEMS",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() },
{ "ITEMS", slist }
});
return CheckReturn(ret);
}
public InventoryItemBase GetItem(InventoryItemBase item)
{
try
{
Dictionary<string, object> ret = MakeRequest("GETITEM",
new Dictionary<string, object> {
{ "ID", item.ID.ToString() }
});
if (!CheckReturn(ret))
return null;
return BuildItem((Dictionary<string, object>)ret["item"]);
}
catch (Exception e)
{
m_log.Error("[XINVENTORY SERVICES CONNECTOR]: Exception in GetItem: ", e);
}
return null;
}
public InventoryFolderBase GetFolder(InventoryFolderBase folder)
{
try
{
Dictionary<string, object> ret = MakeRequest("GETFOLDER",
new Dictionary<string, object> {
{ "ID", folder.ID.ToString() }
});
if (!CheckReturn(ret))
return null;
return BuildFolder((Dictionary<string, object>)ret["folder"]);
}
catch (Exception e)
{
m_log.Error("[XINVENTORY SERVICES CONNECTOR]: Exception in GetFolder: ", e);
}
return null;
}
public List<InventoryItemBase> GetActiveGestures(UUID principalID)
{
Dictionary<string,object> ret = MakeRequest("GETACTIVEGESTURES",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() }
});
if (!CheckReturn(ret))
return null;
List<InventoryItemBase> items = new List<InventoryItemBase>();
foreach (Object o in ((Dictionary<string,object>)ret["ITEMS"]).Values)
items.Add(BuildItem((Dictionary<string, object>)o));
return items;
}
public int GetAssetPermissions(UUID principalID, UUID assetID)
{
Dictionary<string,object> ret = MakeRequest("GETASSETPERMISSIONS",
new Dictionary<string,object> {
{ "PRINCIPAL", principalID.ToString() },
{ "ASSET", assetID.ToString() }
});
// We cannot use CheckReturn() here because valid values for RESULT are "false" (in the case of request failure) or an int
if (ret == null)
return 0;
if (ret.ContainsKey("RESULT"))
{
if (ret["RESULT"] is string)
{
int intResult;
if (int.TryParse ((string)ret["RESULT"], out intResult))
return intResult;
}
}
return 0;
}
public bool HasInventoryForUser(UUID principalID)
{
return false;
}
// Helpers
//
private Dictionary<string,object> MakeRequest(string method,
Dictionary<string,object> sendData)
{
// Add "METHOD" as the first key in the dictionary. This ensures that it will be
// visible even when using partial logging ("debug http all 5").
Dictionary<string, object> temp = sendData;
sendData = new Dictionary<string,object>{ { "METHOD", method } };
foreach (KeyValuePair<string, object> kvp in temp)
sendData.Add(kvp.Key, kvp.Value);
RequestsMade++;
string reply
= SynchronousRestFormsRequester.MakeRequest(
"POST", m_ServerURI + "/xinventory",
ServerUtils.BuildQueryString(sendData), m_requestTimeoutSecs, m_Auth);
Dictionary<string, object> replyData = ServerUtils.ParseXmlResponse(
reply);
return replyData;
}
private InventoryFolderBase BuildFolder(Dictionary<string,object> data)
{
InventoryFolderBase folder = new InventoryFolderBase();
try
{
folder.ParentID = new UUID(data["ParentID"].ToString());
folder.Type = short.Parse(data["Type"].ToString());
folder.Version = ushort.Parse(data["Version"].ToString());
folder.Name = data["Name"].ToString();
folder.Owner = new UUID(data["Owner"].ToString());
folder.ID = new UUID(data["ID"].ToString());
}
catch (Exception e)
{
m_log.Error("[XINVENTORY SERVICES CONNECTOR]: Exception building folder: ", e);
}
return folder;
}
private InventoryItemBase BuildItem(Dictionary<string,object> data)
{
InventoryItemBase item = new InventoryItemBase();
try
{
item.AssetID = new UUID(data["AssetID"].ToString());
item.AssetType = int.Parse(data["AssetType"].ToString());
item.Name = data["Name"].ToString();
item.Owner = new UUID(data["Owner"].ToString());
item.ID = new UUID(data["ID"].ToString());
item.InvType = int.Parse(data["InvType"].ToString());
item.Folder = new UUID(data["Folder"].ToString());
item.CreatorId = data["CreatorId"].ToString();
if (data.ContainsKey("CreatorData"))
item.CreatorData = data["CreatorData"].ToString();
else
item.CreatorData = String.Empty;
item.Description = data["Description"].ToString();
item.NextPermissions = uint.Parse(data["NextPermissions"].ToString());
item.CurrentPermissions = uint.Parse(data["CurrentPermissions"].ToString());
item.BasePermissions = uint.Parse(data["BasePermissions"].ToString());
item.EveryOnePermissions = uint.Parse(data["EveryOnePermissions"].ToString());
item.GroupPermissions = uint.Parse(data["GroupPermissions"].ToString());
item.GroupID = new UUID(data["GroupID"].ToString());
item.GroupOwned = bool.Parse(data["GroupOwned"].ToString());
item.SalePrice = int.Parse(data["SalePrice"].ToString());
item.SaleType = byte.Parse(data["SaleType"].ToString());
item.Flags = uint.Parse(data["Flags"].ToString());
item.CreationDate = int.Parse(data["CreationDate"].ToString());
}
catch (Exception e)
{
m_log.Error("[XINVENTORY CONNECTOR]: Exception building item: ", e);
}
return item;
}
}
}
| |
using Microsoft.DirectX.Direct3D;
using System;
using System.Drawing;
using System.Windows.Forms;
using TGC.Core.Direct3D;
using TGC.Core.Mathematica;
using TGC.Core.Shaders;
using TGC.Examples.Example;
using TGC.Examples.UserControls;
using TGC.Examples.UserControls.Modifier;
namespace TGC.Examples.WorkshopShaders
{
public class POMTerrain
{
private VertexBuffer vbTerrain;
public TGCVector3 center;
public Texture terrainTexture;
public int totalVertices;
public int[,] heightmapData;
public float scaleXZ;
public float scaleY;
public float ki;
public float kj;
public float ftex; // factor para la textura
public POMTerrain()
{
ftex = 1f;
ki = 1;
kj = 1;
}
public void loadHeightmap(string heightmapPath, float pscaleXZ, float pscaleY, TGCVector3 center)
{
scaleXZ = pscaleXZ;
scaleY = pscaleY;
Device d3dDevice = D3DDevice.Instance.Device;
this.center = center;
//Dispose de VertexBuffer anterior, si habia
if (vbTerrain != null && !vbTerrain.Disposed)
{
vbTerrain.Dispose();
}
//cargar heightmap
heightmapData = loadHeightMap(d3dDevice, heightmapPath);
float width = heightmapData.GetLength(0);
float length = heightmapData.GetLength(1);
//Crear vertexBuffer
totalVertices = 2 * 3 * (heightmapData.GetLength(0) + 1) * (heightmapData.GetLength(1) + 1);
totalVertices *= (int)ki * (int)kj;
vbTerrain = new VertexBuffer(typeof(CustomVertex.PositionNormalTextured), totalVertices, d3dDevice, Usage.Dynamic | Usage.WriteOnly, CustomVertex.PositionTextured.Format, Pool.Default);
//Cargar vertices
int dataIdx = 0;
CustomVertex.PositionNormalTextured[] data = new CustomVertex.PositionNormalTextured[totalVertices];
center.X = center.X * scaleXZ - (width / 2) * scaleXZ;
center.Y = center.Y * scaleY;
center.Z = center.Z * scaleXZ - (length / 2) * scaleXZ;
for (int i = 0; i < width - 1; i++)
{
for (int j = 0; j < length - 1; j++)
{
//Vertices
TGCVector3 v1 = new TGCVector3(center.X + i * scaleXZ, center.Y + heightmapData[i, j] * scaleY, center.Z + j * scaleXZ);
TGCVector3 v2 = new TGCVector3(center.X + i * scaleXZ, center.Y + heightmapData[i, j + 1] * scaleY, center.Z + (j + 1) * scaleXZ);
TGCVector3 v3 = new TGCVector3(center.X + (i + 1) * scaleXZ, center.Y + heightmapData[i + 1, j] * scaleY, center.Z + j * scaleXZ);
TGCVector3 v4 = new TGCVector3(center.X + (i + 1) * scaleXZ, center.Y + heightmapData[i + 1, j + 1] * scaleY, center.Z + (j + 1) * scaleXZ);
//Coordendas de textura
TGCVector2 t1 = new TGCVector2(ftex * i / width, ftex * j / length);
TGCVector2 t2 = new TGCVector2(ftex * i / width, ftex * (j + 1) / length);
TGCVector2 t3 = new TGCVector2(ftex * (i + 1) / width, ftex * j / length);
TGCVector2 t4 = new TGCVector2(ftex * (i + 1) / width, ftex * (j + 1) / length);
//Cargar triangulo 1
TGCVector3 n1 = TGCVector3.Cross(v2 - v1, v3 - v1);
n1.Normalize();
data[dataIdx] = new CustomVertex.PositionNormalTextured(v1, n1, t1.X, t1.Y);
data[dataIdx + 1] = new CustomVertex.PositionNormalTextured(v2, n1, t2.X, t2.Y);
data[dataIdx + 2] = new CustomVertex.PositionNormalTextured(v4, n1, t4.X, t4.Y);
//Cargar triangulo 2
TGCVector3 n2 = TGCVector3.Cross(v4 - v1, v3 - v1);
n2.Normalize();
data[dataIdx + 3] = new CustomVertex.PositionNormalTextured(v1, n2, t1.X, t1.Y);
data[dataIdx + 4] = new CustomVertex.PositionNormalTextured(v4, n2, t4.X, t4.Y);
data[dataIdx + 5] = new CustomVertex.PositionNormalTextured(v3, n2, t3.X, t3.Y);
dataIdx += 6;
}
}
vbTerrain.SetData(data, 0, LockFlags.None);
}
/// <summary>
/// Carga la textura del terreno
/// </summary>
public void loadTexture(string path)
{
//Dispose textura anterior, si habia
if (terrainTexture != null && !terrainTexture.Disposed)
{
terrainTexture.Dispose();
}
Device d3dDevice = D3DDevice.Instance.Device;
//Rotar e invertir textura
Bitmap b = (Bitmap)Bitmap.FromFile(path);
b.RotateFlip(RotateFlipType.Rotate90FlipX);
terrainTexture = Texture.FromBitmap(d3dDevice, b, Usage.None, Pool.Managed);
}
/// <summary>
/// Carga los valores del Heightmap en una matriz
/// </summary>
private int[,] loadHeightMap(Device d3dDevice, string path)
{
Bitmap bitmap = (Bitmap)Bitmap.FromFile(path);
int width = bitmap.Size.Width;
int height = bitmap.Size.Height;
int[,] heightmap = new int[width, height];
for (int i = 0; i < width; i++)
{
for (int j = 0; j < height; j++)
{
//(j, i) invertido para primero barrer filas y despues columnas
Color pixel = bitmap.GetPixel(j, i);
float intensity = pixel.R * 0.299f + pixel.G * 0.587f + pixel.B * 0.114f;
heightmap[i, j] = (int)intensity;
}
}
bitmap.Dispose();
return heightmap;
}
public void executeRender(Effect effect)
{
Device d3dDevice = D3DDevice.Instance.Device;
TGCShaders.Instance.SetShaderMatrixIdentity(effect);
//Render terrain
effect.SetValue("texDiffuseMap", terrainTexture);
d3dDevice.VertexFormat = CustomVertex.PositionNormalTextured.Format;
d3dDevice.SetStreamSource(0, vbTerrain, 0);
int numPasses = effect.Begin(0);
for (int n = 0; n < numPasses; n++)
{
effect.BeginPass(n);
d3dDevice.DrawPrimitives(PrimitiveType.TriangleList, 0, totalVertices / 3);
effect.EndPass();
}
effect.End();
}
public float CalcularAltura(float x, float z)
{
float largo = scaleXZ * 64;
float pos_i = 64f * (0.5f + x / largo);
float pos_j = 64f * (0.5f + z / largo);
int pi = (int)pos_i;
float fracc_i = pos_i - pi;
int pj = (int)pos_j;
float fracc_j = pos_j - pj;
if (pi < 0)
pi = 0;
else
if (pi > 63)
pi = 63;
if (pj < 0)
pj = 0;
else
if (pj > 63)
pj = 63;
int pi1 = pi + 1;
int pj1 = pj + 1;
if (pi1 > 63)
pi1 = 63;
if (pj1 > 63)
pj1 = 63;
// 2x2 percent closest filtering usual:
float H0 = heightmapData[pi, pj] * scaleY;
float H1 = heightmapData[pi1, pj] * scaleY;
float H2 = heightmapData[pi, pj1] * scaleY;
float H3 = heightmapData[pi1, pj1] * scaleY;
float H = (H0 * (1 - fracc_i) + H1 * fracc_i) * (1 - fracc_j) +
(H2 * (1 - fracc_i) + H3 * fracc_i) * fracc_j;
return H;
}
public void dispose()
{
if (vbTerrain != null)
{
vbTerrain.Dispose();
}
if (terrainTexture != null)
{
terrainTexture.Dispose();
}
}
}
public class POMTerrainSample : TGCExampleViewer
{
private TGCVertex3fModifier lightDirModifier;
private TGCFloatModifier minSampleModifier;
private TGCFloatModifier maxSampleModifier;
private TGCFloatModifier heightMapScaleModifier;
private string MyShaderDir;
private Effect effect;
private Texture g_pBaseTexture;
private Texture g_pHeightmap;
private POMTerrain terrain;
private TGCVector2 pos = TGCVector2.Zero;
private float dir_an = 0;
private float kvel = 1.0f;
private float time;
public POMTerrainSample(string mediaDir, string shadersDir, TgcUserVars userVars, Panel modifiersPanel)
: base(mediaDir, shadersDir, userVars, modifiersPanel)
{
Category = "Shaders";
Name = "Workshop-POMTerrain";
Description = "POM Terrain";
}
public override void Init()
{
time = 0f;
Device d3dDevice = D3DDevice.Instance.Device;
MyShaderDir = ShadersDir + "WorkshopShaders\\";
g_pBaseTexture = TextureLoader.FromFile(d3dDevice, MediaDir + "Texturas\\rocks.jpg");
g_pHeightmap = TextureLoader.FromFile(d3dDevice, MediaDir + "Texturas\\NM_height_rocks.tga");
//Cargar Shader
string compilationErrors;
effect = Effect.FromFile(d3dDevice, MyShaderDir + "Parallax.fx", null, null, ShaderFlags.None, null, out compilationErrors);
if (effect == null)
{
throw new Exception("Error al cargar shader. Errores: " + compilationErrors);
}
effect.Technique = "ParallaxOcclusion";
effect.SetValue("aux_Tex", g_pBaseTexture);
effect.SetValue("height_map", g_pHeightmap);
effect.SetValue("phong_lighting", true);
effect.SetValue("k_alpha", 0.75f);
lightDirModifier = AddVertex3f("LightDir", new TGCVector3(-1, -1, -1), new TGCVector3(1, 1, 1), TGCVector3.Down);
minSampleModifier = AddFloat("minSample", 1f, 10f, 10f);
maxSampleModifier = AddFloat("maxSample", 11f, 50f, 50f);
heightMapScaleModifier = AddFloat("HeightMapScale", 0.001f, 0.5f, 0.1f);
// ------------------------------------------------------------
// Creo el Heightmap para el terreno:
terrain = new POMTerrain();
terrain.ftex = 250f;
terrain.loadHeightmap(MediaDir + "Heighmaps\\" + "Heightmap3.jpg", 100f, 2.25f, new TGCVector3(0, 0, 0));
terrain.loadTexture(MediaDir + "Heighmaps\\" + "TerrainTexture3.jpg");
Camera.SetCamera(new TGCVector3(-350, 1000, -1100), new TGCVector3(0, 0, 0), TGCVector3.Up);
}
public override void Update()
{
Device d3dDevice = D3DDevice.Instance.Device;
// Actualizo la direccion
if (Input.keyDown(Microsoft.DirectX.DirectInput.Key.A))
{
dir_an += 1f * ElapsedTime;
}
if (Input.keyDown(Microsoft.DirectX.DirectInput.Key.D))
{
dir_an -= 1f * ElapsedTime;
}
// calculo la velocidad
TGCVector2 vel = new TGCVector2((float)Math.Sin(dir_an), (float)Math.Cos(dir_an));
// actualizo la posicion
pos += vel * kvel * ElapsedTime;
// actualizo los parametros de la camara
float dH = 2.0f; // altura del personaje
float H = terrain.CalcularAltura(pos.X, pos.Y);
TGCVector2 pos_s = pos + vel * 2;
TGCVector3 lookFrom = new TGCVector3(pos.X, H + dH, pos.Y);
TGCVector3 lookAt = new TGCVector3(pos_s.X, H + 1.5f, pos_s.Y);
d3dDevice.Transform.View = TGCMatrix.LookAtLH(lookFrom, lookAt, TGCVector3.Up);
effect.SetValue("fvEyePosition", TGCVector3.TGCVector3ToFloat3Array(lookFrom));
}
protected override void PostUpdate()
{
// Ya se maneja en el update, por eso lo sobreescribo vacio.
}
public override void Render()
{
Device d3dDevice = D3DDevice.Instance.Device;
time += ElapsedTime;
TGCVector3 lightDir = lightDirModifier.Value;
effect.SetValue("g_LightDir", TGCVector3.TGCVector3ToFloat3Array(lightDir));
effect.SetValue("min_cant_samples", minSampleModifier.Value);
effect.SetValue("max_cant_samples", maxSampleModifier.Value);
effect.SetValue("fHeightMapScale", heightMapScaleModifier.Value);
//effect.SetValue("fvEyePosition", TgcParserUtils.TGCVector3ToFloat3Array(GuiController.Instance.FpsCamera.getPosition()));
effect.SetValue("time", time);
d3dDevice.Clear(ClearFlags.Target | ClearFlags.ZBuffer, Color.Black, 1.0f, 0);
d3dDevice.BeginScene();
//Renderizar terreno con POM
effect.Technique = "ParallaxOcclusion";
terrain.executeRender(effect);
PostRender();
}
public override void Dispose()
{
effect.Dispose();
g_pBaseTexture.Dispose();
g_pHeightmap.Dispose();
terrain.dispose();
}
}
}
| |
using System.Collections.Generic;
using System.IO;
using Microsoft.Build.Framework;
using Microsoft.Build.Tasks;
using Microsoft.Build.Utilities;
using Xunit;
using Xunit.Abstractions;
namespace Microsoft.Build.UnitTests.ResolveAssemblyReference_Tests
{
/// <summary>
/// Unit test the cases where we need to determine if the target framework is greater than the current target framework
/// </summary>
public sealed class VerifyTargetFrameworkHigherThanRedist : ResolveAssemblyReferenceTestFixture
{
public VerifyTargetFrameworkHigherThanRedist(ITestOutputHelper output) : base(output)
{
}
/// <summary>
/// Verify there are no warnings when the assembly being resolved is not in the redist list and only has dependencies to references in the redist list with the same
/// version as is described in the redist list.
/// </summary>
[Fact]
public void TargetCurrentTargetFramework()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOnOnlyv4Assemblies")
};
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, true);
Assert.Equal(0, e.Warnings); // "No warnings expected in this scenario."
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Single(t.ResolvedFiles);
Assert.True(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOnOnlyv4Assemblies.dll"))); // "Expected to find assembly, but didn't."
}
/// <summary>
/// ReferenceVersion9 depends on mscorlib 9. However the redist list only allows 4.0 since framework unification for dependencies only
/// allows upward unification this would result in a warning. Therefore we need to remap mscorlib 9 to 4.0
///
/// </summary>
[Fact]
public void RemapAssemblyBasic()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("ReferenceVersion9"),
new TaskItem("DependsOnOnlyv4Assemblies"),
new TaskItem("AnotherOne")
};
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='mscorlib' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"<Remap>" +
"<From AssemblyName='mscorlib' Version='9.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true'>" +
" <To AssemblyName='mscorlib' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
" </From>" +
"<From AssemblyName='DependsOnOnlyv4Assemblies'>" +
" <To AssemblyName='ReferenceVersion9' Version='9.0.0.0' PublicKeyToken='b17a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' />" +
" </From>" +
"<From AssemblyName='AnotherOne'>" +
" <To AssemblyName='ReferenceVersion9' Version='9.0.0.0' PublicKeyToken='b17a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' />" +
" </From>" +
"</Remap>" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false);
Assert.Equal(0, e.Warnings); // "Expected NO warning in this scenario."
e.AssertLogContainsMessageFromResource(resourceDelegate, "ResolveAssemblyReference.RemappedReference", "DependsOnOnlyv4Assemblies", "ReferenceVersion9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089");
e.AssertLogContainsMessageFromResource(resourceDelegate, "ResolveAssemblyReference.RemappedReference", "AnotherOne", "ReferenceVersion9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089");
Assert.Single(t.ResolvedFiles);
Assert.Equal("AnotherOne", t.ResolvedFiles[0].GetMetadata("OriginalItemSpec"));
Assert.Equal(Path.Combine(s_myComponentsMiscPath, "ReferenceVersion9.dll"), t.ResolvedFiles[0].ItemSpec);
}
/// <summary>
/// Verify an error is emitted when the reference itself is in the redist list but is a higher version that is described in the redist list.
/// In this case ReferenceVersion9 is version=9.0.0.0 but in the redist we show its highest version as 4.0.0.0.
/// </summary>
[Fact]
public void HigherThanHighestInRedistList()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("ReferenceVersion9")
};
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='ReferenceVersion9' Version='4.0.0.0' PublicKeyToken='b17a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false);
Assert.Equal(1, e.Warnings); // "Expected one warning in this scenario."
e.AssertLogContains("MSB3257");
e.AssertLogContains("ReferenceVersion9");
Assert.Empty(t.ResolvedFiles);
}
/// <summary>
/// Verify that if the reference that is higher than the highest version in the redist list is an MSBuild assembly, we do
/// not warn -- this is a hack until we figure out how to properly deal with .NET assemblies being removed from the framework.
/// </summary>
[Fact]
[Trait("Category", "mono-osx-failing")]
public void HigherThanHighestInRedistListForMSBuildAssembly()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("Microsoft.Build")
};
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='Microsoft.Build' Version='4.0.0.0' PublicKeyToken='b03f5f7f11d50a3a' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t1 = new ResolveAssemblyReference();
t1.TargetFrameworkVersion = "v4.5";
ExecuteRAROnItemsAndRedist(t1, e, items, redistString, false);
Assert.Equal(0, e.Warnings); // "Expected successful resolution with no warnings."
e.AssertLogContains("Microsoft.Build.dll");
Assert.Single(t1.ResolvedFiles);
ResolveAssemblyReference t2 = new ResolveAssemblyReference();
t2.TargetFrameworkVersion = "v4.0";
ExecuteRAROnItemsAndRedist(t2, e, items, redistString, false);
Assert.Equal(1, e.Warnings); // "Expected one warning in this scenario."
// TODO: https://github.com/Microsoft/msbuild/issues/2305
//e.AssertLogContains("Microsoft.Build.dll");
Assert.Empty(t2.ResolvedFiles);
ResolveAssemblyReference t3 = new ResolveAssemblyReference();
t3.TargetFrameworkVersion = "v4.5";
t3.UnresolveFrameworkAssembliesFromHigherFrameworks = true;
ExecuteRAROnItemsAndRedist(t3, e, items, redistString, false);
Assert.Equal(1, e.Warnings); // "Expected one warning in this scenario."
// TODO: https://github.com/Microsoft/msbuild/issues/2305
// e.AssertLogContains("Microsoft.Build.dll");
Assert.Single(t1.ResolvedFiles);
}
/// <summary>
/// Expect no warning from a 3rd party redist list since they are not considered for multi targeting warnings.
/// </summary>
[Fact]
public void HigherThanHighestInRedistList3rdPartyRedist()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("ReferenceVersion9")
};
string redistString = "<FileList Redist='MyRandomREdist' >" +
"<File AssemblyName='mscorlib' Version='4.0.0.0' PublicKeyToken='b17a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false);
Assert.Equal(0, e.Warnings); // "Expected one warning in this scenario."
e.AssertLogDoesntContain("MSB3257");
e.AssertLogContains("ReferenceVersion9");
Assert.Single(t.ResolvedFiles);
}
/// <summary>
/// Test the same case as above except for add the specific version metadata to ignore the warning.
/// </summary>
[Fact]
public void HigherThanHighestInRedistListWithSpecificVersionMetadata()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("ReferenceVersion9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089")
};
items[0].SetMetadata("SpecificVersion", "true");
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='ReferenceVersion9' Version='4.0.0.0' PublicKeyToken='b17a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false);
Assert.Equal(0, e.Warnings); // "No warnings expected in this scenario."
e.AssertLogDoesntContain("MSB3258");
e.AssertLogDoesntContain("MSB3257");
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Single(t.ResolvedFiles);
Assert.True(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "ReferenceVersion9.dll"))); // "Expected to find assembly, but didn't."
}
/// <summary>
/// Verify the case where the assembly itself is not in the redist list but it depends on an assembly which is in the redist list and is a higher version that what is listed in the redist list.
/// In this case the assembly DependsOn9 depends on System 9.0.0.0 while the redist list only goes up to 4.0.0.0.
/// </summary>
[Fact]
public void DependenciesHigherThanHighestInRedistList()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOn9")
};
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"<File AssemblyName='System.Data' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false);
Assert.Equal(2, e.Warnings); // "Expected one warning in this scenario."
e.AssertLogContains(t.Log.FormatResourceString("ResolveAssemblyReference.DependencyReferenceOutsideOfFramework", "DependsOn9", "System, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089", "9.0.0.0", "4.0.0.0"));
e.AssertLogContains(t.Log.FormatResourceString("ResolveAssemblyReference.DependencyReferenceOutsideOfFramework", "DependsOn9", "System.Data, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089", "9.0.0.0", "4.0.0.0"));
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Empty(t.ResolvedFiles);
}
/// <summary>
/// Verify that if the reference that is higher than the highest version in the redist list is an MSBuild assembly, we do
/// not warn -- this is a hack until we figure out how to properly deal with .NET assemblies being removed from the framework.
/// </summary>
[Fact]
[Trait("Category", "mono-osx-failing")]
public void DependenciesHigherThanHighestInRedistListForMSBuildAssembly()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOnMSBuild12")
};
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='Microsoft.Build' Version='4.0.0.0' PublicKeyToken='b03f5f7f11d50a3a' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t1 = new ResolveAssemblyReference();
t1.TargetFrameworkVersion = "v5.0";
ExecuteRAROnItemsAndRedist(t1, e, items, redistString, false);
Assert.Equal(0, e.Warnings); // "Expected successful resolution with no warnings."
e.AssertLogContains("DependsOnMSBuild12");
e.AssertLogContains("Microsoft.Build.dll");
Assert.Single(t1.ResolvedFiles);
ResolveAssemblyReference t2 = new ResolveAssemblyReference();
t2.TargetFrameworkVersion = "v4.0";
ExecuteRAROnItemsAndRedist(t2, e, items, redistString, false);
Assert.Equal(1, e.Warnings); // "Expected one warning in this scenario"
e.AssertLogContains("DependsOnMSBuild12");
// TODO: https://github.com/Microsoft/msbuild/issues/2305
// e.AssertLogContains("Microsoft.Build.dll");
Assert.Empty(t2.ResolvedFiles);
ResolveAssemblyReference t3 = new ResolveAssemblyReference();
//t2.TargetFrameworkVersion is null
ExecuteRAROnItemsAndRedist(t3, e, items, redistString, false);
Assert.Equal(1, e.Warnings); // "Expected one warning in this scenario"
e.AssertLogContains("DependsOnMSBuild12");
// TODO: https://github.com/Microsoft/msbuild/issues/2305
// e.AssertLogContains("Microsoft.Build.dll");
Assert.Empty(t3.ResolvedFiles);
}
/// <summary>
/// Make sure when specific version is set to true and the dependencies of the reference are a higher version than what is in the redist list do not warn, do not unresolve
/// </summary>
[Fact]
public void DependenciesHigherThanHighestInRedistListSpecificVersionMetadata()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOn9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089")
};
items[0].SetMetadata("SpecificVersion", "true");
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"<File AssemblyName='System.Data' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, true);
Assert.Equal(0, e.Warnings); // "No warnings expected in this scenario."
e.AssertLogDoesntContain("MSB3258");
e.AssertLogDoesntContain("MSB3257");
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Single(t.ResolvedFiles);
Assert.True(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOn9.dll"))); // "Expected to find assembly, but didn't."
}
/// <summary>
/// Verify the case where two assemblies depend on an assembly which is in the redist list but has a higher version than what is described in the redist list.
/// DependsOn9 and DependsOn9Also both depend on System, Version=9.0.0.0 one of the items has the SpecificVersion metadata set. In this case
/// we expect to only see a warning from one of the assemblies.
/// </summary>
[Fact]
public void TwoDependenciesHigherThanHighestInRedistListIgnoreOnOne()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOn9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089"),
new TaskItem("DependsOn9Also")
};
items[0].SetMetadata("SpecificVersion", "true");
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false);
Assert.Equal(1, e.Warnings); // "Expected one warning in this scenario."
e.AssertLogContains(t.Log.FormatResourceString("ResolveAssemblyReference.DependencyReferenceOutsideOfFramework", "DependsOn9Also", "System, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089", "9.0.0.0", "4.0.0.0"));
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Single(t.ResolvedFiles);
Assert.True(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOn9.dll"))); // "Expected to not find assembly, but did."
Assert.False(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOn9Also.dll"))); // "Expected to find assembly, but didn't."
}
/// <summary>
/// Verify the case where two assemblies depend on an assembly which is in the redist list but has a higher version than what is described in the redist list.
/// DependsOn9 and DependsOn9Also both depend on System, Version=9.0.0.0. Both of the items has the specificVersion metadata set. In this case
/// we expect to only see no warnings from the assemblies.
/// </summary>
[Fact]
public void TwoDependenciesHigherThanHighestInRedistListIgnoreOnBoth()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOn9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089"),
new TaskItem("DependsOn9Also, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089")
};
items[0].SetMetadata("SpecificVersion", "true");
items[1].SetMetadata("SpecificVersion", "true");
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, true);
Assert.Equal(0, e.Warnings); // "No warnings expected in this scenario."
e.AssertLogDoesntContain("MSB3258");
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Equal(2, t.ResolvedFiles.Length);
Assert.True(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOn9.dll"))); // "Expected to find assembly, but didn't."
Assert.True(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOn9Also.dll"))); // "Expected to find assembly, but didn't."
}
/// <summary>
/// Test the case where two assemblies with different versions but the same name depend on an assembly which is in the redist list but has a higher version than
/// what is described in the redist list. We expect two warnings because both assemblies are going to be resolved even though one of them will not be copy local.
/// </summary>
[Fact]
public void TwoDependenciesSameNameDependOnHigherVersion()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOn9, Version=1.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089"),
new TaskItem("DependsOn9, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089")
};
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='false' />" +
"</FileList >";
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false);
Assert.Equal(2, e.Warnings); // "Expected two warnings."
e.AssertLogContains(t.Log.FormatResourceString("ResolveAssemblyReference.DependencyReferenceOutsideOfFramework", "DependsOn9, Version=1.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089", "System, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089", "9.0.0.0", "4.0.0.0"));
e.AssertLogContains(t.Log.FormatResourceString("ResolveAssemblyReference.DependencyReferenceOutsideOfFramework", "DependsOn9, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089", "System, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089", "9.0.0.0", "4.0.0.0"));
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Empty(t.ResolvedFiles);
}
/// <summary>
/// Test the case where the project has two references, one of them has dependencies which are contained within the projects target framework
/// and there is another reference which has dependencies on a future framework (this is the light up scenario assembly).
///
/// Make sure that if specific version is set on the lightup assembly that we do not unresolve it, and we also should not unify its dependencies.
/// </summary>
[Fact]
public void MixedDependenciesSpecificVersionOnHigherVersionMetadataSet()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOnOnlyv4Assemblies, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089"),
new TaskItem("DependsOn9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089")
};
items[1].SetMetadata("SpecificVersion", "true");
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
List<string> additionalPaths = new List<string>();
additionalPaths.Add(s_myComponents40ComponentPath);
additionalPaths.Add(s_myVersion40Path);
additionalPaths.Add(s_myVersion90Path + Path.DirectorySeparatorChar);
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false, additionalPaths);
Assert.Equal(0, e.Warnings); // "No warnings expected in this scenario."
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Equal(2, t.ResolvedFiles.Length);
Assert.Equal(2, t.ResolvedDependencyFiles.Length);
Assert.True(ContainsItem(t.ResolvedFiles, s_40ComponentDependsOnOnlyv4AssembliesDllPath)); // "Expected to find assembly, but didn't."
Assert.True(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOn9.dll"))); // "Expected to find assembly, but didn't."
}
/// <summary>
/// Test the case where the project has two references, one of them has dependencies which are contained within the projects target framework
/// and there is another reference which has dependencies on a future framework (this is the light up scenario assembly).
///
/// Verify that if specific version is set on the other reference that we get the expected behavior:
/// Un resolve the light up assembly.
/// </summary>
[Fact]
public void MixedDependenciesSpecificVersionOnLowerVersionMetadataSet()
{
MockEngine e = new MockEngine(_output);
ITaskItem[] items = new ITaskItem[]
{
new TaskItem("DependsOnOnlyv4Assemblies, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089"),
new TaskItem("DependsOn9, Version=9.0.0.0, Culture=neutral, PublicKeyToken=b17a5c561934e089")
};
items[0].SetMetadata("SpecificVersion", "true");
string redistString = "<FileList Redist='Microsoft-Windows-CLRCoreComp-Random' >" +
"<File AssemblyName='System' Version='4.0.0.0' PublicKeyToken='b77a5c561934e089' Culture='neutral' ProcessorArchitecture='MSIL' FileVersion='4.0.0.0' InGAC='true' />" +
"</FileList >";
List<string> additionalPaths = new List<string>();
additionalPaths.Add(s_myComponents40ComponentPath);
additionalPaths.Add(s_myVersion40Path);
additionalPaths.Add(s_myVersion90Path + Path.DirectorySeparatorChar);
ResolveAssemblyReference t = new ResolveAssemblyReference();
ExecuteRAROnItemsAndRedist(t, e, items, redistString, false, additionalPaths);
Assert.Equal(1, e.Warnings); // "No warnings expected in this scenario."
Assert.Equal(0, e.Errors); // "No errors expected in this scenario."
Assert.Single(t.ResolvedFiles);
Assert.Single(t.ResolvedDependencyFiles);
Assert.True(ContainsItem(t.ResolvedFiles, s_40ComponentDependsOnOnlyv4AssembliesDllPath)); // "Expected to find assembly, but didn't."
Assert.False(ContainsItem(t.ResolvedFiles, Path.Combine(s_myComponentsMiscPath, "DependsOn9.dll"))); // "Expected to find assembly, but didn't."
}
}
}
| |
#if ENABLE_PLAYFABPLAYSTREAM_API && ENABLE_PLAYFABSERVER_API
using System;
using System.Collections.Generic;
using PlayFab.Internal;
namespace PlayFab
{
/// <summary>
/// <para />APIs which allow game servers to subscribe to PlayStream events for a specific title
/// <para />This API is server only, and should NEVER be used on clients.
/// </summary>
public static class PlayFabPlayStreamAPI
{
/// <summary>
/// The event when successfully subcribed to PlayStream.
/// </summary>
public static event Action OnSubscribed;
/// <summary>
/// The event when failed to subcribe events from PlayStream server.
/// </summary>
public static event Action<SubscriptionError> OnFailed;
/// <summary>
/// <para />This is the event when a PlayStream event is received from the server.
/// </summary>
public static event Action<PlayStreamNotification> OnPlayStreamEvent;
#region Connection Status Events
/// <summary>
/// The debug event when reconnected to the PlayStream server.
/// </summary>
public static event Action OnReconnected;
/// <summary>
/// The debug event when received anything from the PlayStream server. This gives the raw message received from the server and should be used for debug purposes.
/// </summary>
public static event Action<string> OnReceived;
/// <summary>
/// The debug event when an error occurs.
/// </summary>
public static event Action<Exception> OnError;
/// <summary>
/// The debug event when disconnected from the PlayStream server.
/// </summary>
public static event Action OnDisconnected;
#endregion
/// <summary>
/// Start the SignalR connection asynchronously and subscribe to PlayStream events if successfully connected.
/// Optionally pass an filter id to only be subscribed to specific types of PlayStream events. Event filters can be configured on GameManager.
/// </summary>
public static void Start(string eventFilterId = null)
{
Action connetionCallback = () =>
{
OnConnectedCallback(eventFilterId);
};
PlayFabHttp.InitializeSignalR(PlayFabSettings.ProductionEnvironmentPlayStreamUrl, "EventStreamsHub", connetionCallback, OnReceivedCallback, OnReconnectedCallback, OnDisconnectedCallback, OnErrorCallback);
}
/// <summary>
/// Sends a disconnect request to the server and stop the SignalR connection.
/// </summary>
public static void Stop()
{
PlayFabHttp.StopSignalR();
}
#region Connection Callbacks
private static void OnConnectedCallback(string filter)
{
PlayFabHttp.SubscribeSignalR("notifyNewMessage", OnPlayStreamNotificationCallback);
PlayFabHttp.SubscribeSignalR("notifySubscriptionError", OnSubscriptionErrorCallback);
PlayFabHttp.SubscribeSignalR("notifySubscriptionSuccess", OnSubscriptionSuccessCallback);
var queueRequest = new
{
TitleId = PlayFabSettings.TitleId,
TitleSecret = PlayFabSettings.DeveloperSecretKey,
BackFill = false,
EventFilter = filter
};
PlayFabHttp.InvokeSignalR("SubscribeToQueue", null, queueRequest);
}
private static void OnPlayStreamNotificationCallback(object[] data)
{
var notif = Json.JsonWrapper.DeserializeObject<PlayStreamNotification>(data[0].ToString());
if (OnPlayStreamEvent != null)
{
OnPlayStreamEvent(notif);
}
}
private static void OnSubscriptionErrorCallback(object[] data)
{
var message = data[0] as string;
if (OnFailed != null)
{
if (message == "Invalid Title Secret Key!")
{
OnFailed(SubscriptionError.InvalidSecretKey);
}
else
{
OnFailed(SubscriptionError.FailWithUnexpected(message));
}
}
}
private static void OnSubscriptionSuccessCallback(object[] data)
{
if (OnSubscribed != null)
{
OnSubscribed();
}
}
private static void OnReconnectedCallback()
{
if (OnReconnected != null)
{
OnReconnected();
}
}
private static void OnReceivedCallback(string msg)
{
if (OnReceived != null)
{
OnReceived(msg);
}
}
private static void OnErrorCallback(Exception ex)
{
var timeoutEx = ex as TimeoutException;
if (timeoutEx != null)
{
if (OnFailed != null)
{
OnFailed(SubscriptionError.ConnectionTimeout);
}
}
else
{
if (OnError != null)
{
OnError(ex);
}
}
}
private static void OnDisconnectedCallback()
{
if (OnDisconnected != null)
{
OnDisconnected();
}
}
#endregion
}
/// <summary>
/// <para />The server message wrapper for PlayStream events.
/// <para />Should be used to deserialize EventObject into its appropriate types by EventName, TntityType, and EventNamespace. Do not modify.
/// </summary>
public sealed class PlayStreamNotification
{
//metadata sent by server
public string EventName;
public string EntityType;
public string EventNamespace;
public string PlayerId;
public string TitleId;
public PlayStreamEvent EventObject;
public PlayerProfile Profile;
public List<object> TriggerResults;
public List<object> SegmentMatchResults;
public class PlayStreamEvent
{
public object EventData;
public object InternalState;
}
public class PlayerProfile
{
public string PlayerId;
public string TitleId;
public object DisplayName;
public string Origination;
public object Created;
public object LastLogin;
public object BannedUntil;
public Dictionary<string, int> Statistics;
public Dictionary<string, int> VirtualCurrencyBalances;
public List<object> AdCampaignAttributions;
public List<object> PushNotificationRegistrations;
public List<LinkedAccount> LinkedAccounts;
public class LinkedAccount
{
public string Platform;
public string PlatformUserId;
}
}
}
/// <summary>
/// The error code of PlayStream subscription result.
/// </summary>
public struct SubscriptionError
{
public ErrorCode Code;
public string Message;
public enum ErrorCode
{
Unexpected = 400,
ConnectionTimeout = 401,
InvalidSecretKey = 402
}
public static SubscriptionError ConnectionTimeout
{
get
{
return new SubscriptionError() { Message = "Connection Timeout", Code = ErrorCode.ConnectionTimeout };
}
}
public static SubscriptionError InvalidSecretKey
{
get
{
return new SubscriptionError() { Message = "Invalid Secret Key", Code = ErrorCode.InvalidSecretKey };
}
}
public static SubscriptionError FailWithUnexpected(string message)
{
return new SubscriptionError() { Message = message, Code = ErrorCode.Unexpected };
}
}
}
#endif
| |
using System;
using System.Data;
using System.Configuration;
using System.Collections;
using System.Collections.Generic;
using System.Web;
using System.Web.Security;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Web.UI.WebControls.WebParts;
using System.Web.UI.HtmlControls;
using Vevo;
using Vevo.DataAccessLib;
using Vevo.Domain;
using Vevo.Domain.DataInterfaces;
using Vevo.Domain.Marketing;
using Vevo.Base.Domain;
using Vevo.Deluxe.Domain;
using Vevo.Deluxe.Domain.DataInterfaces;
using Vevo.Deluxe.Domain.Marketing;
using Vevo.Shared.DataAccess;
public partial class AdminAdvanced_MainControls_AffiliatePaymentList : AdminAdvancedBaseUserControl
{
private const int ColumnAffiliatePaymentID = 1;
private string AffiliateCode
{
get
{
return MainContext.QueryString["AffiliateCode"];
}
}
private GridViewHelper GridHelper
{
get
{
if (ViewState["GridHelper"] == null)
ViewState["GridHelper"] = new GridViewHelper( uxGrid, "AffiliatePaymentID" );
return (GridViewHelper) ViewState["GridHelper"];
}
}
private void SetUpSearchFilter()
{
IList<TableSchemaItem> list = DataAccessContextDeluxe.AffiliatePaymentRepository.GetTableSchema();
uxSearchFilter.SetUpSchema( list, "AffiliateCode" );
}
private void RefreshGrid()
{
int totalItems;
uxGrid.DataSource = DataAccessContextDeluxe.AffiliatePaymentRepository.SearchAffiliatePayment(
AffiliateCode,
GridHelper.GetFullSortText(),
uxSearchFilter.SearchFilterObj,
(uxPagingControl.CurrentPage - 1) * uxPagingControl.ItemsPerPages,
(uxPagingControl.CurrentPage * uxPagingControl.ItemsPerPages) - 1,
out totalItems );
uxPagingControl.NumberOfPages = (int) Math.Ceiling( (double) totalItems / uxPagingControl.ItemsPerPages );
uxGrid.DataBind();
}
private void DeleteVisible( bool value )
{
uxDeleteButton.Visible = value;
if (value)
{
if (AdminConfig.CurrentTestMode == AdminConfig.TestMode.Normal)
{
uxDeleteConfirmButton.TargetControlID = "uxDeleteButton";
uxConfirmModalPopup.TargetControlID = "uxDeleteButton";
}
else
{
uxDeleteConfirmButton.TargetControlID = "uxDummyButton";
uxConfirmModalPopup.TargetControlID = "uxDummyButton";
}
}
else
{
uxDeleteConfirmButton.TargetControlID = "uxDummyButton";
uxConfirmModalPopup.TargetControlID = "uxDummyButton";
}
}
private void ApplyPermissions()
{
if (!IsAdminModifiable())
{
DeleteVisible( false );
}
}
private void PopulateControls()
{
if (!MainContext.IsPostBack)
{
RefreshGrid();
}
if (uxGrid.Rows.Count > 0)
{
DeleteVisible( true );
uxPagingControl.Visible = true;
}
else
{
DeleteVisible( false );
uxPagingControl.Visible = false;
}
Affiliate affiliate = DataAccessContextDeluxe.AffiliateRepository.GetOne( AffiliateCode );
uxAffiliateNameLabel.Text = affiliate.ContactAddress.FirstName + " " + affiliate.ContactAddress.LastName;
uxAffiliateUserNameLabel.Text = affiliate.UserName;
GetCommissionListLink();
}
private void GetCommissionListLink()
{
uxCommissionListLink.PageName = "AffiliateCommissionList.ascx";
uxCommissionListLink.PageQueryString = "AffiliateCode=" + AffiliateCode;
}
private void uxGrid_RefreshHandler( object sender, EventArgs e )
{
RefreshGrid();
}
protected void Page_Load( object sender, EventArgs e )
{
if (!KeyUtilities.IsDeluxeLicense( DataAccessHelper.DomainRegistrationkey, DataAccessHelper.DomainName ))
MainContext.RedirectMainControl( "Default.ascx", String.Empty );
if (String.IsNullOrEmpty( AffiliateCode ))
MainContext.RedirectMainControl( "AffiliateList.ascx" );
uxSearchFilter.BubbleEvent += new EventHandler( uxGrid_RefreshHandler );
uxPagingControl.BubbleEvent += new EventHandler( uxGrid_RefreshHandler );
if (!MainContext.IsPostBack)
{
uxPagingControl.ItemsPerPages = AdminConfig.AffiliatePaymentPerPage;
SetUpSearchFilter();
}
}
protected void Page_PreRender( object sender, EventArgs e )
{
PopulateControls();
ApplyPermissions();
}
protected void uxGrid_Sorting( object sender, GridViewSortEventArgs e )
{
GridHelper.SelectSorting( e.SortExpression );
RefreshGrid();
}
protected void uxDeleteButton_Click( object sender, EventArgs e )
{
try
{
bool deleted = false;
foreach (GridViewRow row in uxGrid.Rows)
{
CheckBox deleteCheck = (CheckBox) row.FindControl( "uxCheck" );
if (deleteCheck.Checked)
{
string id = row.Cells[ColumnAffiliatePaymentID].Text.Trim();
DataAccessContextDeluxe.AffiliatePaymentRepository.Delete( id );
deleted = true;
}
}
if (deleted)
{
uxMessage.DisplayMessage( Resources.AffiliatePaymentMessages.DeleteSuccess );
}
}
catch (Exception ex)
{
uxMessage.DisplayException( ex );
}
RefreshGrid();
}
protected string GetAllOrderID( object affiliatePaymentID )
{
return DataAccessContextDeluxe.AffiliateOrderRepository.GetOrderIDByAffiliatePaymentID( affiliatePaymentID.ToString() );
}
protected string ShowDate( object paiddate )
{
return ((DateTime) paiddate).ToShortDateString();
}
}
| |
using System;
using System.Collections;
using System.Text;
using System.Runtime.InteropServices;
using EnvDTE;
using EnvDTE80;
//using EnvDTE90;
using System.IO;
using System.Windows.Forms;
namespace ToolboxManager
{
public sealed class Installer : IOleMessageFilter, IDisposable
{
#region Private variables
// Represents a list of installation or uninstallation tasks
private ArrayList _Tasks = new ArrayList();
// Represents a list of solution projects
private ArrayList _SolutionProjects = new ArrayList();
// Controls whether Visual Studio 2005 or 2008 is being used
private bool _IsVisualStudio2005 = false;
private bool _IsVisualStudio2008 = false;
// Represents the Visual Studio 2005 design-time environment
private DTE DesignTimeEnvironment2005;
// Represents the Visual Studio 2008 design-time environment
private DTE DesignTimeEnvironment2008;
// Used to hook into OLE messages during installation
private IOleMessageFilter _OldOleFilter;
#endregion
#region Events
/// <summary>
/// Occurs before the first task begins.
/// </summary>
public event EventHandler OperationStarted;
/// <summary>
/// Occurs when progress has been made on any task.
/// </summary>
public event EventHandler TaskProgressOccurred;
/// <summary>
/// Occurs after the last task has been completed.
/// </summary>
public event EventHandler OperationCompleted;
#endregion
#region Constructor / Finalizer
public Installer()
{
// Hook into OLE messages
CoRegisterMessageFilter(this, out _OldOleFilter);
}
~Installer()
{
Dispose();
}
#endregion
#region Public Properties
/// <summary>
/// Returns a list of install/uninstall tasks for the installer to accomplish.
/// </summary>
public ArrayList Tasks
{
get
{
return _Tasks;
}
}
/// <summary>
/// Controls whether the Visual Studio 2005 toolbox will be affected.
/// </summary>
public bool IsVisualStudio2005
{
get
{
return _IsVisualStudio2005;
}
set
{
_IsVisualStudio2005 = value;
}
}
/// <summary>
/// Controls whether the Visual Studio 2008 toolbox will be affected.
/// </summary>
public bool IsVisualStudio2008
{
get
{
return _IsVisualStudio2008;
}
set
{
_IsVisualStudio2008 = value;
}
}
#endregion
#region Public Methods
/// <summary>
/// Performs all installation and uninstallation tasks
/// </summary>
public void Execute()
{
// Signal that tasks have been started
OnOperationStarted();
#region Create instances of the Visual Studio IDE
if (_IsVisualStudio2005)
{
// Try to create an instance of Visual Studio 2005
try
{
DesignTimeEnvironment2005 = (DTE)System.Activator.CreateInstance(Type.GetTypeFromProgID("VisualStudio.DTE.8.0"), true);
}
catch
{
// Visual Studio 2005 is not installed
DesignTimeEnvironment2005 = null;
}
}
if (_IsVisualStudio2008)
{
// Try to create an instance of Visual Studio 2008
try
{
DesignTimeEnvironment2008 = (DTE)System.Activator.CreateInstance(Type.GetTypeFromProgID("VisualStudio.DTE.9.0"), true);
}
catch
{
// Visual Studio 2008 is not installed
DesignTimeEnvironment2008 = null;
}
}
#endregion
// Receive notifications any time a task achieves progress
foreach (Task task in _Tasks)
{
task.ProgressChanged += new EventHandler(task_ProgressChanged);
}
// Perform tasks
foreach (Task task in _Tasks)
{
// Execute the task, passing along the instances of VS2005 and VS2008 to work with.
// We want to recycle instances as much as possible since they are expensive to create.
task.Execute(DesignTimeEnvironment2005, DesignTimeEnvironment2008);
}
// Signal that tasks are complete
OnOperationCompleted();
}
private void OnOperationStarted()
{
if (OperationStarted != null)
OperationStarted(this, EventArgs.Empty);
}
void task_ProgressChanged(object sender, EventArgs e)
{
OnTaskProgressOccurred();
}
private void OnTaskProgressOccurred()
{
if (TaskProgressOccurred != null)
TaskProgressOccurred(this, EventArgs.Empty);
}
private void OnOperationCompleted()
{
if (OperationCompleted != null)
OperationCompleted(this, EventArgs.Empty);
}
#endregion
#region Static methods
[DllImport("ole32.dll")]
static extern int CoRegisterMessageFilter(IOleMessageFilter newFilter, out IOleMessageFilter oldFilter);
#endregion
#region IDisposable Members
public void Dispose()
{
// We no longer need to finalize
GC.SuppressFinalize(this);
// Close all instances of Visual Studio
if (DesignTimeEnvironment2005 != null)
{
// Close the solution
DesignTimeEnvironment2005.Solution.Close(false);
// Release the DTE so that it can shut down
Marshal.ReleaseComObject(DesignTimeEnvironment2005);
}
// Close all instances of Visual Studio
if (DesignTimeEnvironment2008 != null)
{
// Close the solution
DesignTimeEnvironment2008.Solution.Close(false);
// Release the DTE so that it can shut down
Marshal.ReleaseComObject(DesignTimeEnvironment2008);
}
// Stop listening for OLE messages
CoRegisterMessageFilter(null, out _OldOleFilter);
// Perform garbage collection to ensure disposal (?)
GC.Collect();
GC.WaitForPendingFinalizers();
}
void IDisposable.Dispose()
{
Dispose();
}
#endregion
#region IOleMessageFilter Members
int IOleMessageFilter.HandleInComingCall(int dwCallType, IntPtr hTaskCaller, int dwTickCount, IntPtr lpInterfaceInfo)
{
return 0; //SERVERCALL_ISHANDLED
}
int IOleMessageFilter.RetryRejectedCall(IntPtr hTaskCallee, int dwTickCount, int dwRejectType)
{
if (dwRejectType == 2) // SERVERCALL_RETRYLATER
return 50; // wait .5 seconds and try again
return -1; // cancel call
}
int IOleMessageFilter.MessagePending(IntPtr hTaskCallee, int dwTickCount, int dwPendingType)
{
return 2; //PENDINGMSG_WAITDEFPROCESS
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Runtime.Serialization
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Reflection;
using System.Security;
using System.Xml;
using DataContractDictionary = System.Collections.Generic.Dictionary<System.Xml.XmlQualifiedName, DataContract>;
#if USE_REFEMIT || NET_NATIVE
public class XmlObjectSerializerContext
#else
internal class XmlObjectSerializerContext
#endif
{
protected XmlObjectSerializer serializer;
protected DataContract rootTypeDataContract;
internal ScopedKnownTypes scopedKnownTypes = new ScopedKnownTypes();
protected DataContractDictionary serializerKnownDataContracts;
private bool _isSerializerKnownDataContractsSetExplicit;
protected IList<Type> serializerKnownTypeList;
private int _itemCount;
private int _maxItemsInObjectGraph;
private StreamingContext _streamingContext;
private bool _ignoreExtensionDataObject;
private DataContractResolver _dataContractResolver;
private KnownTypeDataContractResolver _knownTypeResolver;
internal XmlObjectSerializerContext(XmlObjectSerializer serializer, int maxItemsInObjectGraph, StreamingContext streamingContext, bool ignoreExtensionDataObject,
DataContractResolver dataContractResolver)
{
this.serializer = serializer;
_itemCount = 1;
_maxItemsInObjectGraph = maxItemsInObjectGraph;
_streamingContext = streamingContext;
_ignoreExtensionDataObject = ignoreExtensionDataObject;
_dataContractResolver = dataContractResolver;
}
internal XmlObjectSerializerContext(XmlObjectSerializer serializer, int maxItemsInObjectGraph, StreamingContext streamingContext, bool ignoreExtensionDataObject)
: this(serializer, maxItemsInObjectGraph, streamingContext, ignoreExtensionDataObject, null)
{
}
internal XmlObjectSerializerContext(DataContractSerializer serializer, DataContract rootTypeDataContract
, DataContractResolver dataContractResolver
)
: this(serializer,
serializer.MaxItemsInObjectGraph,
new StreamingContext(),
serializer.IgnoreExtensionDataObject,
dataContractResolver
)
{
this.rootTypeDataContract = rootTypeDataContract;
this.serializerKnownTypeList = serializer.knownTypeList;
}
internal virtual SerializationMode Mode
{
get { return SerializationMode.SharedContract; }
}
internal virtual bool IsGetOnlyCollection
{
get { return false; }
set { }
}
#if USE_REFEMIT
public StreamingContext GetStreamingContext()
#else
internal StreamingContext GetStreamingContext()
#endif
{
return _streamingContext;
}
#if USE_REFEMIT
public void IncrementItemCount(int count)
#else
internal void IncrementItemCount(int count)
#endif
{
if (count > _maxItemsInObjectGraph - _itemCount)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ExceededMaxItemsQuota, _maxItemsInObjectGraph)));
_itemCount += count;
}
internal int RemainingItemCount
{
get { return _maxItemsInObjectGraph - _itemCount; }
}
internal bool IgnoreExtensionDataObject
{
get { return _ignoreExtensionDataObject; }
}
protected DataContractResolver DataContractResolver
{
get { return _dataContractResolver; }
}
protected KnownTypeDataContractResolver KnownTypeResolver
{
get
{
if (_knownTypeResolver == null)
{
_knownTypeResolver = new KnownTypeDataContractResolver(this);
}
return _knownTypeResolver;
}
}
internal DataContract GetDataContract(Type type)
{
return GetDataContract(type.TypeHandle, type);
}
internal virtual DataContract GetDataContract(RuntimeTypeHandle typeHandle, Type type)
{
if (IsGetOnlyCollection)
{
return DataContract.GetGetOnlyCollectionDataContract(DataContract.GetId(typeHandle), typeHandle, type, Mode);
}
else
{
return DataContract.GetDataContract(typeHandle, type, Mode);
}
}
internal virtual DataContract GetDataContractSkipValidation(int typeId, RuntimeTypeHandle typeHandle, Type type)
{
if (IsGetOnlyCollection)
{
return DataContract.GetGetOnlyCollectionDataContractSkipValidation(typeId, typeHandle, type);
}
else
{
return DataContract.GetDataContractSkipValidation(typeId, typeHandle, type);
}
}
internal virtual DataContract GetDataContract(int id, RuntimeTypeHandle typeHandle)
{
if (IsGetOnlyCollection)
{
return DataContract.GetGetOnlyCollectionDataContract(id, typeHandle, null /*type*/, Mode);
}
else
{
return DataContract.GetDataContract(id, typeHandle, Mode);
}
}
internal virtual void CheckIfTypeSerializable(Type memberType, bool isMemberTypeSerializable)
{
if (!isMemberTypeSerializable)
throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataContractException(SR.Format(SR.TypeNotSerializable, memberType)));
}
internal virtual Type GetSurrogatedType(Type type)
{
return type;
}
internal virtual DataContractDictionary SerializerKnownDataContracts
{
get
{
// This field must be initialized during construction by serializers using data contracts.
if (!_isSerializerKnownDataContractsSetExplicit)
{
this.serializerKnownDataContracts = serializer.KnownDataContracts;
_isSerializerKnownDataContractsSetExplicit = true;
}
return this.serializerKnownDataContracts;
}
}
private DataContract GetDataContractFromSerializerKnownTypes(XmlQualifiedName qname)
{
DataContractDictionary serializerKnownDataContracts = this.SerializerKnownDataContracts;
if (serializerKnownDataContracts == null)
return null;
DataContract outDataContract;
return serializerKnownDataContracts.TryGetValue(qname, out outDataContract) ? outDataContract : null;
}
internal static DataContractDictionary GetDataContractsForKnownTypes(IList<Type> knownTypeList)
{
if (knownTypeList == null) return null;
DataContractDictionary dataContracts = new DataContractDictionary();
Dictionary<Type, Type> typesChecked = new Dictionary<Type, Type>();
for (int i = 0; i < knownTypeList.Count; i++)
{
Type knownType = knownTypeList[i];
if (knownType == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentException(SR.Format(SR.NullKnownType, "knownTypes")));
DataContract.CheckAndAdd(knownType, typesChecked, ref dataContracts);
}
return dataContracts;
}
internal bool IsKnownType(DataContract dataContract, DataContractDictionary knownDataContracts, Type declaredType)
{
bool knownTypesAddedInCurrentScope = false;
if (knownDataContracts != null)
{
scopedKnownTypes.Push(knownDataContracts);
knownTypesAddedInCurrentScope = true;
}
bool isKnownType = IsKnownType(dataContract, declaredType);
if (knownTypesAddedInCurrentScope)
{
scopedKnownTypes.Pop();
}
return isKnownType;
}
internal bool IsKnownType(DataContract dataContract, Type declaredType)
{
DataContract knownContract = ResolveDataContractFromKnownTypes(dataContract.StableName.Name, dataContract.StableName.Namespace, null /*memberTypeContract*/ /*, declaredType */);
return knownContract != null && knownContract.UnderlyingType == dataContract.UnderlyingType;
}
internal Type ResolveNameFromKnownTypes(XmlQualifiedName typeName)
{
DataContract dataContract = ResolveDataContractFromKnownTypes(typeName);
return dataContract == null ? null : dataContract.UnderlyingType;
}
private DataContract ResolveDataContractFromKnownTypes(XmlQualifiedName typeName)
{
DataContract dataContract = PrimitiveDataContract.GetPrimitiveDataContract(typeName.Name, typeName.Namespace);
if (dataContract == null)
{
#if NET_NATIVE
if (typeName.Name == Globals.SafeSerializationManagerName && typeName.Namespace == Globals.SafeSerializationManagerNamespace && Globals.TypeOfSafeSerializationManager != null)
{
return GetDataContract(Globals.TypeOfSafeSerializationManager);
}
#endif
dataContract = scopedKnownTypes.GetDataContract(typeName);
if (dataContract == null)
{
dataContract = GetDataContractFromSerializerKnownTypes(typeName);
}
}
return dataContract;
}
protected DataContract ResolveDataContractFromKnownTypes(string typeName, string typeNs, DataContract memberTypeContract)
{
XmlQualifiedName qname = new XmlQualifiedName(typeName, typeNs);
DataContract dataContract;
if (_dataContractResolver == null)
{
dataContract = ResolveDataContractFromKnownTypes(qname);
}
else
{
Type dataContractType = _dataContractResolver.ResolveName(typeName, typeNs, null, KnownTypeResolver);
dataContract = dataContractType == null ? null : GetDataContract(dataContractType);
}
if (dataContract == null)
{
if (memberTypeContract != null
&& !memberTypeContract.UnderlyingType.IsInterface
&& memberTypeContract.StableName == qname)
{
dataContract = memberTypeContract;
}
if (dataContract == null && rootTypeDataContract != null)
{
if (rootTypeDataContract.StableName == qname)
dataContract = rootTypeDataContract;
else
{
CollectionDataContract collectionContract = rootTypeDataContract as CollectionDataContract;
while (collectionContract != null)
{
DataContract itemContract = GetDataContract(GetSurrogatedType(collectionContract.ItemType));
if (itemContract.StableName == qname)
{
dataContract = itemContract;
break;
}
collectionContract = itemContract as CollectionDataContract;
}
}
}
}
return dataContract;
}
internal void PushKnownTypes(DataContract dc)
{
if (dc != null && dc.KnownDataContracts != null)
{
scopedKnownTypes.Push(dc.KnownDataContracts);
}
}
internal void PopKnownTypes(DataContract dc)
{
if (dc != null && dc.KnownDataContracts != null)
{
scopedKnownTypes.Pop();
}
}
}
}
| |
using System;
using System.Diagnostics;
namespace YAF.Lucene.Net.Search
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using ArrayUtil = YAF.Lucene.Net.Util.ArrayUtil;
using ByteBlockPool = YAF.Lucene.Net.Util.ByteBlockPool;
using BytesRef = YAF.Lucene.Net.Util.BytesRef;
using BytesRefHash = YAF.Lucene.Net.Util.BytesRefHash;
using IndexReader = YAF.Lucene.Net.Index.IndexReader;
using RamUsageEstimator = YAF.Lucene.Net.Util.RamUsageEstimator;
using RewriteMethod = YAF.Lucene.Net.Search.MultiTermQuery.RewriteMethod;
using Term = YAF.Lucene.Net.Index.Term;
using TermContext = YAF.Lucene.Net.Index.TermContext;
using TermsEnum = YAF.Lucene.Net.Index.TermsEnum;
using TermState = YAF.Lucene.Net.Index.TermState;
/// <summary>
/// Base rewrite method that translates each term into a query, and keeps
/// the scores as computed by the query.
/// <para/>
/// @lucene.internal - Only public to be accessible by spans package.
/// </summary>
public abstract class ScoringRewrite<Q> : TermCollectingRewrite<Q> where Q : Query
{
/// <summary>
/// A rewrite method that first translates each term into
/// <see cref="Occur.SHOULD"/> clause in a
/// <see cref="BooleanQuery"/>, and keeps the scores as computed by the
/// query. Note that typically such scores are
/// meaningless to the user, and require non-trivial CPU
/// to compute, so it's almost always better to use
/// <see cref="MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT"/> instead.
///
/// <para/><b>NOTE</b>: this rewrite method will hit
/// <see cref="BooleanQuery.TooManyClausesException"/> if the number of terms
/// exceeds <see cref="BooleanQuery.MaxClauseCount"/>.
/// </summary>
/// <seealso cref="MultiTermQuery.MultiTermRewriteMethod"/>
public static readonly ScoringRewrite<BooleanQuery> SCORING_BOOLEAN_QUERY_REWRITE = new ScoringRewriteAnonymousInnerClassHelper();
private class ScoringRewriteAnonymousInnerClassHelper : ScoringRewrite<BooleanQuery>
{
public ScoringRewriteAnonymousInnerClassHelper()
{
}
protected override BooleanQuery GetTopLevelQuery()
{
return new BooleanQuery(true);
}
protected override void AddClause(BooleanQuery topLevel, Term term, int docCount, float boost, TermContext states)
{
TermQuery tq = new TermQuery(term, states);
tq.Boost = boost;
topLevel.Add(tq, Occur.SHOULD);
}
protected override void CheckMaxClauseCount(int count)
{
if (count > BooleanQuery.MaxClauseCount)
{
throw new BooleanQuery.TooManyClausesException();
}
}
}
/// <summary>
/// Like <see cref="SCORING_BOOLEAN_QUERY_REWRITE"/> except
/// scores are not computed. Instead, each matching
/// document receives a constant score equal to the
/// query's boost.
///
/// <para/><b>NOTE</b>: this rewrite method will hit
/// <see cref="BooleanQuery.TooManyClausesException"/> if the number of terms
/// exceeds <see cref="BooleanQuery.MaxClauseCount"/>.
/// </summary>
/// <seealso cref="MultiTermQuery.MultiTermRewriteMethod"/>
public static readonly RewriteMethod CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE = new RewriteMethodAnonymousInnerClassHelper();
private class RewriteMethodAnonymousInnerClassHelper : RewriteMethod
{
public RewriteMethodAnonymousInnerClassHelper()
{
}
public override Query Rewrite(IndexReader reader, MultiTermQuery query)
{
BooleanQuery bq = (BooleanQuery)SCORING_BOOLEAN_QUERY_REWRITE.Rewrite(reader, query);
// strip the scores off
Query result = new ConstantScoreQuery(bq);
result.Boost = query.Boost;
return result;
}
}
/// <summary>
/// This method is called after every new term to check if the number of max clauses
/// (e.g. in <see cref="BooleanQuery"/>) is not exceeded. Throws the corresponding <see cref="Exception"/>.
/// </summary>
protected abstract void CheckMaxClauseCount(int count);
public override Query Rewrite(IndexReader reader, MultiTermQuery query)
{
var result = GetTopLevelQuery();
ParallelArraysTermCollector col = new ParallelArraysTermCollector(this);
CollectTerms(reader, query, col);
int size = col.terms.Count;
if (size > 0)
{
int[] sort = col.terms.Sort(col.termsEnum.Comparer);
float[] boost = col.array.boost;
TermContext[] termStates = col.array.termState;
for (int i = 0; i < size; i++)
{
int pos = sort[i];
Term term = new Term(query.Field, col.terms.Get(pos, new BytesRef()));
Debug.Assert(reader.DocFreq(term) == termStates[pos].DocFreq);
AddClause(result, term, termStates[pos].DocFreq, query.Boost * boost[pos], termStates[pos]);
}
}
return result;
}
internal sealed class ParallelArraysTermCollector : TermCollector
{
internal void InitializeInstanceFields()
{
terms = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectAllocator()), 16, array);
}
private readonly ScoringRewrite<Q> outerInstance;
public ParallelArraysTermCollector(ScoringRewrite<Q> outerInstance)
{
this.outerInstance = outerInstance;
InitializeInstanceFields();
}
internal readonly TermFreqBoostByteStart array = new TermFreqBoostByteStart(16);
internal BytesRefHash terms;
internal TermsEnum termsEnum;
private IBoostAttribute boostAtt;
public override void SetNextEnum(TermsEnum termsEnum)
{
this.termsEnum = termsEnum;
this.boostAtt = termsEnum.Attributes.AddAttribute<IBoostAttribute>();
}
public override bool Collect(BytesRef bytes)
{
int e = terms.Add(bytes);
TermState state = termsEnum.GetTermState();
Debug.Assert(state != null);
if (e < 0)
{
// duplicate term: update docFreq
int pos = (-e) - 1;
array.termState[pos].Register(state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq);
Debug.Assert(array.boost[pos] == boostAtt.Boost, "boost should be equal in all segment TermsEnums");
}
else
{
// new entry: we populate the entry initially
array.boost[e] = boostAtt.Boost;
array.termState[e] = new TermContext(m_topReaderContext, state, m_readerContext.Ord, termsEnum.DocFreq, termsEnum.TotalTermFreq);
outerInstance.CheckMaxClauseCount(terms.Count);
}
return true;
}
}
/// <summary>
/// Special implementation of <see cref="BytesRefHash.BytesStartArray"/> that keeps parallel arrays for boost and docFreq </summary>
internal sealed class TermFreqBoostByteStart : BytesRefHash.DirectBytesStartArray
{
internal float[] boost;
internal TermContext[] termState;
public TermFreqBoostByteStart(int initSize)
: base(initSize)
{
}
public override int[] Init()
{
int[] ord = base.Init();
boost = new float[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_SINGLE)];
termState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
Debug.Assert(termState.Length >= ord.Length && boost.Length >= ord.Length);
return ord;
}
public override int[] Grow()
{
int[] ord = base.Grow();
boost = ArrayUtil.Grow(boost, ord.Length);
if (termState.Length < ord.Length)
{
TermContext[] tmpTermState = new TermContext[ArrayUtil.Oversize(ord.Length, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
Array.Copy(termState, 0, tmpTermState, 0, termState.Length);
termState = tmpTermState;
}
Debug.Assert(termState.Length >= ord.Length && boost.Length >= ord.Length);
return ord;
}
public override int[] Clear()
{
boost = null;
termState = null;
return base.Clear();
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
using Nwc.XmlRpc;
using log4net;
using Mono.Addins;
using Nini.Config;
using OpenMetaverse;
using OpenMetaverse.StructuredData;
using OpenSim.Framework;
using OpenSim.Framework.Communications;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Services.Interfaces;
namespace OpenSim.Region.OptionalModules.Avatar.XmlRpcGroups
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "XmlRpcGroupsServicesConnectorModule")]
public class XmlRpcGroupsServicesConnectorModule : ISharedRegionModule, IGroupsServicesConnector
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private bool m_debugEnabled = false;
public const GroupPowers DefaultEveryonePowers
= GroupPowers.AllowSetHome
| GroupPowers.Accountable
| GroupPowers.JoinChat
| GroupPowers.AllowVoiceChat
| GroupPowers.ReceiveNotices
| GroupPowers.StartProposal
| GroupPowers.VoteOnProposal;
// Would this be cleaner as (GroupPowers)ulong.MaxValue?
public const GroupPowers DefaultOwnerPowers
= GroupPowers.Accountable
| GroupPowers.AllowEditLand
| GroupPowers.AllowFly
| GroupPowers.AllowLandmark
| GroupPowers.AllowRez
| GroupPowers.AllowSetHome
| GroupPowers.AllowVoiceChat
| GroupPowers.AssignMember
| GroupPowers.AssignMemberLimited
| GroupPowers.ChangeActions
| GroupPowers.ChangeIdentity
| GroupPowers.ChangeMedia
| GroupPowers.ChangeOptions
| GroupPowers.CreateRole
| GroupPowers.DeedObject
| GroupPowers.DeleteRole
| GroupPowers.Eject
| GroupPowers.FindPlaces
| GroupPowers.Invite
| GroupPowers.JoinChat
| GroupPowers.LandChangeIdentity
| GroupPowers.LandDeed
| GroupPowers.LandDivideJoin
| GroupPowers.LandEdit
| GroupPowers.LandEjectAndFreeze
| GroupPowers.LandGardening
| GroupPowers.LandManageAllowed
| GroupPowers.LandManageBanned
| GroupPowers.LandManagePasses
| GroupPowers.LandOptions
| GroupPowers.LandRelease
| GroupPowers.LandSetSale
| GroupPowers.ModerateChat
| GroupPowers.ObjectManipulate
| GroupPowers.ObjectSetForSale
| GroupPowers.ReceiveNotices
| GroupPowers.RemoveMember
| GroupPowers.ReturnGroupOwned
| GroupPowers.ReturnGroupSet
| GroupPowers.ReturnNonGroup
| GroupPowers.RoleProperties
| GroupPowers.SendNotices
| GroupPowers.SetLandingPoint
| GroupPowers.StartProposal
| GroupPowers.VoteOnProposal;
private bool m_connectorEnabled = false;
private string m_groupsServerURI = string.Empty;
private bool m_disableKeepAlive = false;
private string m_groupReadKey = string.Empty;
private string m_groupWriteKey = string.Empty;
private IUserAccountService m_accountService = null;
private ExpiringCache<string, XmlRpcResponse> m_memoryCache;
private int m_cacheTimeout = 30;
// Used to track which agents are have dropped from a group chat session
// Should be reset per agent, on logon
// TODO: move this to Flotsam XmlRpc Service
// SessionID, List<AgentID>
private Dictionary<UUID, List<UUID>> m_groupsAgentsDroppedFromChatSession = new Dictionary<UUID, List<UUID>>();
private Dictionary<UUID, List<UUID>> m_groupsAgentsInvitedToChatSession = new Dictionary<UUID, List<UUID>>();
#region Region Module interfaceBase Members
public string Name
{
get { return "XmlRpcGroupsServicesConnector"; }
}
// this module is not intended to be replaced, but there should only be 1 of them.
public Type ReplaceableInterface
{
get { return null; }
}
public void Initialise(IConfigSource config)
{
IConfig groupsConfig = config.Configs["Groups"];
if (groupsConfig == null)
{
// Do not run this module by default.
return;
}
else
{
// if groups aren't enabled, we're not needed.
// if we're not specified as the connector to use, then we're not wanted
if ((groupsConfig.GetBoolean("Enabled", false) == false)
|| (groupsConfig.GetString("ServicesConnectorModule", "XmlRpcGroupsServicesConnector") != Name))
{
m_connectorEnabled = false;
return;
}
m_log.DebugFormat("[XMLRPC-GROUPS-CONNECTOR]: Initializing {0}", this.Name);
m_groupsServerURI = groupsConfig.GetString("GroupsServerURI", string.Empty);
if (string.IsNullOrEmpty(m_groupsServerURI))
{
m_log.ErrorFormat("Please specify a valid URL for GroupsServerURI in OpenSim.ini, [Groups]");
m_connectorEnabled = false;
return;
}
m_disableKeepAlive = groupsConfig.GetBoolean("XmlRpcDisableKeepAlive", false);
m_groupReadKey = groupsConfig.GetString("XmlRpcServiceReadKey", string.Empty);
m_groupWriteKey = groupsConfig.GetString("XmlRpcServiceWriteKey", string.Empty);
m_cacheTimeout = groupsConfig.GetInt("GroupsCacheTimeout", 30);
if (m_cacheTimeout == 0)
{
m_log.WarnFormat("[XMLRPC-GROUPS-CONNECTOR]: Groups Cache Disabled.");
}
else
{
m_log.InfoFormat("[XMLRPC-GROUPS-CONNECTOR]: Groups Cache Timeout set to {0}.", m_cacheTimeout);
}
m_debugEnabled = groupsConfig.GetBoolean("DebugEnabled", false);
// If we got all the config options we need, lets start'er'up
m_memoryCache = new ExpiringCache<string, XmlRpcResponse>();
m_connectorEnabled = true;
}
}
public void Close()
{
m_log.DebugFormat("[XMLRPC-GROUPS-CONNECTOR]: Closing {0}", this.Name);
}
public void AddRegion(OpenSim.Region.Framework.Scenes.Scene scene)
{
if (m_connectorEnabled)
{
if (m_accountService == null)
{
m_accountService = scene.UserAccountService;
}
scene.RegisterModuleInterface<IGroupsServicesConnector>(this);
}
}
public void RemoveRegion(OpenSim.Region.Framework.Scenes.Scene scene)
{
if (scene.RequestModuleInterface<IGroupsServicesConnector>() == this)
{
scene.UnregisterModuleInterface<IGroupsServicesConnector>(this);
}
}
public void RegionLoaded(OpenSim.Region.Framework.Scenes.Scene scene)
{
// TODO: May want to consider listenning for Agent Connections so we can pre-cache group info
// scene.EventManager.OnNewClient += OnNewClient;
}
#endregion
#region ISharedRegionModule Members
public void PostInitialise()
{
// NoOp
}
#endregion
#region IGroupsServicesConnector Members
/// <summary>
/// Create a Group, including Everyone and Owners Role, place FounderID in both groups, select Owner as selected role, and newly created group as agent's active role.
/// </summary>
public UUID CreateGroup(UUID requestingAgentID, string name, string charter, bool showInList, UUID insigniaID,
int membershipFee, bool openEnrollment, bool allowPublish,
bool maturePublish, UUID founderID)
{
UUID GroupID = UUID.Random();
UUID OwnerRoleID = UUID.Random();
Hashtable param = new Hashtable();
param["GroupID"] = GroupID.ToString();
param["Name"] = name;
param["Charter"] = charter;
param["ShowInList"] = showInList == true ? 1 : 0;
param["InsigniaID"] = insigniaID.ToString();
param["MembershipFee"] = membershipFee;
param["OpenEnrollment"] = openEnrollment == true ? 1 : 0;
param["AllowPublish"] = allowPublish == true ? 1 : 0;
param["MaturePublish"] = maturePublish == true ? 1 : 0;
param["FounderID"] = founderID.ToString();
param["EveryonePowers"] = ((ulong)DefaultEveryonePowers).ToString();
param["OwnerRoleID"] = OwnerRoleID.ToString();
param["OwnersPowers"] = ((ulong)DefaultOwnerPowers).ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.createGroup", param);
if (respData.Contains("error"))
{
// UUID is not nullable
return UUID.Zero;
}
return UUID.Parse((string)respData["GroupID"]);
}
public void UpdateGroup(UUID requestingAgentID, UUID groupID, string charter, bool showInList,
UUID insigniaID, int membershipFee, bool openEnrollment,
bool allowPublish, bool maturePublish)
{
Hashtable param = new Hashtable();
param["GroupID"] = groupID.ToString();
param["Charter"] = charter;
param["ShowInList"] = showInList == true ? 1 : 0;
param["InsigniaID"] = insigniaID.ToString();
param["MembershipFee"] = membershipFee;
param["OpenEnrollment"] = openEnrollment == true ? 1 : 0;
param["AllowPublish"] = allowPublish == true ? 1 : 0;
param["MaturePublish"] = maturePublish == true ? 1 : 0;
XmlRpcCall(requestingAgentID, "groups.updateGroup", param);
}
public void AddGroupRole(UUID requestingAgentID, UUID groupID, UUID roleID, string name, string description,
string title, ulong powers)
{
Hashtable param = new Hashtable();
param["GroupID"] = groupID.ToString();
param["RoleID"] = roleID.ToString();
param["Name"] = name;
param["Description"] = description;
param["Title"] = title;
param["Powers"] = powers.ToString();
XmlRpcCall(requestingAgentID, "groups.addRoleToGroup", param);
}
public void RemoveGroupRole(UUID requestingAgentID, UUID groupID, UUID roleID)
{
Hashtable param = new Hashtable();
param["GroupID"] = groupID.ToString();
param["RoleID"] = roleID.ToString();
XmlRpcCall(requestingAgentID, "groups.removeRoleFromGroup", param);
}
public void UpdateGroupRole(UUID requestingAgentID, UUID groupID, UUID roleID, string name, string description,
string title, ulong powers)
{
Hashtable param = new Hashtable();
param["GroupID"] = groupID.ToString();
param["RoleID"] = roleID.ToString();
if (name != null)
{
param["Name"] = name;
}
if (description != null)
{
param["Description"] = description;
}
if (title != null)
{
param["Title"] = title;
}
param["Powers"] = powers.ToString();
XmlRpcCall(requestingAgentID, "groups.updateGroupRole", param);
}
public GroupRecord GetGroupRecord(UUID requestingAgentID, UUID GroupID, string GroupName)
{
Hashtable param = new Hashtable();
if (GroupID != UUID.Zero)
{
param["GroupID"] = GroupID.ToString();
}
if (!string.IsNullOrEmpty(GroupName))
{
param["Name"] = GroupName.ToString();
}
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getGroup", param);
if (respData.Contains("error"))
{
return null;
}
return GroupProfileHashtableToGroupRecord(respData);
}
public GroupProfileData GetMemberGroupProfile(UUID requestingAgentID, UUID GroupID, UUID AgentID)
{
Hashtable param = new Hashtable();
param["GroupID"] = GroupID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getGroup", param);
if (respData.Contains("error"))
{
// GroupProfileData is not nullable
return new GroupProfileData();
}
GroupMembershipData MemberInfo = GetAgentGroupMembership(requestingAgentID, AgentID, GroupID);
GroupProfileData MemberGroupProfile = GroupProfileHashtableToGroupProfileData(respData);
MemberGroupProfile.MemberTitle = MemberInfo.GroupTitle;
MemberGroupProfile.PowersMask = MemberInfo.GroupPowers;
return MemberGroupProfile;
}
public void SetAgentActiveGroup(UUID requestingAgentID, UUID AgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
XmlRpcCall(requestingAgentID, "groups.setAgentActiveGroup", param);
}
public void SetAgentActiveGroupRole(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
param["SelectedRoleID"] = RoleID.ToString();
XmlRpcCall(requestingAgentID, "groups.setAgentGroupInfo", param);
}
public void SetAgentGroupInfo(UUID requestingAgentID, UUID AgentID, UUID GroupID, bool AcceptNotices, bool ListInProfile)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
param["AcceptNotices"] = AcceptNotices ? "1" : "0";
param["ListInProfile"] = ListInProfile ? "1" : "0";
XmlRpcCall(requestingAgentID, "groups.setAgentGroupInfo", param);
}
public void AddAgentToGroupInvite(UUID requestingAgentID, UUID inviteID, UUID groupID, UUID roleID, UUID agentID)
{
Hashtable param = new Hashtable();
param["InviteID"] = inviteID.ToString();
param["AgentID"] = agentID.ToString();
param["RoleID"] = roleID.ToString();
param["GroupID"] = groupID.ToString();
XmlRpcCall(requestingAgentID, "groups.addAgentToGroupInvite", param);
}
public GroupInviteInfo GetAgentToGroupInvite(UUID requestingAgentID, UUID inviteID)
{
Hashtable param = new Hashtable();
param["InviteID"] = inviteID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getAgentToGroupInvite", param);
if (respData.Contains("error"))
{
return null;
}
GroupInviteInfo inviteInfo = new GroupInviteInfo();
inviteInfo.InviteID = inviteID;
inviteInfo.GroupID = UUID.Parse((string)respData["GroupID"]);
inviteInfo.RoleID = UUID.Parse((string)respData["RoleID"]);
inviteInfo.AgentID = UUID.Parse((string)respData["AgentID"]);
return inviteInfo;
}
public void RemoveAgentToGroupInvite(UUID requestingAgentID, UUID inviteID)
{
Hashtable param = new Hashtable();
param["InviteID"] = inviteID.ToString();
XmlRpcCall(requestingAgentID, "groups.removeAgentToGroupInvite", param);
}
public void AddAgentToGroup(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
param["RoleID"] = RoleID.ToString();
XmlRpcCall(requestingAgentID, "groups.addAgentToGroup", param);
}
public void RemoveAgentFromGroup(UUID requestingAgentID, UUID AgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
XmlRpcCall(requestingAgentID, "groups.removeAgentFromGroup", param);
}
public void AddAgentToGroupRole(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
param["RoleID"] = RoleID.ToString();
XmlRpcCall(requestingAgentID, "groups.addAgentToGroupRole", param);
}
public void RemoveAgentFromGroupRole(UUID requestingAgentID, UUID AgentID, UUID GroupID, UUID RoleID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
param["RoleID"] = RoleID.ToString();
XmlRpcCall(requestingAgentID, "groups.removeAgentFromGroupRole", param);
}
public List<DirGroupsReplyData> FindGroups(UUID requestingAgentID, string search)
{
Hashtable param = new Hashtable();
param["Search"] = search;
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.findGroups", param);
List<DirGroupsReplyData> findings = new List<DirGroupsReplyData>();
if (!respData.Contains("error"))
{
Hashtable results = (Hashtable)respData["results"];
foreach (Hashtable groupFind in results.Values)
{
DirGroupsReplyData data = new DirGroupsReplyData();
data.groupID = new UUID((string)groupFind["GroupID"]); ;
data.groupName = (string)groupFind["Name"];
data.members = int.Parse((string)groupFind["Members"]);
// data.searchOrder = order;
findings.Add(data);
}
}
return findings;
}
public GroupMembershipData GetAgentGroupMembership(UUID requestingAgentID, UUID AgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getAgentGroupMembership", param);
if (respData.Contains("error"))
{
return null;
}
GroupMembershipData data = HashTableToGroupMembershipData(respData);
return data;
}
public GroupMembershipData GetAgentActiveMembership(UUID requestingAgentID, UUID AgentID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getAgentActiveMembership", param);
if (respData.Contains("error"))
{
return null;
}
return HashTableToGroupMembershipData(respData);
}
public List<GroupMembershipData> GetAgentGroupMemberships(UUID requestingAgentID, UUID AgentID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getAgentGroupMemberships", param);
List<GroupMembershipData> memberships = new List<GroupMembershipData>();
if (!respData.Contains("error"))
{
foreach (object membership in respData.Values)
{
memberships.Add(HashTableToGroupMembershipData((Hashtable)membership));
}
}
return memberships;
}
public List<GroupRolesData> GetAgentGroupRoles(UUID requestingAgentID, UUID AgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["AgentID"] = AgentID.ToString();
param["GroupID"] = GroupID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getAgentRoles", param);
List<GroupRolesData> Roles = new List<GroupRolesData>();
if (respData.Contains("error"))
{
return Roles;
}
foreach (Hashtable role in respData.Values)
{
GroupRolesData data = new GroupRolesData();
data.RoleID = new UUID((string)role["RoleID"]);
data.Name = (string)role["Name"];
data.Description = (string)role["Description"];
data.Powers = ulong.Parse((string)role["Powers"]);
data.Title = (string)role["Title"];
Roles.Add(data);
}
return Roles;
}
public List<GroupRolesData> GetGroupRoles(UUID requestingAgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["GroupID"] = GroupID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getGroupRoles", param);
List<GroupRolesData> Roles = new List<GroupRolesData>();
if (respData.Contains("error"))
{
return Roles;
}
foreach (Hashtable role in respData.Values)
{
GroupRolesData data = new GroupRolesData();
data.Description = (string)role["Description"];
data.Members = int.Parse((string)role["Members"]);
data.Name = (string)role["Name"];
data.Powers = ulong.Parse((string)role["Powers"]);
data.RoleID = new UUID((string)role["RoleID"]);
data.Title = (string)role["Title"];
Roles.Add(data);
}
return Roles;
}
public List<GroupMembersData> GetGroupMembers(UUID requestingAgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["GroupID"] = GroupID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getGroupMembers", param);
List<GroupMembersData> members = new List<GroupMembersData>();
if (respData.Contains("error"))
{
return members;
}
foreach (Hashtable membership in respData.Values)
{
GroupMembersData data = new GroupMembersData();
data.AcceptNotices = ((string)membership["AcceptNotices"]) == "1";
data.AgentID = new UUID((string)membership["AgentID"]);
data.Contribution = int.Parse((string)membership["Contribution"]);
data.IsOwner = ((string)membership["IsOwner"]) == "1";
data.ListInProfile = ((string)membership["ListInProfile"]) == "1";
data.AgentPowers = ulong.Parse((string)membership["AgentPowers"]);
data.Title = (string)membership["Title"];
members.Add(data);
}
return members;
}
public List<GroupRoleMembersData> GetGroupRoleMembers(UUID requestingAgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["GroupID"] = GroupID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getGroupRoleMembers", param);
List<GroupRoleMembersData> members = new List<GroupRoleMembersData>();
if (!respData.Contains("error"))
{
foreach (Hashtable membership in respData.Values)
{
GroupRoleMembersData data = new GroupRoleMembersData();
data.MemberID = new UUID((string)membership["AgentID"]);
data.RoleID = new UUID((string)membership["RoleID"]);
members.Add(data);
}
}
return members;
}
public List<GroupNoticeData> GetGroupNotices(UUID requestingAgentID, UUID GroupID)
{
Hashtable param = new Hashtable();
param["GroupID"] = GroupID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getGroupNotices", param);
List<GroupNoticeData> values = new List<GroupNoticeData>();
if (!respData.Contains("error"))
{
foreach (Hashtable value in respData.Values)
{
GroupNoticeData data = new GroupNoticeData();
data.NoticeID = UUID.Parse((string)value["NoticeID"]);
data.Timestamp = uint.Parse((string)value["Timestamp"]);
data.FromName = (string)value["FromName"];
data.Subject = (string)value["Subject"];
data.HasAttachment = false;
data.AssetType = 0;
values.Add(data);
}
}
return values;
}
public GroupNoticeInfo GetGroupNotice(UUID requestingAgentID, UUID noticeID)
{
Hashtable param = new Hashtable();
param["NoticeID"] = noticeID.ToString();
Hashtable respData = XmlRpcCall(requestingAgentID, "groups.getGroupNotice", param);
if (respData.Contains("error"))
{
return null;
}
GroupNoticeInfo data = new GroupNoticeInfo();
data.GroupID = UUID.Parse((string)respData["GroupID"]);
data.Message = (string)respData["Message"];
data.BinaryBucket = Utils.HexStringToBytes((string)respData["BinaryBucket"], true);
data.noticeData.NoticeID = UUID.Parse((string)respData["NoticeID"]);
data.noticeData.Timestamp = uint.Parse((string)respData["Timestamp"]);
data.noticeData.FromName = (string)respData["FromName"];
data.noticeData.Subject = (string)respData["Subject"];
data.noticeData.HasAttachment = false;
data.noticeData.AssetType = 0;
if (data.Message == null)
{
data.Message = string.Empty;
}
return data;
}
public void AddGroupNotice(UUID requestingAgentID, UUID groupID, UUID noticeID, string fromName, string subject, string message, byte[] binaryBucket)
{
string binBucket = OpenMetaverse.Utils.BytesToHexString(binaryBucket, "");
Hashtable param = new Hashtable();
param["GroupID"] = groupID.ToString();
param["NoticeID"] = noticeID.ToString();
param["FromName"] = fromName;
param["Subject"] = subject;
param["Message"] = message;
param["BinaryBucket"] = binBucket;
param["TimeStamp"] = ((uint)Util.UnixTimeSinceEpoch()).ToString();
XmlRpcCall(requestingAgentID, "groups.addGroupNotice", param);
}
#endregion
#region GroupSessionTracking
public void ResetAgentGroupChatSessions(UUID agentID)
{
foreach (List<UUID> agentList in m_groupsAgentsDroppedFromChatSession.Values)
{
agentList.Remove(agentID);
}
}
public bool hasAgentBeenInvitedToGroupChatSession(UUID agentID, UUID groupID)
{
// If we're tracking this group, and we can find them in the tracking, then they've been invited
return m_groupsAgentsInvitedToChatSession.ContainsKey(groupID)
&& m_groupsAgentsInvitedToChatSession[groupID].Contains(agentID);
}
public bool hasAgentDroppedGroupChatSession(UUID agentID, UUID groupID)
{
// If we're tracking drops for this group,
// and we find them, well... then they've dropped
return m_groupsAgentsDroppedFromChatSession.ContainsKey(groupID)
&& m_groupsAgentsDroppedFromChatSession[groupID].Contains(agentID);
}
public void AgentDroppedFromGroupChatSession(UUID agentID, UUID groupID)
{
if (m_groupsAgentsDroppedFromChatSession.ContainsKey(groupID))
{
// If not in dropped list, add
if (!m_groupsAgentsDroppedFromChatSession[groupID].Contains(agentID))
{
m_groupsAgentsDroppedFromChatSession[groupID].Add(agentID);
}
}
}
public void AgentInvitedToGroupChatSession(UUID agentID, UUID groupID)
{
// Add Session Status if it doesn't exist for this session
CreateGroupChatSessionTracking(groupID);
// If nessesary, remove from dropped list
if (m_groupsAgentsDroppedFromChatSession[groupID].Contains(agentID))
{
m_groupsAgentsDroppedFromChatSession[groupID].Remove(agentID);
}
}
private void CreateGroupChatSessionTracking(UUID groupID)
{
if (!m_groupsAgentsDroppedFromChatSession.ContainsKey(groupID))
{
m_groupsAgentsDroppedFromChatSession.Add(groupID, new List<UUID>());
m_groupsAgentsInvitedToChatSession.Add(groupID, new List<UUID>());
}
}
#endregion
#region XmlRpcHashtableMarshalling
private GroupProfileData GroupProfileHashtableToGroupProfileData(Hashtable groupProfile)
{
GroupProfileData group = new GroupProfileData();
group.GroupID = UUID.Parse((string)groupProfile["GroupID"]);
group.Name = (string)groupProfile["Name"];
if (groupProfile["Charter"] != null)
{
group.Charter = (string)groupProfile["Charter"];
}
group.ShowInList = ((string)groupProfile["ShowInList"]) == "1";
group.InsigniaID = UUID.Parse((string)groupProfile["InsigniaID"]);
group.MembershipFee = int.Parse((string)groupProfile["MembershipFee"]);
group.OpenEnrollment = ((string)groupProfile["OpenEnrollment"]) == "1";
group.AllowPublish = ((string)groupProfile["AllowPublish"]) == "1";
group.MaturePublish = ((string)groupProfile["MaturePublish"]) == "1";
group.FounderID = UUID.Parse((string)groupProfile["FounderID"]);
group.OwnerRole = UUID.Parse((string)groupProfile["OwnerRoleID"]);
group.GroupMembershipCount = int.Parse((string)groupProfile["GroupMembershipCount"]);
group.GroupRolesCount = int.Parse((string)groupProfile["GroupRolesCount"]);
return group;
}
private GroupRecord GroupProfileHashtableToGroupRecord(Hashtable groupProfile)
{
GroupRecord group = new GroupRecord();
group.GroupID = UUID.Parse((string)groupProfile["GroupID"]);
group.GroupName = groupProfile["Name"].ToString();
if (groupProfile["Charter"] != null)
{
group.Charter = (string)groupProfile["Charter"];
}
group.ShowInList = ((string)groupProfile["ShowInList"]) == "1";
group.GroupPicture = UUID.Parse((string)groupProfile["InsigniaID"]);
group.MembershipFee = int.Parse((string)groupProfile["MembershipFee"]);
group.OpenEnrollment = ((string)groupProfile["OpenEnrollment"]) == "1";
group.AllowPublish = ((string)groupProfile["AllowPublish"]) == "1";
group.MaturePublish = ((string)groupProfile["MaturePublish"]) == "1";
group.FounderID = UUID.Parse((string)groupProfile["FounderID"]);
group.OwnerRoleID = UUID.Parse((string)groupProfile["OwnerRoleID"]);
return group;
}
private static GroupMembershipData HashTableToGroupMembershipData(Hashtable respData)
{
GroupMembershipData data = new GroupMembershipData();
data.AcceptNotices = ((string)respData["AcceptNotices"] == "1");
data.Contribution = int.Parse((string)respData["Contribution"]);
data.ListInProfile = ((string)respData["ListInProfile"] == "1");
data.ActiveRole = new UUID((string)respData["SelectedRoleID"]);
data.GroupTitle = (string)respData["Title"];
data.GroupPowers = ulong.Parse((string)respData["GroupPowers"]);
// Is this group the agent's active group
data.GroupID = new UUID((string)respData["GroupID"]);
UUID ActiveGroup = new UUID((string)respData["ActiveGroupID"]);
data.Active = data.GroupID.Equals(ActiveGroup);
data.AllowPublish = ((string)respData["AllowPublish"] == "1");
if (respData["Charter"] != null)
{
data.Charter = (string)respData["Charter"];
}
data.FounderID = new UUID((string)respData["FounderID"]);
data.GroupID = new UUID((string)respData["GroupID"]);
data.GroupName = (string)respData["GroupName"];
data.GroupPicture = new UUID((string)respData["InsigniaID"]);
data.MaturePublish = ((string)respData["MaturePublish"] == "1");
data.MembershipFee = int.Parse((string)respData["MembershipFee"]);
data.OpenEnrollment = ((string)respData["OpenEnrollment"] == "1");
data.ShowInList = ((string)respData["ShowInList"] == "1");
return data;
}
#endregion
/// <summary>
/// Encapsulate the XmlRpc call to standardize security and error handling.
/// </summary>
private Hashtable XmlRpcCall(UUID requestingAgentID, string function, Hashtable param)
{
XmlRpcResponse resp = null;
string CacheKey = null;
// Only bother with the cache if it isn't disabled.
if (m_cacheTimeout > 0)
{
if (!function.StartsWith("groups.get"))
{
// Any and all updates cause the cache to clear
m_memoryCache.Clear();
}
else
{
StringBuilder sb = new StringBuilder(requestingAgentID + function);
foreach (object key in param.Keys)
{
if (param[key] != null)
{
sb.AppendFormat(",{0}:{1}", key.ToString(), param[key].ToString());
}
}
CacheKey = sb.ToString();
m_memoryCache.TryGetValue(CacheKey, out resp);
}
}
if (resp == null)
{
if (m_debugEnabled)
m_log.DebugFormat("[XMLRPC-GROUPS-CONNECTOR]: Cache miss for key {0}", CacheKey);
string UserService;
UUID SessionID;
GetClientGroupRequestID(requestingAgentID, out UserService, out SessionID);
param.Add("RequestingAgentID", requestingAgentID.ToString());
param.Add("RequestingAgentUserService", UserService);
param.Add("RequestingSessionID", SessionID.ToString());
param.Add("ReadKey", m_groupReadKey);
param.Add("WriteKey", m_groupWriteKey);
IList parameters = new ArrayList();
parameters.Add(param);
ConfigurableKeepAliveXmlRpcRequest req;
req = new ConfigurableKeepAliveXmlRpcRequest(function, parameters, m_disableKeepAlive);
try
{
resp = req.Send(m_groupsServerURI, 10000);
if ((m_cacheTimeout > 0) && (CacheKey != null))
{
m_memoryCache.AddOrUpdate(CacheKey, resp, TimeSpan.FromSeconds(m_cacheTimeout));
}
}
catch (Exception e)
{
m_log.ErrorFormat(
"[XMLRPC-GROUPS-CONNECTOR]: An error has occured while attempting to access the XmlRpcGroups server method {0} at {1}",
function, m_groupsServerURI);
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: {0}{1}", e.Message, e.StackTrace);
foreach (string ResponseLine in req.RequestResponse.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
{
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: {0} ", ResponseLine);
}
foreach (string key in param.Keys)
{
m_log.WarnFormat("[XMLRPC-GROUPS-CONNECTOR]: {0} :: {1}", key, param[key].ToString());
}
Hashtable respData = new Hashtable();
respData.Add("error", e.ToString());
return respData;
}
}
if (resp.Value is Hashtable)
{
Hashtable respData = (Hashtable)resp.Value;
if (respData.Contains("error") && !respData.Contains("succeed"))
{
LogRespDataToConsoleError(respData);
}
return respData;
}
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: The XmlRpc server returned a {1} instead of a hashtable for {0}", function, resp.Value.GetType().ToString());
if (resp.Value is ArrayList)
{
ArrayList al = (ArrayList)resp.Value;
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: Contains {0} elements", al.Count);
foreach (object o in al)
{
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: {0} :: {1}", o.GetType().ToString(), o.ToString());
}
}
else
{
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: Function returned: {0}", resp.Value.ToString());
}
Hashtable error = new Hashtable();
error.Add("error", "invalid return value");
return error;
}
private void LogRespDataToConsoleError(Hashtable respData)
{
m_log.Error("[XMLRPC-GROUPS-CONNECTOR]: Error:");
foreach (string key in respData.Keys)
{
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: Key: {0}", key);
string[] lines = respData[key].ToString().Split(new char[] { '\n' });
foreach (string line in lines)
{
m_log.ErrorFormat("[XMLRPC-GROUPS-CONNECTOR]: {0}", line);
}
}
}
/// <summary>
/// Group Request Tokens are an attempt to allow the groups service to authenticate
/// requests.
/// TODO: This broke after the big grid refactor, either find a better way, or discard this
/// </summary>
/// <param name="client"></param>
/// <returns></returns>
private void GetClientGroupRequestID(UUID AgentID, out string UserServiceURL, out UUID SessionID)
{
UserServiceURL = "";
SessionID = UUID.Zero;
// Need to rework this based on changes to User Services
/*
UserAccount userAccount = m_accountService.GetUserAccount(UUID.Zero,AgentID);
if (userAccount == null)
{
// This should be impossible. If I've been passed a reference to a client
// that client should be registered with the UserService. So something
// is horribly wrong somewhere.
m_log.WarnFormat("[GROUPS]: Could not find a UserServiceURL for {0}", AgentID);
}
else if (userProfile is ForeignUserProfileData)
{
// They aren't from around here
ForeignUserProfileData fupd = (ForeignUserProfileData)userProfile;
UserServiceURL = fupd.UserServerURI;
SessionID = fupd.CurrentAgent.SessionID;
}
else
{
// They're a local user, use this:
UserServiceURL = m_commManager.NetworkServersInfo.UserURL;
SessionID = userProfile.CurrentAgent.SessionID;
}
*/
}
}
}
namespace Nwc.XmlRpc
{
using System;
using System.Collections;
using System.IO;
using System.Xml;
using System.Net;
using System.Text;
using System.Reflection;
/// <summary>Class supporting the request side of an XML-RPC transaction.</summary>
public class ConfigurableKeepAliveXmlRpcRequest : XmlRpcRequest
{
private XmlRpcRequestSerializer _serializer = new XmlRpcRequestSerializer();
private XmlRpcResponseDeserializer _deserializer = new XmlRpcResponseDeserializer();
private bool _disableKeepAlive = true;
public string RequestResponse = String.Empty;
/// <summary>Instantiate an <c>XmlRpcRequest</c> for a specified method and parameters.</summary>
/// <param name="methodName"><c>String</c> designating the <i>object.method</i> on the server the request
/// should be directed to.</param>
/// <param name="parameters"><c>ArrayList</c> of XML-RPC type parameters to invoke the request with.</param>
public ConfigurableKeepAliveXmlRpcRequest(String methodName, IList parameters, bool disableKeepAlive)
{
MethodName = methodName;
_params = parameters;
_disableKeepAlive = disableKeepAlive;
}
/// <summary>Send the request to the server.</summary>
/// <param name="url"><c>String</c> The url of the XML-RPC server.</param>
/// <returns><c>XmlRpcResponse</c> The response generated.</returns>
public XmlRpcResponse Send(String url)
{
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url);
if (request == null)
throw new XmlRpcException(XmlRpcErrorCodes.TRANSPORT_ERROR,
XmlRpcErrorCodes.TRANSPORT_ERROR_MSG + ": Could not create request with " + url);
request.Method = "POST";
request.ContentType = "text/xml";
request.AllowWriteStreamBuffering = true;
request.KeepAlive = !_disableKeepAlive;
using (Stream stream = request.GetRequestStream())
{
using (XmlTextWriter xml = new XmlTextWriter(stream, Encoding.ASCII))
{
_serializer.Serialize(xml, this);
xml.Flush();
}
}
XmlRpcResponse resp;
using (HttpWebResponse response = (HttpWebResponse)request.GetResponse())
{
using (Stream s = response.GetResponseStream())
{
using (StreamReader input = new StreamReader(s))
{
string inputXml = input.ReadToEnd();
try
{
resp = (XmlRpcResponse)_deserializer.Deserialize(inputXml);
}
catch (Exception e)
{
RequestResponse = inputXml;
throw e;
}
}
}
}
return resp;
}
}
}
| |
//
// Encog(tm) Core v3.2 - .Net Version
// http://www.heatonresearch.com/encog/
//
// Copyright 2008-2014 Heaton Research, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information on Heaton Research copyrights, licenses
// and trademarks visit:
// http://www.heatonresearch.com/copyright
//
using System.Linq;
using Encog.ML.Data;
using Encog.ML.Data.Basic;
namespace Encog.Util.Arrayutil
{
/// <summary>
/// Produce a time-series from an array.
/// </summary>
///
public class TemporalWindowArray
{
/// <summary>
/// The fields that are to be processed.
/// </summary>
///
private TemporalWindowField[] _fields;
/// <summary>
/// The size of the input window.
/// </summary>
///
private int _inputWindow;
/// <summary>
/// The size of the prediction window.
/// </summary>
///
private int _predictWindow;
/// <summary>
/// Construct a time-series from an array.
/// </summary>
///
/// <param name="theInputWindow">The size of the input window.</param>
/// <param name="thePredictWindow">The size of the predict window.</param>
public TemporalWindowArray(int theInputWindow,
int thePredictWindow)
{
_inputWindow = theInputWindow;
_predictWindow = thePredictWindow;
}
/// <value>The fields that are to be processed.</value>
public TemporalWindowField[] Fields
{
get { return _fields; }
}
/// <value>the inputWindow to set</value>
public int InputWindow
{
get { return _inputWindow; }
set { _inputWindow = value; }
}
/// <value>the predictWindow to set</value>
public int PredictWindow
{
get { return _predictWindow; }
set { _predictWindow = value; }
}
/// <summary>
/// Analyze the 1D array.
/// </summary>
///
/// <param name="array">The array to analyze.</param>
public void Analyze(double[] array)
{
_fields = new TemporalWindowField[1];
_fields[0] = new TemporalWindowField("0") {Action = TemporalType.InputAndPredict};
}
/// <summary>
/// Analyze the 2D array.
/// </summary>
///
/// <param name="array">The 2D array to analyze.</param>
public void Analyze(double[][] array)
{
int length = array[0].Length;
_fields = new TemporalWindowField[length];
for (int i = 0; i < length; i++)
{
_fields[i] = new TemporalWindowField("" + i) {Action = TemporalType.InputAndPredict};
}
}
/// <summary>
/// Count the number of input fields, or fields used to predict.
/// </summary>
///
/// <returns>The number of input fields.</returns>
public int CountInputFields()
{
return _fields.Count(field => field.Input);
}
/// <summary>
/// Count the number of fields that are that are in the prediction.
/// </summary>
///
/// <returns>The number of fields predicted.</returns>
public int CountPredictFields()
{
return _fields.Count(field => field.Predict);
}
/// <summary>
/// Process the array.
/// </summary>
///
/// <param name="data">The array to process.</param>
/// <returns>A neural data set that contains the time-series.</returns>
public IMLDataSet Process(double[] data)
{
var result = new BasicMLDataSet();
int totalWindowSize = _inputWindow + _predictWindow;
int stopPoint = data.Length - totalWindowSize;
for (int i = 0; i < stopPoint; i++)
{
var inputData = new BasicMLData(_inputWindow);
var idealData = new BasicMLData(_predictWindow);
int index = i;
// handle input window
for (int j = 0; j < _inputWindow; j++)
{
inputData[j] = data[index++];
}
// handle predict window
for (int j = 0; j < _predictWindow; j++)
{
idealData[j] = data[index++];
}
var pair = new BasicMLDataPair(inputData, idealData);
result.Add(pair);
}
return result;
}
/// <summary>
/// Processes the specified data array in an IMLDataset.
/// You can send a [][] array directly with this method.
/// </summary>
/// <param name="data">The data.</param>
/// <returns></returns>
public IMLDataSet Process(double[][] data)
{
var result = new BasicMLDataSet();
foreach (double[] doubles in data)
{
result.Add(ProcessToPair(doubles));
}
return result;
}
/// <summary>
/// Process the data array and returns an IMLdatapair.
/// </summary>
///
/// <param name="data">The array to process.</param>
/// <returns>An IMLDatapair containing data.</returns>
public IMLDataPair ProcessToPair(double[] data)
{
// not sure this method works right: it's only using the last pair?
IMLDataPair pair = null;
int totalWindowSize = _inputWindow + _predictWindow;
int stopPoint = data.Length - totalWindowSize;
for (int i = 0; i < stopPoint; i++)
{
var inputData = new BasicMLData(_inputWindow);
var idealData = new BasicMLData(_predictWindow);
int index = i;
// handle input window
for (int j = 0; j < _inputWindow; j++)
{
inputData[j] = data[index++];
}
// handle predict window
for (int j = 0; j < _predictWindow; j++)
{
idealData[j] = data[index++];
}
pair = new BasicMLDataPair(inputData, idealData);
}
return pair;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Xml.XPath;
using System.Xml.Xsl.Qil;
using System.Xml.Xsl.Runtime;
using System.Xml.Xsl.XPath;
namespace System.Xml.Xsl.Xslt
{
using TypeFactory = XmlQueryTypeFactory;
using XPathFunctionInfo = XPathBuilder.FunctionInfo<XPathBuilder.FuncId>;
using XsltFunctionInfo = XPathBuilder.FunctionInfo<QilGenerator.FuncId>;
// ------------------------------- XslAstAnalyzer -------------------------------
internal class XslAstAnalyzer : XslVisitor<XslFlags>
{
private CompilerScopeManager<VarPar> _scope;
private Compiler _compiler;
#if DEBUG
// List of all variables and parameters
private readonly List<VarPar> _allVarPars = new List<VarPar>();
#endif
private int _forEachDepth = 0;
private XPathAnalyzer _xpathAnalyzer;
private ProtoTemplate _currentTemplate;
// Type donor of the last analyzed VarPar. Used for optimization of WithParam's.
private VarPar _typeDonor;
// Template dependencies
// rev/fwd - Callee-to-Coller/Coller-to-Callee
// 0/1 - for-each depth
private Graph<ProtoTemplate> _revCall0Graph = new Graph<ProtoTemplate>();
private Graph<ProtoTemplate> _revCall1Graph = new Graph<ProtoTemplate>();
private Dictionary<Template, Stylesheet> _fwdApplyImportsGraph = new Dictionary<Template, Stylesheet>();
private Dictionary<QilName, List<ProtoTemplate>> _revApplyTemplatesGraph = new Dictionary<QilName, List<ProtoTemplate>>();
// Data flow graph
private Graph<VarPar> _dataFlow = new Graph<VarPar>();
// Mapping (mode, param name) -> helper vertex in data flow graph
private readonly Dictionary<ModeName, VarPar> _applyTemplatesParams = new Dictionary<ModeName, VarPar>();
// ---------------------------------- Graph<V> ----------------------------------
/// <summary>
/// Represents a graph using hashtable of adjacency lists.
/// </summary>
/// <typeparam name="V">Vertex type</typeparam>
internal class Graph<V> : Dictionary<V, List<V>>
where V : XslNode
{
private static readonly IList<V> s_empty = (new List<V>()).AsReadOnly();
public IEnumerable<V> GetAdjList(V v)
{
List<V> adjList;
if (TryGetValue(v, out adjList) && adjList != null)
{
return adjList;
}
return s_empty;
}
public void AddEdge(V v1, V v2)
{
// Ignore loops
if ((object)v1 == (object)v2)
{
return;
}
List<V> adjList;
if (!TryGetValue(v1, out adjList) || adjList == null)
{
adjList = this[v1] = new List<V>();
}
// NOTE: We do not check for duplicate edges here
adjList.Add(v2);
if (!TryGetValue(v2, out adjList))
{
this[v2] = null;
}
}
public void PropagateFlag(XslFlags flag)
{
// Clean Stop flags
foreach (V v in Keys)
{
v.Flags &= ~XslFlags.Stop;
}
foreach (V v in Keys)
{
if ((v.Flags & XslFlags.Stop) == 0)
{
if ((v.Flags & flag) != 0)
{
DepthFirstSearch(v, flag);
}
}
}
}
private void DepthFirstSearch(V v, XslFlags flag)
{
Debug.Assert((v.Flags & XslFlags.Stop) == 0, "Already visited this vertex");
v.Flags |= (flag | XslFlags.Stop);
foreach (V u in GetAdjList(v))
{
if ((u.Flags & XslFlags.Stop) == 0)
{
DepthFirstSearch(u, flag);
}
Debug.Assert((u.Flags & flag) == flag, "Flag was not set on an adjacent vertex");
}
}
}
internal struct ModeName
{
public QilName Mode;
public QilName Name;
public ModeName(QilName mode, QilName name)
{
this.Mode = mode;
this.Name = name;
}
public override int GetHashCode()
{
return Mode.GetHashCode() ^ Name.GetHashCode();
}
}
public XslFlags Analyze(Compiler compiler)
{
_compiler = compiler;
_scope = new CompilerScopeManager<VarPar>();
_xpathAnalyzer = new XPathAnalyzer(compiler, _scope);
// Add global parameters and variables to the scope, they are visible everywhere
foreach (VarPar par in compiler.ExternalPars)
{
_scope.AddVariable(par.Name, par);
}
foreach (VarPar var in compiler.GlobalVars)
{
_scope.AddVariable(var.Name, var);
}
// Visit global parameters and variables, but ignore calculated flags
foreach (VarPar par in compiler.ExternalPars)
{
Visit(par);
par.Flags |= XslFlags.AnyType;
}
foreach (VarPar var in compiler.GlobalVars)
{
Visit(var);
}
// Global "naked" current/position/last flags
XslFlags result = XslFlags.None;
// Visit templates and attribute sets
foreach (ProtoTemplate tmpl in compiler.AllTemplates)
{
_currentTemplate = tmpl;
result |= Visit(tmpl);
}
// At this point for every local parameter we know whether its default value could be used
// by one of the callers of its template. Update flags for local parameters accordingly.
foreach (ProtoTemplate tmpl in compiler.AllTemplates)
{
foreach (XslNode instr in tmpl.Content)
{
// Take care of a bizarre case <xsl:template match="/" xml:space="preserve"> <xsl:param name="par"/>
if (instr.NodeType == XslNodeType.Text)
{
continue;
}
if (instr.NodeType != XslNodeType.Param)
{
break;
}
VarPar par = (VarPar)instr;
if ((par.Flags & XslFlags.MayBeDefault) != 0)
{
par.Flags |= par.DefValueFlags;
}
}
}
// Infer XPath types for all variables and local parameters by propagating literal
// types Rtf, Nodeset, Node, Boolean, Number, String through the data flow graph.
for (int flag = (int)XslFlags.Rtf; flag != 0; flag >>= 1)
{
_dataFlow.PropagateFlag((XslFlags)flag);
}
_dataFlow = null;
// We need to follow revCall0Graph graph to propagate focus flags. But first complete
// dependency graph with fwdApplyImportsGraph
foreach (KeyValuePair<Template, Stylesheet> pair in _fwdApplyImportsGraph)
{
foreach (Stylesheet import in pair.Value.Imports)
{
AddImportDependencies(import, /*focusDonor:*/pair.Key);
}
}
_fwdApplyImportsGraph = null; // Finaly done with this.
if ((result & XslFlags.Current) != 0)
{
_revCall0Graph.PropagateFlag(XslFlags.Current);
}
if ((result & XslFlags.Position) != 0)
{
_revCall0Graph.PropagateFlag(XslFlags.Position);
}
if ((result & XslFlags.Last) != 0)
{
_revCall0Graph.PropagateFlag(XslFlags.Last);
}
if ((result & XslFlags.SideEffects) != 0)
{
PropagateSideEffectsFlag();
}
_revCall0Graph = null;
_revCall1Graph = null;
_revApplyTemplatesGraph = null;
// We can do this only after all flags were propagated.
// Otherwise we can miss case when flag comes to template from attribute-set
FillModeFlags(compiler.Root.ModeFlags, compiler.Root.Imports[0]);
return result;
}
private void AddImportDependencies(Stylesheet sheet, Template focusDonor)
{
foreach (Template tmpl in sheet.Templates)
{
if (tmpl.Mode.Equals(focusDonor.Mode))
{
_revCall0Graph.AddEdge(tmpl, focusDonor);
}
}
foreach (Stylesheet import in sheet.Imports)
{
AddImportDependencies(import, focusDonor);
}
}
private void FillModeFlags(Dictionary<QilName, XslFlags> parentModeFlags, Stylesheet sheet)
{
// Recursion: Process all imports to calculate ModeFlags for apply-import in this sheet
foreach (Stylesheet import in sheet.Imports)
{
FillModeFlags(sheet.ModeFlags, import);
}
// My parent depends on my templates and templates imported
// 1. Copy ModeFlags of my imports to my parent
foreach (KeyValuePair<QilName, XslFlags> modeFlag in sheet.ModeFlags)
{
XslFlags modeFlags;
if (!parentModeFlags.TryGetValue(modeFlag.Key, out modeFlags))
{
modeFlags = 0;
}
parentModeFlags[modeFlag.Key] = modeFlags | modeFlag.Value;
}
// 2. Add ModeFlags of my templates to my parent
foreach (Template tmpl in sheet.Templates)
{
Debug.Assert(tmpl.Match != null);
XslFlags templateFlags = tmpl.Flags & (XslFlags.FocusFilter | XslFlags.SideEffects);
if (templateFlags != 0)
{
XslFlags modeFlags;
if (!parentModeFlags.TryGetValue(tmpl.Mode, out modeFlags))
{
modeFlags = 0;
}
parentModeFlags[tmpl.Mode] = modeFlags | templateFlags;
}
}
}
protected override XslFlags Visit(XslNode node)
{
_scope.EnterScope(node.Namespaces);
XslFlags result = base.Visit(node);
_scope.ExitScope();
// Local variables and parameters must be added to the outer scope
if (_currentTemplate != null && (node.NodeType == XslNodeType.Variable || node.NodeType == XslNodeType.Param))
{
_scope.AddVariable(node.Name, (VarPar)node);
}
Debug.Assert(
(result & XslFlags.TypeFilter & ~XslFlags.Rtf) == 0,
"Instructions always return Rtf. node=" + node.NodeType.ToString() + " result=" + result.ToString()
);
return result;
}
protected override XslFlags VisitChildren(XslNode node)
{
XslFlags result = XslFlags.None;
foreach (var child in node.Content)
{
// Visit this child (recurses)
result |= this.Visit(child);
}
return result;
}
protected override XslFlags VisitAttributeSet(AttributeSet node)
{
// @use-attribute-sets was processed into a sequence of UseAttributeSet nodes,
// which were prepended to the content of node
node.Flags = VisitChildren(node);
return node.Flags;
}
protected override XslFlags VisitTemplate(Template node)
{
// @match does not affect any flags
//ProcessPattern(match);
node.Flags = VisitChildren(node);
return node.Flags;
}
protected override XslFlags VisitApplyImports(XslNode node)
{
Debug.Assert(_forEachDepth == 0, "xsl:apply-imports cannot be inside of xsl:for-each");
Debug.Assert(_currentTemplate is Template, "xsl:apply-imports can only occur within xsl:template");
_fwdApplyImportsGraph[(Template)_currentTemplate] = (Stylesheet)node.Arg;
// xsl:apply-imports uses context node and is not in context of any for-each so it requires current
return XslFlags.HasCalls | XslFlags.Current | XslFlags.Rtf;
}
protected override XslFlags VisitApplyTemplates(XslNode node)
{
Debug.Assert(node.Select != null, "Absent @select should be replaced with 'node()' in XsltLoader");
XslFlags result = ProcessExpr(node.Select);
foreach (XslNode instr in node.Content)
{
result |= Visit(instr);
if (instr.NodeType == XslNodeType.WithParam)
{
ModeName mn = new ModeName(/*mode:*/node.Name, instr.Name);
VarPar modePar;
if (!_applyTemplatesParams.TryGetValue(mn, out modePar))
{
modePar = _applyTemplatesParams[mn] = AstFactory.WithParam(instr.Name);
}
if (_typeDonor != null)
{
_dataFlow.AddEdge(_typeDonor, modePar);
}
else
{
modePar.Flags |= instr.Flags & XslFlags.TypeFilter;
}
}
}
if (_currentTemplate != null)
{
AddApplyTemplatesEdge(/*mode:*/node.Name, _currentTemplate);
}
return XslFlags.HasCalls | XslFlags.Rtf | result;
}
protected override XslFlags VisitAttribute(NodeCtor node)
{
return (
XslFlags.Rtf |
ProcessAvt(node.NameAvt) |
ProcessAvt(node.NsAvt) |
VisitChildren(node)
);
}
protected override XslFlags VisitCallTemplate(XslNode node)
{
XslFlags result = XslFlags.None;
Template target;
if (_compiler.NamedTemplates.TryGetValue(node.Name, out target))
{
Debug.Assert(target != null);
if (_currentTemplate != null)
{
if (_forEachDepth == 0)
{
// Naked xsl:call-template, target would take its focus from currentTemplate
_revCall0Graph.AddEdge(target, _currentTemplate);
}
else
{
// in other cases we need it as donor for side effects flag
_revCall1Graph.AddEdge(target, _currentTemplate);
}
}
}
VarPar[] typeDonors = new VarPar[node.Content.Count];
int idx = 0;
foreach (XslNode instr in node.Content)
{
Debug.Assert(instr.NodeType == XslNodeType.WithParam);
result |= Visit(instr);
typeDonors[idx++] = _typeDonor;
}
// For each xsl:param in the target template find the corresponding xsl:with-param, and:
// a) if the type of xsl:with-param is known, add it to the type of xsl:param;
// b) if value of xsl:with-param is a VarPar reference, add an edge connecting it with xsl:param
// to the data flow graph.
if (target != null)
{
foreach (XslNode instr in target.Content)
{
// Take care of a bizarre case <xsl:template match="/" xml:space="preserve"> <xsl:param name="par"/>
if (instr.NodeType == XslNodeType.Text)
{
continue;
}
if (instr.NodeType != XslNodeType.Param)
{
break;
}
VarPar par = (VarPar)instr;
VarPar found = null;
idx = 0;
foreach (XslNode withPar in node.Content)
{
if (withPar.Name.Equals(par.Name))
{
found = (VarPar)withPar;
_typeDonor = typeDonors[idx];
break;
}
idx++;
}
if (found != null)
{
// Found corresponding xsl:with-param, check its type
if (_typeDonor != null)
{
// add an edge from its type donor to xsl:param
_dataFlow.AddEdge(_typeDonor, par);
}
else
{
par.Flags |= found.Flags & XslFlags.TypeFilter;
}
}
else
{
// No value was specified for this xsl:param, default value will be used for it
par.Flags |= XslFlags.MayBeDefault;
}
}
}
return XslFlags.HasCalls | XslFlags.Rtf | result;
}
//protected override XslFlags VisitChoose(XslNode node) { return VisitChildren(node); }
protected override XslFlags VisitComment(XslNode node)
{
return XslFlags.Rtf | VisitChildren(node);
}
protected override XslFlags VisitCopy(XslNode node)
{
// @use-attribute-sets was processed into a sequence of UseAttributeSet nodes,
// which were prepended to the content of node
return XslFlags.Current | XslFlags.Rtf | VisitChildren(node);
}
protected override XslFlags VisitCopyOf(XslNode node)
{
return XslFlags.Rtf | ProcessExpr(node.Select);
}
protected override XslFlags VisitElement(NodeCtor node)
{
// @use-attribute-sets was processed into a sequence of UseAttributeSet nodes,
// which were prepended to the content of node
return (
XslFlags.Rtf |
ProcessAvt(node.NameAvt) |
ProcessAvt(node.NsAvt) |
VisitChildren(node)
);
}
protected override XslFlags VisitError(XslNode node)
{
return (VisitChildren(node) & ~XslFlags.TypeFilter) | XslFlags.SideEffects;
}
protected override XslFlags VisitForEach(XslNode node)
{
XslFlags result = ProcessExpr(node.Select);
_forEachDepth++;
foreach (XslNode child in node.Content)
{
if (child.NodeType == XslNodeType.Sort)
{
result |= Visit(child);
}
else
{
// Since for-each creates new focus, the focus flags of its children does not contribute into result
result |= Visit(child) & ~XslFlags.FocusFilter;
}
}
_forEachDepth--;
return result;
}
protected override XslFlags VisitIf(XslNode node)
{
return ProcessExpr(node.Select) | VisitChildren(node);
}
/*
protected override XslFlags VisitKey(Key node) {
// @match and @use do not affect any flags
//ProcessPattern(node.Match);
//ProcessExpr(node.Use);
}
*/
//protected override XslFlags VisitList(XslNode node) { return VisitChildren(node); }
protected override XslFlags VisitLiteralAttribute(XslNode node)
{
return (
XslFlags.Rtf |
ProcessAvt(node.Select) |
VisitChildren(node)
);
}
protected override XslFlags VisitLiteralElement(XslNode node)
{
return XslFlags.Rtf | VisitChildren(node);
}
protected override XslFlags VisitMessage(XslNode node)
{
return (VisitChildren(node) & ~XslFlags.TypeFilter) | XslFlags.SideEffects;
}
//protected override XslFlags VisitNop(XslNode node) { return VisitChildren(node); }
protected override XslFlags VisitNumber(Number node)
{
return (
XslFlags.Rtf |
ProcessPattern(node.Count) |
ProcessPattern(node.From) |
(node.Value != null ? ProcessExpr(node.Value) : XslFlags.Current) |
ProcessAvt(node.Format) |
ProcessAvt(node.Lang) |
ProcessAvt(node.LetterValue) |
ProcessAvt(node.GroupingSeparator) |
ProcessAvt(node.GroupingSize)
);
}
//protected override XslFlags VisitOtherwise(XslNode node) { return VisitChildren(node); }
protected override XslFlags VisitPI(XslNode node)
{
return (
XslFlags.Rtf |
ProcessAvt(node.Select) |
VisitChildren(node)
);
}
protected override XslFlags VisitSort(Sort node)
{
return (
// @select is calculated in context of xsl:for-each or xsl:apply-templates,
// so it does not affect focus flags
ProcessExpr(node.Select) & ~XslFlags.FocusFilter |
ProcessAvt(node.Lang) |
ProcessAvt(node.DataType) |
ProcessAvt(node.Order) |
ProcessAvt(node.CaseOrder)
);
}
protected override XslFlags VisitText(Text node)
{
return XslFlags.Rtf | VisitChildren(node);
}
protected override XslFlags VisitUseAttributeSet(XslNode node)
{
if (_compiler.AttributeSets.TryGetValue(node.Name, out AttributeSet attSet) && _currentTemplate != null)
{
if (_forEachDepth == 0)
{
// Naked [xsl:]use-attribute-sets, attSet would take its focus from currentTemplate
_revCall0Graph.AddEdge(attSet, _currentTemplate);
}
else
{
// in other cases we need it as donor for side effects flag
_revCall1Graph.AddEdge(attSet, _currentTemplate);
}
}
return XslFlags.HasCalls | XslFlags.Rtf;
}
protected override XslFlags VisitValueOf(XslNode node)
{
return XslFlags.Rtf | ProcessExpr(node.Select);
}
protected override XslFlags VisitValueOfDoe(XslNode node)
{
return XslFlags.Rtf | ProcessExpr(node.Select);
}
protected override XslFlags VisitParam(VarPar node)
{
Template tmpl = _currentTemplate as Template;
if (tmpl != null && tmpl.Match != null)
{
// This template has 'match' attribute and might be called from built-in template rules,
// all xsl:param's will be defaulted in that case
node.Flags |= XslFlags.MayBeDefault;
ModeName mn = new ModeName(tmpl.Mode, node.Name);
VarPar par;
if (!_applyTemplatesParams.TryGetValue(mn, out par))
{
par = _applyTemplatesParams[mn] = AstFactory.WithParam(node.Name);
}
_dataFlow.AddEdge(par, node);
}
node.DefValueFlags = ProcessVarPar(node);
return node.DefValueFlags & ~XslFlags.TypeFilter;
}
protected override XslFlags VisitVariable(VarPar node)
{
node.Flags = ProcessVarPar(node);
return node.Flags & ~XslFlags.TypeFilter;
}
protected override XslFlags VisitWithParam(VarPar node)
{
node.Flags = ProcessVarPar(node);
return node.Flags & ~XslFlags.TypeFilter;
}
private XslFlags ProcessVarPar(VarPar node)
{
XslFlags result;
#if DEBUG
if (node.NodeType != XslNodeType.WithParam)
{
_allVarPars.Add(node);
}
#endif
if (node.Select != null)
{
if (node.Content.Count != 0)
{
// In case of incorrect stylesheet, variable or parameter may have both a 'select' attribute and non-empty content
// NOTE: This code must be in sync with recovery logic in QilGenerator
result = _xpathAnalyzer.Analyze(node.Select) | VisitChildren(node) | XslFlags.AnyType;
_typeDonor = null;
}
else
{
result = _xpathAnalyzer.Analyze(node.Select);
_typeDonor = _xpathAnalyzer.TypeDonor;
if (_typeDonor != null && node.NodeType != XslNodeType.WithParam)
{
_dataFlow.AddEdge(_typeDonor, node);
}
}
}
else if (node.Content.Count != 0)
{
result = XslFlags.Rtf | VisitChildren(node);
_typeDonor = null;
}
else
{
result = XslFlags.String;
_typeDonor = null;
}
return result;
}
// Ignores XPath type flags
private XslFlags ProcessExpr(string expr)
{
return _xpathAnalyzer.Analyze(expr) & ~XslFlags.TypeFilter;
}
// Ignores XPath type flags
private XslFlags ProcessAvt(string avt)
{
return _xpathAnalyzer.AnalyzeAvt(avt) & ~XslFlags.TypeFilter;
}
// Ignores XPath type flags and focus flags
private XslFlags ProcessPattern(string pattern)
{
// We need to analyze using of variables in the pattern
return _xpathAnalyzer.Analyze(pattern) & ~XslFlags.TypeFilter & ~XslFlags.FocusFilter;
}
private void AddApplyTemplatesEdge(QilName mode, ProtoTemplate dependentTemplate)
{
List<ProtoTemplate> templates;
if (!_revApplyTemplatesGraph.TryGetValue(mode, out templates))
{
templates = new List<ProtoTemplate>();
_revApplyTemplatesGraph.Add(mode, templates);
}
else
{
if (templates[templates.Count - 1] == dependentTemplate)
{
return; // this is a duplicate
}
}
templates.Add(dependentTemplate);
}
private void PropagateSideEffectsFlag()
{
// Clean Stop flags
foreach (ProtoTemplate t in _revCall0Graph.Keys)
{
t.Flags &= ~XslFlags.Stop;
}
foreach (ProtoTemplate t in _revCall1Graph.Keys)
{
t.Flags &= ~XslFlags.Stop;
}
foreach (ProtoTemplate t in _revCall0Graph.Keys)
{
if ((t.Flags & XslFlags.Stop) == 0)
{
if ((t.Flags & XslFlags.SideEffects) != 0)
{
DepthFirstSearch(t);
}
}
}
foreach (ProtoTemplate t in _revCall1Graph.Keys)
{
if ((t.Flags & XslFlags.Stop) == 0)
{
if ((t.Flags & XslFlags.SideEffects) != 0)
{
DepthFirstSearch(t);
}
}
}
}
private void DepthFirstSearch(ProtoTemplate t)
{
Debug.Assert((t.Flags & XslFlags.Stop) == 0, "Already visited this vertex");
t.Flags |= (XslFlags.SideEffects | XslFlags.Stop);
List<ProtoTemplate> list;
foreach (ProtoTemplate u in _revCall0Graph.GetAdjList(t))
{
if ((u.Flags & XslFlags.Stop) == 0)
{
DepthFirstSearch(u);
}
Debug.Assert((u.Flags & XslFlags.SideEffects) == XslFlags.SideEffects, "Flag was not set on an adjacent vertex");
}
foreach (ProtoTemplate u in _revCall1Graph.GetAdjList(t))
{
if ((u.Flags & XslFlags.Stop) == 0)
{
DepthFirstSearch(u);
}
Debug.Assert((u.Flags & XslFlags.SideEffects) == XslFlags.SideEffects, "Flag was not set on an adjacent vertex");
}
Template template = t as Template;
if (
template != null && // This ProteTemplate is Template
_revApplyTemplatesGraph.TryGetValue(template.Mode, out list) // list - ProtoTemplates that have apply-templatess mode="{template.Mode}"
)
{
_revApplyTemplatesGraph.Remove(template.Mode); // to prevent recursion remove this list from dictionary
foreach (ProtoTemplate u in list)
{
if ((u.Flags & XslFlags.Stop) == 0)
{
DepthFirstSearch(u);
}
Debug.Assert((u.Flags & XslFlags.SideEffects) == XslFlags.SideEffects, "Flag was not set on an adjacent vertex");
}
}
}
// ------------------------------- XPathAnalyzer --------------------------------
// Ignores all errors and warnings
internal readonly struct NullErrorHelper : IErrorHelper
{
public void ReportError(string res, params string[] args) { }
public void ReportWarning(string res, params string[] args) { }
}
internal class XPathAnalyzer : IXPathBuilder<XslFlags>
{
private readonly XPathParser<XslFlags> _xpathParser = new XPathParser<XslFlags>();
private readonly CompilerScopeManager<VarPar> _scope;
private readonly Compiler _compiler;
// True if the expression needs XSLT's current() node
private bool _xsltCurrentNeeded;
// If the expression is just a reference to some VarPar, like "(($foo))",
// then this field contains that VarPar, and null otherwise.
private VarPar _typeDonor;
public VarPar TypeDonor
{
get { return _typeDonor; }
}
public XPathAnalyzer(Compiler compiler, CompilerScopeManager<VarPar> scope)
{
_compiler = compiler;
_scope = scope;
}
public XslFlags Analyze(string xpathExpr)
{
_typeDonor = null;
if (xpathExpr == null)
{
return XslFlags.None;
}
try
{
// Note that the constructor may throw an exception, for example, in case of the expression "'"
_xsltCurrentNeeded = false;
XPathScanner scanner = new XPathScanner(xpathExpr);
XslFlags result = _xpathParser.Parse(scanner, this, LexKind.Eof);
if (_xsltCurrentNeeded)
{
result |= XslFlags.Current;
}
return result;
}
catch (XslLoadException)
{
return XslFlags.AnyType | XslFlags.FullFocus;
}
}
public XslFlags AnalyzeAvt(string source)
{
_typeDonor = null;
if (source == null)
{
return XslFlags.None;
}
try
{
_xsltCurrentNeeded = false;
XslFlags result = XslFlags.None;
int pos = 0;
while (pos < source.Length)
{
pos = source.IndexOf('{', pos);
if (pos == -1)
{
break; // no more AVTs
}
pos++;
if (pos < source.Length && source[pos] == '{')
{ // "{{"
pos++;
continue;
}
if (pos < source.Length)
{ // '{' encountered, parse an expression
XPathScanner scanner = new XPathScanner(source, pos);
result |= _xpathParser.Parse(scanner, this, LexKind.RBrace);
pos = scanner.LexStart + 1;
}
}
if (_xsltCurrentNeeded)
{
result |= XslFlags.Current;
}
return result & ~XslFlags.TypeFilter;
}
catch (XslLoadException)
{
return XslFlags.FullFocus;
}
}
// Returns null in case of error
private VarPar ResolveVariable(string prefix, string name)
{
string ns = ResolvePrefix(prefix);
if (ns == null)
{
return null;
}
return _scope.LookupVariable(name, ns);
}
// Returns null in case of error
private string ResolvePrefix(string prefix)
{
// ignoreDefaultNs == true
if (prefix.Length == 0)
{
return string.Empty;
}
else
{
return _scope.LookupNamespace(prefix);
}
}
public virtual void StartBuild()
{
}
public virtual XslFlags EndBuild(XslFlags result)
{
return result;
}
public virtual XslFlags String(string value)
{
_typeDonor = null;
return XslFlags.String;
}
public virtual XslFlags Number(double value)
{
_typeDonor = null;
return XslFlags.Number;
}
private static readonly XslFlags[] s_operatorType = {
/*Unknown */ XslFlags.AnyType,
/*Or */ XslFlags.Boolean,
/*And */ XslFlags.Boolean,
/*Eq */ XslFlags.Boolean,
/*Ne */ XslFlags.Boolean,
/*Lt */ XslFlags.Boolean,
/*Le */ XslFlags.Boolean,
/*Gt */ XslFlags.Boolean,
/*Ge */ XslFlags.Boolean,
/*Plus */ XslFlags.Number,
/*Minus */ XslFlags.Number,
/*Multiply */ XslFlags.Number,
/*Divide */ XslFlags.Number,
/*Modulo */ XslFlags.Number,
/*UnaryMinus*/ XslFlags.Number,
/*Union */ XslFlags.Nodeset,
};
public virtual XslFlags Operator(XPathOperator op, XslFlags left, XslFlags right)
{
_typeDonor = null;
Debug.Assert(op != XPathOperator.Unknown);
XslFlags result = (left | right) & ~XslFlags.TypeFilter;
return result | s_operatorType[(int)op];
}
public virtual XslFlags Axis(XPathAxis xpathAxis, XPathNodeType nodeType, string prefix, string name)
{
_typeDonor = null;
if (xpathAxis == XPathAxis.Self && nodeType == XPathNodeType.All && prefix == null && name == null)
{
return XslFlags.Current | XslFlags.Node;
}
else
{
return XslFlags.Current | XslFlags.Nodeset;
}
}
// "left/right"
public virtual XslFlags JoinStep(XslFlags left, XslFlags right)
{
_typeDonor = null;
return (left & ~XslFlags.TypeFilter) | XslFlags.Nodeset; // "ex:Foo(position())/Bar"
}
// "nodeset[predicate]"
public virtual XslFlags Predicate(XslFlags nodeset, XslFlags predicate, bool isReverseStep)
{
_typeDonor = null;
return (nodeset & ~XslFlags.TypeFilter) | XslFlags.Nodeset | (predicate & XslFlags.SideEffects); // "ex:Foo(position())[Bar]"
}
public virtual XslFlags Variable(string prefix, string name)
{
_typeDonor = ResolveVariable(prefix, name);
if (_typeDonor == null)
{
return XslFlags.AnyType;
}
return XslFlags.None;
}
public virtual XslFlags Function(string prefix, string name, IList<XslFlags> args)
{
_typeDonor = null;
XslFlags argsFlags = XslFlags.None;
foreach (XslFlags t in args)
{
argsFlags |= t;
}
XslFlags funcFlags = XslFlags.None;
if (prefix.Length == 0)
{
XPathFunctionInfo xpathFunc;
XsltFunctionInfo xsltFunc;
if (XPathBuilder.FunctionTable.TryGetValue(name, out xpathFunc))
{
XPathBuilder.FuncId funcId = xpathFunc.id;
funcFlags = s_XPathFunctionFlags[(int)funcId];
if (args.Count == 0 && (
funcId == XPathBuilder.FuncId.LocalName ||
funcId == XPathBuilder.FuncId.NamespaceUri ||
funcId == XPathBuilder.FuncId.Name ||
funcId == XPathBuilder.FuncId.String ||
funcId == XPathBuilder.FuncId.Number ||
funcId == XPathBuilder.FuncId.StringLength ||
funcId == XPathBuilder.FuncId.Normalize
))
{
funcFlags |= XslFlags.Current;
}
}
else if (QilGenerator.FunctionTable.TryGetValue(name, out xsltFunc))
{
QilGenerator.FuncId funcId = xsltFunc.id;
funcFlags = s_xsltFunctionFlags[(int)funcId];
if (funcId == QilGenerator.FuncId.Current)
{
_xsltCurrentNeeded = true;
}
else if (funcId == QilGenerator.FuncId.GenerateId && args.Count == 0)
{
funcFlags |= XslFlags.Current;
}
}
}
else
{
string ns = ResolvePrefix(prefix);
if (ns == XmlReservedNs.NsMsxsl)
{
switch (name)
{
case "node-set": funcFlags = XslFlags.Nodeset; break;
case "string-compare": funcFlags = XslFlags.Number; break;
case "utc": funcFlags = XslFlags.String; break;
case "format-date": funcFlags = XslFlags.String; break;
case "format-time": funcFlags = XslFlags.String; break;
case "local-name": funcFlags = XslFlags.String; break;
case "namespace-uri": funcFlags = XslFlags.String | XslFlags.Current; break;
case "number": funcFlags = XslFlags.Number; break;
}
}
else if (ns == XmlReservedNs.NsExsltCommon)
{
switch (name)
{
case "node-set": funcFlags = XslFlags.Nodeset; break;
case "object-type": funcFlags = XslFlags.String; break;
}
}
if (funcFlags == XslFlags.None)
{
// Unknown function. Can be script function or extension function
funcFlags = XslFlags.AnyType;
if (_compiler.Settings.EnableScript && ns != null)
{
XmlExtensionFunction scrFunc = _compiler.Scripts.ResolveFunction(name, ns, args.Count, new NullErrorHelper());
if (scrFunc != null)
{
XmlQueryType xt = scrFunc.XmlReturnType;
if (xt == TypeFactory.StringX)
{
funcFlags = XslFlags.String;
}
else if (xt == TypeFactory.DoubleX)
{
funcFlags = XslFlags.Number;
}
else if (xt == TypeFactory.BooleanX)
{
funcFlags = XslFlags.Boolean;
}
else if (xt == TypeFactory.NodeNotRtf)
{
funcFlags = XslFlags.Node;
}
else if (xt == TypeFactory.NodeSDod)
{
funcFlags = XslFlags.Nodeset;
}
else if (xt == TypeFactory.ItemS)
{
funcFlags = XslFlags.AnyType;
}
else if (xt == TypeFactory.Empty)
{
funcFlags = XslFlags.Nodeset;
}
else
{
Debug.Fail("Unexpected XmlQueryType for script function: " + xt.ToString());
}
}
}
funcFlags |= XslFlags.SideEffects;
}
}
return (argsFlags & ~XslFlags.TypeFilter) | funcFlags;
}
#region XPath Function Flags
private static readonly XslFlags[] s_XPathFunctionFlags = {
/*Last */ XslFlags.Number | XslFlags.Last,
/*Position */ XslFlags.Number | XslFlags.Position,
/*Count */ XslFlags.Number,
/*LocalName */ XslFlags.String, // | XslFlags.Current if 0 args
/*NamespaceUri */ XslFlags.String, // | XslFlags.Current if 0 args
/*Name */ XslFlags.String, // | XslFlags.Current if 0 args
/*String */ XslFlags.String, // | XslFlags.Current if 0 args
/*Number */ XslFlags.Number, // | XslFlags.Current if 0 args
/*Boolean */ XslFlags.Boolean,
/*True */ XslFlags.Boolean,
/*False */ XslFlags.Boolean,
/*Not */ XslFlags.Boolean,
/*Id */ XslFlags.Nodeset | XslFlags.Current,
/*Concat */ XslFlags.String,
/*StartsWith */ XslFlags.Boolean,
/*Contains */ XslFlags.Boolean,
/*SubstringBefore */ XslFlags.String,
/*SubstringAfter */ XslFlags.String,
/*Substring */ XslFlags.String,
/*StringLength */ XslFlags.Number, // | XslFlags.Current if 0 args
/*Normalize */ XslFlags.String, // | XslFlags.Current if 0 args
/*Translate */ XslFlags.String,
/*Lang */ XslFlags.Boolean | XslFlags.Current,
/*Sum */ XslFlags.Number,
/*Floor */ XslFlags.Number,
/*Ceiling */ XslFlags.Number,
/*Round */ XslFlags.Number,
};
#endregion
#region Xslt Function Flags
private static readonly XslFlags[] s_xsltFunctionFlags = {
/*Current */ XslFlags.Node, // xsltCurrentNeeded = true
/*Document */ XslFlags.Nodeset,
/*Key */ XslFlags.Nodeset | XslFlags.Current,
/*FormatNumber */ XslFlags.String,
/*UnparsedEntityUri */ XslFlags.String, // | XslFlags.Current if it is implemented
/*GenerateId */ XslFlags.String, // | XslFlags.Current if 0 args
/*SystemProperty */ XslFlags.String | XslFlags.Number,
/*ElementAvailable */ XslFlags.Boolean,
/*FunctionAvailable */ XslFlags.Boolean,
};
#endregion
}
}
// ------------------------------- XslAstRewriter -------------------------------
internal sealed class XslAstRewriter
{
private CompilerScopeManager<VarPar> _scope;
private Stack<Template> _newTemplates;
private Compiler _compiler;
public void Rewrite(Compiler compiler)
{
_compiler = compiler;
_scope = new CompilerScopeManager<VarPar>();
_newTemplates = new Stack<Template>();
// Rewrite every template
foreach (var template in compiler.AllTemplates)
{
_scope.EnterScope();
CheckNodeCost(template);
_scope.CheckEmpty();
}
// Add the new templates to the compiled set
while (_newTemplates.Count > 0)
{
var newtemplate = _newTemplates.Pop();
// From Stylesheet.AddTemplate(newtemplate):
compiler.AllTemplates.Add(newtemplate);
compiler.NamedTemplates.Add(newtemplate.Name, newtemplate);
_scope.EnterScope();
CheckNodeCost(newtemplate);
_scope.CheckEmpty();
}
}
// Returns a cost based on an estimate of the number of locals required for the given expression
private static int NodeCostForXPath(string xpath)
{
int cost = 0;
if (xpath != null)
{
// Every XPath expression needs at least one iterator
cost = IteratorNodeCost;
// Count slashes, two characters at a time, ignore leading slash
for (int t = 2; t < xpath.Length; t += 2)
{
if (xpath[t] == '/' || xpath[t - 1] == '/')
{
cost += IteratorNodeCost;
}
}
}
return cost;
}
// These values should be changed to achieve methods with ~2KB IL
private const int FixedNodeCost = 1; // should probably depend on node type
private const int IteratorNodeCost = 2; // XPath iterators are more expensive
private const int CallTemplateCost = 1; // best kept at a minimum, 1
private const int RewriteThreshold = 100;
// These are all the node types for which the .Select member has an XPath expression
private const int NodesWithSelect =
(1 << (int)XslNodeType.Param) |
(1 << (int)XslNodeType.Variable) |
(1 << (int)XslNodeType.WithParam) |
(1 << (int)XslNodeType.ApplyTemplates) |
(1 << (int)XslNodeType.CopyOf) |
(1 << (int)XslNodeType.ForEach) |
(1 << (int)XslNodeType.If) |
//(1 << (int)XslNodeType.Number) | // has XPath, but not in .Select member
(1 << (int)XslNodeType.Sort) |
(1 << (int)XslNodeType.ValueOf) |
(1 << (int)XslNodeType.ValueOfDoe);
// These are all the node types which can have call-template as a child and are therefor suitable for refactoring
private const int ParentsOfCallTemplate =
(1 << (int)XslNodeType.Attribute) |
(1 << (int)XslNodeType.Comment) |
(1 << (int)XslNodeType.Copy) |
(1 << (int)XslNodeType.Element) |
(1 << (int)XslNodeType.ForEach) |
(1 << (int)XslNodeType.If) | // also used for xsl:when
(1 << (int)XslNodeType.Message) |
(1 << (int)XslNodeType.Otherwise) |
(1 << (int)XslNodeType.Param) |
(1 << (int)XslNodeType.PI) |
(1 << (int)XslNodeType.Template) |
(1 << (int)XslNodeType.Variable) |
(1 << (int)XslNodeType.WithParam) |
(1 << (int)XslNodeType.LiteralAttribute) |
(1 << (int)XslNodeType.LiteralElement);
// Tests whether the specified XslNodeType bit is set in the provided flags
private static bool NodeTypeTest(XslNodeType nodetype, int flags)
{
return ((flags >> (int)nodetype) & 1) != 0;
}
private int CheckNodeCost(XslNode node)
{
_scope.EnterScope(node.Namespaces);
// We don't want to allow rewriting by default
bool canRewrite = false;
// Use a constant cost for all nodes (should probably depend on the node's type)
int nodeCost = FixedNodeCost;
// Detect the number of iterators used by the node's 'select' attribute
if (NodeTypeTest(node.NodeType, NodesWithSelect))
{
nodeCost += NodeCostForXPath(node.Select);
}
// Iterate through all the child nodes
var content = node.Content;
int last = content.Count - 1;
for (int t = 0; t <= last; ++t)
{
var child = content[t];
var costForChild = CheckNodeCost(child); // recurse
nodeCost += costForChild;
if (canRewrite && nodeCost > RewriteThreshold)
{
// This child would overflow the limit for this scope; create a new scope
// Don't refactor the code if this is the last child and its cost is trivial
if (t < last || costForChild > CallTemplateCost)
{
//Debug.WriteLine("Node {0} (within {5}) on {1}:{2} has cost {3}; {4} total",
// child.NodeType, child.SourceLine.Start.Line, child.SourceLine.Start.Pos, costForChild, nodeCost, node.NodeType);
// Refactor this node, moving the current child and all after into a new template
Refactor(node, t);
// The new template (containing the remainder of the current node) will be processed later
nodeCost -= costForChild;
nodeCost += CallTemplateCost;
}
break;
}
// Local variables and parameters must be added to the outer scope
if (child.NodeType == XslNodeType.Variable || child.NodeType == XslNodeType.Param)
{
_scope.AddVariable(child.Name, (VarPar)child);
// Parameters will cause code generation at the call-site, not in the callee
if (child.NodeType == XslNodeType.Param)
{
nodeCost -= costForChild;
}
}
else if (!canRewrite)
{
// We're passed the parameters and our first real node; start checking the cost
// Note: some nodes like xsl:choose cannot contain xsl:call-template
canRewrite = NodeTypeTest(node.NodeType, ParentsOfCallTemplate);
}
}
_scope.ExitScope();
return nodeCost;
}
// Splits the children into two pieces: prefix and suffix
// The prefix calls a new template T, which contains all of the suffix:
// F=PREFIX~SUFFIX => F=PREFIX~C(T) and T=PARAMS~SUFFIX
private void Refactor(XslNode parent, int split)
{
Debug.Assert(split > 0);
var content = (List<XslNode>)parent.Content;
var node = content[split];
// Generate unique name for the new template
QilName templatename = AstFactory.QName("generated", _compiler.CreatePhantomNamespace(), "compiler");
// Create fake ContextInfo for the new nodes, based on the context for the old node
var fakeCtxInfo = new XsltInput.ContextInfo(node.SourceLine);
// Create the new call-template node
var calltemplate = AstFactory.CallTemplate(templatename, fakeCtxInfo);
XsltLoader.SetInfo(calltemplate, null, fakeCtxInfo);
// Create a new template node
Template newtemplate = AstFactory.Template(templatename, null, XsltLoader.nullMode, double.NaN, node.XslVersion);
XsltLoader.SetInfo(newtemplate, null, fakeCtxInfo);
_newTemplates.Push(newtemplate);
// Pre-allocate the new content list to minimize the number of resizes (adding some space for any params)
newtemplate.SetContent(new List<XslNode>(content.Count - split + 8));
// Pass parameters from the current scope into the called template
foreach (var scoperecord in _scope.GetActiveRecords())
{
if (!scoperecord.IsVariable)
{
// The scope record is either a namespace declaration or an exclusion namespace
Debug.Assert(scoperecord.IsNamespace || scoperecord.ncName == null);
Debug.Assert(!_compiler.IsPhantomNamespace(scoperecord.nsUri));
newtemplate.Namespaces = new NsDecl(newtemplate.Namespaces, scoperecord.ncName, scoperecord.nsUri);
}
else
{
// The scope contains a variable that we must pass into the new template
var variable = scoperecord.value;
// Skip variables generated during errors
if (_compiler.IsPhantomNamespace(variable.Name.NamespaceUri))
{
continue;
}
// Need to create a new QilName (can't reuse the one from the variable, eventhough it's exactly the same)
var paramname = AstFactory.QName(variable.Name.LocalName, variable.Name.NamespaceUri, variable.Name.Prefix);
// For each variable in scope, add xsl:with-param to the xsl:call-template
var withparam = AstFactory.VarPar(XslNodeType.WithParam, paramname, '$' + paramname.QualifiedName, XslVersion.Current);
XsltLoader.SetInfo(withparam, null, fakeCtxInfo);
withparam.Namespaces = variable.Namespaces;
calltemplate.AddContent(withparam);
// For each variable in scope, add xsl:param to the xsl:template
var param = AstFactory.VarPar(XslNodeType.Param, paramname, null, XslVersion.Current);
XsltLoader.SetInfo(param, null, fakeCtxInfo);
param.Namespaces = variable.Namespaces;
newtemplate.AddContent(param);
}
}
// Move all the other children to the new template as well (AddRange)
for (int t = split; t < content.Count; ++t)
{
newtemplate.AddContent(content[t]);
}
// Replace the child with the rewritten child; remove the rest
content[split] = calltemplate;
content.RemoveRange(split + 1, content.Count - split - 1);
Debug.Assert(parent.Content.Count == split + 1);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.CodeGeneration;
using Orleans.Configuration;
using Orleans.Runtime;
using Orleans.Runtime.Scheduler;
using Orleans.Serialization;
using Orleans.Transactions.Abstractions;
namespace Orleans.Transactions.State
{
internal class ReadWriteLock<TState>
where TState : class, new()
{
private readonly TransactionalStateOptions options;
private readonly TransactionQueue<TState> queue;
private BatchWorker lockWorker;
private BatchWorker storageWorker;
private readonly ILogger logger;
private readonly IActivationLifetime activationLifetime;
// the linked list of lock groups
// the head is the group that is currently holding the lock
private LockGroup currentGroup = null;
// cache the last known minimum so we don't have to recompute it as much
private DateTime cachedMin = DateTime.MaxValue;
private Guid cachedMinId;
// group of non-conflicting transactions collectively acquiring/releasing the lock
private class LockGroup : Dictionary<Guid, TransactionRecord<TState>>
{
public int FillCount;
public List<Action> Tasks; // the tasks for executing the waiting operations
public LockGroup Next; // queued-up transactions waiting to acquire lock
public DateTime? Deadline;
public void Reset()
{
FillCount = 0;
Tasks = null;
Deadline = null;
Clear();
}
}
public ReadWriteLock(
IOptions<TransactionalStateOptions> options,
TransactionQueue<TState> queue,
BatchWorker storageWorker,
ILogger logger,
IActivationLifetime activationLifetime)
{
this.options = options.Value;
this.queue = queue;
this.storageWorker = storageWorker;
this.logger = logger;
this.activationLifetime = activationLifetime;
this.lockWorker = new BatchWorkerFromDelegate(LockWork);
}
public async Task<TResult> EnterLock<TResult>(Guid transactionId, DateTime priority,
AccessCounter counter, bool isRead, Func<TResult> task)
{
bool rollbacksOccurred = false;
List<Task> cleanup = new List<Task>();
await this.queue.Ready();
// search active transactions
if (Find(transactionId, isRead, out var group, out var record))
{
// check if we lost some reads or writes already
if (counter.Reads > record.NumberReads || counter.Writes > record.NumberWrites)
{
throw new OrleansBrokenTransactionLockException(transactionId.ToString(), "when re-entering lock");
}
// check if the operation conflicts with other transactions in the group
if (HasConflict(isRead, priority, transactionId, group, out var resolvable))
{
if (!resolvable)
{
throw new OrleansTransactionLockUpgradeException(transactionId.ToString());
}
else
{
// rollback all conflicts
var conflicts = Conflicts(transactionId, group).ToList();
if (conflicts.Count > 0)
{
foreach (var r in conflicts)
{
cleanup.Add(Rollback(r, true));
rollbacksOccurred = true;
}
}
}
}
}
else
{
// check if we were supposed to already hold this lock
if (counter.Reads + counter.Writes > 0)
{
throw new OrleansBrokenTransactionLockException(transactionId.ToString(), "when trying to re-enter lock");
}
// update the lock deadline
if (group == currentGroup)
{
group.Deadline = DateTime.UtcNow + this.options.LockTimeout;
if (logger.IsEnabled(LogLevel.Trace))
logger.Trace("set lock expiration at {Deadline}", group.Deadline.Value.ToString("o"));
}
// create a new record for this transaction
record = new TransactionRecord<TState>()
{
TransactionId = transactionId,
Priority = priority,
Deadline = DateTime.UtcNow + this.options.LockAcquireTimeout
};
group.Add(transactionId, record);
group.FillCount++;
if (logger.IsEnabled(LogLevel.Trace))
{
if (group == currentGroup)
logger.Trace($"enter-lock {transactionId} fc={group.FillCount}");
else
logger.Trace($"enter-lock-queue {transactionId} fc={group.FillCount}");
}
}
var result =
new TaskCompletionSource<TResult>(TaskCreationOptions.RunContinuationsAsynchronously);
Action completion = () =>
{
try
{
result.TrySetResult(task());
}
catch (Exception exception)
{
result.TrySetException(exception);
}
};
if (group != currentGroup)
{
// task will be executed once its group acquires the lock
if (group.Tasks == null)
group.Tasks = new List<Action>();
group.Tasks.Add(completion);
}
else
{
// execute task right now
completion();
}
if (isRead)
{
record.AddRead();
}
else
{
record.AddWrite();
}
if (rollbacksOccurred)
{
lockWorker.Notify();
}
else if (group.Deadline.HasValue)
{
lockWorker.Notify(group.Deadline.Value);
}
await Task.WhenAll(cleanup);
return await result.Task;
}
public async Task<Tuple<TransactionalStatus, TransactionRecord<TState>>> ValidateLock(Guid transactionId, AccessCounter accessCount)
{
if (currentGroup == null || !currentGroup.TryGetValue(transactionId, out TransactionRecord<TState> record))
{
return Tuple.Create(TransactionalStatus.BrokenLock, new TransactionRecord<TState>());
}
else if (record.NumberReads != accessCount.Reads
|| record.NumberWrites != accessCount.Writes)
{
await Rollback(transactionId, true);
return Tuple.Create(TransactionalStatus.LockValidationFailed, record);
}
else
{
return Tuple.Create(TransactionalStatus.Ok, record);
}
}
public void Notify()
{
this.lockWorker.Notify();
}
public bool TryGetRecord(Guid transactionId, out TransactionRecord<TState> record)
{
return this.currentGroup.TryGetValue(transactionId, out record);
}
public Task AbortExecutingTransactions()
{
if (currentGroup != null)
{
Task[] pending = currentGroup.Select(g => BreakLock(g.Key, g.Value)).ToArray();
currentGroup.Reset();
return Task.WhenAll(pending);
}
return Task.CompletedTask;
}
private Task BreakLock(Guid transactionId, TransactionRecord<TState> entry)
{
if (logger.IsEnabled(LogLevel.Trace))
logger.Trace("Break-lock for transaction {TransactionId}", transactionId);
return this.queue.NotifyOfAbort(entry, TransactionalStatus.BrokenLock);
}
public void AbortQueuedTransactions()
{
var pos = currentGroup?.Next;
while (pos != null)
{
if (pos.Tasks != null)
{
foreach (var t in pos.Tasks)
{
// running the task will abort the transaction because it is not in currentGroup
t();
}
}
pos.Clear();
pos = pos.Next;
}
if (currentGroup != null)
currentGroup.Next = null;
}
public async Task Rollback(Guid guid, bool notify)
{
// no-op if the transaction never happened or already rolled back
if (currentGroup == null || !currentGroup.TryGetValue(guid, out var record))
{
return;
}
// remove record for this transaction
currentGroup.Remove(guid);
// notify remote listeners
if (notify)
{
await this.queue.NotifyOfAbort(record, TransactionalStatus.BrokenLock);
}
}
private async Task LockWork()
{
// Stop pumping lock work if this activation is stopping/stopped.
if (this.activationLifetime.OnDeactivating.IsCancellationRequested) return;
using (this.activationLifetime.BlockDeactivation())
{
var now = DateTime.UtcNow;
if (currentGroup != null)
{
// check if there are any group members that are ready to exit the lock
if (currentGroup.Count > 0)
{
if (LockExits(out var single, out var multiple))
{
if (single != null)
{
await this.queue.EnqueueCommit(single);
}
else if (multiple != null)
{
foreach (var r in multiple)
{
await this.queue.EnqueueCommit(r);
}
}
lockWorker.Notify();
storageWorker.Notify();
}
else if (currentGroup.Deadline.HasValue)
{
if (currentGroup.Deadline.Value < now)
{
// the lock group has timed out.
string txlist = string.Join(",", currentGroup.Keys.Select(g => g.ToString()));
TimeSpan late = now - currentGroup.Deadline.Value;
logger.LogWarning("Break-lock timeout for transactions {TransactionIds}. {Late}ms late", txlist, Math.Floor(late.TotalMilliseconds));
await AbortExecutingTransactions();
lockWorker.Notify();
}
else
{
if (logger.IsEnabled(LogLevel.Trace))
logger.Trace("recheck lock expiration at {Deadline}", currentGroup.Deadline.Value.ToString("o"));
// check again when the group expires
lockWorker.Notify(currentGroup.Deadline.Value);
}
}
else
{
string txlist = string.Join(",", currentGroup.Keys.Select(g => g.ToString()));
logger.LogWarning("Deadline not set for transactions {TransactionIds}", txlist);
}
}
else
{
// the lock is empty, a new group can enter
currentGroup = currentGroup.Next;
if (currentGroup != null)
{
currentGroup.Deadline = now + this.options.LockTimeout;
// discard expired waiters that have no chance to succeed
// because they have been waiting for the lock for a longer timespan than the
// total transaction timeout
List<Guid> expiredWaiters = null;
foreach (var kvp in currentGroup)
{
if (now > kvp.Value.Deadline)
{
if (expiredWaiters == null)
expiredWaiters = new List<Guid>();
expiredWaiters.Add(kvp.Key);
if (logger.IsEnabled(LogLevel.Trace))
logger.Trace($"expire-lock-waiter {kvp.Key}");
}
}
if (expiredWaiters != null)
{
foreach (var guid in expiredWaiters)
{
currentGroup.Remove(guid);
}
}
if (logger.IsEnabled(LogLevel.Trace))
{
logger.Trace($"lock groupsize={currentGroup.Count} deadline={currentGroup.Deadline:o}");
foreach (var kvp in currentGroup)
logger.Trace($"enter-lock {kvp.Key}");
}
// execute all the read and update tasks
if (currentGroup.Tasks != null)
{
foreach (var t in currentGroup.Tasks)
{
t();
}
}
lockWorker.Notify();
}
}
}
}
}
private bool Find(Guid guid, bool isRead, out LockGroup group, out TransactionRecord<TState> record)
{
if (currentGroup == null)
{
group = currentGroup = new LockGroup();
record = null;
return false;
}
else
{
group = null;
var pos = currentGroup;
while (true)
{
if (pos.TryGetValue(guid, out record))
{
group = pos;
return true;
}
// if we have not found a place to insert this op yet, and there is room, and no conflicts, use this one
if (group == null
&& pos.FillCount < this.options.MaxLockGroupSize
&& !HasConflict(isRead, DateTime.MaxValue, guid, pos, out var resolvable))
{
group = pos;
}
if (pos.Next == null) // we did not find this tx.
{
// add a new empty group to insert this tx, if we have not found one yet
if (group == null)
{
group = pos.Next = new LockGroup();
}
return false;
}
pos = pos.Next;
}
}
}
private bool HasConflict(bool isRead, DateTime priority, Guid transactionId, LockGroup group, out bool resolvable)
{
bool foundResolvableConflicts = false;
foreach (var kvp in group)
{
if (kvp.Key != transactionId)
{
if (isRead && kvp.Value.NumberWrites == 0)
{
continue;
}
else
{
if (priority > kvp.Value.Priority)
{
resolvable = false;
return true;
}
else
{
foundResolvableConflicts = true;
}
}
}
}
resolvable = foundResolvableConflicts;
return foundResolvableConflicts;
}
private IEnumerable<Guid> Conflicts(Guid transactionId, LockGroup group)
{
foreach (var kvp in group)
{
if (kvp.Key != transactionId)
{
yield return kvp.Key;
}
}
}
private bool LockExits(out TransactionRecord<TState> single, out List<TransactionRecord<TState>> multiple)
{
single = null;
multiple = null;
// fast-path the one-element case
if (currentGroup.Count == 1)
{
var kvp = currentGroup.First();
if (kvp.Value.Role == CommitRole.NotYetDetermined) // has not received commit from TA
{
return false;
}
else
{
single = kvp.Value;
currentGroup.Remove(single.TransactionId);
if (logger.IsEnabled(LogLevel.Debug))
logger.Debug($"exit-lock {single.TransactionId} {single.Timestamp:o}");
return true;
}
}
else
{
// find the current minimum, if we don't have a valid cache of it
if (cachedMin == DateTime.MaxValue
|| !currentGroup.TryGetValue(cachedMinId, out var record)
|| record.Role != CommitRole.NotYetDetermined
|| record.Timestamp != cachedMin)
{
cachedMin = DateTime.MaxValue;
foreach (var kvp in currentGroup)
{
if (kvp.Value.Role == CommitRole.NotYetDetermined) // has not received commit from TA
{
if (cachedMin > kvp.Value.Timestamp)
{
cachedMin = kvp.Value.Timestamp;
cachedMinId = kvp.Key;
}
}
}
}
// find released entries
foreach (var kvp in currentGroup)
{
if (kvp.Value.Role != CommitRole.NotYetDetermined) // ready to commit
{
if (kvp.Value.Timestamp < cachedMin)
{
if (multiple == null)
{
multiple = new List<TransactionRecord<TState>>();
}
multiple.Add(kvp.Value);
}
}
}
if (multiple == null)
{
return false;
}
else
{
multiple.Sort(Comparer);
for (int i = 0; i < multiple.Count; i++)
{
currentGroup.Remove(multiple[i].TransactionId);
if (logger.IsEnabled(LogLevel.Debug))
logger.Debug($"exit-lock ({i}/{multiple.Count}) {multiple[i].TransactionId} {multiple[i].Timestamp:o}");
}
return true;
}
}
}
private static int Comparer(TransactionRecord<TState> a, TransactionRecord<TState> b)
{
return a.Timestamp.CompareTo(b.Timestamp);
}
}
}
| |
#region Copyright (c) 2006 Ian Davis and James Carlyle
/*------------------------------------------------------------------------------
COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 2006 Ian Davis and James Carlyle
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
------------------------------------------------------------------------------*/
#endregion
using MySql.Data.MySqlClient;
namespace SemPlan.Spiral.MySql {
using System;
using System.Text;
using System.Collections;
using SemPlan.Spiral.Core;
using SemPlan.Spiral.Expressions;
using System.Data;
/// <summary>
/// Represents a mapping from a query to SQL
/// </summary>
/// <remarks>
/// $Id: QuerySqlMapper.cs,v 1.2 2006/03/08 22:42:36 ian Exp $
///</remarks>
public class QuerySqlMapper : QueryGroupVisitor {
private Query itsQuery;
private ResourceMap itsResourceMap;
private ArrayList itsSelectTerms;
private ArrayList itsWhereTerms;
private ArrayList itsJoins;
private ArrayList itsVariableList;
private int itsStatementTableCount;
private int itsOptionalNestingDepth;
private bool itsIsMappable;
private bool itsIsFeasible;
private string itsSql;
private IDictionary itsResourcesIndexedByNode;
private Hashtable itsSelectedVariables;
private Hashtable itsVariableFirstMentions;
private Hashtable itsVariableTables;
private Hashtable itsNonLiteralVariables; // Stores whether a variable is used in subject or object of pattern which restricts possible node types
public QuerySqlMapper(Query query, ResourceMap resourceMap) {
itsQuery = query;
itsResourceMap = resourceMap;
itsIsMappable = true;
itsIsFeasible = true;
itsSelectTerms = new ArrayList();
itsWhereTerms = new ArrayList();
itsJoins = new ArrayList();
itsStatementTableCount = 0;
itsVariableFirstMentions = new Hashtable();
itsVariableTables = new Hashtable();
itsSelectedVariables = new Hashtable();
itsNonLiteralVariables = new Hashtable();
itsVariableList = new ArrayList();
itsSql = "";
QueryGroup group = itsQuery.QueryGroup;
StringBuilder sql = new StringBuilder();
if (! CollectResourcesForTerms(group) ) {
NoSolutionIsPossible();
}
if ( itsIsFeasible ) {
sql.Append( GenerateSelect( group ) );
}
itsSql = sql.ToString();
}
private string GenerateSelect(QueryGroup group) {
group.Accept( this );
if ( itsJoins.Count == 0 && itsWhereTerms.Count == 0) {
itsIsFeasible = false;
itsIsMappable = false;
return string.Empty;
}
foreach (Variable variable in itsQuery.Variables) {
if ( itsVariableFirstMentions.Contains( variable.Name ) ) {
if (! itsSelectedVariables.Contains( variable.Name ) ) {
itsVariableList.Add( variable );
itsSelectedVariables[ variable.Name ] = variable;
itsSelectTerms.Add( "rn_" + variable.Name + ".resourceHash rh_" + variable.Name);
itsSelectTerms.Add( "rn_" + variable.Name + ".nodeHash nh_" + variable.Name );
itsSelectTerms.Add( "rn_" + variable.Name + ".nodeType nt_" + variable.Name );
itsJoins.Add("JOIN ResourceNodes rn_" + variable.Name + " ON rn_" + variable.Name + ".resourceHash=" + itsVariableFirstMentions[ variable.Name ] + " AND rn_" + variable.Name + ".graphId=" + itsVariableTables[ variable.Name ] + ".graphId");
itsJoins.Add("LEFT OUTER JOIN UriRefs u_" + variable.Name + " ON rn_" + variable.Name + ".nodeHash=u_" + variable.Name + ".hash AND rn_" + variable.Name + ".nodeType='u'");
if (! itsNonLiteralVariables.Contains( variable.Name ) ) {
itsSelectTerms.Add( "COALESCE(u_" + variable.Name + ".uri, pl_" + variable.Name + ".value, l_" + variable.Name + ".value) val_" + variable.Name );
itsSelectTerms.Add( "COALESCE(tl_" + variable.Name + ".value, t_" + variable.Name + ".value) sub_" + variable.Name );
itsJoins.Add("LEFT OUTER JOIN PlainLiterals pl_" + variable.Name + " ON rn_" + variable.Name + ".nodeHash=pl_" + variable.Name + ".hash AND rn_" + variable.Name + ".nodeType='p'");
itsJoins.Add("LEFT OUTER JOIN Languages l_" + variable.Name + " ON pl_" + variable.Name + ".languageHash=l_" + variable.Name + ".hash");
itsJoins.Add("LEFT OUTER JOIN TypedLiterals tl_" + variable.Name + " ON rn_" + variable.Name + ".nodehash=tl_" + variable.Name + ".hash AND rn_" + variable.Name + ".nodeType='t'");
itsJoins.Add("LEFT OUTER JOIN DataTypes t_" + variable.Name + " ON tl_" + variable.Name + ".datatypeHash=t_" + variable.Name + ".hash");
}
else {
itsSelectTerms.Add( "u_" + variable.Name + ".uri val_" + variable.Name );
itsSelectTerms.Add( "NULL sub_" + variable.Name );
}
}
}
else {
itsSelectTerms.Add( "NULL rh_" + variable.Name );
itsSelectTerms.Add( "NULL nh_" + variable.Name );
itsSelectTerms.Add( "NULL nt_" + variable.Name );
itsSelectTerms.Add( "NULL val_" + variable.Name );
itsSelectTerms.Add( "NULL sub_" + variable.Name );
}
}
return BuildSql();
}
public string Sql {
get {
return itsSql;
}
}
public bool IsMappable {
get { return itsIsMappable; }
}
public bool IsFeasible {
get { return itsIsFeasible; }
}
public void visit(QueryGroupAnd group) {
foreach (QueryGroup subgroup in group.Groups) {
subgroup.Accept( this );
}
}
public void visit(QueryGroupConstraints group) {
foreach (SemPlan.Spiral.Core.Constraint constraint in group.Constraints) {
if ( constraint.Expression is IsLiteral && ((IsLiteral)constraint.Expression).SubExpression is VariableExpression) {
string variableName = ((VariableExpression)((IsLiteral)constraint.Expression).SubExpression).Variable.Name;
itsWhereTerms.Add("(nt_" + variableName + "='p' OR nt_" + variableName + "='t')");
}
else if ( constraint.Expression is IsIri && ((IsIri)constraint.Expression).SubExpression is VariableExpression) {
string variableName = ((VariableExpression)((IsIri)constraint.Expression).SubExpression).Variable.Name;
itsWhereTerms.Add("nt_" + variableName + "='u'");
}
else if ( constraint.Expression is IsBlank && ((IsBlank)constraint.Expression).SubExpression is VariableExpression) {
string variableName = ((VariableExpression)((IsBlank)constraint.Expression).SubExpression).Variable.Name;
itsWhereTerms.Add("nt_" + variableName + "='b'");
}
else if ( constraint.Expression is Bound) {
string variableName = ((Bound)constraint.Expression).Variable.Name;
if ( itsVariableFirstMentions.Contains( variableName ) ) {
itsWhereTerms.Add(itsVariableFirstMentions[ variableName ] + " IS NOT NULL");
}
else {
MarkAsNotFeasible(" Bound constraint applied to variable '" + variableName + "' before it is first mentioned");
}
}
else {
itsIsMappable = false;
}
}
}
public void visit(QueryGroupOptional group) {
++itsOptionalNestingDepth;
group.Group.Accept( this );
--itsOptionalNestingDepth;
}
public void visit(QueryGroupOr group) {
foreach (QueryGroup subgroup in group.Groups) {
subgroup.Accept( this );
}
}
public void visit(QueryGroupPatterns group) {
ArrayList groupJoins = new ArrayList();
Hashtable variableMentionsWithinThisGroup = new Hashtable();
bool allPatternsAreMatchable = true;
foreach (Pattern pattern in group.Patterns) {
if ( ! (pattern.GetSubject() is Variable) && ! itsResourcesIndexedByNode.Contains( pattern.GetSubject() ) ) {
allPatternsAreMatchable = false;
}
else if ( ! (pattern.GetPredicate() is Variable) && ! itsResourcesIndexedByNode.Contains( pattern.GetPredicate() ) ) {
allPatternsAreMatchable = false;
}
else if ( ! (pattern.GetObject() is Variable) && ! itsResourcesIndexedByNode.Contains( pattern.GetObject() ) ) {
allPatternsAreMatchable = false;
}
}
if ( allPatternsAreMatchable) {
foreach (Pattern pattern in group.Patterns) {
++itsStatementTableCount;
string tableName = "s" + itsStatementTableCount;
ArrayList constraints = new ArrayList();
bool referencesExternalVariables = false;
if (ProcessPatternTerm( pattern.GetSubject(), tableName, "subjectHash", constraints, variableMentionsWithinThisGroup) ) {
referencesExternalVariables = true;
}
if ( ProcessPatternTerm( pattern.GetPredicate(), tableName, "predicateHash", constraints, variableMentionsWithinThisGroup) ) {
referencesExternalVariables = true;
}
if ( ProcessPatternTerm( pattern.GetObject(), tableName, "objectHash", constraints, variableMentionsWithinThisGroup) ) {
referencesExternalVariables = true;
}
constraints.Add( tableName + ".graphId=" + itsResourceMap.GetHashCode() );
if (itsStatementTableCount > 1) {
StringBuilder join = new StringBuilder();
if ( referencesExternalVariables && itsOptionalNestingDepth > 0) {
join.Append("LEFT OUTER ");
}
join.Append("JOIN Statements " + tableName + " ON ");
bool doneFirst = false;
foreach (string constraint in constraints) {
if ( constraint != null && constraint.Length > 0) {
if ( doneFirst ) {
join.Append(" AND ");
}
join.Append( constraint );
doneFirst = true;
}
}
groupJoins.Add( join.ToString() );
}
else {
foreach (string constraint in constraints) {
if ( constraint != null && constraint.Length > 0) {
itsWhereTerms.Add( constraint );
}
}
}
}
itsJoins.AddRange( groupJoins );
}
else {
if ( itsOptionalNestingDepth == 0 ) {
NoSolutionIsPossible();
}
}
}
private bool ProcessPatternTerm(PatternTerm term, string tableName, string columnName, ArrayList constraints, Hashtable localVariableMentions) {
bool referencesExternalVariables = false;
if (term is Variable) {
Variable variable = (Variable)term;
if ( itsVariableFirstMentions.Contains( variable.Name ) ) {
if ( localVariableMentions.Contains( variable.Name ) ) {
constraints.Add( tableName + "." + columnName + "=" + localVariableMentions[ variable.Name ] );
}
else {
referencesExternalVariables = true;
constraints.Add( tableName + "." + columnName + "=" + itsVariableFirstMentions[ variable.Name ] );
localVariableMentions[ variable.Name ] = tableName + "." + columnName;
}
}
else {
itsVariableFirstMentions[ variable.Name ] = tableName + "." + columnName;
localVariableMentions[ variable.Name ] = tableName + "." + columnName;
itsVariableTables[ variable.Name ] = tableName;
}
if ( columnName.Equals("subjectHash") || columnName.Equals("predicateHash") ) {
itsNonLiteralVariables[ variable.Name ] = variable;
}
}
else {
constraints.Add( tableName + "." + columnName + "=" + itsResourcesIndexedByNode[ term ].GetHashCode() );
}
return referencesExternalVariables;
}
private string BuildSql() {
StringBuilder sql = new StringBuilder("SELECT ");
if (itsQuery.IsDistinct) {
sql.Append("DISTINCT ");
}
bool doneFirst = false;
foreach (string term in itsSelectTerms) {
if ( doneFirst ) {
sql.Append(", ");
}
sql.Append( term );
doneFirst = true;
}
sql.Append(" FROM Statements s1 ");
foreach (string term in itsJoins) {
sql.Append( term ).Append(" ");
}
sql.Append("WHERE ");
doneFirst = false;
foreach (string term in itsWhereTerms) {
if ( doneFirst ) {
sql.Append(" AND ");
}
sql.Append( term );
doneFirst = true;
}
if ( itsQuery.IsOrdered ) {
if ( itsQuery.OrderBy is VariableExpression ) {
sql.Append(" ORDER BY val_" + ((VariableExpression)itsQuery.OrderBy).Variable.Name);
if ( itsQuery.OrderDirection.Equals( Query.SortOrder.Descending ) ) {
sql.Append(" DESC");
}
}
else {
itsIsMappable = false;
}
}
return sql.ToString();
}
public ICollection Variables {
get { return itsVariableList; }
}
private bool CollectResourcesForTerms(QueryGroup group) {
TermCollector collector = new TermCollector( group );
ICollection requiredTerms = collector.RequiredTerms;
ICollection optionalTerms = collector.OptionalTerms;
IDictionary requiredResourcesIndexedByNode = itsResourceMap.GetResourcesDenotedBy( requiredTerms );
if ( requiredResourcesIndexedByNode.Keys.Count != requiredTerms.Count) {
return false;
}
IDictionary optionalResourcesIndexedByNode = itsResourceMap.GetResourcesDenotedBy( optionalTerms );
itsResourcesIndexedByNode = requiredResourcesIndexedByNode;
foreach ( Node node in optionalResourcesIndexedByNode.Keys) {
itsResourcesIndexedByNode[ node ] = optionalResourcesIndexedByNode[node];
}
return true;
}
public void NoSolutionIsPossible() {
itsIsFeasible = false;
itsIsMappable = false;
itsSql = "";
}
public void MarkAsNotFeasible(string reason) {
itsIsFeasible = false;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Reflection.Internal;
using System.Reflection.Metadata;
using System.Threading;
namespace System.Reflection.PortableExecutable
{
/// <summary>
/// Portable Executable format reader.
/// </summary>
/// <remarks>
/// The implementation is thread-safe, that is multiple threads can read data from the reader in parallel.
/// Disposal of the reader is not thread-safe (see <see cref="Dispose"/>).
/// </remarks>
public sealed class PEReader : IDisposable
{
// May be null in the event that the entire image is not
// deemed necessary and we have been instructed to read
// the image contents without being lazy.
private MemoryBlockProvider _peImage;
// If we read the data from the image lazily (peImage != null) we defer reading the PE headers.
private PEHeaders _lazyPEHeaders;
private AbstractMemoryBlock _lazyMetadataBlock;
private AbstractMemoryBlock _lazyImageBlock;
private AbstractMemoryBlock[] _lazyPESectionBlocks;
/// <summary>
/// Creates a Portable Executable reader over a PE image stored in memory.
/// </summary>
/// <param name="peImage">Pointer to the start of the PE image.</param>
/// <param name="size">The size of the PE image.</param>
/// <exception cref="ArgumentNullException"><paramref name="peImage"/> is <see cref="IntPtr.Zero"/>.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="size"/> is negative.</exception>
/// <remarks>
/// The memory is owned by the caller and not released on disposal of the <see cref="PEReader"/>.
/// The caller is responsible for keeping the memory alive and unmodified throughout the lifetime of the <see cref="PEReader"/>.
/// The content of the image is not read during the construction of the <see cref="PEReader"/>
/// </remarks>
public unsafe PEReader(byte* peImage, int size)
{
if (peImage == null)
{
throw new ArgumentNullException("peImage");
}
if (size < 0)
{
throw new ArgumentOutOfRangeException("size");
}
_peImage = new ExternalMemoryBlockProvider(peImage, size);
}
/// <summary>
/// Creates a Portable Executable reader over a PE image stored in a stream.
/// </summary>
/// <param name="peStream">PE image stream.</param>
/// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception>
/// <exception cref="BadImageFormatException">
/// <see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid.
/// </exception>
/// <remarks>
/// Ownership of the stream is transferred to the <see cref="PEReader"/> upon successful validation of constructor arguments. It will be
/// disposed by the <see cref="PEReader"/> and the caller must not manipulate it.
/// </remarks>
public PEReader(Stream peStream)
: this(peStream, PEStreamOptions.Default)
{
}
/// <summary>
/// Creates a Portable Executable reader over a PE image stored in a stream beginning at its current position and ending at the end of the stream.
/// </summary>
/// <param name="peStream">PE image stream.</param>
/// <param name="options">
/// Options specifying how sections of the PE image are read from the stream.
///
/// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/>
/// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it.
///
/// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data
/// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated
/// by caller while the <see cref="PEReader"/> is alive and undisposed.
///
/// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/>
/// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also
/// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/>
/// after construction.
/// </param>
/// <exception cref="ArgumentNullException"><paramref name="peStream"/> is null.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="options"/> has an invalid value.</exception>
/// <exception cref="BadImageFormatException">
/// <see cref="PEStreamOptions.PrefetchMetadata"/> is specified and the PE headers of the image are invalid.
/// </exception>
public PEReader(Stream peStream, PEStreamOptions options)
: this(peStream, options, (int?)null)
{
}
/// <summary>
/// Creates a Portable Executable reader over a PE image of the given size beginning at the stream's current position.
/// </summary>
/// <param name="peStream">PE image stream.</param>
/// <param name="size">PE image size.</param>
/// <param name="options">
/// Options specifying how sections of the PE image are read from the stream.
///
/// Unless <see cref="PEStreamOptions.LeaveOpen"/> is specified, ownership of the stream is transferred to the <see cref="PEReader"/>
/// upon successful argument validation. It will be disposed by the <see cref="PEReader"/> and the caller must not manipulate it.
///
/// Unless <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/> is specified no data
/// is read from the stream during the construction of the <see cref="PEReader"/>. Furthermore, the stream must not be manipulated
/// by caller while the <see cref="PEReader"/> is alive and undisposed.
///
/// If <see cref="PEStreamOptions.PrefetchMetadata"/> or <see cref="PEStreamOptions.PrefetchEntireImage"/>, the <see cref="PEReader"/>
/// will have read all of the data requested during construction. As such, if <see cref="PEStreamOptions.LeaveOpen"/> is also
/// specified, the caller retains full ownership of the stream and is assured that it will not be manipulated by the <see cref="PEReader"/>
/// after construction.
/// </param>
/// <exception cref="ArgumentOutOfRangeException">Size is negative or extends past the end of the stream.</exception>
public PEReader(Stream peStream, PEStreamOptions options, int size)
: this(peStream, options, (int?)size)
{
}
private unsafe PEReader(Stream peStream, PEStreamOptions options, int? sizeOpt)
{
if (peStream == null)
{
throw new ArgumentNullException("peStream");
}
if (!peStream.CanRead || !peStream.CanSeek)
{
throw new ArgumentException(SR.StreamMustSupportReadAndSeek, "peStream");
}
if (!options.IsValid())
{
throw new ArgumentOutOfRangeException("options");
}
long start = peStream.Position;
int size = PEBinaryReader.GetAndValidateSize(peStream, sizeOpt);
bool closeStream = true;
try
{
bool isFileStream = FileStreamReadLightUp.IsFileStream(peStream);
if ((options & (PEStreamOptions.PrefetchMetadata | PEStreamOptions.PrefetchEntireImage)) == 0)
{
_peImage = new StreamMemoryBlockProvider(peStream, start, size, isFileStream, (options & PEStreamOptions.LeaveOpen) != 0);
closeStream = false;
}
else
{
// Read in the entire image or metadata blob:
if ((options & PEStreamOptions.PrefetchEntireImage) != 0)
{
var imageBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, 0, (int)Math.Min(peStream.Length, int.MaxValue));
_lazyImageBlock = imageBlock;
_peImage = new ExternalMemoryBlockProvider(imageBlock.Pointer, imageBlock.Size);
// if the caller asked for metadata initialize the PE headers (calculates metadata offset):
if ((options & PEStreamOptions.PrefetchMetadata) != 0)
{
InitializePEHeaders();
}
}
else
{
// The peImage is left null, but the lazyMetadataBlock is initialized up front.
_lazyPEHeaders = new PEHeaders(peStream);
_lazyMetadataBlock = StreamMemoryBlockProvider.ReadMemoryBlockNoLock(peStream, isFileStream, _lazyPEHeaders.MetadataStartOffset, _lazyPEHeaders.MetadataSize);
}
// We read all we need, the stream is going to be closed.
}
}
finally
{
if (closeStream && (options & PEStreamOptions.LeaveOpen) == 0)
{
peStream.Dispose();
}
}
}
/// <summary>
/// Creates a Portable Executable reader over a PE image stored in a byte array.
/// </summary>
/// <param name="peImage">PE image.</param>
/// <remarks>
/// The content of the image is not read during the construction of the <see cref="PEReader"/>
/// </remarks>
/// <exception cref="ArgumentNullException"><paramref name="peImage"/> is null.</exception>
public PEReader(ImmutableArray<byte> peImage)
{
if (peImage.IsDefault)
{
throw new ArgumentNullException("peImage");
}
_peImage = new ByteArrayMemoryProvider(peImage);
}
/// <summary>
/// Disposes all memory allocated by the reader.
/// </summary>
/// <remarks>
/// <see cref="Dispose"/> can be called multiple times (but not in parallel).
/// It is not safe to call <see cref="Dispose"/> in parallel with any other operation on the <see cref="PEReader"/>
/// or reading from <see cref="PEMemoryBlock"/>s retrieved from the reader.
/// </remarks>
public void Dispose()
{
var image = _peImage;
if (image != null)
{
image.Dispose();
_peImage = null;
}
var imageBlock = _lazyImageBlock;
if (imageBlock != null)
{
imageBlock.Dispose();
_lazyImageBlock = null;
}
var metadataBlock = _lazyMetadataBlock;
if (metadataBlock != null)
{
metadataBlock.Dispose();
_lazyMetadataBlock = null;
}
var peSectionBlocks = _lazyPESectionBlocks;
if (peSectionBlocks != null)
{
foreach (var block in peSectionBlocks)
{
if (block != null)
{
block.Dispose();
}
}
_lazyPESectionBlocks = null;
}
}
/// <summary>
/// Gets the PE headers.
/// </summary>
/// <exception cref="BadImageFormatException">The headers contain invalid data.</exception>
public PEHeaders PEHeaders
{
get
{
if (_lazyPEHeaders == null)
{
InitializePEHeaders();
}
return _lazyPEHeaders;
}
}
private void InitializePEHeaders()
{
Debug.Assert(_peImage != null);
StreamConstraints constraints;
Stream stream = _peImage.GetStream(out constraints);
PEHeaders headers;
if (constraints.GuardOpt != null)
{
lock (constraints.GuardOpt)
{
headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize);
}
}
else
{
headers = ReadPEHeadersNoLock(stream, constraints.ImageStart, constraints.ImageSize);
}
Interlocked.CompareExchange(ref _lazyPEHeaders, headers, null);
}
private static PEHeaders ReadPEHeadersNoLock(Stream stream, long imageStartPosition, int imageSize)
{
Debug.Assert(imageStartPosition >= 0 && imageStartPosition <= stream.Length);
stream.Seek(imageStartPosition, SeekOrigin.Begin);
return new PEHeaders(stream, imageSize);
}
/// <summary>
/// Returns a view of the entire image as a pointer and length.
/// </summary>
/// <exception cref="InvalidOperationException">PE image not available.</exception>
private AbstractMemoryBlock GetEntireImageBlock()
{
if (_lazyImageBlock == null)
{
if (_peImage == null)
{
throw new InvalidOperationException(SR.PEImageNotAvailable);
}
var newBlock = _peImage.GetMemoryBlock();
if (Interlocked.CompareExchange(ref _lazyImageBlock, newBlock, null) != null)
{
// another thread created the block already, we need to dispose ours:
newBlock.Dispose();
}
}
return _lazyImageBlock;
}
private AbstractMemoryBlock GetMetadataBlock()
{
if (!HasMetadata)
{
throw new InvalidOperationException(SR.PEImageDoesNotHaveMetadata);
}
if (_lazyMetadataBlock == null)
{
Debug.Assert(_peImage != null, "We always have metadata if peImage is not available.");
var newBlock = _peImage.GetMemoryBlock(PEHeaders.MetadataStartOffset, PEHeaders.MetadataSize);
if (Interlocked.CompareExchange(ref _lazyMetadataBlock, newBlock, null) != null)
{
// another thread created the block already, we need to dispose ours:
newBlock.Dispose();
}
}
return _lazyMetadataBlock;
}
private AbstractMemoryBlock GetPESectionBlock(int index)
{
Debug.Assert(index >= 0 && index < PEHeaders.SectionHeaders.Length);
Debug.Assert(_peImage != null);
if (_lazyPESectionBlocks == null)
{
Interlocked.CompareExchange(ref _lazyPESectionBlocks, new AbstractMemoryBlock[PEHeaders.SectionHeaders.Length], null);
}
var newBlock = _peImage.GetMemoryBlock(
PEHeaders.SectionHeaders[index].PointerToRawData,
PEHeaders.SectionHeaders[index].SizeOfRawData);
if (Interlocked.CompareExchange(ref _lazyPESectionBlocks[index], newBlock, null) != null)
{
// another thread created the block already, we need to dispose ours:
newBlock.Dispose();
}
return _lazyPESectionBlocks[index];
}
/// <summary>
/// Return true if the reader can access the entire PE image.
/// </summary>
/// <remarks>
/// Returns false if the <see cref="PEReader"/> is constructed from a stream and only part of it is prefetched into memory.
/// </remarks>
public bool IsEntireImageAvailable
{
get { return _lazyImageBlock != null || _peImage != null; }
}
/// <summary>
/// Gets a pointer to and size of the PE image if available (<see cref="IsEntireImageAvailable"/>).
/// </summary>
/// <exception cref="InvalidOperationException">The entire PE image is not available.</exception>
public PEMemoryBlock GetEntireImage()
{
return new PEMemoryBlock(GetEntireImageBlock());
}
/// <summary>
/// Returns true if the PE image contains CLI metadata.
/// </summary>
/// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception>
public bool HasMetadata
{
get { return PEHeaders.MetadataSize > 0; }
}
/// <summary>
/// Loads PE section that contains CLI metadata.
/// </summary>
/// <exception cref="InvalidOperationException">The PE image doesn't contain metadata (<see cref="HasMetadata"/> returns false).</exception>
/// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception>
public PEMemoryBlock GetMetadata()
{
return new PEMemoryBlock(GetMetadataBlock());
}
/// <summary>
/// Loads PE section that contains the specified <paramref name="relativeVirtualAddress"/> into memory
/// and returns a memory block that starts at <paramref name="relativeVirtualAddress"/> and ends at the end of the containing section.
/// </summary>
/// <param name="relativeVirtualAddress">Relative Virtual Address of the data to read.</param>
/// <returns>
/// An empty block if <paramref name="relativeVirtualAddress"/> doesn't represent a location in any of the PE sections of this PE image.
/// </returns>
/// <exception cref="BadImageFormatException">The PE headers contain invalid data.</exception>
public PEMemoryBlock GetSectionData(int relativeVirtualAddress)
{
var sectionIndex = PEHeaders.GetContainingSectionIndex(relativeVirtualAddress);
if (sectionIndex < 0)
{
return default(PEMemoryBlock);
}
int relativeOffset = relativeVirtualAddress - PEHeaders.SectionHeaders[sectionIndex].VirtualAddress;
int size = PEHeaders.SectionHeaders[sectionIndex].VirtualSize - relativeOffset;
AbstractMemoryBlock block;
if (_peImage != null)
{
block = GetPESectionBlock(sectionIndex);
}
else
{
block = GetEntireImageBlock();
relativeOffset += PEHeaders.SectionHeaders[sectionIndex].PointerToRawData;
}
return new PEMemoryBlock(block, relativeOffset);
}
}
}
| |
/***********************************************************************************************************************
* TorrentDotNET - A BitTorrent library based on the .NET platform *
* Copyright (C) 2004, Peter Ward *
* *
* This library is free software; you can redistribute it and/or modify it under the terms of the *
* GNU Lesser General Public License as published by the Free Software Foundation; *
* either version 2.1 of the License, or (at your option) any later version. *
* *
* This library is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public License along with this library; *
* if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA *
***********************************************************************************************************************/
using System;
using IO = System.IO;
// 2/7/03
namespace PWLib.Platform
{
/// <summary>
/// Provides a self-expanding circular buffer.
/// </summary>
public class CircularBuffer : System.ICloneable
{
/// <summary>
/// Stream class to enable reading to the stream. Supports seeking but does not support writing.
/// </summary>
private class CircularBufferReader : IO.Stream
{
private CircularBuffer buffer;
/// <summary>Constructs a CircularBufferReader</summary>
/// <param name="buffer">Buffer to use</summary>
public CircularBufferReader(CircularBuffer buffer)
{
this.buffer = buffer;
}
/// <summary>Seeks to the specified offset. Origin is always Current, Begin and End are ignored.</summary>
/// <param name="offset">Offset to seek to</param>
/// <param name="origin">Origin. Always System.IO.SeekOrigin.Current</param>
/// <returns>New position, meaningless in a circularbuffer</returns>
public override long Seek(long offset, IO.SeekOrigin origin)
{
lock (this.buffer)
{
if (this.buffer.readPosition + offset >= this.buffer.internalData.Length)
{
this.buffer.readPosition = (this.buffer.readPosition + offset) % this.buffer.internalData.Length;
}
else if (this.buffer.readPosition + offset < 0)
{
this.buffer.readPosition = this.buffer.internalData.Length + this.buffer.readPosition + offset;
}
else
this.buffer.readPosition += offset;
return this.buffer.readPosition;
}
}
/// <summary></summary>
/// <returns>Always true</returns>
public override bool CanRead
{
get { return true; }
}
/// <summary></summary>
/// <returns>Always false</returns>
public override bool CanWrite
{
get { return false; }
}
/// <summary></summary>
/// <returns>Always true</returns>
public override bool CanSeek
{
get { return true; }
}
/// <summary></summary>
/// <returns>Size of the buffer</returns>
public override long Length
{
get { return this.buffer.internalData.Length; }
}
/// <summary>
/// Not supported - a circular buffer does not have any positions!
/// </summary>
/// <returns></returns>
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
/// <summary>Reads data from the buffer</summary>
/// <param name="data">Data to read into</param>
/// <param name="offset">Offset within data to write to</param>
/// <param name="length">Amount of data to read</param>
/// <returns>Amount of data read</returns>
public override int Read(byte[] data, int offset, int length)
{
return this.buffer.Read(data, offset, length);
}
public override int ReadByte()
{
return this.buffer.ReadByte();
}
/// <summary>Not supported</summary>
/// <param name="data"></param>
/// <param name="offset"></param>
/// <param name="length"></param>
public override void Write(byte[] data, int offset, int length)
{
throw new NotSupportedException();
}
public override void WriteByte(byte val)
{
throw new NotSupportedException();
}
/// <summary>Does nothing</summary>
public override void Flush()
{
}
/// <summary>Sets the length of the buffer, will truncate if smaller than the current size</summary>
/// <param name="size">New size</param>
public override void SetLength(long size)
{
this.buffer.SetLength(size);
}
}
/// <summary>
/// Stream class to enable writing to the stream. Supports seeking but does not support reading.
/// </summary>
private class CircularBufferWriter : IO.Stream
{
private CircularBuffer buffer;
/// <summary>Constructs a CircularBufferWriter</summary>
/// <param name="buffer">Buffer to use</summary>
public CircularBufferWriter(CircularBuffer buffer)
{
this.buffer = buffer;
}
/// <summary>Seeks to the specified offset. Origin is always Current, Begin and End are ignored.</summary>
/// <param name="offset">Offset to seek to</param>
/// <param name="origin">Origin. Always System.IO.SeekOrigin.Current</param>
/// <returns>New position, meaningless in a circularbuffer</returns>
public override long Seek(long offset, IO.SeekOrigin origin)
{
if (this.buffer.writePosition + offset >= this.buffer.internalData.Length)
{
this.buffer.writePosition = this.buffer.writePosition + offset - this.buffer.internalData.Length;
}
else if (this.buffer.writePosition + offset < 0)
{
this.buffer.writePosition = this.buffer.internalData.Length + this.buffer.writePosition + offset;
}
else
this.buffer.writePosition += offset;
return this.buffer.writePosition;
}
/// <summary></summary>
/// <returns>Always false</returns>
public override bool CanRead
{
get { return false; }
}
/// <summary></summary>
/// <returns>Always true</returns>
public override bool CanWrite
{
get { return true; }
}
/// <summary></summary>
/// <returns>Always true</returns>
public override bool CanSeek
{
get { return true; }
}
/// <summary></summary>
/// <returns>Size of the buffer</returns>
public override long Length
{
get { return this.buffer.internalData.Length; }
}
/// <summary>
/// Not supported - a circular buffer does not have any positions!
/// </summary>
public override long Position
{
get { throw new NotSupportedException(); }
set { throw new NotSupportedException(); }
}
public override void Write(byte[] data, int offset, int length)
{
this.buffer.Write(data, offset, length);
}
public override void WriteByte(byte val)
{
this.buffer.WriteByte(val);
}
/// <summary>Not supported</summary>
/// <param name="data"></param>
/// <param name="offset"></param>
/// <param name="length"></param>
public override int Read(byte[] data, int offset, int length)
{
throw new NotSupportedException();
}
public override int ReadByte()
{
throw new NotSupportedException();
}
/// <summary>Does nothing</summary>
public override void Flush()
{
}
/// <summary>Sets the length of the buffer, will truncate if smaller than the current size</summary>
/// <param name="size">New size</param>
public override void SetLength(long size)
{
this.buffer.SetLength(size);
}
}
private const int defaultSize = 4092;
private byte[] internalData;
private long readPosition = 0, writePosition = 0;
private CircularBufferWriter writer;
private CircularBufferReader reader;
/// <summary>Provides a seekable stream to read from the buffer</summary>
/// <returns>Reading stream</returns>
public IO.Stream Reader
{
get { return this.reader; }
}
/// <summary>Provides a seekable stream to write to the buffer</summary>
/// <returns>Writing stream</returns>
public IO.Stream Writer
{
get { return this.writer; }
}
/// <summary>
/// Number of bytes that are available to read
/// </summary>
public long DataAvailable
{
get
{
if (this.readPosition == this.writePosition)
return 0;
else if (this.readPosition > this.writePosition)
return this.internalData.Length - this.readPosition + this.writePosition;
else
return this.writePosition - this.readPosition;
}
}
/// <summary>
/// Constructs a circular buffer of default size
/// </summary>
public CircularBuffer()
: this(defaultSize)
{
}
/// <summary>
/// Constructs a circular buffer of specified initial size
/// </summary>
/// <param name="size">Initial size of buffer</param>
public CircularBuffer(int size)
{
this.internalData = new byte[ size+1 ];
this.writer = new CircularBufferWriter(this);
this.reader = new CircularBufferReader(this);
}
/// <summary>
/// Sets the length of the circular buffer. Will truncate if length is
/// shorter than the size of the buffer
/// </summary>
/// <param name="length">New size of buffer</param>
public void SetLength(long length)
{
long bytesTillEnd = this.internalData.Length - this.writePosition - 1;
// copy data from the write pointer to the end into a new array
byte[] rawData = new byte[ this.internalData.Length ];
if (bytesTillEnd > 0)
Array.Copy(this.internalData, this.writePosition + 1, rawData, 0, bytesTillEnd);
// then append the rest of the data to the new array
Array.Copy(this.internalData, 0, rawData, bytesTillEnd, this.writePosition);
// set read pointer to the position relative to where it was
this.readPosition += bytesTillEnd;
this.readPosition %= this.internalData.Length;
// set write pointer to the end of the freshly written data
this.writePosition += bytesTillEnd;
this.writePosition %= this.internalData.Length;
// now we have the data all in order, recreate the internal array and then
// copy the new array back into it
int streamLength = Math.Min((int)length, rawData.Length);
this.internalData = new byte[ length+1 ];
Array.Copy(rawData, 0, this.internalData, 0, streamLength);
}
/// <summary>
/// Reads data from the buffer
/// </summary>
/// <param name="data">Array to write to</param>
/// <param name="offset">Offset in parameter data to write to</param>
/// <param name="length">Amount of data to read</param>
/// <returns>Actual amount read</returns>
public int Read(byte[] data, int offset, int length)
{
lock (this)
{
if (this.writePosition == this.readPosition)
return 0; // empty buffer
long amountRead = 0;
if (this.writePosition < this.readPosition)
{
// write pointer is behind the read pointer, then read up until the end of
// the buffer and read from the start till the write position
long bytesTillEnd = this.internalData.Length - this.readPosition;
if (bytesTillEnd < length)
{
long bytesFromStart = Math.Min(this.writePosition, length - bytesTillEnd);
Array.Copy(this.internalData, this.readPosition, data, offset, bytesTillEnd);
Array.Copy(this.internalData, 0, data, offset + bytesTillEnd, bytesFromStart);
amountRead = bytesTillEnd + bytesFromStart;
}
else
{
Array.Copy(this.internalData, this.readPosition, data, offset, length);
amountRead = length;
}
}
else
{
// write pointer is ahead of the read pointer, just read up until then
long amountToCopy = Math.Min(this.writePosition - this.readPosition, length);
Array.Copy(this.internalData, this.readPosition, data, offset, amountToCopy);
amountRead = amountToCopy;
}
this.readPosition += amountRead;
this.readPosition %= this.internalData.Length;
return (int)amountRead;
}
}
/// <summary>
/// Writes data to the buffer
/// </summary>
/// <param name="data">Array to read into</param>
/// <param name="offset">Offset in parameter data to read to</param>
/// <param name="length">Amount of data to write</param>
public void Write(byte[] data, int offset, int length)
{
lock (this)
{
// test if the buffer needs to be resized to accomodate the written data
long spaceAvailable = 0;
if (this.readPosition <= this.writePosition)
spaceAvailable = this.internalData.Length - this.writePosition + this.readPosition - 1;
else
spaceAvailable = this.readPosition - this.writePosition - 1;
if (spaceAvailable < length)
this.SetLength(this.internalData.Length - spaceAvailable + length - 1);
if (this.readPosition <= this.writePosition)
{
// read pointer is behind the write pointer, write till the end
// of the buffer then go back to the start
long bytesTillEnd = this.internalData.Length - this.writePosition;
if (bytesTillEnd < length)
{
// length to copy is greater than the size of the data until the end of the buffer,
// so copy the remaining data until the end
Array.Copy(data, offset, this.internalData, this.writePosition, bytesTillEnd);
long bytesFromStart = Math.Min(this.readPosition, length - bytesTillEnd);
if (bytesFromStart > 0)
Array.Copy(data, offset + bytesTillEnd, this.internalData, 0, bytesFromStart);
}
else
{
// length is not greater than the size of the data until the end, so
// just copy it over
Array.Copy(data, offset, this.internalData, this.writePosition, length);
}
}
else
{
// write pointer is behind the read pointer, simply write up until then
long amountToCopy = Math.Min(this.readPosition - this.writePosition - 1, length);
Array.Copy(data, offset, this.internalData, this.writePosition, amountToCopy);
}
this.writePosition += length;
this.writePosition %= this.internalData.Length;
}
}
public int ReadByte()
{
byte[] b = new byte[1];
if (this.Read(b, 0, 1) > 0)
return (int)b[0];
else
return 0;
}
public void WriteByte(byte b)
{
byte[] b2 = new byte[] { b };
this.Write(b2, 0, 1);
}
#region ICloneable members
/// <summary></summary>
/// <returns>Clone of the buffer</returns>
object System.ICloneable.Clone()
{
return this.Clone();
}
/// <summary></summary>
/// <returns>Clone of the buffer</returns>
public CircularBuffer Clone()
{
CircularBuffer buffer = new CircularBuffer(this.internalData.Length);
System.Array.Copy(this.internalData, 0, buffer.internalData, 0, this.internalData.Length);
buffer.readPosition = this.readPosition;
buffer.writePosition = this.writePosition;
return buffer;
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Globalization;
namespace System.IO
{
/// <summary>
/// Helper for reading config files where each row is a key-value data pair.
/// The input key-values must not have any whitespace within them.
/// Keys are only matched if they begin a line, with no preceding whitespace.
/// </summary>
internal struct RowConfigReader
{
private readonly string _buffer;
private readonly StringComparison _comparisonKind;
private int _currentIndex;
/// <summary>
/// Constructs a new RowConfigReader which reads from the given string.
/// <param name="buffer">The string to parse through.</param>
/// </summary>
public RowConfigReader(string buffer)
{
_buffer = buffer;
_comparisonKind = StringComparison.Ordinal;
_currentIndex = 0;
}
/// <summary>
/// Constructs a new RowConfigReader which reads from the given string.
/// <param name="buffer">The string to parse through.</param>
/// <param name="comparisonKind">The comparison kind to use.</param>
/// </summary>
public RowConfigReader(string buffer, StringComparison comparisonKind)
{
_buffer = buffer;
_comparisonKind = comparisonKind;
_currentIndex = 0;
}
/// <summary>
/// Gets the next occurrence of the given key, from the current position of the reader,
/// or throws if no occurrence of the key exists in the remainder of the string.
/// </summary>
public string GetNextValue(string key)
{
string value;
if (!TryGetNextValue(key, out value))
{
throw new InvalidOperationException("Couldn't get next value with key " + key);
}
else
{
return value;
}
}
/// <summary>
/// Tries to get the next occurrence of the given key from the current position of the reader.
/// If successful, returns true and stores the result in 'value'. Otherwise, returns false.
/// </summary>
public bool TryGetNextValue(string key, out string value)
{
Debug.Assert(_buffer != null);
if (_currentIndex >= _buffer.Length)
{
value = null;
return false;
}
// First, find the key, by repeatedly searching for occurrences.
// We only match an occurrence if it starts a line, by itself, with no preceding whitespace.
int keyIndex;
if (!TryFindNextKeyOccurrence(key, _currentIndex, out keyIndex))
{
value = null;
return false;
}
// Next, we will take the end of the line, and look backwards for the start of the value.
// NOTE: This assumes that the "value" does not have any whitespace in it, nor is there any
// after. This is the format of most "row-based" config files in /proc/net, etc.
int afterKey = keyIndex + key.Length;
int endOfValue;
int endOfLine = _buffer.IndexOf(Environment.NewLine, afterKey, _comparisonKind);
if (endOfLine == -1)
{
// There may not be a newline after this key, if we've reached the end of the file.
endOfLine = _buffer.Length - 1;
endOfValue = endOfLine;
}
else
{
endOfValue = endOfLine - 1;
}
int lineLength = endOfLine - keyIndex; // keyIndex is the start of the line.
int whitespaceBeforeValue = _buffer.LastIndexOf('\t', endOfLine, lineLength);
if (whitespaceBeforeValue == -1)
{
whitespaceBeforeValue = _buffer.LastIndexOf(' ', endOfLine, lineLength); // try space as well
}
int valueIndex = whitespaceBeforeValue + 1; // Get the first character after the whitespace.
int valueLength = endOfValue - whitespaceBeforeValue;
if (valueIndex <= keyIndex || valueIndex == -1 || valueLength == 0)
{
// No value found after the key.
value = null;
return false;
}
value = _buffer.Substring(valueIndex, valueLength); // Grab the whole value string.
_currentIndex = endOfLine + 1;
return true;
}
private bool TryFindNextKeyOccurrence(string key, int startIndex, out int keyIndex)
{
// Loop until end of file is reached, or a match is found.
while (true)
{
keyIndex = _buffer.IndexOf(key, startIndex, _comparisonKind);
if (keyIndex == -1)
{
// Reached end of string with no match.
return false;
}
// Check If the match is at the beginning of the string, or is preceded by a newline.
else if (keyIndex == 0
|| (keyIndex >= Environment.NewLine.Length && _buffer.Substring(keyIndex - Environment.NewLine.Length, Environment.NewLine.Length) == Environment.NewLine))
{
// Check if the match is followed by whitespace, meaning it is not part of a larger word.
if (HasFollowingWhitespace(keyIndex, key.Length))
{
return true;
}
}
startIndex = startIndex + key.Length;
}
}
private bool HasFollowingWhitespace(int keyIndex, int length)
{
return (keyIndex + length < _buffer.Length)
&& (_buffer[keyIndex + length] == ' ' || _buffer[keyIndex + length] == '\t');
}
/// <summary>
/// Gets the next occurrence of the key in the string, and parses it as an Int32.
/// Throws if the key is not found in the remainder of the string, or if the key
/// cannot be successfully parsed into an Int32.
/// </summary>
/// <remarks>
/// This is mainly provided as a helper because most Linux config/info files
/// store integral data.
/// </remarks>
public int GetNextValueAsInt32(string key)
{
// PERF: We don't need to allocate a new string here, we can parse an Int32 "in-place" in the existing string.
string value = GetNextValue(key);
int result;
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out result))
{
return result;
}
else
{
throw new InvalidOperationException("Unable to parse value " + value + " of key " + key + " as an Int32.");
}
}
/// <summary>
/// Gets the next occurrence of the key in the string, and parses it as an Int64.
/// Throws if the key is not found in the remainder of the string, or if the key
/// cannot be successfully parsed into an Int64.
/// </summary>
/// <remarks>
/// This is mainly provided as a helper because most Linux config/info files
/// store integral data.
/// </remarks>
public long GetNextValueAsInt64(string key)
{
// PERF: We don't need to allocate a new string here, we can parse an Int64 "in-place" in the existing string.
string value = GetNextValue(key);
long result;
if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out result))
{
return result;
}
else
{
throw new InvalidOperationException("Unable to parse value " + value + " of key " + key + " as an Int64.");
}
}
/// <summary>
/// Reads the value of the first occurrence of the given key contained in the string given.
/// </summary>
/// <param name="data">The key-value row configuration string.</param>
/// <param name="key">The key to find.</param>
/// <returns>The value of the row containing the first occurrence of the key.</returns>
public static string ReadFirstValueFromString(string data, string key)
{
return new RowConfigReader(data).GetNextValue(key);
}
}
}
| |
using System;
using Foundation;
using UIKit;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Linq;
using AVFoundation;
using CoreVideo;
using CoreMedia;
using CoreGraphics;
using CoreFoundation;
using System.Timers;
namespace ManualCameraControls
{
public partial class WhiteBalanceViewController : UIViewController
{
#region Private Variables
private NSError Error;
private bool Automatic = true;
#endregion
#region Computed Properties
/// <summary>
/// Returns the delegate of the current running application
/// </summary>
/// <value>The this app.</value>
public AppDelegate ThisApp {
get { return (AppDelegate)UIApplication.SharedApplication.Delegate; }
}
/// <summary>
/// Gets or sets the sample timer.
/// </summary>
/// <value>The sample timer.</value>
public Timer SampleTimer { get; set; }
#endregion
#region Constructors
public WhiteBalanceViewController (IntPtr handle) : base (handle)
{
}
#endregion
#region Private Methods
/// <summary>
/// Sets the temperature and tint.
/// </summary>
private void SetTemperatureAndTint() {
// Grab current temp and tint
// NOTE: The following line explodes in Xamarin with no error being thrown...
AVCaptureWhiteBalanceTemperatureAndTintValues TempAndTint = new AVCaptureWhiteBalanceTemperatureAndTintValues(Temperature.Value, Tint.Value);
// Convert Color space
var gains = ThisApp.CaptureDevice.GetDeviceWhiteBalanceGains (TempAndTint);
// Set the new values
ThisApp.CaptureDevice.LockForConfiguration(out Error);
ThisApp.CaptureDevice.SetWhiteBalanceModeLockedWithDeviceWhiteBalanceGains (gains, (time) => {
// Ignore callback for now
});
ThisApp.CaptureDevice.UnlockForConfiguration();
}
#endregion
#region Override Methods
/// <summary>
/// Views the did load.
/// </summary>
public override void ViewDidLoad ()
{
base.ViewDidLoad ();
// Hide no camera label
NoCamera.Hidden = ThisApp.CameraAvailable;
// Attach to camera view
ThisApp.Recorder.DisplayView = CameraView;
// Set min and max values
Temperature.MinValue = 1.0f;
Temperature.MaxValue = ThisApp.CaptureDevice.MaxWhiteBalanceGain;
Tint.MinValue = 1.0f;
Tint.MaxValue = ThisApp.CaptureDevice.MaxWhiteBalanceGain;
// Create a timer to monitor and update the UI
SampleTimer = new Timer (5000);
SampleTimer.Elapsed += (sender, e) => {
// Convert color space
var TempAndTint = ThisApp.CaptureDevice.GetTemperatureAndTintValues(ThisApp.CaptureDevice.DeviceWhiteBalanceGains);
// Update slider positions
Temperature.BeginInvokeOnMainThread(() =>{
Temperature.Value = TempAndTint.Temperature;
});
Tint.BeginInvokeOnMainThread(() =>{
Tint.Value = TempAndTint.Tint;
});
};
// Watch for value changes
Segments.ValueChanged += (object sender, EventArgs e) => {
// Lock device for change
ThisApp.CaptureDevice.LockForConfiguration(out Error);
// Take action based on the segment selected
switch(Segments.SelectedSegment) {
case 0:
// Activate auto focus and start monitoring position
Temperature.Enabled = false;
Tint.Enabled = false;
ThisApp.CaptureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance;
SampleTimer.Start();
Automatic = true;
break;
case 1:
// Stop auto focus and allow the user to control the camera
SampleTimer.Stop();
ThisApp.CaptureDevice.WhiteBalanceMode = AVCaptureWhiteBalanceMode.Locked;
Automatic = false;
Temperature.Enabled = true;
Tint.Enabled = true;
break;
}
// Unlock device
ThisApp.CaptureDevice.UnlockForConfiguration();
};
// Monitor position changes
Temperature.TouchUpInside += (object sender, EventArgs e) => {
// If we are in the automatic mode, ignore changes
if (Automatic) return;
// Update white balance
SetTemperatureAndTint();
};
Tint.TouchUpInside += (object sender, EventArgs e) => {
// If we are in the automatic mode, ignore changes
if (Automatic) return;
// Update white balance
SetTemperatureAndTint();
};
GrayCardButton.TouchUpInside += (sender, e) => {
// If we are in the automatic mode, ignore changes
if (Automatic) return;
// Get gray card values
var gains = ThisApp.CaptureDevice.GrayWorldDeviceWhiteBalanceGains;
// Set the new values
ThisApp.CaptureDevice.LockForConfiguration(out Error);
ThisApp.CaptureDevice.SetWhiteBalanceModeLockedWithDeviceWhiteBalanceGains (gains, (time) => {
// Ignore callback for now
});
ThisApp.CaptureDevice.UnlockForConfiguration();
};
}
/// <summary>
/// Views the will appear.
/// </summary>
/// <param name="animated">If set to <c>true</c> animated.</param>
public override void ViewDidAppear (bool animated)
{
base.ViewDidAppear (animated);
// Start udating the display
if (ThisApp.CameraAvailable) {
// Remap to this camera view
ThisApp.Recorder.DisplayView = CameraView;
ThisApp.Session.StartRunning ();
SampleTimer.Start ();
}
}
/// <summary>
/// Views the will disappear.
/// </summary>
/// <param name="animated">If set to <c>true</c> animated.</param>
public override void ViewWillDisappear (bool animated)
{
// Stop display
if (ThisApp.CameraAvailable) {
SampleTimer.Stop ();
ThisApp.Session.StopRunning ();
}
base.ViewWillDisappear (animated);
}
#endregion
}
}
| |
using System;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.InteropServices;
using System.ComponentModel.Design;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudio.Shell;
using EnvDTE;
using EnvDTE80;
using EnvDTE90;
using EnvDTE100;
using System.Collections.Generic;
using System.IO;
using System.Web;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.VCProjectEngine;
namespace Spraylight.MurlTools
{
/// <summary>
/// This is the class that implements the package exposed by this assembly.
///
/// The minimum requirement for a class to be considered a valid package for Visual Studio
/// is to implement the IVsPackage interface and register itself with the shell.
/// This package uses the helper classes defined inside the Managed Package Framework (MPF)
/// to do it: it derives from the Package class that provides the implementation of the
/// IVsPackage interface and uses the registration attributes defined in the framework to
/// register itself and its components with the shell.
/// </summary>
// This attribute tells the PkgDef creation utility (CreatePkgDef.exe) that this class is
// a package.
[PackageRegistration(UseManagedResourcesOnly = true)]
// This attribute is used to register the information needed to show this package
// in the Help/About dialog of Visual Studio.
[InstalledProductRegistration("#110", "#112", "1.0", IconResourceID = 400)]
// This attribute is needed to let the shell know that this package exposes some menus.
[ProvideMenuResource("Menus.ctmenu", 1)]
// Autoload as soon as a solution exists.
[ProvideAutoLoad(UIContextGuids80.SolutionExists)]
[Guid(GuidList.guidMurlToolsPkgString)]
public sealed class MurlToolsPackage : Package
{
/// <summary>
/// Default constructor of the package.
/// Inside this method you can place any initialization code that does not require
/// any Visual Studio service because at this point the package object is created but
/// not sited yet inside Visual Studio environment. The place to do all the other
/// initialization is the Initialize method.
/// </summary>
public MurlToolsPackage()
{
Debug.WriteLine(string.Format(CultureInfo.CurrentCulture, "Entering constructor for: {0}", this.ToString()));
}
/////////////////////////////////////////////////////////////////////////////
// Overridden Package Implementation
#region Package Members
/// <summary>
/// Initialization of the package; this method is called right after the package is sited, so this is the place
/// where you can put all the initialization code that rely on services provided by VisualStudio.
/// </summary>
protected override void Initialize()
{
Debug.WriteLine(string.Format(CultureInfo.CurrentCulture, "Entering Initialize() of: {0}", this.ToString()));
base.Initialize();
// Add our command handlers for menu (commands must exist in the .vsct file)
OleMenuCommandService mcs = GetService(typeof(IMenuCommandService)) as OleMenuCommandService;
if (null != mcs)
{
// Create the command for the menu item.
CommandID menuCommandID = new CommandID(GuidList.guidDuplicateFileCmdSet, (int)PkgCmdIDList.cmdidDuplicateCmd);
MenuCommand menuItem = new MenuCommand(MenuItemCallbackDuplicate, menuCommandID);
mcs.AddCommand(menuItem);
}
// Add our command handlers for menu (commands must exist in the .vsct file)
if (null != mcs)
{
// Create the command for the menu item.
CommandID menuCommandID = new CommandID(GuidList.guidRefreshCmdSet, (int)PkgCmdIDList.cmdidRefreshCmd);
MenuCommand menuItem = new MenuCommand(MenuItemCallbackRefresh, menuCommandID);
mcs.AddCommand(menuItem);
}
// Add our command handlers for menu (commands must exist in the .vsct file)
if (null != mcs)
{
// Create the command for the menu item.
CommandID menuCommandID = new CommandID(GuidList.guidShowHelpCmdSet, (int)PkgCmdIDList.cmdidShowHelp);
MenuCommand menuItem = new MenuCommand(MenuItemCallbackShowHelp, menuCommandID);
mcs.AddCommand(menuItem);
}
// Override Edit.Delete command
_applicationObject = (DTE)GetService(typeof(DTE));
var command = _applicationObject.Commands.Item("Edit.Delete");
_removeEvent = _applicationObject.Events.CommandEvents[command.Guid, command.ID];
_removeEvent.BeforeExecute += OnBeforeDeleteCommand;
}
private EnvDTE.DTE _applicationObject;
private CommandEvents _removeEvent;
#endregion
/// <summary>
/// Helper method: Store all selected project items in the selectedProjectItems array.
/// </summary>
private void getSelectedItems()
{
EnvDTE80.DTE2 _applicationObject = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
UIHierarchy solutionExplorer = _applicationObject.ToolWindows.SolutionExplorer;
Array selectedItems = solutionExplorer.SelectedItems as Array;
selectedProjectItems.Clear();
if (selectedItems != null)
{
foreach (UIHierarchyItem selItem in selectedItems)
{
if (selItem.Object is EnvDTE.ProjectItem)
{
ProjectItem prjItem = selItem.Object as ProjectItem;
if (prjItem != null && prjItem.ProjectItems.Count == 0)
{
if (HasProperty(prjItem.Properties, "FullPath"))
{
selectedProjectItems.Add(prjItem);
}
}
}
else if (selItem.Object is EnvDTE.Project)
{ }
else if (selItem.Object is EnvDTE.Solution)
{ }
}
}
}
private bool IsVCProject()
{
EnvDTE80.DTE2 _applicationObject = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
if (_applicationObject.ActiveWindow.Object != _applicationObject.ToolWindows.SolutionExplorer)
return false;
Array projs = _applicationObject.ActiveSolutionProjects as Array;
if (projs.Length == 0)
{
return false;
}
string type = "Unknown";
try
{
EnvDTE.Project proj = projs.GetValue(0) as EnvDTE.Project;
if (HasProperty(proj.Properties, "Kind"))
{
type = proj.Properties.Item("Kind").Value.ToString();
}
if (type.Equals("VCProject"))
{
//VCProject vcProject = (VCProject)proj.Object;
return true;
}
}
catch (Exception) { }
return false;
}
/// <summary>
/// Called on Edit.Delete command events
/// </summary>
/// <param name="Guid"></param>
/// <param name="ID"></param>
/// <param name="CustomIn"></param>
/// <param name="CustomOut"></param>
/// <param name="CancelDefault"></param>
private void OnBeforeDeleteCommand(string Guid, int ID, Object CustomIn, Object CustomOut, ref bool CancelDefault)
{
if (!IsVCProject())
{
return;
}
getSelectedItems();
if (selectedProjectItems.Count == 0)
return;
DlgRemoveDelete dlg = new DlgRemoveDelete();
var m = dlg.ShowModal();
int result = dlg.getResult();
if (result == DlgRemoveDelete.DELETE)
{
for (int i = selectedProjectItems.Count - 1; i >= 0; i--)
{
try
{
selectedProjectItems[i].Delete();
}
catch (Exception) { }
}
}
else if (result == DlgRemoveDelete.REMOVE)
{
for (int i = selectedProjectItems.Count - 1; i >= 0; i--)
{
try
{
selectedProjectItems[i].Remove();
}
catch (Exception) { }
}
}
CancelDefault = true;
}
/// <summary>
/// Duplicate File Callback
/// </summary>
private void MenuItemCallbackDuplicate(object sender, EventArgs e)
{
getSelectedItems();
duplicateFiles();
SelectLastAdded();
EnterRenameState();
}
private void duplicateFiles()
{
lastAdded = null;
if (selectedProjectItems.Count == 0)
return;
EnvDTE80.DTE2 _applicationObject = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
for (int i = 1; i <= _applicationObject.Solution.Projects.Count; i++)
{
Project project = _applicationObject.Solution.Projects.Item(i);
ProjectItems projItems = project.ProjectItems;
duplicateFiles(projItems);
}
}
private void duplicateFiles(ProjectItems projItems)
{
ProjectItem projItem;
for (int i = 1; i <= projItems.Count; i++)
{
projItem = projItems.Item(i);
Debug.Write(projItem.Name + " " + projItem.Kind);
try
{
if (projItem.ProjectItems.Count > 0)
{
duplicateFiles(projItem.ProjectItems);
}
else if (selectedProjectItems.Contains(projItem))
{
selectedProjectItems.Remove(projItem);
String newPath = copyFile(projItem);
if (newPath != null)
{
try
{
lastAdded = projItems.AddFromFile(newPath);
}
catch (Exception) { }
}
}
}
catch (Exception)
{
}
}
}
// determine the new name and create a copy of the file
private String copyFile(ProjectItem projItem)
{
string path = projItem.get_FileNames(1);
if (!File.Exists(path))
return null;
int index = path.LastIndexOf('.');
if (index < 0)
return null;
string newBase = path.Substring(0, index) + " - Copy";
string newExt = path.Substring(index);
string newPath = newBase + newExt;
index = 1;
while (File.Exists(newPath))
{
index++;
newPath = newBase + " (" + index + ")" + newExt;
}
File.Copy(path, newPath);
return newPath;
}
private void SelectLastAdded()
{
if (lastAdded != null)
{
List<UIHierarchyItems> itemStack = new List<UIHierarchyItems>();
EnvDTE80.DTE2 _applicationObject = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
UIHierarchy solutionExplorer = _applicationObject.ToolWindows.SolutionExplorer;
UIHierarchyItems items = solutionExplorer.UIHierarchyItems;
itemStack.Add(solutionExplorer.UIHierarchyItems);
while (itemStack.Count > 0)
{
int lastIndex = itemStack.Count - 1;
items = itemStack[lastIndex];
itemStack.RemoveAt(lastIndex);
foreach (UIHierarchyItem item in items)
{
if (item.Object == lastAdded)
{
item.Select(vsUISelectionType.vsUISelectionTypeSelect);
return;
}
if (item.UIHierarchyItems != null)
{
itemStack.Add(item.UIHierarchyItems);
}
}
}
}
}
private void EnterRenameState()
{
EnvDTE80.DTE2 _applicationObject = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
_applicationObject.ExecuteCommand("File.Rename");
}
/*
* See also http://www.mztools.com/articles/2006/MZ2006009.aspx for info about how to determine the code element at the cursor position.
*/
private void MenuItemCallbackShowHelp(object sender, EventArgs e)
{
EnvDTE80.DTE2 dte = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
string searchText = "";
try
{
Document activeDoc = dte.ActiveDocument;
if (activeDoc != null && activeDoc.Selection != null)
{
TextSelection sel = activeDoc.Selection as TextSelection;
if (sel.Text.Length == 0)
{
sel.WordLeft(true);
searchText = sel.Text;
sel.WordRight(true);
searchText = searchText + sel.Text;
}
else
{
searchText = sel.Text;
}
}
if (searchText.Length == 0)
{
System.Diagnostics.Process.Start("http://murlengine.com/api");
}
else
{
if (searchText.Length > 255)
{
searchText = searchText.Substring(0, 255);
}
System.Diagnostics.Process.Start("http://murlengine.com/api/en/search.php?q=" + System.Web.HttpUtility.UrlEncode(searchText));
}
}
catch (Exception)
{ }
Debug.Print("Show Help: " + searchText);
}
private void MenuItemCallbackRefresh(object sender, EventArgs e)
{
if (!IsVCProject())
{
return;
}
RefreshSelectedFolder();
}
/// <summary>
/// Refresh selected filter/folder callback
/// </summary>
void RefreshSelectedFolder()
{
EnvDTE80.DTE2 _applicationObject = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
object[] selectedItems = (object[])_applicationObject.ToolWindows.SolutionExplorer.SelectedItems;
foreach (EnvDTE.UIHierarchyItem selectedItem in selectedItems)
{
if (!(selectedItem.Object is EnvDTE.ProjectItem) || selectedItem.UIHierarchyItems == null)
{
continue;
}
string path = determinePath(selectedItem);
if (path.Length == 0)
{
path = guessPath(selectedItem);
}
if (path.Length > 0)
{
RefreshSelectedFolder(selectedItem, path);
}
}
}
private string guessPath(EnvDTE.UIHierarchyItem selectedItem)
{
// try "..\..\..\filterName"
DTE dte = (DTE)GetService(typeof(DTE));
dte = selectedItem.DTE;
string solutionDir = System.IO.Path.GetDirectoryName(dte.Solution.FullName);
Debug.Write(solutionDir+"\n");
solutionDir += "..\\..\\..\\" + selectedItem.Name;
string guessedPath = Path.GetFullPath(solutionDir + "..\\..\\..\\" + selectedItem.Name);
Debug.Write(guessedPath+"\n");
if (Directory.Exists(guessedPath))
{
Debug.Write("Guessed path found!\n");
return guessedPath;
}
return "";
}
private string determinePath(EnvDTE.UIHierarchyItem selectedItem)
{
List<UIHierarchyItem> filterList = new List<UIHierarchyItem>();
// The filter element does not have a path.
// Try to determine the path from nested project files.
bool oldExpandedVal = selectedItem.UIHierarchyItems.Expanded;
selectedItem.UIHierarchyItems.Expanded = true;
foreach (UIHierarchyItem item in selectedItem.UIHierarchyItems)
{
if (item.Object is EnvDTE.ProjectItem)
{
if (item.UIHierarchyItems != null && item.UIHierarchyItems.Count > 0)
{
//filter element
filterList.Add(item);
continue;
}
ProjectItem prjItem = item.Object as ProjectItem;
Property prop = GetProperty(prjItem.Properties, "FullPath");
if (prop != null)
{
string res = Path.GetDirectoryName(prop.Value.ToString());
if (Directory.Exists(res))
{
selectedItem.UIHierarchyItems.Expanded = oldExpandedVal;
return Path.GetDirectoryName(prop.Value.ToString());
}
}
}
}
// if not found, try to determine path from sub folders/filters
foreach (UIHierarchyItem item in filterList)
{
string path = determinePath(item);
if (path.Length > 0)
{
try
{
string res = path.Substring(0, path.Length - item.Name.Length-1);
if (res.EndsWith(selectedItem.Name) && Directory.Exists(res))
{
selectedItem.UIHierarchyItems.Expanded = oldExpandedVal;
return res;
}
}
catch (Exception) { }
}
}
selectedItem.UIHierarchyItems.Expanded = oldExpandedVal;
// not able to determine path
return "";
}
private void printHierarchy(string prefix, EnvDTE.UIHierarchyItem item)
{
Debug.Write(prefix+item.Name+"\n");
bool oldval = item.UIHierarchyItems.Expanded;
item.UIHierarchyItems.Expanded = true;
foreach (EnvDTE.UIHierarchyItem child in item.UIHierarchyItems)
{
printHierarchy(prefix + " ", child);
}
item.UIHierarchyItems.Expanded = oldval;
}
private void RefreshSelectedFolder(EnvDTE.UIHierarchyItem selectedItem, string dir)
{
List<string> pathList = new List<string>();
List<UIHierarchyItem> filterList = new List<UIHierarchyItem>();
string path = "";
//printHierarchy("", selectedItem);
bool oldval = selectedItem.UIHierarchyItems.Expanded;
selectedItem.UIHierarchyItems.Expanded = true;
// Remove references which aren't exist
foreach (UIHierarchyItem item in selectedItem.UIHierarchyItems)
{
if (!(item.Object is EnvDTE.ProjectItem))
{
continue;
}
bool oldval2 = selectedItem.UIHierarchyItems.Expanded;
selectedItem.UIHierarchyItems.Expanded = true;
if (item.UIHierarchyItems != null && item.UIHierarchyItems.Count > 0)
{
filterList.Add(item);
selectedItem.UIHierarchyItems.Expanded = oldval2;
//filter element
continue;
}
selectedItem.UIHierarchyItems.Expanded = oldval2;
ProjectItem prjItem = item.Object as ProjectItem;
Property prop = GetProperty(prjItem.Properties, "FullPath");
if (prop != null)
{
path = prop.Value.ToString();
if (!File.Exists(path))
{
// remove prjItem if path does not exist
try
{
prjItem.Remove();
}
catch (Exception) { }
}
else
{
// else store in pathList
pathList.Add(path);
}
}
else
{
//empty filter
filterList.Add(item);
}
}
selectedItem.UIHierarchyItems.Expanded = oldval;
// Add existing files which are not in pathList
if (dir.Length > 0)
{
string[] fileEntries = Directory.GetFiles(dir);
foreach (string fileName in fileEntries)
{
if (!pathList.Contains(fileName))
{
ProjectItem filter = selectedItem.Object as EnvDTE.ProjectItem;
if (filter != null && filter.ProjectItems != null)
{
try
{
filter.ProjectItems.AddFromFile(fileName);
}
catch (Exception) { }
}
}
}
}
// Add existing directories which are not listed as filters
if (dir.Length > 0)
{
string[] dirEntries = Directory.GetDirectories(dir);
List<string> fl = new List<string>();
foreach (UIHierarchyItem item in filterList)
{
fl.Add(dir+"\\"+item.Name);
}
foreach (string dirName in dirEntries)
{
if (!fl.Contains(dirName))
{
ProjectItem filter = selectedItem.Object as EnvDTE.ProjectItem;
VCFilter vcFilter = (VCFilter)filter.Object;
if (vcFilter != null)
{
addNewFilterRecursive(vcFilter, dirName, dir);
}
// add
Debug.WriteLine(dirName);
}
}
}
// recursively update sub dirs/filters
foreach (UIHierarchyItem item in filterList)
{
string newPath = dir + "\\" + item.Name;
if (Directory.Exists(newPath))
{
RefreshSelectedFolder(item, newPath);
}
}
}
private void addNewFilterRecursive(VCFilter vcFilter, string dirName, string dir)
{
string filterName = dirName.Substring(dir.Length + 1);
VCFilter newFilter = vcFilter.AddFilter(filterName);
// add files
string[] fileEntries = Directory.GetFiles(dirName);
foreach (string file in fileEntries)
{
newFilter.AddFile(file);
}
// add directories as filter
string[] dirEntries = Directory.GetDirectories(dirName);
foreach (string d in dirEntries)
{
addNewFilterRecursive(newFilter, d, dirName);
}
}
private Property GetProperty(Properties properties, string propertyName)
{
if (properties != null)
{
foreach (Property item in properties)
{
if (item != null && item.Name == propertyName)
{
return item;
}
}
}
return null;
}
private bool HasProperty(Properties properties, string propertyName)
{
if (GetProperty(properties, propertyName) != null)
return true;
return false;
}
/// <summary>
/// Debug only
/// </summary>
/// <param name="s"></param>
void writeDebugIntoOutputPane(string s)
{
EnvDTE80.DTE2 _applicationObject = GetGlobalService(typeof(DTE)) as EnvDTE80.DTE2;
// Retrieve the Output window.
OutputWindow outputWin = _applicationObject.ToolWindows.OutputWindow;
// Find the "Test Pane" Output window pane; if it doesn't exist,
// create it.
OutputWindowPane pane = null;
try
{
pane = outputWin.OutputWindowPanes.Item("Test Pane");
}
catch
{
pane = outputWin.OutputWindowPanes.Add("Test Pane");
}
pane.OutputString(s + "\n");
}
/// <summary>
/// Variable Declarations
/// </summary>
private List<ProjectItem> selectedProjectItems = new List<ProjectItem>();
private ProjectItem lastAdded = null;
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data;
using System.Globalization;
using System.Reflection;
using System.Text;
using DataBoss.Data.Scripting;
using DataBoss.Data.SqlServer;
using DataBoss.Linq;
namespace DataBoss.Data
{
public struct DataBossDbType
{
enum BossTypeTag : byte
{
Custom = 0,
TinyInt = 1,
SmallInt = 2,
Int = 3,
BigInt = 4,
Real = 5,
Float = 6,
Bit = 7,
DateTime = 8,
Date = 9,
Time = 10,
Guid = 11,
Char = 16,
VarChar = 17,
NChar = 18,
NVarChar = 19,
Binary = 20,
VarBinary = 21,
RowVersion = 22,
TagMask = 31,
IsVariableSize = Char,
IsNullable = 1 << 7,
}
static readonly (string TypeName, byte Width)[] FixedTypes = new(string, byte)[]
{
(null, 0),
("tinyint", 1),
("smallint", 2),
("int", 4),
("bigint", 8),
("real", 4),
("float", 8),
("bit", 0),
("datetime", 8),
("date", 3),
("time", 8),
("uniqueidentifier", 16)
};
static readonly (string TypeName, byte Width)[] VariableSizeTypes = new(string, byte)[] {
("char", 0),
("varchar", 0),
("nchar", 0),
("nvarchar", 0),
("binary", 0),
("varbinary", 0),
//rowversion
("binary", 0),
};
(string TypeName, byte Width) GetBossType(BossTypeTag tag) => tag.HasFlag(BossTypeTag.IsVariableSize)
? VariableSizeTypes[(byte)(tag & BossTypeTag.TagMask) - (byte)BossTypeTag.Char]
: FixedTypes[(byte)(tag & BossTypeTag.TagMask)];
readonly BossTypeTag tag;
readonly object extra;
public int? ColumnSize => tag.HasFlag(BossTypeTag.IsVariableSize)
? (int?)extra
: IsKnownType(out var knownType) ? GetBossType(knownType).Width : -1;
public string TypeName => IsKnownType(out var knownType)
? GetBossType(knownType).TypeName
: CustomInfo.TypeName;
bool IsKnownType(out BossTypeTag typeTag) {
typeTag = (tag & BossTypeTag.TagMask);
return typeTag != BossTypeTag.Custom;
}
(string TypeName, int? Width) CustomInfo => (ValueTuple<string, int?>)extra;
public bool IsRowVersion => (tag & BossTypeTag.TagMask) == BossTypeTag.RowVersion;
public bool IsNullable => tag.HasFlag(BossTypeTag.IsNullable);
public static DataBossDbType Create(string typeName, int? columnSize, bool isNullable) {
var tag = TypeTagLookup(ref typeName);
if(tag == BossTypeTag.Custom)
return new DataBossDbType(tag, isNullable, (typeName, columnSize));
return new DataBossDbType(tag, isNullable, columnSize);
}
static BossTypeTag TypeTagLookup(ref string typeName) {
var nameToFind = typeName;
var n = Array.FindIndex(FixedTypes, x => x.TypeName == nameToFind);
if(n == -1) {
n = Array.FindIndex(VariableSizeTypes, x => x.TypeName == nameToFind);
if(n != -1)
n += (int)BossTypeTag.IsVariableSize;
}
if (n == -1)
return BossTypeTag.Custom;
typeName = null;
return (BossTypeTag)n;
}
DataBossDbType(BossTypeTag tag, bool isNullable) : this(tag, isNullable, null)
{ }
DataBossDbType(BossTypeTag tag, bool isNullable, object extra) {
this.tag = tag | (isNullable ? BossTypeTag.IsNullable : 0);
this.extra = extra;
}
public static DataBossDbType From(Type type) => From(type, type);
public static DataBossDbType From(Type type, ICustomAttributeProvider attributes) {
var canBeNull = !type.IsValueType && !attributes.Any<RequiredAttribute>();
if (type.TryGetNullableTargetType(out var newTargetType)) {
canBeNull = true;
type = newTargetType;
}
return MapType(type, attributes, canBeNull);
}
public static DataBossDbType ToDataBossDbType(IDbDataParameter parameter) {
var t = MapType(parameter.DbType);
return t.HasFlag(BossTypeTag.IsVariableSize)
? new DataBossDbType(t, true, parameter.Size)
: new DataBossDbType(t, true);
}
public string FormatValue(object value) {
switch(tag & BossTypeTag.TagMask) {
default: throw new NotSupportedException($"Can't format {value} of type {value.GetType()} as {ToString()}");
case BossTypeTag.TinyInt: return ChangeType<byte>(value).ToString();
case BossTypeTag.SmallInt: return ChangeType<short>(value).ToString();
case BossTypeTag.Int: return ChangeType<int>(value).ToString();
case BossTypeTag.BigInt: return ChangeType<long>(value).ToString();
case BossTypeTag.Real: return ChangeType<float>(value).ToString(CultureInfo.InvariantCulture);
case BossTypeTag.Float: return ChangeType<double>(value).ToString(CultureInfo.InvariantCulture);
case BossTypeTag.DateTime: return ChangeType<DateTime>(value).ToString("s");
case BossTypeTag.VarChar: return $"'{Escape(value.ToString())}'";
case BossTypeTag.NVarChar: return $"N'{Escape(value.ToString())}'";
case BossTypeTag.RowVersion:
value = ((RowVersion)value).Value.Value;
goto case BossTypeTag.VarBinary;
case BossTypeTag.Binary:
case BossTypeTag.VarBinary:
var bytes = value as IEnumerable<byte>;
if(bytes == null)
goto default;
var r = new StringBuilder("0x");
foreach(var b in bytes)
r.AppendFormat("{0:x2}", b);
return r.ToString();
}
}
static string Escape(string input) => input.Replace("'", "''");
static T ChangeType<T>(object value) => (T)Convert.ChangeType(value, typeof(T));
static BossTypeTag MapType(DbType dbType) {
switch(dbType) {
default: throw new NotSupportedException($"No mapping for {dbType}.");
case DbType.Byte: return BossTypeTag.TinyInt;
case DbType.Int16: return BossTypeTag.SmallInt;
case DbType.Int32: return BossTypeTag.Int;
case DbType.Int64: return BossTypeTag.BigInt;
case DbType.Boolean: return BossTypeTag.Bit;
case DbType.String: return BossTypeTag.NVarChar;
case DbType.Binary: return BossTypeTag.Binary;
case DbType.Guid: return BossTypeTag.Guid;
}
}
internal static DataBossDbType MapType(Type type, ICustomAttributeProvider attributes, bool canBeNull) {
var column = attributes.SingleOrDefault<ColumnAttribute>();
if (column != null && !string.IsNullOrEmpty(column.TypeName))
return Create(column.TypeName, null, canBeNull);
var typeMapping = type.SingleOrDefault<TypeMappingAttribute>();
if(typeMapping != null && !string.IsNullOrEmpty(typeMapping.TypeName))
return Create(typeMapping.TypeName, null, canBeNull);
switch (type.FullName) {
case "System.Byte": return new DataBossDbType(BossTypeTag.TinyInt, canBeNull);
case "System.Data.SqlTypes.SqlByte": return new DataBossDbType(BossTypeTag.TinyInt, canBeNull);
case "System.Int16": return new DataBossDbType(BossTypeTag.SmallInt, canBeNull);
case "System.Int32": return new DataBossDbType(BossTypeTag.Int, canBeNull);
case "System.Int64": return new DataBossDbType(BossTypeTag.BigInt, canBeNull);
case "System.Single": return new DataBossDbType(BossTypeTag.Real, canBeNull);
case "System.Double": return new DataBossDbType(BossTypeTag.Float, canBeNull);
case "System.Boolean": return new DataBossDbType(BossTypeTag.Bit, canBeNull);
case "System.Guid": return new DataBossDbType(BossTypeTag.Guid, canBeNull);
case "System.String":
return new DataBossDbType(attributes.Any<AnsiStringAttribute>() ? BossTypeTag.VarChar: BossTypeTag.NVarChar, canBeNull, MaxLength(attributes)?.Length ?? int.MaxValue);
case "System.Char":
return new DataBossDbType(attributes.Any<AnsiStringAttribute>() ? BossTypeTag.Char : BossTypeTag.NChar, canBeNull, 1);
case "System.Byte[]":
return new DataBossDbType(BossTypeTag.VarBinary, canBeNull, MaxLength(attributes)?.Length ?? int.MaxValue);
case "System.DateTime": return Create("datetime", 8, canBeNull);
case "System.TimeSpan": return Create("time", 3, canBeNull);
case "System.Data.SqlTypes.SqlMoney": return Create("money", null, canBeNull);
case "DataBoss.Data.SqlServer.RowVersion": return new DataBossDbType(BossTypeTag.RowVersion, canBeNull, (int?)8);
default:
throw new NotSupportedException("Don't know how to map " + type.FullName + " to a db type.\nTry providing a TypeName using System.ComponentModel.DataAnnotations.Schema.ColumnAttribute.");
}
}
static MaxLengthAttribute MaxLength(ICustomAttributeProvider attributes) =>
attributes.SingleOrDefault<MaxLengthAttribute>();
public static DbType ToDbType(Type type) {
switch (type.FullName) {
case "System.Byte": return DbType.Byte;
case "System.Int16": return DbType.Int16;
case "System.Int32": return DbType.Int32;
case "System.Int64": return DbType.Int64;
case "System.Single": return DbType.Single;
case "System.Double": return DbType.Double;
case "System.Decimal": return DbType.Decimal;
case "System.Boolean": return DbType.Boolean;
case "System.DateTime": return DbType.DateTime;
case "System.Guid": return DbType.Guid;
}
return DbType.String;
}
public static bool operator==(DataBossDbType a, DataBossDbType b) =>
a.TypeName == b.TypeName && a.IsNullable == b.IsNullable;
public static bool operator!=(DataBossDbType a, DataBossDbType b) => !(a == b);
public override string ToString() => FormatType() + (IsNullable ? string.Empty : " not null");
public override int GetHashCode() => TypeName.GetHashCode();
public override bool Equals(object obj) => (obj is DataBossDbType && this == (DataBossDbType)obj) || obj.Equals(this);
string FormatType() =>
tag.HasFlag(BossTypeTag.IsVariableSize) ? FormatWideType() : TypeName;
string FormatWideType() =>
(!ColumnSize.HasValue || ColumnSize.Value == 1) ? TypeName : $"{TypeName}({FormatWidth(ColumnSize.Value)})";
static string FormatWidth(int width) => width == int.MaxValue ? "max" : width.ToString();
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Scripting;
using Roslyn.Test.Utilities;
using Roslyn.Utilities;
using Xunit;
#pragma warning disable RS0003 // Do not directly await a Task
namespace Microsoft.CodeAnalysis.CSharp.Scripting.UnitTests
{
public class ScriptTests : TestBase
{
public class Globals
{
public int X;
public int Y;
}
[Fact]
public void TestCreateScript()
{
var script = CSharpScript.Create("1 + 2");
Assert.Equal("1 + 2", script.Code);
}
[Fact]
public async Task TestGetCompilation()
{
var state = await CSharpScript.RunAsync("1 + 2", globals: new ScriptTests());
var compilation = state.Script.GetCompilation();
Assert.Equal(state.Script.Code, compilation.SyntaxTrees.First().GetText().ToString());
}
[Fact]
public void TestCreateScriptDelegate()
{
// create a delegate for the entire script
var script = CSharpScript.Create("1 + 2");
var fn = script.CreateDelegate();
Assert.Equal(3, fn().Result);
AssertEx.ThrowsArgumentException("globals", () => fn(new object()));
}
[Fact]
public void TestCreateScriptDelegateWithGlobals()
{
// create a delegate for the entire script
var script = CSharpScript.Create<int>("X + Y", globalsType: typeof(Globals));
var fn = script.CreateDelegate();
AssertEx.ThrowsArgumentException("globals", () => fn());
AssertEx.ThrowsArgumentException("globals", () => fn(new object()));
Assert.Equal(4, fn(new Globals { X = 1, Y = 3 }).Result);
}
[Fact]
public async Task TestRunScript()
{
var state = await CSharpScript.RunAsync("1 + 2");
Assert.Equal(3, state.ReturnValue);
}
[Fact]
public async Task TestCreateAndRunScript()
{
var script = CSharpScript.Create("1 + 2");
var state = await script.RunAsync();
Assert.Same(script, state.Script);
Assert.Equal(3, state.ReturnValue);
}
[Fact]
public async Task TestEvalScript()
{
var value = await CSharpScript.EvaluateAsync("1 + 2");
Assert.Equal(3, value);
}
[Fact]
public async Task TestRunScriptWithSpecifiedReturnType()
{
var state = await CSharpScript.RunAsync("1 + 2");
Assert.Equal(3, state.ReturnValue);
}
[Fact]
public async Task TestRunVoidScript()
{
var state = await CSharpScript.RunAsync("System.Console.WriteLine(0);");
Assert.Null(state.ReturnValue);
}
[WorkItem(5279, "https://github.com/dotnet/roslyn/issues/5279")]
[Fact]
public async void TestRunExpressionStatement()
{
var state = await CSharpScript.RunAsync(
@"int F() { return 1; }
F();");
Assert.Null(state.ReturnValue);
}
[Fact(Skip = "https://github.com/dotnet/roslyn/issues/170")]
public void TestRunDynamicVoidScriptWithTerminatingSemicolon()
{
var result = CSharpScript.RunAsync(@"
class SomeClass
{
public void Do()
{
}
}
dynamic d = new SomeClass();
d.Do();"
, ScriptOptions.Default.WithReferences(MscorlibRef, SystemRef, SystemCoreRef, CSharpRef));
}
[Fact(Skip = "https://github.com/dotnet/roslyn/issues/170")]
public void TestRunDynamicVoidScriptWithoutTerminatingSemicolon()
{
var result = CSharpScript.RunAsync(@"
class SomeClass
{
public void Do()
{
}
}
dynamic d = new SomeClass();
d.Do()"
, ScriptOptions.Default.WithReferences(MscorlibRef, SystemRef, SystemCoreRef, CSharpRef));
}
[Fact]
public async Task TestRunScriptWithGlobals()
{
var state = await CSharpScript.RunAsync("X + Y", globals: new Globals { X = 1, Y = 2 });
Assert.Equal(3, state.ReturnValue);
}
[Fact]
public async Task TestRunCreatedScriptWithExpectedGlobals()
{
var script = CSharpScript.Create("X + Y", globalsType: typeof(Globals));
var state = await script.RunAsync(new Globals { X = 1, Y = 2 });
Assert.Equal(3, state.ReturnValue);
Assert.Same(script, state.Script);
}
[Fact]
public void TestRunCreatedScriptWithUnexpectedGlobals()
{
var script = CSharpScript.Create("X + Y");
// Global variables passed to a script without a global type
AssertEx.ThrowsArgumentException("globals", () => script.RunAsync(new Globals { X = 1, Y = 2 }));
}
[Fact]
public void TestRunCreatedScriptWithoutGlobals()
{
var script = CSharpScript.Create("X + Y", globalsType: typeof(Globals));
// The script requires access to global variables but none were given
AssertEx.ThrowsArgumentException("globals", () => script.RunAsync());
}
[Fact]
public void TestRunCreatedScriptWithMismatchedGlobals()
{
var script = CSharpScript.Create("X + Y", globalsType: typeof(Globals));
// The globals of type 'System.Object' is not assignable to 'Microsoft.CodeAnalysis.CSharp.Scripting.Test.ScriptTests+Globals'
AssertEx.ThrowsArgumentException("globals", () => script.RunAsync(new object()));
}
[Fact]
public async Task ContinueAsync_Error1()
{
var state = await CSharpScript.RunAsync("X + Y", globals: new Globals());
AssertEx.ThrowsArgumentNull("previousState", () => state.Script.ContinueAsync(null));
}
[Fact]
public async Task ContinueAsync_Error2()
{
var state1 = await CSharpScript.RunAsync("X + Y + 1", globals: new Globals());
var state2 = await CSharpScript.RunAsync("X + Y + 2", globals: new Globals());
AssertEx.ThrowsArgumentException("previousState", () => state1.Script.ContinueAsync(state2));
}
[Fact]
public async Task TestRunScriptWithScriptState()
{
// run a script using another scripts end state as the starting state (globals)
var state = await CSharpScript.RunAsync("int X = 100;").ContinueWith("X + X");
Assert.Equal(200, state.ReturnValue);
}
[Fact]
public async Task TestRepl()
{
string[] submissions = new[]
{
"int x = 100;",
"int y = x * x;",
"x + y"
};
var state = await CSharpScript.RunAsync("");
foreach (var submission in submissions)
{
state = await state.ContinueWithAsync(submission);
}
Assert.Equal(10100, state.ReturnValue);
}
#if TODO // https://github.com/dotnet/roslyn/issues/3720
[Fact]
public void TestCreateMethodDelegate()
{
// create a delegate to a method declared in the script
var state = CSharpScript.Run("int Times(int x) { return x * x; }");
var fn = state.CreateDelegate<Func<int, int>>("Times");
var result = fn(5);
Assert.Equal(25, result);
}
#endif
[Fact]
public async Task ScriptVariables_Chain()
{
var globals = new Globals { X = 10, Y = 20 };
var script =
CSharpScript.Create(
"var a = '1';",
globalsType: globals.GetType()).
ContinueWith("var b = 2u;").
ContinueWith("var a = 3m;").
ContinueWith("var x = a + b;").
ContinueWith("var X = Y;");
var state = await script.RunAsync(globals);
AssertEx.Equal(new[] { "a", "b", "a", "x", "X" }, state.Variables.Select(v => v.Name));
AssertEx.Equal(new object[] { '1', 2u, 3m, 5m, 20 }, state.Variables.Select(v => v.Value));
AssertEx.Equal(new Type[] { typeof(char), typeof(uint), typeof(decimal), typeof(decimal), typeof(int) }, state.Variables.Select(v => v.Type));
Assert.Equal(3m, state.GetVariable("a").Value);
Assert.Equal(2u, state.GetVariable("b").Value);
Assert.Equal(5m, state.GetVariable("x").Value);
Assert.Equal(20, state.GetVariable("X").Value);
Assert.Equal(null, state.GetVariable("A"));
Assert.Same(state.GetVariable("X"), state.GetVariable("X"));
}
[Fact]
public async Task ScriptVariable_SetValue()
{
var script = CSharpScript.Create("var x = 1;");
var s1 = await script.RunAsync();
s1.GetVariable("x").Value = 2;
Assert.Equal(2, s1.GetVariable("x").Value);
// rerunning the script from the beginning rebuilds the state:
var s2 = await s1.Script.RunAsync();
Assert.Equal(1, s2.GetVariable("x").Value);
// continuing preserves the state:
var s3 = await s1.ContinueWithAsync("x");
Assert.Equal(2, s3.GetVariable("x").Value);
Assert.Equal(2, s3.ReturnValue);
}
[Fact]
public async Task ScriptVariable_SetValue_Errors()
{
var state = await CSharpScript.RunAsync(@"
var x = 1;
readonly var y = 2;
const int z = 3;
");
Assert.False(state.GetVariable("x").IsReadOnly);
Assert.True(state.GetVariable("y").IsReadOnly);
Assert.True(state.GetVariable("z").IsReadOnly);
Assert.Throws<ArgumentException>(() => state.GetVariable("x").Value = "str");
Assert.Throws<InvalidOperationException>(() => state.GetVariable("y").Value = "str");
Assert.Throws<InvalidOperationException>(() => state.GetVariable("z").Value = "str");
Assert.Throws<InvalidOperationException>(() => state.GetVariable("y").Value = 0);
Assert.Throws<InvalidOperationException>(() => state.GetVariable("z").Value = 0);
}
[Fact]
public async Task TestBranchingSubscripts()
{
// run script to create declaration of M
var state1 = await CSharpScript.RunAsync("int M(int x) { return x + x; }");
// run second script starting from first script's end state
// this script's new declaration should hide the old declaration
var state2 = await state1.ContinueWithAsync("int M(int x) { return x * x; } M(5)");
Assert.Equal(25, state2.ReturnValue);
// run third script also starting from first script's end state
// it should not see any declarations made by the second script.
var state3 = await state1.ContinueWithAsync("M(5)");
Assert.Equal(10, state3.ReturnValue);
}
[Fact]
public async Task ReturnIntAsObject()
{
var expected = 42;
var script = CSharpScript.Create<object>($"return {expected};");
var result = await script.EvaluateAsync();
Assert.Equal(expected, result);
}
[Fact]
public async Task NoReturn()
{
var script = CSharpScript.Create<object>("System.Console.WriteLine();");
var result = await script.EvaluateAsync();
Assert.Null(result);
}
[Fact]
public async Task ReturnAwait()
{
var script = CSharpScript.Create<int>("return await System.Threading.Tasks.Task.FromResult(42);");
var result = await script.EvaluateAsync();
Assert.Equal(42, result);
}
[Fact]
public async Task ReturnInNestedScopeNoTrailingExpression()
{
var script = CSharpScript.Create(@"
bool condition = false;
if (condition)
{
return 1;
}");
var result = await script.EvaluateAsync();
Assert.Null(result);
}
[Fact]
public async Task ReturnInNestedScopeWithTrailingVoidExpression()
{
var script = CSharpScript.Create(@"
bool condition = false;
if (condition)
{
return 1;
}
System.Console.WriteLine();");
var result = await script.EvaluateAsync();
Assert.Null(result);
script = CSharpScript.Create(@"
bool condition = true;
if (condition)
{
return 1;
}
System.Console.WriteLine();");
result = await script.EvaluateAsync();
Assert.Equal(1, result);
}
[Fact]
public async Task ReturnInNestedScopeWithTrailingVoidExpressionAsInt()
{
var script = CSharpScript.Create<int>(@"
bool condition = false;
if (condition)
{
return 1;
}
System.Console.WriteLine();");
var result = await script.EvaluateAsync();
Assert.Equal(0, result);
script = CSharpScript.Create<int>(@"
bool condition = false;
if (condition)
{
return 1;
}
System.Console.WriteLine()");
result = await script.EvaluateAsync();
Assert.Equal(0, result);
}
[Fact]
public async Task ReturnIntWithTrailingDoubleExpression()
{
var script = CSharpScript.Create(@"
bool condition = false;
if (condition)
{
return 1;
}
1.1");
var result = await script.EvaluateAsync();
Assert.Equal(1.1, result);
script = CSharpScript.Create(@"
bool condition = true;
if (condition)
{
return 1;
}
1.1");
result = await script.EvaluateAsync();
Assert.Equal(1, result);
}
[Fact]
public async Task ReturnGenericAsInterface()
{
var script = CSharpScript.Create<IEnumerable<int>>(@"
if (false)
{
return new System.Collections.Generic.List<int> { 1, 2, 3 };
}");
var result = await script.EvaluateAsync();
Assert.Null(result);
script = CSharpScript.Create<IEnumerable<int>>(@"
if (true)
{
return new System.Collections.Generic.List<int> { 1, 2, 3 };
}");
result = await script.EvaluateAsync();
Assert.Equal(new List<int> { 1, 2, 3 }, result);
}
[Fact]
public async Task ReturnNullable()
{
var script = CSharpScript.Create<int?>(@"
if (false)
{
return 42;
}");
var result = await script.EvaluateAsync();
Assert.False(result.HasValue);
script = CSharpScript.Create<int?>(@"
if (true)
{
return 42;
}");
result = await script.EvaluateAsync();
Assert.Equal(42, result);
}
[Fact]
public async Task ReturnInLoadedFile()
{
var resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", "return 42;"));
var options = ScriptOptions.Default.WithSourceResolver(resolver);
var script = CSharpScript.Create("#load \"a.csx\"", options);
var result = await script.EvaluateAsync();
Assert.Equal(42, result);
script = CSharpScript.Create(@"
#load ""a.csx""
-1", options);
result = await script.EvaluateAsync();
Assert.Equal(42, result);
}
[Fact]
public async Task ReturnInLoadedFileTrailingExpression()
{
var resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", @"
if (false)
{
return 42;
}
1"));
var options = ScriptOptions.Default.WithSourceResolver(resolver);
var script = CSharpScript.Create("#load \"a.csx\"", options);
var result = await script.EvaluateAsync();
Assert.Null(result);
script = CSharpScript.Create(@"
#load ""a.csx""
2", options);
result = await script.EvaluateAsync();
Assert.Equal(2, result);
}
[Fact]
public async Task ReturnInLoadedFileTrailingVoidExpression()
{
var resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", @"
if (false)
{
return 1;
}
System.Console.WriteLine(42)"));
var options = ScriptOptions.Default.WithSourceResolver(resolver);
var script = CSharpScript.Create("#load \"a.csx\"", options);
var result = await script.EvaluateAsync();
Assert.Null(result);
script = CSharpScript.Create(@"
#load ""a.csx""
2", options);
result = await script.EvaluateAsync();
Assert.Equal(2, result);
}
[Fact]
public async Task MultipleLoadedFilesWithTrailingExpression()
{
var resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", "1"),
KeyValuePair.Create("b.csx", @"
#load ""a.csx""
2"));
var options = ScriptOptions.Default.WithSourceResolver(resolver);
var script = CSharpScript.Create("#load \"b.csx\"", options);
var result = await script.EvaluateAsync();
Assert.Null(result);
resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", "1"),
KeyValuePair.Create("b.csx", "2"));
options = ScriptOptions.Default.WithSourceResolver(resolver);
script = CSharpScript.Create(@"
#load ""a.csx""
#load ""b.csx""", options);
result = await script.EvaluateAsync();
Assert.Null(result);
resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", "1"),
KeyValuePair.Create("b.csx", "2"));
options = ScriptOptions.Default.WithSourceResolver(resolver);
script = CSharpScript.Create(@"
#load ""a.csx""
#load ""b.csx""
3", options);
result = await script.EvaluateAsync();
Assert.Equal(3, result);
}
[Fact]
public async Task MultipleLoadedFilesWithReturnAndTrailingExpression()
{
var resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", "return 1;"),
KeyValuePair.Create("b.csx", @"
#load ""a.csx""
2"));
var options = ScriptOptions.Default.WithSourceResolver(resolver);
var script = CSharpScript.Create("#load \"b.csx\"", options);
var result = await script.EvaluateAsync();
Assert.Equal(1, result);
resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", "return 1;"),
KeyValuePair.Create("b.csx", "2"));
options = ScriptOptions.Default.WithSourceResolver(resolver);
script = CSharpScript.Create(@"
#load ""a.csx""
#load ""b.csx""", options);
result = await script.EvaluateAsync();
Assert.Equal(1, result);
resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", "return 1;"),
KeyValuePair.Create("b.csx", "2"));
options = ScriptOptions.Default.WithSourceResolver(resolver);
script = CSharpScript.Create(@"
#load ""a.csx""
#load ""b.csx""
return 3;", options);
result = await script.EvaluateAsync();
Assert.Equal(1, result);
}
[Fact]
public async Task LoadedFileWithReturnAndGoto()
{
var resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", @"
goto EOF;
NEXT:
return 1;
EOF:;
2"));
var options = ScriptOptions.Default.WithSourceResolver(resolver);
var script = CSharpScript.Create(@"
#load ""a.csx""
goto NEXT;
return 3;
NEXT:;", options);
var result = await script.EvaluateAsync();
Assert.Null(result);
script = CSharpScript.Create(@"
#load ""a.csx""
L1: goto EOF;
L2: return 3;
EOF:
EOF2: ;
4", options);
result = await script.EvaluateAsync();
Assert.Equal(4, result);
}
[Fact]
public async Task VoidReturn()
{
var script = CSharpScript.Create("return;");
var result = await script.EvaluateAsync();
Assert.Null(result);
script = CSharpScript.Create(@"
var b = true;
if (b)
{
return;
}
b");
result = await script.EvaluateAsync();
Assert.Null(result);
}
[Fact]
public async Task LoadedFileWithVoidReturn()
{
var resolver = TestSourceReferenceResolver.Create(
KeyValuePair.Create("a.csx", @"
var i = 42;
return;
i = -1;"));
var options = ScriptOptions.Default.WithSourceResolver(resolver);
var script = CSharpScript.Create<int>(@"
#load ""a.csx""
i", options);
var result = await script.EvaluateAsync();
Assert.Equal(0, result);
}
}
}
| |
/*
Copyright (c) 2006 Tomas Petricek
The use and distribution terms for this software are contained in the file named License.txt,
which can be found in the root of the Phalanger distribution. By using this software
in any fashion, you are agreeing to be bound by the terms of this license.
You must not remove this notice from this software.
*/
//#define DEBUG_DUCK_EMIT
using System;
using System.Collections.Generic;
using System.Text;
using System.Reflection;
using System.Reflection.Emit;
using PHP.Core.Emit;
using System.Collections;
namespace PHP.Core.DuckTyping
{
#region Attributes
/// <summary>
/// This attribute marks interface that is used with duck typing.
/// </summary>
[AttributeUsage(AttributeTargets.Interface, Inherited = false, AllowMultiple = false)]
public sealed class DuckTypeAttribute : Attribute
{
public bool GlobalFunctions { get; set; }
}
/// <summary>
/// Use this attribute when you want to use different name of property or method.
/// </summary>
/// <example>
/// The following example demonstrates how to rename function from "php_name" to "PhpName":
/// <code>
/// [DuckType]
/// interface IDemo {
/// [DuckName("php_name")]
/// void PhpName();
/// }
/// </code>
/// </example>
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Property, Inherited = false, AllowMultiple = false)]
public sealed class DuckNameAttribute : Attribute
{
#region Members
private string _name;
public string Name { get { return _name; } set { _name = value; } }
public DuckNameAttribute(string name)
{
_name = name;
}
#endregion
}
#endregion
/// <summary>
/// Use this type as a interface of all values wrapped by duck types.
/// </summary>
public interface IDuckType
{
/// <summary>
/// Gets original object this IDuckValue was created from. This allows passing of returned duck types as
/// arguments for another duck type methods.
/// </summary>
object OriginalObject { get; }
}
/// <summary>
/// Common base implementation of IDuckType interface.
/// </summary>
public abstract class DuckTypeBase : IDuckType
{
public object OriginalObject { get { return original; } }
object original;
protected DuckTypeBase(object original)
{
this.original = original;
}
}
/// <summary>
/// Class that contains duck typing implementation
/// </summary>
public class DuckTyping
{
#region Static
internal const string RealAssemblyName = "DuckTypingAssembly";
internal const string RealModuleName = "DuckTypingModule";
private static object initializationMutex = new object();
private static volatile bool initialized = false;
private static DuckTyping instance;
/// <summary>
/// Singleton - returns instance of the object
/// </summary>
public static DuckTyping Instance
{
get
{
if (!initialized)
{
lock (initializationMutex)
{
if (!initialized)
{
instance = new DuckTyping();
initialized = true;
}
}
}
return instance;
}
}
#endregion
#region Types
/// <summary>
/// Represents key for the type cache
/// </summary>
struct TypeTuple
{
#region Members
private Type _interfaceType;
public Type InterfaceType { get { return _interfaceType;} set { _interfaceType = value;} }
private Type _objectType;
public Type ObjectType { get { return _objectType;} set { _objectType = value;} }
public TypeTuple(Type interfaceType, Type objectType)
{
_interfaceType = interfaceType; _objectType = objectType;
}
public override int GetHashCode()
{
return InterfaceType.GetHashCode() + ObjectType.GetHashCode();
}
#endregion
}
#endregion
#region Locals
ModuleBuilder module_builder;
AssemblyBuilder assembly_builder;
Dictionary<TypeTuple, Type> typeCache;
Dictionary<Type, Type> globalCache;
int type_counter = 0;
readonly object moduleLock = new object();
#endregion
#region Construction
/// <summary>
/// Singleton - initialize instance
/// </summary>
private DuckTyping()
{
AssemblyName assembly_name = new AssemblyName(RealAssemblyName);
assembly_builder = AppDomain.CurrentDomain.DefineDynamicAssembly
(assembly_name, AssemblyBuilderAccess.Run);
module_builder = assembly_builder.DefineDynamicModule(RealModuleName);
typeCache = new Dictionary<TypeTuple, Type>();
globalCache = new Dictionary<Type, Type>();
}
#endregion
#region Public
/// <summary>
/// Implements duck typed wrapper using interface <typeparamref name="T"/>
/// for global functions in the currently loaded PHP source and returns wrapped object.
/// </summary>
/// <typeparam name="T">Interface that must be marked using <seealso cref="DuckTypeAttribute"/> attribute
/// and the attribute must be GlobalFunctions = true.</typeparam>
/// <returns>Wrapped object</returns>
public T ImplementGlobalDuckType<T>()
{
if (!typeof(T).IsInterface)
throw new ArgumentException("Type parameter for NewObject must be an interface!");
object[] attrs = typeof(T).GetCustomAttributes(typeof(DuckTypeAttribute), false);
if (attrs.Length == 0)
throw new ArgumentException("Type parameter should have [DuckType] attribute!");
if (!((DuckTypeAttribute)attrs[0]).GlobalFunctions)
throw new ArgumentException("Type parameter should have [DuckType(GlobalFunctions=true)] attribute!");
lock (this) {
Type type;
// cache lookup
if (!globalCache.TryGetValue(typeof(T), out type))
{
string typeName;
EmitAll(typeof(T), out type, out typeName, true);
globalCache.Add(typeof(T), type);
#if DEBUG_DUCK_EMIT
AssemblyName assembly_name = new AssemblyName(RealAssemblyName);
AssemblyBuilder ab = assembly_builder;
ModuleBuilder mb = module_builder;
assembly_builder = AppDomain.CurrentDomain.DefineDynamicAssembly(
assembly_name, AssemblyBuilderAccess.RunAndSave, "C:\\Temp\\", null, null, null, null, true);
module_builder = assembly_builder.DefineDynamicModule(RealModuleName, String.Format("test_{0}.dll", type_counter-1), true);
Type _type; string _string;
EmitAll(typeof(T), out _type, out _string);
assembly_builder.Save(String.Format("test_{0}.dll", type_counter-1));
assembly_builder = ab;
module_builder = mb;
#endif
}
return (T)type.GetConstructor(Type.EmptyTypes).Invoke(ArrayUtils.EmptyObjects);
}
}
/// <summary>
/// Implements duck typed wrapper using interface <typeparamref name="T"/>
/// for the object <paramref name="o"/> and returns wrapped object.
/// </summary>
/// <typeparam name="T">Interface that must be marked using <seealso cref="DuckTypeAttribute"/> attribute</typeparam>
/// <param name="o">Object to be wrapped</param>
/// <returns>Wrapped object</returns>
public T ImplementDuckType<T>(object o)
{
if (!typeof(T).IsInterface)
throw new ArgumentException("Type parameter for NewObject must be an interface!");
object[] attrs = typeof(T).GetCustomAttributes(typeof(DuckTypeAttribute), false);
if (attrs.Length == 0)
throw new ArgumentException("Type parameter should have [DuckType] attribute!");
lock (this) {
TypeTuple cacheKey = new TypeTuple(typeof(T), o.GetType());
Type type;
// cache lookup
if (!typeCache.TryGetValue(cacheKey, out type))
{
string typeName;
EmitAll(typeof(T), out type, out typeName, false);
typeCache.Add(cacheKey, type);
#if DEBUG_DUCK_EMIT
AssemblyName assembly_name = new AssemblyName(RealAssemblyName);
AssemblyBuilder ab = assembly_builder;
ModuleBuilder mb = module_builder;
assembly_builder = AppDomain.CurrentDomain.DefineDynamicAssembly(
assembly_name, AssemblyBuilderAccess.RunAndSave, "C:\\Temp\\", null, null, null, null, true);
module_builder = assembly_builder.DefineDynamicModule(RealModuleName, String.Format("test_{0}.dll", type_counter-1), true);
Type _type; string _string;
EmitAll(typeof(T), out _type, out _string);
assembly_builder.Save(String.Format("test_{0}.dll", type_counter-1));
assembly_builder = ab;
module_builder = mb;
#endif
}
return (T)type.GetConstructor(Types.Object).Invoke(new object[] { o });
}
}
#endregion
#region Private
/// <summary>
/// Emit duck type implementation
/// </summary>
/// <param name="origType">Interface type</param>
/// <param name="type">Emitted interface implementation</param>
/// <param name="typeName">Generated type name</param>
/// <param name="global">Is global.</param>
private void EmitAll(Type origType, out Type type, out string typeName, bool global)
{
lock (moduleLock)
{
typeName = String.Format("<{1}#{0}>", type_counter++, origType.Name);
Type[] interfaces;
if (!global)
{
interfaces = new Type[] { origType, typeof(IDuckType) };
}
else
{
interfaces = new Type[] { origType };
}
TypeBuilder tb = module_builder.DefineType(typeName, TypeAttributes.Public |
TypeAttributes.Sealed | TypeAttributes.Class, null, interfaces);
FieldInfo fld = null;
if (!global)
{
// internal constructor and field to store object
fld = CreateField(tb);
CreateConstructor(tb, fld);
ImplementCommonDuckTypeInterface(tb, fld);
}
else
{
// only empty constructor for 'GlobalFunctions' object
CreateEmptyConstructor(tb);
}
// methods
foreach (MethodInfo method in origType.GetMethods())
{
if (method.IsSpecialName) continue;
ImplementMethod(tb, method, fld, global);
}
// properties
foreach (PropertyInfo prop in origType.GetProperties())
{
if (global)
throw new ArgumentException("DuckType interfaces with GlobalFunctions=true can not support properties!");
ImplementProperty(tb, prop, fld);
}
type = tb.CreateType();
}
}
/// <summary> Creates constructor </summary>
/// <remarks><code>
/// class A : IDuck {
/// public A(object o) { _obj = o; }
/// }
/// </code></remarks>
private void CreateConstructor(TypeBuilder tb, FieldInfo fld)
{
ConstructorBuilder c = tb.DefineConstructor(MethodAttributes.Public,
CallingConventions.Standard, Types.Object);
ILGenerator il = c.GetILGenerator();
il.Emit(OpCodes.Ldarg_0);
il.Emit(OpCodes.Call, Types.Object[0].GetConstructor(Type.EmptyTypes));
il.Emit(OpCodes.Ldarg_0);
il.Emit(OpCodes.Ldarg_1);
il.Emit(OpCodes.Stfld, fld);
il.Emit(OpCodes.Ret);
}
private void ImplementCommonDuckTypeInterface(TypeBuilder tb, FieldInfo fld)
{
PropertyBuilder prop = tb.DefineProperty("OriginalObject", PropertyAttributes.HasDefault, typeof(Object), null);
MethodBuilder method = tb.DefineMethod("get_OriginalObject", MethodAttributes.Private | MethodAttributes.HideBySig |
MethodAttributes.SpecialName | MethodAttributes.Virtual | MethodAttributes.NewSlot | MethodAttributes.Final, typeof(Object), Type.EmptyTypes);
ILGenerator il = method.GetILGenerator();
il.Emit(OpCodes.Ldarg_0);
il.Emit(OpCodes.Ldfld, fld);
il.Emit(OpCodes.Ret);
prop.SetGetMethod(method);
tb.DefineMethodOverride(method, typeof(IDuckType).GetProperty("OriginalObject").GetGetMethod());
}
/// <summary> Creates empty constructor </summary>
/// <remarks><code>
/// class A : IDuck {
/// public A() { }
/// }
/// </code></remarks>
private void CreateEmptyConstructor(TypeBuilder tb)
{
ConstructorBuilder c = tb.DefineConstructor(MethodAttributes.Public,
CallingConventions.Standard, Type.EmptyTypes);
ILGenerator il = c.GetILGenerator();
il.Emit(OpCodes.Ldarg_0);
il.Emit(OpCodes.Call, Types.Object[0].GetConstructor(Type.EmptyTypes));
il.Emit(OpCodes.Ret);
}
/// <summary> Creates field to store object </summary>
/// <remarks><code>
/// class A : IDuck {
/// object _obj;
/// }
/// </code></remarks>
private FieldInfo CreateField(TypeBuilder tb)
{
return tb.DefineField("_obj", typeof(object), FieldAttributes.Private);
}
/// <summary> Implements property </summary>
/// <remarks><code>
/// class A : IDuck {
/// /*type*/ Prop {
/// get {
/// Operators.GetProperty(this._obj, "Foo", null, false);
/// return /* .. type conversion .. */
/// }
/// set {
/// Operators.SetProperty(
/// new PhpReference(PhpVariable.Copy(ClrObject.WrapDynamic(argument#i), CopyReason.PassedByCopy)),
/// ref this._obj, "Foo", null, ScriptContext.Current);
/// }
/// }
/// }
/// </code></remarks>
private void ImplementProperty(TypeBuilder tb, PropertyInfo prop, FieldInfo fld)
{
if (prop.GetIndexParameters().Length > 0)
throw new NotImplementedException("Indexers are not supported!");
string propName = prop.Name;
object[] attrs = prop.GetCustomAttributes(typeof(DuckNameAttribute), false);
if (attrs.Length > 0) propName = ((DuckNameAttribute)attrs[0]).Name;
// define method
PropertyBuilder pb = tb.DefineProperty(prop.Name, PropertyAttributes.HasDefault, prop.PropertyType, null);
if (prop.CanRead)
{
MethodBuilder getter = tb.DefineMethod("get_"+prop.Name, MethodAttributes.Private | MethodAttributes.SpecialName |
MethodAttributes.HideBySig | MethodAttributes.NewSlot | MethodAttributes.Virtual | MethodAttributes.Final,
prop.PropertyType, Type.EmptyTypes);
ILEmitter il = new ILEmitter(getter);
// emit getter
// Operators.GetProperty(this.obj, "Foo", null, false);
il.Ldarg(0);
il.Load(fld);
il.LoadLiteral(propName);
il.LoadLiteral(null);
il.LoadLiteral(false);
il.Emit(OpCodes.Call, Methods.Operators.GetProperty);
EmitReturn(il, prop.PropertyType, false);
pb.SetGetMethod(getter);
tb.DefineMethodOverride(getter, prop.GetGetMethod());
}
if (prop.CanWrite)
{
MethodBuilder setter = tb.DefineMethod("set_" + prop.Name, MethodAttributes.Private | MethodAttributes.SpecialName |
MethodAttributes.HideBySig | MethodAttributes.NewSlot | MethodAttributes.Virtual | MethodAttributes.Final,
typeof(void), new Type[] { prop.PropertyType });
ILEmitter il = new ILEmitter(setter);
// emit setter
// new PhpReference(PhpVariable.Copy(ClrObject.WrapDynamic(argument#i), CopyReason.PassedByCopy))
il.Ldarg(1);
if (prop.PropertyType.IsValueType)
il.Emit(OpCodes.Box, prop.PropertyType);
il.Emit(OpCodes.Call, Methods.ClrObject_WrapDynamic);
il.LdcI4((int)CopyReason.PassedByCopy);
il.Emit(OpCodes.Call, Methods.PhpVariable.Copy);
il.Emit(OpCodes.Newobj, Constructors.PhpReference_Object);
// Operators.SetProperty( ... , ref this._obj, "Foo", null, ScriptContext.Current);
il.Ldarg(0);
il.LoadAddress(fld);
il.LoadLiteral(propName);
il.LoadLiteral(null);
il.EmitCall(OpCodes.Call, Methods.ScriptContext.GetCurrentContext, Type.EmptyTypes);
il.EmitCall(OpCodes.Call, Methods.Operators.SetProperty, Type.EmptyTypes);
il.Emit(OpCodes.Ret);
pb.SetSetMethod(setter);
tb.DefineMethodOverride(setter, prop.GetSetMethod());
}
}
/// <summary> Implements method </summary>
/// <remarks><code>
/// class A : IDuck {
/// /*type*/ Func(/*arguments*/) {
/// sc = ScriptContext.Current;
/// // temporary array is created only when arguments.Length > 8 (otherwise AddFrame overload exists)
/// object[] tmp = new object[arguments.Length];
/// tmp[#i] = new PhpReference(PhpVariable.Copy(ClrObject.WrapDynamic(argument#i), CopyReason.PassedByCopy));
/// sc.Stack.AddFrame(tmp);
/// return /* .. type conversion .. */
/// }
/// }
/// </code></remarks>
private void ImplementMethod(TypeBuilder tb, MethodInfo method, FieldInfo fld, bool globalFuncs)
{
// get parameters (i want C# 3.0 NOW!!)
ParameterInfo[] pinfo = method.GetParameters();
Type[] ptypes = new Type[pinfo.Length];
for(int i = 0; i < pinfo.Length; i++) ptypes[i] = pinfo[i].ParameterType;
int argCount = pinfo.Length;
string methName = method.Name;
object[] attrs = method.GetCustomAttributes(typeof(DuckNameAttribute), false);
if (attrs.Length > 0) methName = ((DuckNameAttribute)attrs[0]).Name;
// define method
MethodBuilder mb = tb.DefineMethod(method.Name, MethodAttributes.Private | MethodAttributes.HideBySig |
MethodAttributes.NewSlot | MethodAttributes.Virtual | MethodAttributes.Final,
method.ReturnType, ptypes);
ILEmitter il = new ILEmitter(mb);
// Wrap parameters
// sc = ScriptContext.Current
LocalBuilder sc = il.DeclareLocal(typeof(ScriptContext));
il.Emit(OpCodes.Call, Methods.ScriptContext.GetCurrentContext);
il.Stloc(sc);
LocalBuilder ar = null;
if (argCount > 8)
{
// tmp = new object[pinfo.Length];
ar = il.DeclareLocal(typeof(object[]));
il.Emit(OpCodes.Ldc_I4, pinfo.Length);
il.Emit(OpCodes.Newarr, typeof(object));
il.Stloc(ar);
}
// sc.Stack.AddFrame(...);
il.Ldloc(sc);
il.Load(Fields.ScriptContext_Stack);
for (int i = 0; i < argCount; i++)
{
if (argCount > 8)
{
// tmp[i]
il.Emit(OpCodes.Ldloc, ar);
il.Emit(OpCodes.Ldc_I4, i);
}
// if (param#i is IDuckType)
// param#i.OriginalObject
// else
// new PhpReference(PhpVariable.Copy(ClrObject.WrapDynamic(param#i), CopyReason.PassedByCopy));
Label lblDuckType = il.DefineLabel();
Label lblEnd = il.DefineLabel();
if (!ptypes[i].IsValueType)
{
il.Ldarg(i + 1);
il.Emit(OpCodes.Isinst, typeof(IDuckType));
il.Emit(OpCodes.Brtrue, lblDuckType);
}
il.Ldarg(i + 1);
if (ptypes[i].IsValueType)
il.Emit(OpCodes.Box, ptypes[i]);
il.Emit(OpCodes.Call, Methods.ClrObject_WrapDynamic);
il.LdcI4((int)CopyReason.PassedByCopy);
il.Emit(OpCodes.Call, Methods.PhpVariable.Copy);
il.Emit(OpCodes.Newobj, Constructors.PhpReference_Object);
if (!ptypes[i].IsValueType)
{
il.Emit(OpCodes.Br, lblEnd);
il.MarkLabel(lblDuckType);
il.Ldarg(i + 1);
il.Emit(OpCodes.Call, typeof(IDuckType).GetProperty("OriginalObject").GetGetMethod());
il.MarkLabel(lblEnd);
}
if (argCount > 8) il.Emit(OpCodes.Stelem_Ref);
}
if (argCount > 8)
il.Emit(OpCodes.Ldloc, ar);
il.Emit(OpCodes.Call, Methods.PhpStack.AddFrame.Overload(argCount));
if (globalFuncs)
{
// localVariables = null, namingContext = null
// ScriptContex.Call(null, null, "Foo", null, ScriptContext.Current).value;
il.LoadLiteral(null);
il.LoadLiteral(null);
il.LoadLiteral(methName);
il.LoadLiteral(null);
il.Emit(OpCodes.Ldsflda, il.TypeBuilder.DefineField("<callHint>'lambda", typeof(PHP.Core.Reflection.DRoutineDesc), FieldAttributes.Static | FieldAttributes.Private));
il.Ldloc(sc);
il.Emit(OpCodes.Call, Methods.ScriptContext.Call);
}
else
{
// Operators.InvokeMethod(this.obj, "Foo", null, ScriptContext.Current).value;
il.Ldarg(0);
il.Load(fld);
il.LoadLiteral(methName);
il.LoadLiteral(null);
il.Ldloc(sc);
il.Emit(OpCodes.Call, Methods.Operators.InvokeMethodStr);
}
EmitReturn(il, method.ReturnType, true);
tb.DefineMethodOverride(mb, method);
}
/// <summary> Emit PHP to CLR conversion </summary>
/// <remarks>If the return type is interface marked using <seealso cref="DuckTypeAttribute"/>
/// it is wrapped again.
/// <code>
/// // type is IDuckEnumerable<T>
/// return new DuckEnumerableWrapper<T>(obj.GetForeachEnumerator(false, false, null))
///
/// // type is IDuckKeyedEnumerable<T>
/// return new DuckKeyedEnumerableWrapper<T>(obj.GetForeachEnumerator(true, false, null))
///
/// // type is marked using [DuckType]
/// return DuckTyping.Instance.ImplementDuckType<T>(obj);
///
/// // otherwise uses standard ConvertToClr conversion method
/// </code>
/// </remarks>
private static void EmitReturn(ILEmitter il, Type returnedType, bool isPhpRef)
{
Type[] gargs = returnedType.GetGenericArguments();
object[] attrs = returnedType.GetCustomAttributes(typeof(DuckTypeAttribute), false);
bool isDuckEnumerable = (gargs.Length == 1 && returnedType.Equals(typeof(IDuckEnumerable<>).MakeGenericType(gargs)));
bool isDuckKeyedEnumerable = (gargs.Length == 2 && returnedType.Equals(typeof(IDuckKeyedEnumerable<,>).MakeGenericType(gargs)));
bool isDuckType = attrs != null && attrs.Length > 0;
if (returnedType.Equals(typeof(void)))
{
il.Emit(OpCodes.Pop);
il.Emit(OpCodes.Ret);
}
else if (isDuckType || isDuckEnumerable || isDuckKeyedEnumerable)
{
LocalBuilder tmp = il.DeclareLocal(typeof(object));
//store the value local var (after unwrapping it from the reference)
if (isPhpRef) il.Emit(OpCodes.Ldfld, Fields.PhpReference_Value);
il.Stloc(tmp);
Label lblTestMinusOne = il.DefineLabel();
Label lblWrap = il.DefineLabel();
Label lblInvalidInt = il.DefineLabel();
// test whether the value is null
il.Ldloc(tmp);
il.Emit(OpCodes.Ldnull);
il.Emit(OpCodes.Ceq);
il.Emit(OpCodes.Brfalse, lblTestMinusOne);
il.Emit(OpCodes.Ldnull);
il.Emit(OpCodes.Ret);
il.MarkLabel(lblTestMinusOne);
// test whether value is -1
il.Ldloc(tmp);
il.Emit(OpCodes.Isinst, typeof(int));
il.Emit(OpCodes.Brfalse, lblWrap); // value is not int, so we can wrap the value
il.Ldloc(tmp);
il.Emit(OpCodes.Unbox_Any, typeof(int));
il.Emit(OpCodes.Ldc_I4, -1);
il.Emit(OpCodes.Ceq);
il.Emit(OpCodes.Brfalse, lblWrap); // value is int but not -1
il.Emit(OpCodes.Ldnull);
il.Emit(OpCodes.Ret);
il.MarkLabel(lblWrap);
// specific duck type wrapping
if (isDuckEnumerable || isDuckKeyedEnumerable)
{
il.Ldloc(tmp);
il.Emit(OpCodes.Dup);
// Standard: new DuckEnumerableWrapper<T>(obj.GetForeachEnumerator(false, false, null))
// Keyed: new DuckKeyedEnumerableWrapper<T>(obj.GetForeachEnumerator(false, false, null))
il.LoadLiteral(gargs.Length == 2); // keyed?
il.LoadLiteral(false);
il.LoadLiteral(null);
il.Emit(OpCodes.Callvirt, Methods.IPhpEnumerable_GetForeachEnumerator);
if (isDuckEnumerable)
il.Emit(OpCodes.Newobj, typeof(DuckEnumerableWrapper<>).
MakeGenericType(gargs).GetConstructors()[0]);
else
il.Emit(OpCodes.Newobj, typeof(DuckKeyedEnumerableWrapper<,>).
MakeGenericType(gargs).GetConstructors()[0]);
}
else
{
il.Emit(OpCodes.Call, typeof(DuckTyping).GetProperty("Instance", BindingFlags.Public | BindingFlags.Static).GetGetMethod());
il.Ldloc(tmp);
il.Emit(OpCodes.Call, typeof(DuckTyping).GetMethod("ImplementDuckType", BindingFlags.Public | BindingFlags.Instance).MakeGenericMethod(returnedType));
}
il.Emit(OpCodes.Ret);
}
else
{
if (returnedType == typeof(object))
{
Label lbl = il.DefineLabel();
if (isPhpRef)
{
il.Emit(OpCodes.Ldfld, Fields.PhpReference_Value);
}
il.Emit(OpCodes.Dup);
il.Emit(OpCodes.Isinst, typeof(PhpBytes));
il.Emit(OpCodes.Brfalse, lbl);
il.EmitCall(OpCodes.Call, typeof(IPhpConvertible).GetMethod("ToString", Type.EmptyTypes), Type.EmptyTypes);
il.Emit(OpCodes.Ret);
il.MarkLabel(lbl);
ClrOverloadBuilder.EmitConvertToClr(il, PhpTypeCode.Object, returnedType);
il.Emit(OpCodes.Ret);
}
else
{
ClrOverloadBuilder.EmitConvertToClr(il, isPhpRef ? PhpTypeCode.PhpReference : PhpTypeCode.Object, returnedType);
il.Emit(OpCodes.Ret);
}
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
//
// System.Net.HttpListenerRequest
//
// Authors:
// Gonzalo Paniagua Javier (gonzalo.mono@gmail.com)
// Marek Safar (marek.safar@gmail.com)
//
// Copyright (c) 2005 Novell, Inc. (http://www.novell.com)
// Copyright (c) 2011-2012 Xamarin, Inc. (http://xamarin.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Security.Authentication.ExtendedProtection;
using System.Security.Cryptography.X509Certificates;
using System.Text;
namespace System.Net
{
public sealed partial class HttpListenerRequest
{
private class Context : TransportContext
{
public override ChannelBinding GetChannelBinding(ChannelBindingKind kind)
{
if (kind != ChannelBindingKind.Endpoint)
{
throw new NotSupportedException(SR.Format(SR.net_listener_invalid_cbt_type, kind.ToString()));
}
return null;
}
}
private long _contentLength;
private bool _clSet;
private WebHeaderCollection _headers;
private string _method;
private Stream _inputStream;
private HttpListenerContext _context;
private bool _isChunked;
private static byte[] s_100continue = Encoding.ASCII.GetBytes("HTTP/1.1 100 Continue\r\n\r\n");
internal HttpListenerRequest(HttpListenerContext context)
{
_context = context;
_headers = new WebHeaderCollection();
_version = HttpVersion.Version10;
}
private static readonly char[] s_separators = new char[] { ' ' };
internal void SetRequestLine(string req)
{
string[] parts = req.Split(s_separators, 3);
if (parts.Length != 3)
{
_context.ErrorMessage = "Invalid request line (parts).";
return;
}
_method = parts[0];
foreach (char c in _method)
{
int ic = (int)c;
if ((ic >= 'A' && ic <= 'Z') ||
(ic > 32 && c < 127 && c != '(' && c != ')' && c != '<' &&
c != '<' && c != '>' && c != '@' && c != ',' && c != ';' &&
c != ':' && c != '\\' && c != '"' && c != '/' && c != '[' &&
c != ']' && c != '?' && c != '=' && c != '{' && c != '}'))
continue;
_context.ErrorMessage = "(Invalid verb)";
return;
}
_rawUrl = parts[1];
if (parts[2].Length != 8 || !parts[2].StartsWith("HTTP/"))
{
_context.ErrorMessage = "Invalid request line (version).";
return;
}
try
{
_version = new Version(parts[2].Substring(5));
}
catch
{
_context.ErrorMessage = "Invalid request line (version).";
return;
}
if (_version.Major < 1)
{
_context.ErrorMessage = "Invalid request line (version).";
return;
}
if (_version.Major > 1)
{
_context.ErrorStatus = (int)HttpStatusCode.HttpVersionNotSupported;
_context.ErrorMessage = HttpStatusDescription.Get(HttpStatusCode.HttpVersionNotSupported);
return;
}
}
private static bool MaybeUri(string s)
{
int p = s.IndexOf(':');
if (p == -1)
return false;
if (p >= 10)
return false;
return IsPredefinedScheme(s.Substring(0, p));
}
private static bool IsPredefinedScheme(string scheme)
{
if (scheme == null || scheme.Length < 3)
return false;
char c = scheme[0];
if (c == 'h')
return (scheme == UriScheme.Http || scheme == UriScheme.Https);
if (c == 'f')
return (scheme == UriScheme.File || scheme == UriScheme.Ftp);
if (c == 'n')
{
c = scheme[1];
if (c == 'e')
return (scheme == UriScheme.News || scheme == UriScheme.NetPipe || scheme == UriScheme.NetTcp);
if (scheme == UriScheme.Nntp)
return true;
return false;
}
if ((c == 'g' && scheme == UriScheme.Gopher) || (c == 'm' && scheme == UriScheme.Mailto))
return true;
return false;
}
internal void FinishInitialization()
{
string host = UserHostName;
if (_version > HttpVersion.Version10 && (host == null || host.Length == 0))
{
_context.ErrorMessage = "Invalid host name";
return;
}
string path;
Uri raw_uri = null;
if (MaybeUri(_rawUrl.ToLowerInvariant()) && Uri.TryCreate(_rawUrl, UriKind.Absolute, out raw_uri))
path = raw_uri.PathAndQuery;
else
path = _rawUrl;
if ((host == null || host.Length == 0))
host = UserHostAddress;
if (raw_uri != null)
host = raw_uri.Host;
int colon = host.IndexOf(':');
if (colon >= 0)
host = host.Substring(0, colon);
string base_uri = string.Format("{0}://{1}:{2}", RequestScheme, host, LocalEndPoint.Port);
if (!Uri.TryCreate(base_uri + path, UriKind.Absolute, out _requestUri))
{
_context.ErrorMessage = WebUtility.HtmlEncode("Invalid url: " + base_uri + path);
return;
}
_requestUri = HttpListenerRequestUriBuilder.GetRequestUri(_rawUrl, _requestUri.Scheme,
_requestUri.Authority, _requestUri.LocalPath, _requestUri.Query);
if (_version >= HttpVersion.Version11)
{
string t_encoding = Headers[HttpKnownHeaderNames.TransferEncoding];
_isChunked = (t_encoding != null && string.Equals(t_encoding, "chunked", StringComparison.OrdinalIgnoreCase));
// 'identity' is not valid!
if (t_encoding != null && !_isChunked)
{
_context.Connection.SendError(null, 501);
return;
}
}
if (!_isChunked && !_clSet)
{
if (string.Equals(_method, "POST", StringComparison.OrdinalIgnoreCase) ||
string.Equals(_method, "PUT", StringComparison.OrdinalIgnoreCase))
{
_context.Connection.SendError(null, 411);
return;
}
}
if (String.Compare(Headers[HttpKnownHeaderNames.Expect], "100-continue", StringComparison.OrdinalIgnoreCase) == 0)
{
HttpResponseStream output = _context.Connection.GetResponseStream();
output.InternalWrite(s_100continue, 0, s_100continue.Length);
}
}
internal static string Unquote(String str)
{
int start = str.IndexOf('\"');
int end = str.LastIndexOf('\"');
if (start >= 0 && end >= 0)
str = str.Substring(start + 1, end - 1);
return str.Trim();
}
internal void AddHeader(string header)
{
int colon = header.IndexOf(':');
if (colon == -1 || colon == 0)
{
_context.ErrorMessage = HttpStatusDescription.Get(400);
_context.ErrorStatus = 400;
return;
}
string name = header.Substring(0, colon).Trim();
string val = header.Substring(colon + 1).Trim();
if (name.Equals("content-length", StringComparison.OrdinalIgnoreCase))
{
// To match Windows behavior:
// Content lengths >= 0 and <= long.MaxValue are accepted as is.
// Content lengths > long.MaxValue and <= ulong.MaxValue are treated as 0.
// Content lengths < 0 cause the requests to fail.
// Other input is a failure, too.
long parsedContentLength =
ulong.TryParse(val, out ulong parsedUlongContentLength) ? (parsedUlongContentLength <= long.MaxValue ? (long)parsedUlongContentLength : 0) :
long.Parse(val);
if (parsedContentLength < 0 || (_clSet && parsedContentLength != _contentLength))
{
_context.ErrorMessage = "Invalid Content-Length.";
}
else
{
_contentLength = parsedContentLength;
_clSet = true;
}
}
else if (name.Equals("transfer-encoding", StringComparison.OrdinalIgnoreCase))
{
if (Headers[HttpKnownHeaderNames.TransferEncoding] != null)
{
_context.ErrorStatus = (int)HttpStatusCode.NotImplemented;
_context.ErrorMessage = HttpStatusDescription.Get(HttpStatusCode.NotImplemented);
}
}
if (_context.ErrorMessage == null)
{
_headers.Set(name, val);
}
}
// returns true is the stream could be reused.
internal bool FlushInput()
{
if (!HasEntityBody)
return true;
int length = 2048;
if (_contentLength > 0)
length = (int)Math.Min(_contentLength, (long)length);
byte[] bytes = new byte[length];
while (true)
{
try
{
IAsyncResult ares = InputStream.BeginRead(bytes, 0, length, null, null);
if (!ares.IsCompleted && !ares.AsyncWaitHandle.WaitOne(1000))
return false;
if (InputStream.EndRead(ares) <= 0)
return true;
}
catch (ObjectDisposedException)
{
_inputStream = null;
return true;
}
catch
{
return false;
}
}
}
private X509Certificate2 GetClientCertificateCore() => ClientCertificate = _context.Connection.ClientCertificate;
private int GetClientCertificateErrorCore()
{
HttpConnection cnc = _context.Connection;
if (cnc.ClientCertificate == null)
return 0;
int[] errors = cnc.ClientCertificateErrors;
if (errors != null && errors.Length > 0)
return errors[0];
return 0;
}
public long ContentLength64
{
get
{
if (_isChunked)
_contentLength = -1;
return _contentLength;
}
}
public bool HasEntityBody => (_contentLength > 0 || _isChunked);
public NameValueCollection Headers => _headers;
public string HttpMethod => _method;
public Stream InputStream
{
get
{
if (_inputStream == null)
{
if (_isChunked || _contentLength > 0)
_inputStream = _context.Connection.GetRequestStream(_isChunked, _contentLength);
else
_inputStream = Stream.Null;
}
return _inputStream;
}
}
public bool IsAuthenticated => false;
public bool IsSecureConnection => _context.Connection.IsSecure;
public IPEndPoint LocalEndPoint => _context.Connection.LocalEndPoint;
public IPEndPoint RemoteEndPoint => _context.Connection.RemoteEndPoint;
public Guid RequestTraceIdentifier { get; } = Guid.NewGuid();
private IAsyncResult BeginGetClientCertificateCore(AsyncCallback requestCallback, object state)
{
var asyncResult = new GetClientCertificateAsyncResult(this, state, requestCallback);
// The certificate is already retrieved by the time this method is called. GetClientCertificateCore() evaluates to
// a simple member access, so this will always complete immediately.
ClientCertState = ListenerClientCertState.Completed;
asyncResult.InvokeCallback(GetClientCertificateCore());
return asyncResult;
}
public X509Certificate2 EndGetClientCertificate(IAsyncResult asyncResult)
{
if (asyncResult == null)
throw new ArgumentNullException(nameof(asyncResult));
GetClientCertificateAsyncResult clientCertAsyncResult = asyncResult as GetClientCertificateAsyncResult;
if (clientCertAsyncResult == null || clientCertAsyncResult.AsyncObject != this)
{
throw new ArgumentException(SR.net_io_invalidasyncresult, nameof(asyncResult));
}
if (clientCertAsyncResult.EndCalled)
{
throw new InvalidOperationException(SR.Format(SR.net_io_invalidendcall, nameof(EndGetClientCertificate)));
}
clientCertAsyncResult.EndCalled = true;
return (X509Certificate2)clientCertAsyncResult.Result;
}
public string ServiceName => null;
public TransportContext TransportContext => new Context();
private Uri RequestUri => _requestUri;
private bool SupportsWebSockets => true;
private class GetClientCertificateAsyncResult : LazyAsyncResult
{
public GetClientCertificateAsyncResult(object myObject, object myState, AsyncCallback myCallBack) : base(myObject, myState, myCallBack) { }
}
}
}
| |
using System;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Orleans.Runtime.Scheduler;
namespace Orleans.Runtime
{
internal class GrainTimer : IGrainTimer
{
private Func<object, Task> asyncCallback;
private AsyncTaskSafeTimer timer;
private readonly TimeSpan dueTime;
private readonly TimeSpan timerFrequency;
private DateTime previousTickTime;
private int totalNumTicks;
private readonly ILogger logger;
private Task currentlyExecutingTickTask;
private readonly OrleansTaskScheduler scheduler;
private readonly IActivationData activationData;
public string Name { get; }
private bool TimerAlreadyStopped { get { return timer == null || asyncCallback == null; } }
private GrainTimer(OrleansTaskScheduler scheduler, IActivationData activationData, ILogger logger, Func<object, Task> asyncCallback, object state, TimeSpan dueTime, TimeSpan period, string name)
{
var ctxt = RuntimeContext.CurrentActivationContext;
scheduler.CheckSchedulingContextValidity(ctxt);
this.scheduler = scheduler;
this.activationData = activationData;
this.logger = logger;
this.Name = name;
this.asyncCallback = asyncCallback;
timer = new AsyncTaskSafeTimer(logger,
stateObj => TimerTick(stateObj, ctxt),
state);
this.dueTime = dueTime;
timerFrequency = period;
previousTickTime = DateTime.UtcNow;
totalNumTicks = 0;
}
internal static GrainTimer FromTimerCallback(
OrleansTaskScheduler scheduler,
ILogger logger,
TimerCallback callback,
object state,
TimeSpan dueTime,
TimeSpan period,
string name = null)
{
return new GrainTimer(
scheduler,
null,
logger,
ob =>
{
if (callback != null)
callback(ob);
return Task.CompletedTask;
},
state,
dueTime,
period,
name);
}
internal static IGrainTimer FromTaskCallback(
OrleansTaskScheduler scheduler,
ILogger logger,
Func<object, Task> asyncCallback,
object state,
TimeSpan dueTime,
TimeSpan period,
string name = null,
IActivationData activationData = null)
{
return new GrainTimer(scheduler, activationData, logger, asyncCallback, state, dueTime, period, name);
}
public void Start()
{
if (TimerAlreadyStopped)
throw new ObjectDisposedException(String.Format("The timer {0} was already disposed.", GetFullName()));
timer.Start(dueTime, timerFrequency);
}
public void Stop()
{
asyncCallback = null;
}
private async Task TimerTick(object state, ISchedulingContext context)
{
if (TimerAlreadyStopped)
return;
try
{
// Schedule call back to grain context
await this.scheduler.QueueNamedTask(() => ForwardToAsyncCallback(state), context, this.Name);
}
catch (InvalidSchedulingContextException exc)
{
logger.Error(ErrorCode.Timer_InvalidContext,
string.Format("Caught an InvalidSchedulingContextException on timer {0}, context is {1}. Going to dispose this timer!",
GetFullName(), context), exc);
DisposeTimer();
}
}
private async Task ForwardToAsyncCallback(object state)
{
// AsyncSafeTimer ensures that calls to this method are serialized.
var callback = asyncCallback;
if (TimerAlreadyStopped) return;
totalNumTicks++;
if (logger.IsEnabled(LogLevel.Trace))
logger.Trace(ErrorCode.TimerBeforeCallback, "About to make timer callback for timer {0}", GetFullName());
try
{
RequestContext.Clear(); // Clear any previous RC, so it does not leak into this call by mistake.
currentlyExecutingTickTask = callback(state);
await currentlyExecutingTickTask;
if (logger.IsEnabled(LogLevel.Trace)) logger.Trace(ErrorCode.TimerAfterCallback, "Completed timer callback for timer {0}", GetFullName());
}
catch (Exception exc)
{
logger.Error(
ErrorCode.Timer_GrainTimerCallbackError,
string.Format( "Caught and ignored exception: {0} with message: {1} thrown from timer callback {2}",
exc.GetType(),
exc.Message,
GetFullName()),
exc);
}
finally
{
previousTickTime = DateTime.UtcNow;
currentlyExecutingTickTask = null;
// if this is not a repeating timer, then we can
// dispose of the timer.
if (timerFrequency == Constants.INFINITE_TIMESPAN)
DisposeTimer();
}
}
public Task GetCurrentlyExecutingTickTask()
{
return currentlyExecutingTickTask ?? Task.CompletedTask;
}
private string GetFullName()
{
var callback = asyncCallback;
var callbackTarget = callback?.Target?.ToString() ?? string.Empty;
var callbackMethodInfo = callback?.GetMethodInfo()?.ToString() ?? string.Empty;
return $"GrainTimer.{this.Name ?? string.Empty} TimerCallbackHandler:{callbackTarget ?? string.Empty}->{callbackMethodInfo ?? string.Empty}";
}
public int GetNumTicks()
{
return totalNumTicks;
}
// The reason we need to check CheckTimerFreeze on both the SafeTimer and this GrainTimer
// is that SafeTimer may tick OK (no starvation by .NET thread pool), but then scheduler.QueueWorkItem
// may not execute and starve this GrainTimer callback.
public bool CheckTimerFreeze(DateTime lastCheckTime)
{
if (TimerAlreadyStopped) return true;
// check underlying SafeTimer (checking that .NET thread pool does not starve this timer)
if (!timer.CheckTimerFreeze(lastCheckTime, () => Name)) return false;
// if SafeTimer failed the check, no need to check GrainTimer too, since it will fail as well.
// check myself (checking that scheduler.QueueWorkItem does not starve this timer)
return SafeTimerBase.CheckTimerDelay(previousTickTime, totalNumTicks,
dueTime, timerFrequency, logger, GetFullName, ErrorCode.Timer_TimerInsideGrainIsNotTicking, true);
}
public bool CheckTimerDelay()
{
return SafeTimerBase.CheckTimerDelay(previousTickTime, totalNumTicks,
dueTime, timerFrequency, logger, GetFullName, ErrorCode.Timer_TimerInsideGrainIsNotTicking, false);
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
// Maybe called by finalizer thread with disposing=false. As per guidelines, in such a case do not touch other objects.
// Dispose() may be called multiple times
protected virtual void Dispose(bool disposing)
{
if (disposing)
DisposeTimer();
asyncCallback = null;
}
private void DisposeTimer()
{
var tmp = timer;
if (tmp == null) return;
Utils.SafeExecute(tmp.Dispose);
timer = null;
asyncCallback = null;
activationData?.OnTimerDisposed(this);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
namespace System.Text
{
internal static class ParsingTrie
{
#region Parsing trie struct
// The parsing trie is structured as an array, which means that there are two types of
// "nodes" for representational purposes
//
// The first node type (the parent node) uses the valueOrNumChildren to represent the number of children
// underneath it. The index is unused for this type of node, except when it's used for
// sequential node mapping (see below). If valueOrNumChildren is zero for this type of node, the index
// is used and represents an index into _digitsAndSymbols.
//
// The second node types immediately follow the first (the childe nodes). They are composed of a value
// (valueOrNumChildren), which is walked via binary search, and an index, which points to another
// node contained in the array.
//
// We use the int index here to encode max-min info for sequential leaves
// It's very common for digits to be encoded sequentially, so we save time by mapping here
// The index is formatted as such: 0xAABBCCDD, where AA = the min value,
// BB = the index of the min value relative to the current node (1-indexed),
// CC = the max value, and DD = the max value's index in the same coord-system as BB.
public struct Node
{
public byte ValueOrNumChildren;
public int IndexOrSymbol;
}
#endregion Parsing trie struct
/// <summary>
/// A Suffix represents the ending sequence of bytes that correspond to a symbol.
/// Suffixes play an important role in the parsing trie generation algorithm.
///
/// Let's say there are four symbols:
/// Symbol 0: Sequence 1, 1, 2, 3
/// Symbol 1: Sequence 0, 1, 2, 3
/// Symbol 2: Sequence 0, 1, 4, 4
/// Symbol 3: Sequence 1, 1, 2, 1
///
/// First, a Suffix is created for each symbol's sequence, and the Suffixes are sorted by their byte sequences:
/// ListOfSuffix {
/// Suffix { SymbolIndex: 1, Bytes: { 0, 1, 2, 3 } }
/// Suffix { SymbolIndex: 2, Bytes: { 0, 1, 4, 4 } }
/// Suffix { SymbolIndex: 3, Bytes: { 1, 1, 2, 1 } }
/// Suffix { SymbolIndex: 0, Bytes: { 1, 1, 2, 3 } }
/// }
///
/// Next, the Suffixes are clumped into SuffixClumps, based on the beginning byte:
/// ListOfSuffixClump {
/// SuffixClump {
/// BeginningByte: 0
/// Suffixes {
/// Suffix { SymbolIndex: 1, Bytes: { 1, 2, 3 } }
/// Suffix { SymbolIndex: 2, Bytes: { 1, 4, 4 } }
/// }
/// }
/// SuffixClump {
/// BeginningByte: 1
/// Suffixes {
/// Suffix { SymbolIndex: 3, Bytes: { 1, 2, 1 } }
/// Suffix { SymbolIndex: 0, Bytes: { 1, 2, 3 } }
/// }
/// }
/// }
///
/// Then, a parent ParsingTrieNode is created, with its NumChildren equal to the number of SuffixClumps.
/// Each SuffixClump represents both a "child" node in the parsing trie, and the "parent" node that child
/// node points to.
///
/// Each SuffixClump that has more than one Suffix will require further clumping; that is to say, it does
/// not represent a leaf node in the parsing trie. Such SuffixClumps will be recursively clumped.
/// </summary>
private struct Suffix : IComparable<Suffix>
{
public int SymbolIndex;
public byte[] Bytes;
public Suffix(int symbolIndex, byte[] bytes)
{
SymbolIndex = symbolIndex;
Bytes = bytes;
}
public Suffix(int symbolIndex, ReadOnlySpan<byte> bytes)
{
SymbolIndex = symbolIndex;
// HACKHACK: Keeping Bytes as a Span property on Suffix will cause crashing in .NET Core 2.0.
// Storing as pure array for now until we can re-visit.
// This is necessary to unblock usage of fast Span for Kestrel and others.
Bytes = bytes.ToArray();
}
public int CompareTo(Suffix other)
{
var shorter = Math.Min(other.Bytes.Length, Bytes.Length);
for(int index = 0; index < shorter; index++)
{
if (Bytes[index] == other.Bytes[index]) continue;
return Bytes[index].CompareTo(other.Bytes[index]);
}
return Bytes.Length.CompareTo(other.Bytes.Length);
}
}
private struct SuffixClump
{
public byte BeginningByte;
public List<Suffix> Suffixes;
public SuffixClump(byte beginningByte)
{
BeginningByte = beginningByte;
// This list of suffixes will not exceed the number of symbols. Initialize
// the list to be of size 20, which is slightly larger than the number of symbols.
Suffixes = new List<Suffix>(20);
}
}
private struct Sequence : IComparable<Sequence>
{
public int BeginningIndex;
public int EndIndex;
public byte BeginningValue;
public byte EndValue;
// This constructor creates a sequence of length 0.
public Sequence(int index, byte value)
{
BeginningIndex = index;
EndIndex = index;
BeginningValue = value;
EndValue = value;
}
public int CompareTo(Sequence other)
{
int thisLength = EndIndex - BeginningIndex;
int otherLength = other.EndIndex - other.BeginningIndex;
return thisLength.CompareTo(otherLength);
}
public int Length
{
get
{
return EndIndex - BeginningIndex;
}
}
// Sequence map is formatted as such:
// 0xAABBCCDD
// AA: The min value
// BB: The index of the min value relative to the current node (1-indexed)
// CC: The max value
// DD: The max value's index in the same coord-system as BB
public int CreateSequenceMap()
{
int sequenceMap = 0;
// AA
sequenceMap += BeginningValue << 24;
// BB: Add 1 to BeginningIndex because the parent node is located 1 place before the 0-indexed child node
sequenceMap += (BeginningIndex + 1) << 16;
// CC
sequenceMap += EndValue << 8;
// DD: Add 1 to EndIndex for same reason as BB
sequenceMap += EndIndex + 1;
return sequenceMap;
}
}
// The return value here is the index in parsingTrieList at which the parent node was placed.
private static int CreateParsingTrieNodeAndChildren(ref List<Node> parsingTrieList, List<Suffix> sortedSuffixes)
{
// If there is only one suffix, create a leaf node
if (sortedSuffixes.Count == 1)
{
Node leafNode = new Node();
leafNode.ValueOrNumChildren = 0;
leafNode.IndexOrSymbol = sortedSuffixes[0].SymbolIndex;
int leafNodeIndex = parsingTrieList.Count;
parsingTrieList.Add(leafNode);
return leafNodeIndex;
}
// Group suffixes into clumps based on first byte
List<SuffixClump> clumps = new List<SuffixClump>(sortedSuffixes.Count);
byte beginningByte = sortedSuffixes[0].Bytes[0];
SuffixClump currentClump = new SuffixClump(beginningByte);
clumps.Add(currentClump);
// Initialize sequence detection
Sequence currentSequence = new Sequence(0, beginningByte);
Sequence longestSequence = currentSequence;
foreach (Suffix suffix in sortedSuffixes)
{
var bytesSpan = new Span<byte>(suffix.Bytes);
if (suffix.Bytes[0] == beginningByte)
{
currentClump.Suffixes.Add(new Suffix(suffix.SymbolIndex, bytesSpan.Slice(1)));
}
else
{
beginningByte = suffix.Bytes[0];
// Determine if the new clump is part of a sequence
if (beginningByte == currentSequence.EndValue + 1)
{
// This clump is part of the current sequence
currentSequence.EndIndex++;
currentSequence.EndValue++;
if (!currentSequence.Equals(longestSequence) && currentSequence.CompareTo(longestSequence) > 0)
{
// Replace the longest sequence with this sequence
longestSequence = currentSequence;
}
}
else
{
// This clump is part of a new sequence
currentSequence = new Sequence(clumps.Count, beginningByte);
}
// This is a new clump, with at least one suffix inside it. Add to the list of clumps.
currentClump = new SuffixClump(beginningByte);
currentClump.Suffixes.Add(new Suffix(suffix.SymbolIndex, bytesSpan.Slice(1)));
clumps.Add(currentClump);
}
}
// Now that we know how many children there are, create parent node and place in list
Node parentNode = new Node();
parentNode.ValueOrNumChildren = (byte)clumps.Count;
// Only bother specifying a sequence if the longest sequence is sufficiently long
if (longestSequence.Length > 5)
{
parentNode.IndexOrSymbol = longestSequence.CreateSequenceMap();
}
else
{
parentNode.IndexOrSymbol = 0;
}
int parentNodeIndex = parsingTrieList.Count;
parsingTrieList.Add(parentNode);
// Reserve space in list for child nodes. In this algorithm, all parent nodes are created first, leaving gaps for the child nodes
// to be filled in once it is known where they point to.
int childNodeStartIndex = parsingTrieList.Count;
for (int i = 0; i < clumps.Count; i++)
{
parsingTrieList.Add(default);
}
// Process child nodes
List<Node> childNodes = new List<Node>();
foreach (SuffixClump clump in clumps)
{
Node childNode = new Node();
childNode.ValueOrNumChildren = clump.BeginningByte;
childNode.IndexOrSymbol = CreateParsingTrieNodeAndChildren(ref parsingTrieList, clump.Suffixes);
childNodes.Add(childNode);
}
// Place child nodes in spots allocated for them
int childNodeIndex = childNodeStartIndex;
foreach (Node childNode in childNodes)
{
parsingTrieList[childNodeIndex] = childNode;
childNodeIndex++;
}
return parentNodeIndex;
}
public static Node[] Create(byte[][] symbols)
{
List<Suffix> symbolList = new List<Suffix>(symbols.Length);
for (int i = 0; i < symbols.Length; i++)
{
if (symbols[i] != null)
{
symbolList.Add(new Suffix(i, symbols[i]));
}
}
// Sort the symbol list. This is important for allowing binary search of the child nodes, as well as
// counting the number of children a node has.
symbolList.Sort();
// validate symbol consistemcy:
// a) each symbol must be unique
// b) a symbol cannot be a prefix of another symbol
// c) symbols cannot be empty
for(int i = 1; i < symbolList.Count; i++)
{
var first = symbolList[i - 1];
var second = symbolList[i];
if(first.Bytes.Length == 0 || second.Bytes.Length == 0)
{
throw new ArgumentException("Symbol cannot be zero bytes long");
}
var firstSpan = first.Bytes.AsSpan();
if (firstSpan.SequenceEqual(second.Bytes))
{
throw new ArgumentException("Symbols cannot be identical");
}
if (first.Bytes.Length > second.Bytes.Length)
{
if (firstSpan.StartsWith(second.Bytes))
{
throw new ArgumentException("Symbols are ambiguous");
}
}
else if(first.Bytes.Length < second.Bytes.Length)
{
if (second.Bytes.AsSpan().StartsWith(first.Bytes))
{
throw new ArgumentException("Symbols are ambiguous");
}
}
}
List<Node> parsingTrieList = new List<Node>(100);
CreateParsingTrieNodeAndChildren(ref parsingTrieList, symbolList);
return parsingTrieList.ToArray();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void OrSingle()
{
var test = new SimpleBinaryOpTest__OrSingle();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__OrSingle
{
private const int VectorSize = 16;
private const int ElementCount = VectorSize / sizeof(Single);
private static Single[] _data1 = new Single[ElementCount];
private static Single[] _data2 = new Single[ElementCount];
private static Vector128<Single> _clsVar1;
private static Vector128<Single> _clsVar2;
private Vector128<Single> _fld1;
private Vector128<Single> _fld2;
private SimpleBinaryOpTest__DataTable<Single> _dataTable;
static SimpleBinaryOpTest__OrSingle()
{
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (float)(random.NextDouble()); _data2[i] = (float)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data2[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data1[0]), VectorSize);
}
public SimpleBinaryOpTest__OrSingle()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (float)(random.NextDouble()); _data2[i] = (float)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < ElementCount; i++) { _data1[i] = (float)(random.NextDouble()); _data2[i] = (float)(random.NextDouble()); }
_dataTable = new SimpleBinaryOpTest__DataTable<Single>(_data1, _data2, new Single[ElementCount], VectorSize);
}
public bool IsSupported => Sse.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Sse.Or(
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Sse.Or(
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Sse.Or(
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Sse).GetMethod(nameof(Sse.Or), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Sse).GetMethod(nameof(Sse.Or), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Sse).GetMethod(nameof(Sse.Or), new Type[] { typeof(Vector128<Single>), typeof(Vector128<Single>) })
.Invoke(null, new object[] {
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr)),
Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Sse.Or(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector128<Single>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector128<Single>>(_dataTable.inArray2Ptr);
var result = Sse.Or(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var left = Sse.LoadVector128((Single*)(_dataTable.inArray1Ptr));
var right = Sse.LoadVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse.Or(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray1Ptr));
var right = Sse.LoadAlignedVector128((Single*)(_dataTable.inArray2Ptr));
var result = Sse.Or(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleBinaryOpTest__OrSingle();
var result = Sse.Or(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Sse.Or(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<Single> left, Vector128<Single> right, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[ElementCount];
Single[] inArray2 = new Single[ElementCount];
Single[] outArray = new Single[ElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[ElementCount];
Single[] inArray2 = new Single[ElementCount];
Single[] outArray = new Single[ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
if ((BitConverter.SingleToInt32Bits(left[0]) | BitConverter.SingleToInt32Bits(right[0])) != BitConverter.SingleToInt32Bits(result[0]))
{
Succeeded = false;
}
else
{
for (var i = 1; i < left.Length; i++)
{
if ((BitConverter.SingleToInt32Bits(left[0]) | BitConverter.SingleToInt32Bits(right[0])) != BitConverter.SingleToInt32Bits(result[0]))
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Sse)}.{nameof(Sse.Or)}<Single>: {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
using System;
using System.Diagnostics;
using System.Windows;
using System.Collections.Generic;
using Dependencies;
using Dependencies.ClrPh;
public class DisplayPeImport : SettingBindingHandler
{
#region Constructors
public DisplayPeImport(
PeImport PeImport,
PhSymbolProvider SymPrv,
string ModuleFilePath,
bool ImportFound
)
{
Info.ordinal = PeImport.Ordinal;
Info.hint = PeImport.Hint;
Info.name = PeImport.Name;
Info.moduleName = PeImport.ModuleName;
Info.modulePath = ModuleFilePath;
Info.importNotFound = !ImportFound;
Tuple<CLRPH_DEMANGLER, string> DemanglingInfos = SymPrv.UndecorateName(PeImport.Name);
Info.Demangler = Enum.GetName(typeof(CLRPH_DEMANGLER), DemanglingInfos.Item1);
Info.UndecoratedName = DemanglingInfos.Item2;
if (ImportFound)
{
Info.delayedImport = PeImport.DelayImport;
Info.importAsCppName = (PeImport.Name.Length > 0 && PeImport.Name[0] == '?');
Info.importByOrdinal = PeImport.ImportByOrdinal;
}
AddNewEventHandler("Undecorate", "Undecorate", "Name", this.GetDisplayName);
AddNewEventHandler("FullPath", "FullPath", "ModuleName", this.GetPathDisplayName);
}
#endregion Constructors
#region PublicAPI
public override string ToString()
{
List<string> members = new List<string>() {
Ordinal != null ? String.Format("{0} (0x{0:x08})", Ordinal) : "N/A",
Hint != null ? String.Format("{0} (0x{0:x08})", Hint) : "N/A",
Name,
ModuleName,
DelayImport.ToString(),
Demangler
};
return String.Join(", ", members.ToArray());
}
public string IconUri
{
// @TODO(implement API lookup in order to test for API Export presence)
get
{
string PathStrFormat = "Images/import_{0:s}_found.png";
if (Info.importNotFound)
PathStrFormat = "Images/import_{0:s}_not_found.png";
if (Info.importByOrdinal)
return String.Format(PathStrFormat, "ord");
if (Info.importAsCppName)
return String.Format(PathStrFormat, "cpp");
return String.Format(PathStrFormat, "c");
}
}
public int Type
{
get
{
if (Info.importNotFound)
return 1;
if (Info.importByOrdinal)
return 2;
if (Info.importAsCppName)
return 3;
return 0;
}
}
public ushort? Hint
{
get
{
if (Info.importByOrdinal)
return null;
return Info.hint;
}
}
public ushort? Ordinal { get { if (Info.importByOrdinal) { return Info.ordinal; } return null; } }
public string Name
{
get { return GetDisplayName(Dependencies.Properties.Settings.Default.Undecorate); }
}
public string ModuleName
{
get { return GetPathDisplayName(Dependencies.Properties.Settings.Default.FullPath); }
}
public string FilterName
{
get { return String.Format("{0:s}:{1:s}", this.ModuleName, this.Name); }
}
public Boolean DelayImport { get { return Info.delayedImport; } }
public string Demangler { get { return this.Info.Demangler; } }
protected string GetDisplayName(bool UndecorateName)
{
if (Info.importByOrdinal)
return String.Format("Ordinal_{0:d}", Info.ordinal);
if ((UndecorateName) && (Info.UndecoratedName.Length > 0))
return Info.UndecoratedName;
return Info.name;
}
protected string GetPathDisplayName(bool FullPath)
{
if ((FullPath) && (Info.modulePath != null))
return Info.modulePath;
return Info.moduleName;
}
#endregion PublicAPI
#region Commands
public RelayCommand QueryImportApi
{
get
{
if (_QueryImportApi == null)
{
_QueryImportApi = new RelayCommand((param) =>
{
if ((param == null))
{
return;
}
string ExportName = (param as DisplayPeImport).Name;
if (ExportName == null)
{
return;
}
Process.Start(@"https://docs.microsoft.com/search/?search=" + ExportName);
});
}
return _QueryImportApi;
}
}
public RelayCommand CopyValue
{
get
{
if (_CopyValue == null)
{
_CopyValue = new RelayCommand((param) =>
{
if ((param == null))
{
return;
}
Clipboard.Clear();
try
{
Clipboard.SetText((string)param, TextDataFormat.Text);
}
catch { }
});
}
return _CopyValue;
}
}
#endregion // Commands
private PeImportInfo Info;
private RelayCommand _QueryImportApi;
private RelayCommand _CopyValue;
}
public struct PeImportInfo
{
public ushort ordinal;
public ushort hint;
public string name;
public string moduleName;
public string modulePath;
public string UndecoratedName;
public string Demangler;
public Boolean delayedImport;
public Boolean importByOrdinal;
public Boolean importAsCppName;
public Boolean importNotFound;
}
| |
using System.Linq;
using GitTools.Testing;
using GitVersion.Extensions;
using GitVersion.Model.Configuration;
using GitVersion.VersionCalculation;
using GitVersionCore.Tests.Helpers;
using LibGit2Sharp;
using NUnit.Framework;
namespace GitVersionCore.Tests.IntegrationTests
{
[TestFixture]
public class HotfixBranchScenarios : TestBase
{
[Test]
// This test actually validates #465 as well
public void PatchLatestReleaseExample()
{
using var fixture = new BaseGitFlowRepositoryFixture("1.2.0");
// create hotfix
Commands.Checkout(fixture.Repository, "master");
Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("hotfix-1.2.1"));
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.2.1-beta.1+1");
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.2.1-beta.1+2");
fixture.Repository.ApplyTag("1.2.1-beta.1");
fixture.AssertFullSemver("1.2.1-beta.1");
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.2.1-beta.2+3");
// Merge hotfix branch to master
Commands.Checkout(fixture.Repository, "master");
fixture.Repository.MergeNoFF("hotfix-1.2.1", Generate.SignatureNow());
fixture.AssertFullSemver("1.2.1+4");
fixture.Repository.ApplyTag("1.2.1");
fixture.AssertFullSemver("1.2.1");
// Verify develop version
Commands.Checkout(fixture.Repository, "develop");
fixture.AssertFullSemver("1.3.0-alpha.1");
fixture.Repository.MergeNoFF("hotfix-1.2.1", Generate.SignatureNow());
fixture.AssertFullSemver("1.3.0-alpha.5");
}
[Test]
public void CanTakeVersionFromHotfixesBranch()
{
using var fixture = new BaseGitFlowRepositoryFixture(r =>
{
r.MakeATaggedCommit("1.0.0");
r.MakeATaggedCommit("1.1.0");
r.MakeATaggedCommit("2.0.0");
});
// Merge hotfix branch to support
Commands.Checkout(fixture.Repository, "master");
Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("support-1.1", (Commit)fixture.Repository.Tags.Single(t => t.FriendlyName == "1.1.0").Target));
fixture.AssertFullSemver("1.1.0");
// create hotfix branch
Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("hotfixes/1.1.1"));
fixture.AssertFullSemver("1.1.0"); // We are still on a tagged commit
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.1.1-beta.1+1");
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.1.1-beta.1+2");
}
[Test]
public void PatchOlderReleaseExample()
{
using var fixture = new BaseGitFlowRepositoryFixture(r =>
{
r.MakeATaggedCommit("1.0.0");
r.MakeATaggedCommit("1.1.0");
r.MakeATaggedCommit("2.0.0");
});
// Merge hotfix branch to support
Commands.Checkout(fixture.Repository, "master");
var tag = fixture.Repository.Tags.Single(t => t.FriendlyName == "1.1.0");
var supportBranch = fixture.Repository.CreateBranch("support-1.1", (Commit)tag.Target);
Commands.Checkout(fixture.Repository, supportBranch);
fixture.AssertFullSemver("1.1.0");
// create hotfix branch
Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("hotfix-1.1.1"));
fixture.AssertFullSemver("1.1.0"); // We are still on a tagged commit
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.1.1-beta.1+1");
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.1.1-beta.1+2");
// Create feature branch off hotfix branch and complete
Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("feature/fix"));
fixture.AssertFullSemver("1.1.1-fix.1+2");
fixture.Repository.MakeACommit();
fixture.AssertFullSemver("1.1.1-fix.1+3");
fixture.Repository.CreatePullRequestRef("feature/fix", "hotfix-1.1.1", normalise: true, prNumber: 8);
fixture.AssertFullSemver("1.1.1-PullRequest0008.4");
Commands.Checkout(fixture.Repository, "hotfix-1.1.1");
fixture.Repository.MergeNoFF("feature/fix", Generate.SignatureNow());
fixture.AssertFullSemver("1.1.1-beta.1+4");
// Merge hotfix into support branch to complete hotfix
Commands.Checkout(fixture.Repository, "support-1.1");
fixture.Repository.MergeNoFF("hotfix-1.1.1", Generate.SignatureNow());
fixture.AssertFullSemver("1.1.1+5");
fixture.Repository.ApplyTag("1.1.1");
fixture.AssertFullSemver("1.1.1");
// Verify develop version
Commands.Checkout(fixture.Repository, "develop");
fixture.AssertFullSemver("2.1.0-alpha.1");
fixture.Repository.MergeNoFF("support-1.1", Generate.SignatureNow());
fixture.AssertFullSemver("2.1.0-alpha.7");
}
/// <summary>
/// Create a feature branch from a hotfix branch, and merge back, then delete it
/// </summary>
[Test]
public void FeatureOnHotfixFeatureBranchDeleted()
{
var config = new Config
{
AssemblyVersioningScheme = AssemblyVersioningScheme.MajorMinorPatchTag,
VersioningMode = VersioningMode.ContinuousDeployment
};
using var fixture = new EmptyRepositoryFixture();
var release450 = "release/4.5.0";
var hotfix451 = "hotfix/4.5.1";
var support45 = "support/4.5";
var tag450 = "4.5.0";
var featureBranch = "feature/some-bug-fix";
fixture.Repository.MakeACommit("initial");
fixture.Repository.CreateBranch("develop");
Commands.Checkout(fixture.Repository, "develop");
// create release branch
fixture.Repository.CreateBranch(release450);
Commands.Checkout(fixture.Repository, release450);
fixture.AssertFullSemver("4.5.0-beta.0", config);
fixture.Repository.MakeACommit("blabla");
Commands.Checkout(fixture.Repository, "develop");
fixture.Repository.MergeNoFF(release450, Generate.SignatureNow());
Commands.Checkout(fixture.Repository, "master");
fixture.Repository.MergeNoFF(release450, Generate.SignatureNow());
// create support branch
fixture.Repository.CreateBranch(support45);
Commands.Checkout(fixture.Repository, support45);
fixture.Repository.ApplyTag(tag450);
fixture.AssertFullSemver("4.5.0", config);
// create hotfix branch
fixture.Repository.CreateBranch(hotfix451);
Commands.Checkout(fixture.Repository, hotfix451);
// feature branch from hotfix
fixture.Repository.CreateBranch(featureBranch);
Commands.Checkout(fixture.Repository, featureBranch);
fixture.Repository.MakeACommit("blabla"); // commit 1
Commands.Checkout(fixture.Repository, hotfix451);
fixture.Repository.MergeNoFF(featureBranch, Generate.SignatureNow()); // commit 2
fixture.Repository.Branches.Remove(featureBranch);
fixture.AssertFullSemver("4.5.1-beta.2", config);
}
/// <summary>
/// Create a feature branch from a hotfix branch, and merge back, but don't delete it
/// </summary>
[Test]
public void FeatureOnHotfixFeatureBranchNotDeleted()
{
var config = new Config
{
AssemblyVersioningScheme = AssemblyVersioningScheme.MajorMinorPatchTag,
VersioningMode = VersioningMode.ContinuousDeployment
};
using var fixture = new EmptyRepositoryFixture();
var release450 = "release/4.5.0";
var hotfix451 = "hotfix/4.5.1";
var support45 = "support/4.5";
var tag450 = "4.5.0";
var featureBranch = "feature/some-bug-fix";
fixture.Repository.MakeACommit("initial");
fixture.Repository.CreateBranch("develop");
Commands.Checkout(fixture.Repository, "develop");
// create release branch
fixture.Repository.CreateBranch(release450);
Commands.Checkout(fixture.Repository, release450);
fixture.AssertFullSemver("4.5.0-beta.0", config);
fixture.Repository.MakeACommit("blabla");
Commands.Checkout(fixture.Repository, "develop");
fixture.Repository.MergeNoFF(release450, Generate.SignatureNow());
Commands.Checkout(fixture.Repository, "master");
fixture.Repository.MergeNoFF(release450, Generate.SignatureNow());
// create support branch
fixture.Repository.CreateBranch(support45);
Commands.Checkout(fixture.Repository, support45);
fixture.Repository.ApplyTag(tag450);
fixture.AssertFullSemver("4.5.0", config);
// create hotfix branch
fixture.Repository.CreateBranch(hotfix451);
Commands.Checkout(fixture.Repository, hotfix451);
// feature branch from hotfix
fixture.Repository.CreateBranch(featureBranch);
Commands.Checkout(fixture.Repository, featureBranch);
fixture.Repository.MakeACommit("blabla"); // commit 1
Commands.Checkout(fixture.Repository, hotfix451);
fixture.Repository.MergeNoFF(featureBranch, Generate.SignatureNow()); // commit 2
fixture.AssertFullSemver("4.5.1-beta.2", config);
}
}
}
| |
using System;
using System.Collections;
using System.Management.Automation;
using System.Net.Http;
using TfsCmdlets.Extensions;
using TfsCmdlets.HttpClient;
using TfsCmdlets.Services;
using TfsCmdlets.Util;
namespace TfsCmdlets.Cmdlets.RestApi
{
/// <summary>
/// Invoke an Azure DevOps REST API.
/// </summary>
/// <remarks>
/// Invoke-TfsRestApi can automatically parse an example URL from
/// https://docs.microsoft.com/en-us/rest/api/azure/devops/ and replace its various tokens
/// (such as {organization}, {project} and {team}) as long as collection / project / team
/// information are available via either the their respective arguments in this command or the
/// corresponding Connect-Tfs* cmdlet. HTTP method and API version are also automatically extracted
/// from the supplied example, when available.
/// </remarks>
/// <example>
/// <code>Invoke-TfsRestApi -Method GET -Path /_apis/projects -ApiVersion 4.1 -Collection DefaultCollection</code>
/// <para>Calls a REST API that lists all team projects in a TFS collection named DefaultCollection</para>
/// </example>
/// <example>
/// <code>Invoke-TfsRestApi 'GET https://extmgmt.dev.azure.com/{organization}/_apis/extensionmanagement/installedextensions?api-version=5.1-preview.1'</code>
/// <para>Calls the API described by an example extracted from the docs.microsoft.com web site.
/// HTTP method, host name and API version are all set based on the supplied values;
/// Tokens {organization}, {project} and {team} are properly replaced with the corresponding
/// values provided by the current connection context (via previous calls to
/// Connect-TfsTeamProjectCollection, Connect-TfsTeamProject and/or Connect-TfsTeam).</para>
/// </example>
/// <example>
/// <code>Invoke-TfsRestApi 'GET https://{instance}/{collection}/_apis/process/processes?api-version=4.1' -Collection http://vsalm:8080/tfs/DefaultCollection</code>
/// <para>Calls an API in a TFS instance, parsing the example provided by the docs.microsoft.com web site.</para>
/// </example>
[Cmdlet(VerbsLifecycle.Invoke, "TfsRestApi")]
public class InvokeRestApi : CmdletBase
{
/// <summary>
/// Specifies the path of the REST API to call. Tipically it is the portion of the URL after
/// the name of the collection/organization, i.e. in the URL
/// https://dev.azure.com/{organization}/_apis/projects?api-version=5.1 the path is
/// "/_apis/projects".
/// </summary>
[Parameter(Mandatory = true, Position = 0)]
[ValidateNotNullOrEmpty]
public string Path { get; set; }
/// <summary>
/// Specifies the HTTP method to call the API endpoint. When omitted, defaults to "GET".
/// </summary>
[Parameter()]
public string Method { get; set; } = "GET";
/// <summary>
/// Specifies the request body to send to the API endpoint. Tipically contains the JSON payload
/// required by the API.
/// </summary>
[Parameter()]
[Alias("Content")]
public string Body { get; set; }
/// <summary>
/// Specifies the request body content type to send to the API. When omitted, defaults to
/// "application/json".
/// </summary>
[Parameter()]
public string RequestContentType { get; set; } = "application/json";
/// <summary>
/// Specifies the response body content type returned by the API. When omitted, defaults to
/// "application/json".
/// </summary>
[Parameter()]
public string ResponseContentType { get; set; } = "application/json";
/// <summary>
/// Specifies a hashtable with additional HTTP headers to send to the API endpoint.
/// </summary>
[Parameter()]
public Hashtable AdditionalHeaders { get; set; }
/// <summary>
/// Specifies a hashtable with additional query parameters to send to the API endpoint.
/// </summary>
[Parameter()]
public Hashtable QueryParameters { get; set; }
/// <summary>
/// Specifies the desired API version. When omitted, defaults to "4.1".
/// </summary>
[Parameter()]
public string ApiVersion { get; set; } = "4.1";
/// <summary>
/// Specifies an alternate host name for APIs not hosted in "dev.azure.com",
/// e.g. "vsaex.dev.azure.com" or "vssps.dev.azure.com".
/// </summary>
[Parameter()]
public string UseHost { get; set; }
/// <summary>
/// Returns the API response as an unparsed string. If omitted, JSON responses will be
/// parsed, converted and returned as objects (via ConvertFrom-Json).
/// </summary>
[Parameter()]
public SwitchParameter Raw { get; set; }
/// <summary>
/// Returns the System.Threading.Tasks.Task object used to issue the asynchronous call to the API.
/// The caller is responsible for finishing the asynchronous call by e.g. accessing the Result property.
/// </summary>
[Parameter()]
public SwitchParameter AsTask { get; set; }
/// <summary>
/// HELP_PARAM_TEAM
/// </summary>
[Parameter()]
public object Team { get; set; }
/// <summary>
/// HELP_PARAM_PROJECT
/// </summary>
[Parameter()]
public object Project { get; set; }
/// <summary>
/// HELP_PARAM_COLLECTION
/// </summary>
[Parameter()]
public object Collection { get; set; }
/// <summary>
/// Performs execution of the command
/// </summary>
protected override void DoProcessRecord()
{
if (Path.Contains(" "))
{
var tokens = Path.Split(' ');
if (IsHttpMethod(tokens[0]))
{
Method = tokens[0];
Path = Path.Substring(tokens[0].Length+1);
}
}
var tpc = this.GetCollection();
Path = Path.Replace("https://{instance}/{collection}/", "http://tfs/");
if (Uri.TryCreate(Path, UriKind.Absolute, out var uri))
{
var host = uri.Host;
if (host.EndsWith(".dev.azure.com"))
{
UseHost = host;
}
Path = uri.AbsolutePath.Replace("%7Borganization%7D/", "");
if (uri.AbsoluteUri.StartsWith(tpc.Uri.AbsoluteUri))
{
Path = Path.Substring(tpc.Uri.AbsoluteUri.Length);
}
var query = uri.ParseQueryString();
if(query["api-version"] != null)
{
ApiVersion = query["api-version"];
}
}
if (Path.Contains("%7Bproject%7D") || Path.Contains("%7BprojectId%7D"))
{
var (_, tp) = GetCollectionAndProject();
Path = Path
.Replace("%7Bproject%7D", tp.Id.ToString())
.Replace("%7BprojectId%7D", tp.Id.ToString());
this.Log($"Replace token {{project[Id]}} in URL with [{tp.Id}]");
}
if (Path.Contains("%7Bteam%7D") || Path.Contains("%7BteamId%7D"))
{
var (_, _, t) = GetCollectionProjectAndTeam();
Path = Path
.Replace("%7Bteam%7D", t.Id.ToString())
.Replace("%7BteamId%7D", t.Id.ToString());
this.Log($"Replace token {{team}} in URL with [{t.Id}]");
}
this.Log($"Path '{Path}', version '{ApiVersion}'");
if(tpc.IsHosted && !string.IsNullOrEmpty(UseHost))
{
GenericHttpClient.UseHost(UseHost);
}
var client = this.GetService<IRestApiService>();
var task = client.InvokeAsync(tpc, Path, Method, Body,
RequestContentType, ResponseContentType,
AdditionalHeaders.ToDictionary<string, string>(),
QueryParameters.ToDictionary<string, string>(),
ApiVersion);
this.Log($"{Method} {client.Uri.AbsoluteUri}");
if (AsTask)
{
WriteObject(task);
return;
}
var result = task.GetResult("Unknown error when calling REST API");
var responseBody = result.Content.ReadAsStringAsync().GetAwaiter().GetResult();
var responseType = result.Content.Headers.ContentType.MediaType;
WriteObject(!Raw && responseType.Equals("application/json")
? PSJsonConverter.Deserialize(responseBody)
: responseBody);
}
private bool IsHttpMethod(string method)
{
try
{
var m = new HttpMethod(method);
return true;
}
catch
{
return false;
}
}
}
}
| |
using System;
using Lucene.Net.Documents;
namespace Lucene.Net.Index
{
using BinaryDocValuesField = BinaryDocValuesField;
using BinaryDocValuesUpdate = Lucene.Net.Index.DocValuesUpdate.BinaryDocValuesUpdate;
using BytesRef = Lucene.Net.Util.BytesRef;
using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator;
using FixedBitSet = Lucene.Net.Util.FixedBitSet;
using InPlaceMergeSorter = Lucene.Net.Util.InPlaceMergeSorter;
using PackedInts = Lucene.Net.Util.Packed.PackedInts;
using PagedGrowableWriter = Lucene.Net.Util.Packed.PagedGrowableWriter;
using PagedMutable = Lucene.Net.Util.Packed.PagedMutable;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A <seealso cref="DocValuesFieldUpdates"/> which holds updates of documents, of a single
/// <seealso cref="BinaryDocValuesField"/>.
///
/// @lucene.experimental
/// </summary>
internal class BinaryDocValuesFieldUpdates : DocValuesFieldUpdates
{
internal sealed class Iterator : DocValuesFieldUpdates.Iterator
{
internal readonly PagedGrowableWriter Offsets;
internal readonly int Size;
internal readonly PagedGrowableWriter Lengths;
internal readonly PagedMutable Docs;
internal readonly FixedBitSet DocsWithField;
internal long Idx = 0; // long so we don't overflow if size == Integer.MAX_VALUE
internal int Doc_Renamed = -1;
internal readonly BytesRef Value_Renamed;
internal int Offset, Length;
internal Iterator(int size, PagedGrowableWriter offsets, PagedGrowableWriter lengths, PagedMutable docs, BytesRef values, FixedBitSet docsWithField)
{
this.Offsets = offsets;
this.Size = size;
this.Lengths = lengths;
this.Docs = docs;
this.DocsWithField = docsWithField;
Value_Renamed = (BytesRef)values.Clone();
}
public object Value()
{
if (Offset == -1)
{
return null;
}
else
{
Value_Renamed.Offset = Offset;
Value_Renamed.Length = Length;
return Value_Renamed;
}
}
public int NextDoc()
{
if (Idx >= Size)
{
Offset = -1;
return Doc_Renamed = DocIdSetIterator.NO_MORE_DOCS;
}
Doc_Renamed = (int)Docs.Get(Idx);
++Idx;
while (Idx < Size && Docs.Get(Idx) == Doc_Renamed)
{
++Idx;
}
// idx points to the "next" element
long prevIdx = Idx - 1;
if (!DocsWithField.Get((int)prevIdx))
{
Offset = -1;
}
else
{
// cannot change 'value' here because nextDoc is called before the
// value is used, and it's a waste to clone the BytesRef when we
// obtain the value
Offset = (int)Offsets.Get(prevIdx);
Length = (int)Lengths.Get(prevIdx);
}
return Doc_Renamed;
}
public int Doc()
{
return Doc_Renamed;
}
public void Reset()
{
Doc_Renamed = -1;
Offset = -1;
Idx = 0;
}
}
private FixedBitSet DocsWithField;
private PagedMutable Docs;
private PagedGrowableWriter Offsets, Lengths;
private BytesRef Values;
private int Size;
public BinaryDocValuesFieldUpdates(string field, int maxDoc)
: base(field, Type_e.BINARY)
{
DocsWithField = new FixedBitSet(64);
Docs = new PagedMutable(1, 1024, PackedInts.BitsRequired(maxDoc - 1), PackedInts.COMPACT);
Offsets = new PagedGrowableWriter(1, 1024, 1, PackedInts.FAST);
Lengths = new PagedGrowableWriter(1, 1024, 1, PackedInts.FAST);
Values = new BytesRef(16); // start small
Size = 0;
}
public override void Add(int doc, object value)
{
// TODO: if the Sorter interface changes to take long indexes, we can remove that limitation
if (Size == int.MaxValue)
{
throw new InvalidOperationException("cannot support more than Integer.MAX_VALUE doc/value entries");
}
BytesRef val = (BytesRef)value;
if (val == null)
{
val = BinaryDocValuesUpdate.MISSING;
}
// grow the structures to have room for more elements
if (Docs.Size() == Size)
{
Docs = Docs.Grow(Size + 1);
Offsets = Offsets.Grow(Size + 1);
Lengths = Lengths.Grow(Size + 1);
DocsWithField = FixedBitSet.EnsureCapacity(DocsWithField, (int)Docs.Size());
}
if (val != BinaryDocValuesUpdate.MISSING)
{
// only mark the document as having a value in that field if the value wasn't set to null (MISSING)
DocsWithField.Set(Size);
}
Docs.Set(Size, doc);
Offsets.Set(Size, Values.Length);
Lengths.Set(Size, val.Length);
Values.Append(val);
++Size;
}
internal override DocValuesFieldUpdates.Iterator GetIterator()
{
PagedMutable docs = this.Docs;
PagedGrowableWriter offsets = this.Offsets;
PagedGrowableWriter lengths = this.Lengths;
BytesRef values = this.Values;
FixedBitSet docsWithField = this.DocsWithField;
new InPlaceMergeSorterAnonymousInnerClassHelper(this, docs, offsets, lengths, docsWithField).Sort(0, Size);
return new Iterator(Size, offsets, lengths, docs, values, docsWithField);
}
private class InPlaceMergeSorterAnonymousInnerClassHelper : InPlaceMergeSorter
{
private readonly BinaryDocValuesFieldUpdates OuterInstance;
private PagedMutable Docs;
private PagedGrowableWriter Offsets;
private PagedGrowableWriter Lengths;
private FixedBitSet DocsWithField;
public InPlaceMergeSorterAnonymousInnerClassHelper(BinaryDocValuesFieldUpdates outerInstance, PagedMutable docs, PagedGrowableWriter offsets, PagedGrowableWriter lengths, FixedBitSet docsWithField)
{
this.OuterInstance = outerInstance;
this.Docs = docs;
this.Offsets = offsets;
this.Lengths = lengths;
this.DocsWithField = docsWithField;
}
protected override void Swap(int i, int j)
{
long tmpDoc = Docs.Get(j);
Docs.Set(j, Docs.Get(i));
Docs.Set(i, tmpDoc);
long tmpOffset = Offsets.Get(j);
Offsets.Set(j, Offsets.Get(i));
Offsets.Set(i, tmpOffset);
long tmpLength = Lengths.Get(j);
Lengths.Set(j, Lengths.Get(i));
Lengths.Set(i, tmpLength);
bool tmpBool = DocsWithField.Get(j);
if (DocsWithField.Get(i))
{
DocsWithField.Set(j);
}
else
{
DocsWithField.Clear(j);
}
if (tmpBool)
{
DocsWithField.Set(i);
}
else
{
DocsWithField.Clear(i);
}
}
protected override int Compare(int i, int j)
{
int x = (int)Docs.Get(i);
int y = (int)Docs.Get(j);
return (x < y) ? -1 : ((x == y) ? 0 : 1);
}
}
public override void Merge(DocValuesFieldUpdates other)
{
BinaryDocValuesFieldUpdates otherUpdates = (BinaryDocValuesFieldUpdates)other;
int newSize = Size + otherUpdates.Size;
if (newSize > int.MaxValue)
{
throw new InvalidOperationException("cannot support more than Integer.MAX_VALUE doc/value entries; size=" + Size + " other.size=" + otherUpdates.Size);
}
Docs = Docs.Grow(newSize);
Offsets = Offsets.Grow(newSize);
Lengths = Lengths.Grow(newSize);
DocsWithField = FixedBitSet.EnsureCapacity(DocsWithField, (int)Docs.Size());
for (int i = 0; i < otherUpdates.Size; i++)
{
int doc = (int)otherUpdates.Docs.Get(i);
if (otherUpdates.DocsWithField.Get(i))
{
DocsWithField.Set(Size);
}
Docs.Set(Size, doc);
Offsets.Set(Size, Values.Length + otherUpdates.Offsets.Get(i)); // correct relative offset
Lengths.Set(Size, otherUpdates.Lengths.Get(i));
++Size;
}
Values.Append(otherUpdates.Values);
}
public override bool Any()
{
return Size > 0;
}
}
}
| |
#region Header
/**
* JsonReader.cs
* Stream-like access to JSON text.
*
* The authors disclaim copyright to this source code. For more details, see
* the COPYING file included with this distribution.
**/
#endregion
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace LitJson
{
public enum JsonToken
{
None,
ObjectStart,
PropertyName,
ObjectEnd,
ArrayStart,
ArrayEnd,
Int,
Long,
Double,
String,
Boolean,
Null
}
public class JsonReader
{
#region Fields
private static IDictionary<int, IDictionary<int, int[]>> parse_table;
private Stack<int> automaton_stack;
private int current_input;
private int current_symbol;
private bool end_of_json;
private bool end_of_input;
private Lexer lexer;
private bool parser_in_string;
private bool parser_return;
private bool read_started;
private TextReader reader;
private bool reader_is_owned;
private bool skip_non_members;
private object token_value;
private JsonToken token;
#endregion
#region Public Properties
public bool AllowComments {
get { return lexer.AllowComments; }
set { lexer.AllowComments = value; }
}
public bool AllowSingleQuotedStrings {
get { return lexer.AllowSingleQuotedStrings; }
set { lexer.AllowSingleQuotedStrings = value; }
}
public bool SkipNonMembers {
get { return skip_non_members; }
set { skip_non_members = value; }
}
public bool EndOfInput {
get { return end_of_input; }
}
public bool EndOfJson {
get { return end_of_json; }
}
public JsonToken Token {
get { return token; }
}
public object Value {
get { return token_value; }
}
#endregion
#region Constructors
static JsonReader ()
{
PopulateParseTable ();
}
public JsonReader (string json_text) :
this (new StringReader (json_text), true)
{
}
public JsonReader (TextReader reader) :
this (reader, false)
{
}
private JsonReader (TextReader reader, bool owned)
{
if (reader == null)
throw new ArgumentNullException ("reader");
parser_in_string = false;
parser_return = false;
read_started = false;
automaton_stack = new Stack<int> ();
automaton_stack.Push ((int) ParserToken.End);
automaton_stack.Push ((int) ParserToken.Text);
lexer = new Lexer (reader);
end_of_input = false;
end_of_json = false;
skip_non_members = true;
this.reader = reader;
reader_is_owned = owned;
}
#endregion
#region Static Methods
private static void PopulateParseTable ()
{
// See section A.2. of the manual for details
parse_table = new Dictionary<int, IDictionary<int, int[]>> ();
TableAddRow (ParserToken.Array);
TableAddCol (ParserToken.Array, '[',
'[',
(int) ParserToken.ArrayPrime);
TableAddRow (ParserToken.ArrayPrime);
TableAddCol (ParserToken.ArrayPrime, '"',
(int) ParserToken.Value,
(int) ParserToken.ValueRest,
']');
TableAddCol (ParserToken.ArrayPrime, '[',
(int) ParserToken.Value,
(int) ParserToken.ValueRest,
']');
TableAddCol (ParserToken.ArrayPrime, ']',
']');
TableAddCol (ParserToken.ArrayPrime, '{',
(int) ParserToken.Value,
(int) ParserToken.ValueRest,
']');
TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.Number,
(int) ParserToken.Value,
(int) ParserToken.ValueRest,
']');
TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.True,
(int) ParserToken.Value,
(int) ParserToken.ValueRest,
']');
TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.False,
(int) ParserToken.Value,
(int) ParserToken.ValueRest,
']');
TableAddCol (ParserToken.ArrayPrime, (int) ParserToken.Null,
(int) ParserToken.Value,
(int) ParserToken.ValueRest,
']');
TableAddRow (ParserToken.Object);
TableAddCol (ParserToken.Object, '{',
'{',
(int) ParserToken.ObjectPrime);
TableAddRow (ParserToken.ObjectPrime);
TableAddCol (ParserToken.ObjectPrime, '"',
(int) ParserToken.Pair,
(int) ParserToken.PairRest,
'}');
TableAddCol (ParserToken.ObjectPrime, '}',
'}');
TableAddRow (ParserToken.Pair);
TableAddCol (ParserToken.Pair, '"',
(int) ParserToken.String,
':',
(int) ParserToken.Value);
TableAddRow (ParserToken.PairRest);
TableAddCol (ParserToken.PairRest, ',',
',',
(int) ParserToken.Pair,
(int) ParserToken.PairRest);
TableAddCol (ParserToken.PairRest, '}',
(int) ParserToken.Epsilon);
TableAddRow (ParserToken.String);
TableAddCol (ParserToken.String, '"',
'"',
(int) ParserToken.CharSeq,
'"');
TableAddRow (ParserToken.Text);
TableAddCol (ParserToken.Text, '[',
(int) ParserToken.Array);
TableAddCol (ParserToken.Text, '{',
(int) ParserToken.Object);
TableAddRow (ParserToken.Value);
TableAddCol (ParserToken.Value, '"',
(int) ParserToken.String);
TableAddCol (ParserToken.Value, '[',
(int) ParserToken.Array);
TableAddCol (ParserToken.Value, '{',
(int) ParserToken.Object);
TableAddCol (ParserToken.Value, (int) ParserToken.Number,
(int) ParserToken.Number);
TableAddCol (ParserToken.Value, (int) ParserToken.True,
(int) ParserToken.True);
TableAddCol (ParserToken.Value, (int) ParserToken.False,
(int) ParserToken.False);
TableAddCol (ParserToken.Value, (int) ParserToken.Null,
(int) ParserToken.Null);
TableAddRow (ParserToken.ValueRest);
TableAddCol (ParserToken.ValueRest, ',',
',',
(int) ParserToken.Value,
(int) ParserToken.ValueRest);
TableAddCol (ParserToken.ValueRest, ']',
(int) ParserToken.Epsilon);
}
private static void TableAddCol (ParserToken row, int col,
params int[] symbols)
{
parse_table[(int) row].Add (col, symbols);
}
private static void TableAddRow (ParserToken rule)
{
parse_table.Add ((int) rule, new Dictionary<int, int[]> ());
}
#endregion
#region Private Methods
private void ProcessNumber (string number)
{
if (number.IndexOf ('.') != -1 ||
number.IndexOf ('e') != -1 ||
number.IndexOf ('E') != -1) {
double n_double;
if (Double.TryParse (number, out n_double)) {
token = JsonToken.Double;
token_value = n_double;
return;
}
}
int n_int32;
if (Int32.TryParse (number, out n_int32)) {
token = JsonToken.Int;
token_value = n_int32;
return;
}
long n_int64;
if (Int64.TryParse (number, out n_int64)) {
token = JsonToken.Long;
token_value = n_int64;
return;
}
ulong n_uint64;
if (UInt64.TryParse(number, out n_uint64))
{
token = JsonToken.Long;
token_value = n_uint64;
return;
}
// Shouldn't happen, but just in case, return something
token = JsonToken.Int;
token_value = 0;
}
private void ProcessSymbol ()
{
if (current_symbol == '[') {
token = JsonToken.ArrayStart;
parser_return = true;
} else if (current_symbol == ']') {
token = JsonToken.ArrayEnd;
parser_return = true;
} else if (current_symbol == '{') {
token = JsonToken.ObjectStart;
parser_return = true;
} else if (current_symbol == '}') {
token = JsonToken.ObjectEnd;
parser_return = true;
} else if (current_symbol == '"') {
if (parser_in_string) {
parser_in_string = false;
parser_return = true;
} else {
if (token == JsonToken.None)
token = JsonToken.String;
parser_in_string = true;
}
} else if (current_symbol == (int) ParserToken.CharSeq) {
token_value = lexer.StringValue;
} else if (current_symbol == (int) ParserToken.False) {
token = JsonToken.Boolean;
token_value = false;
parser_return = true;
} else if (current_symbol == (int) ParserToken.Null) {
token = JsonToken.Null;
parser_return = true;
} else if (current_symbol == (int) ParserToken.Number) {
ProcessNumber (lexer.StringValue);
parser_return = true;
} else if (current_symbol == (int) ParserToken.Pair) {
token = JsonToken.PropertyName;
} else if (current_symbol == (int) ParserToken.True) {
token = JsonToken.Boolean;
token_value = true;
parser_return = true;
}
}
private bool ReadToken ()
{
if (end_of_input)
return false;
lexer.NextToken ();
if (lexer.EndOfInput) {
Close ();
return false;
}
current_input = lexer.Token;
return true;
}
#endregion
public void Close ()
{
if (end_of_input)
return;
end_of_input = true;
end_of_json = true;
if (reader_is_owned)
reader.Close ();
reader = null;
}
public bool Read ()
{
if (end_of_input)
return false;
if (end_of_json) {
end_of_json = false;
automaton_stack.Clear ();
automaton_stack.Push ((int) ParserToken.End);
automaton_stack.Push ((int) ParserToken.Text);
}
parser_in_string = false;
parser_return = false;
token = JsonToken.None;
token_value = null;
if (! read_started) {
read_started = true;
if (! ReadToken ())
return false;
}
int[] entry_symbols;
while (true) {
if (parser_return) {
if (automaton_stack.Peek () == (int) ParserToken.End)
end_of_json = true;
return true;
}
current_symbol = automaton_stack.Pop ();
ProcessSymbol ();
if (current_symbol == current_input) {
if (! ReadToken ()) {
if (automaton_stack.Peek () != (int) ParserToken.End)
throw new JsonException (
"Input doesn't evaluate to proper JSON text");
if (parser_return)
return true;
return false;
}
continue;
}
try {
entry_symbols =
parse_table[current_symbol][current_input];
} catch (KeyNotFoundException e) {
throw new JsonException ((ParserToken) current_input, e);
}
if (entry_symbols[0] == (int) ParserToken.Epsilon)
continue;
for (int i = entry_symbols.Length - 1; i >= 0; i--)
automaton_stack.Push (entry_symbols[i]);
}
}
}
}
| |
/*
Copyright 2012 Michael Edwards
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//-CRE-
using Glass.Mapper.Pipelines.ConfigurationResolver.Tasks.OnDemandResolver;
using Glass.Mapper.Sc.Configuration;
using Glass.Mapper.Sc.Configuration.Attributes;
using Glass.Mapper.Sc.DataMappers;
using NUnit.Framework;
using Sitecore.Data;
using Sitecore.FakeDb;
namespace Glass.Mapper.Sc.FakeDb.DataMappers
{
[TestFixture]
public class SitecoreIgnoreMapperFixture
{
#region Method - CanHandle
[Test]
public void CanHandle_IgnoreCOnfigurtion_ReturnsTrue()
{
//Assign
var mapper = new SitecoreIgnoreMapper();
var config = new SitecoreIgnoreConfiguration();
//ACt
var result = mapper.CanHandle(config, null);
//Assert
Assert.IsTrue(result);
}
#endregion
#region Method - MapCmsToProperty
[Test]
public void MapCmsToProperty_DoesNotAlterObject()
{
//Assign
string targetPath = "/sitecore/content/target";
var templateId = ID.NewID;
using (Db database = new Db
{
new DbTemplate(templateId)
{
new DbField("Field")
{
Type = "text"
}
},
new Sitecore.FakeDb.DbItem("Target", ID.NewID, templateId)
{
{"Field", ""}
}
})
{
var fieldValue = "hello world";
var propertyValue = "goodbye world";
var item = database.GetItem(targetPath);
var field = item.Fields["Field"];
var mapper = new SitecoreIgnoreMapper();
var config = new SitecoreIgnoreMapper();
using (new ItemEditing(item, true))
{
field.Value = fieldValue;
}
var stub = new StubClass();
stub.Field = propertyValue;
var context = new SitecoreDataMappingContext(stub, item, null);
//Act
mapper.MapCmsToProperty(context);
//Assert
Assert.AreEqual(stub.Field, propertyValue);
}
}
[Test]
public void MapCmsToProperty_AutoMap()
{
//Assign
string targetPath = "/sitecore/content/target";
var templateId = ID.NewID;
using (Db database = new Db
{
new DbTemplate(templateId)
{
new DbField("Field")
{
Type = "text"
}
},
new Sitecore.FakeDb.DbItem("Target", ID.NewID, templateId)
{
{"Field", ""}
}
})
{
var fieldValue = "hello world";
var propertyValue = "goodbye world";
var item = database.GetItem(targetPath);
var field = item.Fields["Field"];
var context = Context.Create(Utilities.CreateStandardResolver());
context.Load(new OnDemandLoader<SitecoreTypeConfiguration>(typeof(StubClassNotIgnored)));
var service = new SitecoreService(item.Database, context);
using (new ItemEditing(item, true))
{
field.Value = fieldValue;
}
//Act
var notIgnored = service.GetItem<StubClassNotIgnored>(targetPath);
var ignored = service.GetItem<StubClassIgnored>(targetPath);
//Assert
Assert.AreEqual(fieldValue, notIgnored.Field);
Assert.AreEqual(null, ignored.Field);
}
}
#endregion
#region Method - MapPropertyToCms
[Test]
public void MapPropertyToCms_DoesNotAlterObject()
{
//Assign
string targetPath = "/sitecore/content/target";
var templateId = ID.NewID;
using (Db database = new Db
{
new DbTemplate(templateId)
{
new DbField("Field")
{
Type = "text"
}
},
new Sitecore.FakeDb.DbItem("Target", ID.NewID, templateId)
{
{"Field", ""}
}
}) {
var fieldValue = "hello world";
var propertyValue = "goodbye world";
var item = database.GetItem(targetPath);
var field = item.Fields["Field"];
var mapper = new SitecoreIgnoreMapper();
var config = new SitecoreIgnoreMapper();
using (new ItemEditing(item, true))
{
field.Value = fieldValue;
}
var stub = new StubClass();
stub.Field = propertyValue;
var context = new SitecoreDataMappingContext(stub, item, null);
//Act
mapper.MapPropertyToCms(context);
//Assert
Assert.AreEqual(fieldValue, item.Fields["Field"].Value);
}
}
#endregion
#region Stub
public class StubClass
{
public string Field { get; set; }
}
[SitecoreType(AutoMap = true)]
public class StubClassNotIgnored
{
public string Field { get; set; }
}
[SitecoreType(AutoMap = true)]
public class StubClassIgnored
{
[SitecoreIgnore]
public string Field { get; set; }
}
#endregion
}
}
| |
#region License
/*
* All content copyright Terracotta, Inc., unless otherwise indicated. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
#endregion
using System;
using System.Collections.Generic;
using Quartz.Impl.Matchers;
using Quartz.Spi;
namespace Quartz
{
/// <summary>
/// This is the main interface of a Quartz Scheduler.
/// </summary>
/// <remarks>
/// <para>
/// A <see cref="IScheduler"/> maintains a registry of
/// <see cref="IJobDetail"/>s and <see cref="ITrigger"/>s. Once
/// registered, the <see cref="IScheduler"/> is responsible for executing
/// <see cref="IJob"/> s when their associated <see cref="ITrigger"/> s
/// fire (when their scheduled time arrives).
/// </para>
/// <para>
/// <see cref="IScheduler"/> instances are produced by a
/// <see cref="ISchedulerFactory"/>. A scheduler that has already been
/// created/initialized can be found and used through the same factory that
/// produced it. After a <see cref="IScheduler"/> has been created, it is in
/// "stand-by" mode, and must have its <see cref="IScheduler.Start"/> method
/// called before it will fire any <see cref="IJob"/>s.
/// </para>
/// <para>
/// <see cref="IJob"/> s are to be created by the 'client program', by
/// defining a class that implements the <see cref="IJob"/> interface.
/// <see cref="IJobDetail"/> objects are then created (also by the client) to
/// define a individual instances of the <see cref="IJob"/>.
/// <see cref="IJobDetail"/> instances can then be registered with the
/// <see cref="IScheduler"/> via the %IScheduler.ScheduleJob(JobDetail,
/// Trigger)% or %IScheduler.AddJob(JobDetail, bool)% method.
/// </para>
/// <para>
/// <see cref="ITrigger"/> s can then be defined to fire individual
/// <see cref="IJob"/> instances based on given schedules.
/// <see cref="ISimpleTrigger"/> s are most useful for one-time firings, or
/// firing at an exact moment in time, with N repeats with a given delay between
/// them. <see cref="ICronTrigger"/> s allow scheduling based on time of day,
/// day of week, day of month, and month of year.
/// </para>
/// <para>
/// <see cref="IJob"/> s and <see cref="ITrigger"/> s have a name and
/// group associated with them, which should uniquely identify them within a single
/// <see cref="IScheduler"/>. The 'group' feature may be useful for creating
/// logical groupings or categorizations of <see cref="IJob"/>s and
/// <see cref="ITrigger"/>s. If you don't have need for assigning a group to a
/// given <see cref="IJob"/>s of <see cref="ITrigger"/>s, then you can use
/// the <see cref="SchedulerConstants.DefaultGroup"/> constant defined on
/// this interface.
/// </para>
/// <para>
/// Stored <see cref="IJob"/> s can also be 'manually' triggered through the
/// use of the %IScheduler.TriggerJob(string, string)% function.
/// </para>
/// <para>
/// Client programs may also be interested in the 'listener' interfaces that are
/// available from Quartz. The <see cref="IJobListener"/> interface provides
/// notifications of <see cref="IJob"/> executions. The
/// <see cref="ITriggerListener"/> interface provides notifications of
/// <see cref="ITrigger"/> firings. The <see cref="ISchedulerListener"/>
/// interface provides notifications of <see cref="IScheduler"/> events and
/// errors. Listeners can be associated with local schedulers through the
/// <see cref="IListenerManager" /> interface.
/// </para>
/// <para>
/// The setup/configuration of a <see cref="IScheduler"/> instance is very
/// customizable. Please consult the documentation distributed with Quartz.
/// </para>
/// </remarks>
/// <seealso cref="IJob"/>
/// <seealso cref="IJobDetail"/>
/// <seealso cref="ITrigger"/>
/// <seealso cref="IJobListener"/>
/// <seealso cref="ITriggerListener"/>
/// <seealso cref="ISchedulerListener"/>
/// <author>Marko Lahma (.NET)</author>
public interface IScheduler
{
/// <summary>
/// returns true if the given JobGroup
/// is paused
/// </summary>
/// <param name="groupName"></param>
/// <returns></returns>
bool IsJobGroupPaused(string groupName);
/// <summary>
/// returns true if the given TriggerGroup
/// is paused
/// </summary>
/// <param name="groupName"></param>
/// <returns></returns>
bool IsTriggerGroupPaused(string groupName);
/// <summary>
/// Returns the name of the <see cref="IScheduler" />.
/// </summary>
string SchedulerName { get; }
/// <summary>
/// Returns the instance Id of the <see cref="IScheduler" />.
/// </summary>
string SchedulerInstanceId { get; }
/// <summary>
/// Returns the <see cref="SchedulerContext" /> of the <see cref="IScheduler" />.
/// </summary>
SchedulerContext Context { get; }
/// <summary>
/// Reports whether the <see cref="IScheduler" /> is in stand-by mode.
/// </summary>
/// <seealso cref="Standby()" />
/// <seealso cref="Start()" />
bool InStandbyMode { get; }
/// <summary>
/// Reports whether the <see cref="IScheduler" /> has been Shutdown.
/// </summary>
bool IsShutdown { get; }
/// <summary>
/// Get a <see cref="SchedulerMetaData" /> object describing the settings
/// and capabilities of the scheduler instance.
/// </summary>
/// <remarks>
/// Note that the data returned is an 'instantaneous' snap-shot, and that as
/// soon as it's returned, the meta data values may be different.
/// </remarks>
SchedulerMetaData GetMetaData();
/// <summary>
/// Return a list of <see cref="IJobExecutionContext" /> objects that
/// represent all currently executing Jobs in this Scheduler instance.
/// </summary>
/// <remarks>
/// <para>
/// This method is not cluster aware. That is, it will only return Jobs
/// currently executing in this Scheduler instance, not across the entire
/// cluster.
/// </para>
/// <para>
/// Note that the list returned is an 'instantaneous' snap-shot, and that as
/// soon as it's returned, the true list of executing jobs may be different.
/// Also please read the doc associated with <see cref="IJobExecutionContext" />-
/// especially if you're using remoting.
/// </para>
/// </remarks>
/// <seealso cref="IJobExecutionContext" />
IList<IJobExecutionContext> GetCurrentlyExecutingJobs();
/// <summary>
/// Set the <see cref="JobFactory" /> that will be responsible for producing
/// instances of <see cref="IJob" /> classes.
/// </summary>
/// <remarks>
/// JobFactories may be of use to those wishing to have their application
/// produce <see cref="IJob" /> instances via some special mechanism, such as to
/// give the opportunity for dependency injection.
/// </remarks>
/// <seealso cref="IJobFactory" />
IJobFactory JobFactory { set; }
/// <summary>
/// Get a reference to the scheduler's <see cref="IListenerManager" />,
/// through which listeners may be registered.
/// </summary>
/// <returns>the scheduler's <see cref="IListenerManager" /></returns>
/// <seealso cref="ListenerManager" />
/// <seealso cref="IJobListener" />
/// <seealso cref="ITriggerListener" />
/// <seealso cref="ISchedulerListener" />
IListenerManager ListenerManager { get; }
/// <summary>
/// Get the names of all known <see cref="IJobDetail" /> groups.
/// </summary>
IList<string> GetJobGroupNames();
/// <summary>
/// Get the names of all known <see cref="ITrigger" /> groups.
/// </summary>
IList<string> GetTriggerGroupNames();
/// <summary>
/// Get the names of all <see cref="ITrigger" /> groups that are paused.
/// </summary>
Collection.ISet<string> GetPausedTriggerGroups();
/// <summary>
/// Starts the <see cref="IScheduler" />'s threads that fire <see cref="ITrigger" />s.
/// When a scheduler is first created it is in "stand-by" mode, and will not
/// fire triggers. The scheduler can also be put into stand-by mode by
/// calling the <see cref="Standby" /> method.
/// </summary>
/// <remarks>
/// The misfire/recovery process will be started, if it is the initial call
/// to this method on this scheduler instance.
/// </remarks>
/// <seealso cref="StartDelayed(TimeSpan)"/>
/// <seealso cref="Standby"/>
/// <seealso cref="Shutdown(bool)"/>
void Start();
/// <summary>
/// Calls <see cref="Start" /> after the indicated delay.
/// (This call does not block). This can be useful within applications that
/// have initializers that create the scheduler immediately, before the
/// resources needed by the executing jobs have been fully initialized.
/// </summary>
/// <seealso cref="Start"/>
/// <seealso cref="Standby"/>
/// <seealso cref="Shutdown(bool)"/>
void StartDelayed(TimeSpan delay);
/// <summary>
/// Whether the scheduler has been started.
/// </summary>
/// <remarks>
/// Note: This only reflects whether <see cref="Start" /> has ever
/// been called on this Scheduler, so it will return <see langword="true" /> even
/// if the <see cref="IScheduler" /> is currently in standby mode or has been
/// since shutdown.
/// </remarks>
/// <seealso cref="Start" />
/// <seealso cref="IsShutdown" />
/// <seealso cref="InStandbyMode" />
bool IsStarted { get; }
/// <summary>
/// Temporarily halts the <see cref="IScheduler" />'s firing of <see cref="ITrigger" />s.
/// </summary>
/// <remarks>
/// <para>
/// When <see cref="Start" /> is called (to bring the scheduler out of
/// stand-by mode), trigger misfire instructions will NOT be applied
/// during the execution of the <see cref="Start" /> method - any misfires
/// will be detected immediately afterward (by the <see cref="IJobStore" />'s
/// normal process).
/// </para>
/// <para>
/// The scheduler is not destroyed, and can be re-started at any time.
/// </para>
/// </remarks>
/// <seealso cref="Start()"/>
/// <seealso cref="PauseAll()"/>
void Standby();
/// <summary>
/// Halts the <see cref="IScheduler" />'s firing of <see cref="ITrigger" />s,
/// and cleans up all resources associated with the Scheduler. Equivalent to
/// <see cref="Shutdown(bool)" />.
/// </summary>
/// <remarks>
/// The scheduler cannot be re-started.
/// </remarks>
/// <seealso cref="Shutdown(bool)" />
void Shutdown();
/// <summary>
/// Halts the <see cref="IScheduler" />'s firing of <see cref="ITrigger" />s,
/// and cleans up all resources associated with the Scheduler.
/// </summary>
/// <remarks>
/// The scheduler cannot be re-started.
/// </remarks>
/// <param name="waitForJobsToComplete">
/// if <see langword="true" /> the scheduler will not allow this method
/// to return until all currently executing jobs have completed.
/// </param>
/// <seealso cref="Shutdown()" />
void Shutdown(bool waitForJobsToComplete);
/// <summary>
/// Add the given <see cref="IJobDetail" /> to the
/// Scheduler, and associate the given <see cref="ITrigger" /> with
/// it.
/// </summary>
/// <remarks>
/// If the given Trigger does not reference any <see cref="IJob" />, then it
/// will be set to reference the Job passed with it into this method.
/// </remarks>
DateTimeOffset ScheduleJob(IJobDetail jobDetail, ITrigger trigger);
/// <summary>
/// Schedule the given <see cref="ITrigger" /> with the
/// <see cref="IJob" /> identified by the <see cref="ITrigger" />'s settings.
/// </summary>
DateTimeOffset ScheduleJob(ITrigger trigger);
/// <summary>
/// Schedule all of the given jobs with the related set of triggers.
/// </summary>
/// <remarks>
/// <para>If any of the given jobs or triggers already exist (or more
/// specifically, if the keys are not unique) and the replace
/// parameter is not set to true then an exception will be thrown.</para>
/// </remarks>
void ScheduleJobs(IDictionary<IJobDetail, Collection.ISet<ITrigger>> triggersAndJobs, bool replace);
/// <summary>
/// Schedule the given job with the related set of triggers.
/// </summary>
/// <remarks>
/// If any of the given job or triggers already exist (or more
/// specifically, if the keys are not unique) and the replace
/// parameter is not set to true then an exception will be thrown.
/// </remarks>
/// <param name="jobDetail"></param>
/// <param name="triggersForJob"></param>
/// <param name="replace"></param>
void ScheduleJob(IJobDetail jobDetail, Collection.ISet<ITrigger> triggersForJob, bool replace);
/// <summary>
/// Remove the indicated <see cref="ITrigger" /> from the scheduler.
/// <para>If the related job does not have any other triggers, and the job is
/// not durable, then the job will also be deleted.</para>
/// </summary>
bool UnscheduleJob(TriggerKey triggerKey);
/// <summary>
/// Remove all of the indicated <see cref="ITrigger" />s from the scheduler.
/// </summary>
/// <remarks>
/// <para>If the related job does not have any other triggers, and the job is
/// not durable, then the job will also be deleted.</para>
/// Note that while this bulk operation is likely more efficient than
/// invoking <see cref="UnscheduleJob(TriggerKey)" /> several
/// times, it may have the adverse affect of holding data locks for a
/// single long duration of time (rather than lots of small durations
/// of time).
/// </remarks>
bool UnscheduleJobs(IList<TriggerKey> triggerKeys);
/// <summary>
/// Remove (delete) the <see cref="ITrigger" /> with the
/// given key, and store the new given one - which must be associated
/// with the same job (the new trigger must have the job name & group specified)
/// - however, the new trigger need not have the same name as the old trigger.
/// </summary>
/// <param name="triggerKey">The <see cref="ITrigger" /> to be replaced.</param>
/// <param name="newTrigger">
/// The new <see cref="ITrigger" /> to be stored.
/// </param>
/// <returns>
/// <see langword="null" /> if a <see cref="ITrigger" /> with the given
/// name and group was not found and removed from the store (and the
/// new trigger is therefore not stored), otherwise
/// the first fire time of the newly scheduled trigger.
/// </returns>
DateTimeOffset? RescheduleJob(TriggerKey triggerKey, ITrigger newTrigger);
/// <summary>
/// Add the given <see cref="IJob" /> to the Scheduler - with no associated
/// <see cref="ITrigger" />. The <see cref="IJob" /> will be 'dormant' until
/// it is scheduled with a <see cref="ITrigger" />, or <see cref="TriggerJob(Quartz.JobKey)" />
/// is called for it.
/// </summary>
/// <remarks>
/// The <see cref="IJob" /> must by definition be 'durable', if it is not,
/// SchedulerException will be thrown.
/// </remarks>
void AddJob(IJobDetail jobDetail, bool replace);
/// <summary>
/// Delete the identified <see cref="IJob" /> from the Scheduler - and any
/// associated <see cref="ITrigger" />s.
/// </summary>
/// <returns> true if the Job was found and deleted.</returns>
bool DeleteJob(JobKey jobKey);
/// <summary>
/// Delete the identified jobs from the Scheduler - and any
/// associated <see cref="ITrigger" />s.
/// </summary>
/// <remarks>
/// <para>Note that while this bulk operation is likely more efficient than
/// invoking <see cref="DeleteJob(JobKey)" /> several
/// times, it may have the adverse affect of holding data locks for a
/// single long duration of time (rather than lots of small durations
/// of time).</para>
/// </remarks>
/// <returns>
/// true if all of the Jobs were found and deleted, false if
/// one or more were not deleted.
/// </returns>
bool DeleteJobs(IList<JobKey> jobKeys);
/// <summary>
/// Trigger the identified <see cref="IJobDetail" />
/// (Execute it now).
/// </summary>
void TriggerJob(JobKey jobKey);
/// <summary>
/// Trigger the identified <see cref="IJobDetail" /> (Execute it now).
/// </summary>
/// <param name="data">
/// the (possibly <see langword="null" />) JobDataMap to be
/// associated with the trigger that fires the job immediately.
/// </param>
/// <param name="jobKey">
/// The <see cref="JobKey"/> of the <see cref="IJob" /> to be executed.
/// </param>
void TriggerJob(JobKey jobKey, JobDataMap data);
/// <summary>
/// Pause the <see cref="IJobDetail" /> with the given
/// key - by pausing all of its current <see cref="ITrigger" />s.
/// </summary>
void PauseJob(JobKey jobKey);
/// <summary>
/// Pause all of the <see cref="IJobDetail" />s in the
/// matching groups - by pausing all of their <see cref="ITrigger" />s.
/// </summary>
/// <remarks>
/// <para>
/// The Scheduler will "remember" that the groups are paused, and impose the
/// pause on any new jobs that are added to any of those groups until it is resumed.
/// </para>
/// <para>NOTE: There is a limitation that only exactly matched groups
/// can be remembered as paused. For example, if there are pre-existing
/// job in groups "aaa" and "bbb" and a matcher is given to pause
/// groups that start with "a" then the group "aaa" will be remembered
/// as paused and any subsequently added jobs in group "aaa" will be paused,
/// however if a job is added to group "axx" it will not be paused,
/// as "axx" wasn't known at the time the "group starts with a" matcher
/// was applied. HOWEVER, if there are pre-existing groups "aaa" and
/// "bbb" and a matcher is given to pause the group "axx" (with a
/// group equals matcher) then no jobs will be paused, but it will be
/// remembered that group "axx" is paused and later when a job is added
/// in that group, it will become paused.</para>
/// </remarks>
/// <seealso cref="ResumeJobs" />
void PauseJobs(GroupMatcher<JobKey> matcher);
/// <summary>
/// Pause the <see cref="ITrigger" /> with the given key.
/// </summary>
void PauseTrigger(TriggerKey triggerKey);
/// <summary>
/// Pause all of the <see cref="ITrigger" />s in the groups matching.
/// </summary>
/// <remarks>
/// <para>
/// The Scheduler will "remember" all the groups paused, and impose the
/// pause on any new triggers that are added to any of those groups until it is resumed.
/// </para>
/// <para>NOTE: There is a limitation that only exactly matched groups
/// can be remembered as paused. For example, if there are pre-existing
/// triggers in groups "aaa" and "bbb" and a matcher is given to pause
/// groups that start with "a" then the group "aaa" will be remembered as
/// paused and any subsequently added triggers in that group be paused,
/// however if a trigger is added to group "axx" it will not be paused,
/// as "axx" wasn't known at the time the "group starts with a" matcher
/// was applied. HOWEVER, if there are pre-existing groups "aaa" and
/// "bbb" and a matcher is given to pause the group "axx" (with a
/// group equals matcher) then no triggers will be paused, but it will be
/// remembered that group "axx" is paused and later when a trigger is added
/// in that group, it will become paused.</para>
/// </remarks>
/// <seealso cref="ResumeTriggers" />
void PauseTriggers(GroupMatcher<TriggerKey> matcher);
/// <summary>
/// Resume (un-pause) the <see cref="IJobDetail" /> with
/// the given key.
/// </summary>
/// <remarks>
/// If any of the <see cref="IJob" />'s<see cref="ITrigger" /> s missed one
/// or more fire-times, then the <see cref="ITrigger" />'s misfire
/// instruction will be applied.
/// </remarks>
void ResumeJob(JobKey jobKey);
/// <summary>
/// Resume (un-pause) all of the <see cref="IJobDetail" />s
/// in matching groups.
/// </summary>
/// <remarks>
/// If any of the <see cref="IJob" /> s had <see cref="ITrigger" /> s that
/// missed one or more fire-times, then the <see cref="ITrigger" />'s
/// misfire instruction will be applied.
/// </remarks>
/// <seealso cref="PauseJobs" />
void ResumeJobs(GroupMatcher<JobKey> matcher);
/// <summary>
/// Resume (un-pause) the <see cref="ITrigger" /> with the given
/// key.
/// </summary>
/// <remarks>
/// If the <see cref="ITrigger" /> missed one or more fire-times, then the
/// <see cref="ITrigger" />'s misfire instruction will be applied.
/// </remarks>
void ResumeTrigger(TriggerKey triggerKey);
/// <summary>
/// Resume (un-pause) all of the <see cref="ITrigger" />s in matching groups.
/// </summary>
/// <remarks>
/// If any <see cref="ITrigger" /> missed one or more fire-times, then the
/// <see cref="ITrigger" />'s misfire instruction will be applied.
/// </remarks>
/// <seealso cref="PauseTriggers" />
void ResumeTriggers(GroupMatcher<TriggerKey> matcher);
/// <summary>
/// Pause all triggers - similar to calling <see cref="PauseTriggers" />
/// on every group, however, after using this method <see cref="ResumeAll()" />
/// must be called to clear the scheduler's state of 'remembering' that all
/// new triggers will be paused as they are added.
/// </summary>
/// <remarks>
/// When <see cref="ResumeAll()" /> is called (to un-pause), trigger misfire
/// instructions WILL be applied.
/// </remarks>
/// <seealso cref="ResumeAll()" />
/// <seealso cref="PauseTriggers" />
/// <seealso cref="Standby()" />
void PauseAll();
/// <summary>
/// Resume (un-pause) all triggers - similar to calling
/// <see cref="ResumeTriggers" /> on every group.
/// </summary>
/// <remarks>
/// If any <see cref="ITrigger" /> missed one or more fire-times, then the
/// <see cref="ITrigger" />'s misfire instruction will be applied.
/// </remarks>
/// <seealso cref="PauseAll()" />
void ResumeAll();
/// <summary>
/// Get the keys of all the <see cref="IJobDetail" />s in the matching groups.
/// </summary>
Collection.ISet<JobKey> GetJobKeys(GroupMatcher<JobKey> matcher);
/// <summary>
/// Get all <see cref="ITrigger" /> s that are associated with the
/// identified <see cref="IJobDetail" />.
/// </summary>
/// <remarks>
/// The returned Trigger objects will be snap-shots of the actual stored
/// triggers. If you wish to modify a trigger, you must re-store the
/// trigger afterward (e.g. see <see cref="RescheduleJob(TriggerKey, ITrigger)" />).
/// </remarks>
IList<ITrigger> GetTriggersOfJob(JobKey jobKey);
/// <summary>
/// Get the names of all the <see cref="ITrigger" />s in the given
/// groups.
/// </summary>
Collection.ISet<TriggerKey> GetTriggerKeys(GroupMatcher<TriggerKey> matcher);
/// <summary>
/// Get the <see cref="IJobDetail" /> for the <see cref="IJob" />
/// instance with the given key .
/// </summary>
/// <remarks>
/// The returned JobDetail object will be a snap-shot of the actual stored
/// JobDetail. If you wish to modify the JobDetail, you must re-store the
/// JobDetail afterward (e.g. see <see cref="AddJob(IJobDetail, bool)" />).
/// </remarks>
IJobDetail GetJobDetail(JobKey jobKey);
/// <summary>
/// Get the <see cref="ITrigger" /> instance with the given key.
/// </summary>
/// <remarks>
/// The returned Trigger object will be a snap-shot of the actual stored
/// trigger. If you wish to modify the trigger, you must re-store the
/// trigger afterward (e.g. see <see cref="RescheduleJob(TriggerKey, ITrigger)" />).
/// </remarks>
ITrigger GetTrigger(TriggerKey triggerKey);
/// <summary>
/// Get the current state of the identified <see cref="ITrigger" />.
/// </summary>
/// <seealso cref="TriggerState.Normal" />
/// <seealso cref="TriggerState.Paused" />
/// <seealso cref="TriggerState.Complete" />
/// <seealso cref="TriggerState.Blocked" />
/// <seealso cref="TriggerState.Error" />
/// <seealso cref="TriggerState.None" />
TriggerState GetTriggerState(TriggerKey triggerKey);
/// <summary>
/// Add (register) the given <see cref="ICalendar" /> to the Scheduler.
/// </summary>
/// <param name="calName">Name of the calendar.</param>
/// <param name="calendar">The calendar.</param>
/// <param name="replace">if set to <c>true</c> [replace].</param>
/// <param name="updateTriggers">whether or not to update existing triggers that
/// referenced the already existing calendar so that they are 'correct'
/// based on the new trigger.</param>
void AddCalendar(string calName, ICalendar calendar, bool replace, bool updateTriggers);
/// <summary>
/// Delete the identified <see cref="ICalendar" /> from the Scheduler.
/// </summary>
/// <remarks>
/// If removal of the <code>Calendar</code> would result in
/// <see cref="ITrigger" />s pointing to non-existent calendars, then a
/// <see cref="SchedulerException" /> will be thrown.
/// </remarks>
/// <param name="calName">Name of the calendar.</param>
/// <returns>true if the Calendar was found and deleted.</returns>
bool DeleteCalendar(string calName);
/// <summary>
/// Get the <see cref="ICalendar" /> instance with the given name.
/// </summary>
ICalendar GetCalendar(string calName);
/// <summary>
/// Get the names of all registered <see cref="ICalendar" />.
/// </summary>
IList<string> GetCalendarNames();
/// <summary>
/// Request the interruption, within this Scheduler instance, of all
/// currently executing instances of the identified <see cref="IJob" />, which
/// must be an implementor of the <see cref="IInterruptableJob" /> interface.
/// </summary>
/// <remarks>
/// <para>
/// If more than one instance of the identified job is currently executing,
/// the <see cref="IInterruptableJob.Interrupt" /> method will be called on
/// each instance. However, there is a limitation that in the case that
/// <see cref="Interrupt(JobKey)" /> on one instances throws an exception, all
/// remaining instances (that have not yet been interrupted) will not have
/// their <see cref="Interrupt(JobKey)" /> method called.
/// </para>
///
/// <para>
/// If you wish to interrupt a specific instance of a job (when more than
/// one is executing) you can do so by calling
/// <see cref="GetCurrentlyExecutingJobs" /> to obtain a handle
/// to the job instance, and then invoke <see cref="Interrupt(JobKey)" /> on it
/// yourself.
/// </para>
/// <para>
/// This method is not cluster aware. That is, it will only interrupt
/// instances of the identified InterruptableJob currently executing in this
/// Scheduler instance, not across the entire cluster.
/// </para>
/// </remarks>
/// <returns>
/// true is at least one instance of the identified job was found and interrupted.
/// </returns>
/// <seealso cref="IInterruptableJob" />
/// <seealso cref="GetCurrentlyExecutingJobs" />
bool Interrupt(JobKey jobKey);
/// <summary>
/// Request the interruption, within this Scheduler instance, of the
/// identified executing job instance, which
/// must be an implementor of the <see cref="IInterruptableJob" /> interface.
/// </summary>
/// <remarks>
/// This method is not cluster aware. That is, it will only interrupt
/// instances of the identified InterruptableJob currently executing in this
/// Scheduler instance, not across the entire cluster.
/// </remarks>
/// <seealso cref="IInterruptableJob.Interrupt()" />
/// <seealso cref="GetCurrentlyExecutingJobs()" />
/// <seealso cref="IJobExecutionContext.FireInstanceId" />
/// <seealso cref="Interrupt(JobKey)" />
/// <param nane="fireInstanceId">
/// the unique identifier of the job instance to be interrupted (see <see cref="IJobExecutionContext.FireInstanceId" />
/// </param>
/// <param name="fireInstanceId"> </param>
/// <returns>true if the identified job instance was found and interrupted.</returns>
bool Interrupt(string fireInstanceId);
/// <summary>
/// Determine whether a <see cref="IJob" /> with the given identifier already
/// exists within the scheduler.
/// </summary>
/// <param name="jobKey">the identifier to check for</param>
/// <returns>true if a Job exists with the given identifier</returns>
bool CheckExists(JobKey jobKey);
/// <summary>
/// Determine whether a <see cref="ITrigger" /> with the given identifier already
/// exists within the scheduler.
/// </summary>
/// <param name="triggerKey">the identifier to check for</param>
/// <returns>true if a Trigger exists with the given identifier</returns>
bool CheckExists(TriggerKey triggerKey);
/// <summary>
/// Clears (deletes!) all scheduling data - all <see cref="IJob"/>s, <see cref="ITrigger" />s
/// <see cref="ICalendar"/>s.
/// </summary>
void Clear();
}
}
| |
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using Microsoft.Win32;
namespace Meziantou.Framework.Win32;
/// <summary>
/// Defines a file's perceived type based on its extension.
/// </summary>
public sealed class Perceived
{
private static readonly Dictionary<string, Perceived> s_perceivedTypes = new(StringComparer.OrdinalIgnoreCase);
private static object SyncObject { get; } = new object();
private Perceived(string extension, PerceivedType perceivedType, PerceivedTypeSource perceivedTypeSource)
{
Extension = extension;
PerceivedType = perceivedType;
PerceivedTypeSource = perceivedTypeSource;
}
public static void AddDefaultPerceivedTypes()
{
AddPerceived(".appxmanifest", PerceivedType.Text);
AddPerceived(".asax", PerceivedType.Text);
AddPerceived(".ascx", PerceivedType.Text);
AddPerceived(".ashx", PerceivedType.Text);
AddPerceived(".asmx", PerceivedType.Text);
AddPerceived(".bat", PerceivedType.Text);
AddPerceived(".class", PerceivedType.Text);
AddPerceived(".cmd", PerceivedType.Text);
AddPerceived(".cs", PerceivedType.Text);
AddPerceived(".cshtml", PerceivedType.Text);
AddPerceived(".css", PerceivedType.Text);
AddPerceived(".cfxproj", PerceivedType.Text);
AddPerceived(".config", PerceivedType.Text);
AddPerceived(".csproj", PerceivedType.Text);
AddPerceived(".dll", PerceivedType.Application);
AddPerceived(".exe", PerceivedType.Application);
AddPerceived(".htm", PerceivedType.Text);
AddPerceived(".html", PerceivedType.Text);
AddPerceived(".iqy", PerceivedType.Text);
AddPerceived(".js", PerceivedType.Text);
AddPerceived(".master", PerceivedType.Text);
AddPerceived(".manifest", PerceivedType.Text);
AddPerceived(".rdl", PerceivedType.Text);
AddPerceived(".reg", PerceivedType.Text);
AddPerceived(".resx", PerceivedType.Text);
AddPerceived(".rtf", PerceivedType.Text);
AddPerceived(".rzt", PerceivedType.Text);
AddPerceived(".sln", PerceivedType.Text);
AddPerceived(".sql", PerceivedType.Text);
AddPerceived(".sqlproj", PerceivedType.Text);
AddPerceived(".snippet", PerceivedType.Text);
AddPerceived(".svc", PerceivedType.Text);
AddPerceived(".tpl", PerceivedType.Text);
AddPerceived(".tplxaml", PerceivedType.Text);
AddPerceived(".vb", PerceivedType.Text);
AddPerceived(".vbhtml", PerceivedType.Text);
AddPerceived(".vbproj", PerceivedType.Text);
AddPerceived(".vbs", PerceivedType.Text);
AddPerceived(".vdproj", PerceivedType.Text);
AddPerceived(".wsdl", PerceivedType.Text);
AddPerceived(".wxi", PerceivedType.Text);
AddPerceived(".wxl", PerceivedType.Text);
AddPerceived(".wxs", PerceivedType.Text);
AddPerceived(".wixlib", PerceivedType.Text);
AddPerceived(".xaml", PerceivedType.Text);
AddPerceived(".xsd", PerceivedType.Text);
AddPerceived(".xsl", PerceivedType.Text);
AddPerceived(".xslt", PerceivedType.Text);
}
/// <summary>
/// Adds a perceived instance to the list.
/// </summary>
/// <param name="extension">The file extension. May not be null.</param>
/// <param name="type">The perceived type.</param>
public static Perceived AddPerceived(string extension!!, PerceivedType type)
{
var perceived = new Perceived(extension, type, PerceivedTypeSource.HardCoded);
lock (SyncObject)
{
s_perceivedTypes[perceived.Extension] = perceived;
}
return perceived;
}
/// <summary>
/// Gets the file's xtension.
/// </summary>
/// <value>The file's extension.</value>
public string Extension { get; }
/// <summary>
/// Indicates the normalized perceived type.
/// </summary>
/// <value>The normalized perceived type.</value>
public PerceivedType PerceivedType { get; }
/// <summary>
/// Indicates the source of the perceived type information.
/// </summary>
/// <value>the source of the perceived type information.</value>
public PerceivedTypeSource PerceivedTypeSource { get; }
/// <summary>
/// Gets a file's perceived type based on its extension.
/// </summary>
/// <param name="fileName">The file name. May not be null..</param>
/// <returns>An instance of the PerceivedType type.</returns>
[SupportedOSPlatform("windows")]
public static Perceived GetPerceivedType(string fileName!!)
{
var extension = Path.GetExtension(fileName);
if (extension == null)
throw new ArgumentException("The extension cannot be determined from the file name", nameof(fileName));
extension = extension.ToUpperInvariant();
if (s_perceivedTypes.TryGetValue(extension, out var ptype))
return ptype;
if (!IsSupportedPlatform())
throw new PlatformNotSupportedException("PerceivedType is only supported on Windows");
lock (SyncObject)
{
var type = PerceivedType.Unknown;
var source = PerceivedTypeSource.Undefined;
if (!s_perceivedTypes.TryGetValue(extension, out ptype))
{
using (var key = Registry.ClassesRoot.OpenSubKey(extension, writable: false))
{
if (key != null)
{
var ct = key.GetStringValue("PerceivedType");
if (ct != null)
{
type = Extensions.GetEnumValue(ct, PerceivedType.Custom);
source = PerceivedTypeSource.SoftCoded;
}
else
{
ct = key.GetStringValue("Content Type");
if (ct != null)
{
var pos = ct.IndexOf('/', StringComparison.Ordinal);
if (pos > 0)
{
type = Extensions.GetEnumValue(ct[..pos], PerceivedType.Custom);
source = PerceivedTypeSource.Mime;
}
}
}
}
}
if (type == PerceivedType.Unknown)
{
var text = IntPtr.Zero;
type = PerceivedType.Unknown;
source = PerceivedTypeSource.Undefined;
var hr = AssocGetPerceivedType(extension, ref type, ref source, ref text);
if (hr != 0)
{
type = PerceivedType.Unspecified;
source = PerceivedTypeSource.Undefined;
}
}
ptype = new Perceived(extension, type, source);
s_perceivedTypes.Add(extension, ptype);
}
return ptype;
}
}
/// <summary>
/// Returns a <see cref="string"/> that represents the current <see cref="object"/>.
/// </summary>
/// <returns>
/// A <see cref="string"/> that represents the current <see cref="object"/>.
/// </returns>
public override string ToString()
{
return Extension + ":" + PerceivedType + " (" + PerceivedTypeSource + ")";
}
private static bool IsSupportedPlatform()
{
return Environment.OSVersion.Platform == PlatformID.Win32NT;
}
[DllImport("shlwapi.dll")]
private static extern int AssocGetPerceivedType(
[MarshalAs(UnmanagedType.LPWStr)] string pszExt,
ref PerceivedType ptype,
ref PerceivedTypeSource pflag,
ref IntPtr ppszType);
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using lro = Google.LongRunning;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.Compute.V1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedRoutersClientTest
{
[xunit::FactAttribute]
public void GetRequestObject()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterRequest request = new GetRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
Router expectedResponse = new Router
{
Id = 11672635353343658936UL,
Bgp = new RouterBgp(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Nats = { new RouterNat(), },
Interfaces =
{
new RouterInterface(),
},
CreationTimestamp = "creation_timestamp235e59a1",
Region = "regionedb20d96",
Network = "networkd22ce091",
EncryptedInterconnectRouter = false,
Description = "description2cf9da67",
BgpPeers =
{
new RouterBgpPeer(),
},
SelfLink = "self_link7e87f12d",
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
Router response = client.Get(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetRequestObjectAsync()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterRequest request = new GetRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
Router expectedResponse = new Router
{
Id = 11672635353343658936UL,
Bgp = new RouterBgp(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Nats = { new RouterNat(), },
Interfaces =
{
new RouterInterface(),
},
CreationTimestamp = "creation_timestamp235e59a1",
Region = "regionedb20d96",
Network = "networkd22ce091",
EncryptedInterconnectRouter = false,
Description = "description2cf9da67",
BgpPeers =
{
new RouterBgpPeer(),
},
SelfLink = "self_link7e87f12d",
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Router>(stt::Task.FromResult(expectedResponse), null, null, null, null));
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
Router responseCallSettings = await client.GetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Router responseCancellationToken = await client.GetAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void Get()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterRequest request = new GetRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
Router expectedResponse = new Router
{
Id = 11672635353343658936UL,
Bgp = new RouterBgp(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Nats = { new RouterNat(), },
Interfaces =
{
new RouterInterface(),
},
CreationTimestamp = "creation_timestamp235e59a1",
Region = "regionedb20d96",
Network = "networkd22ce091",
EncryptedInterconnectRouter = false,
Description = "description2cf9da67",
BgpPeers =
{
new RouterBgpPeer(),
},
SelfLink = "self_link7e87f12d",
};
mockGrpcClient.Setup(x => x.Get(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
Router response = client.Get(request.Project, request.Region, request.Router);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetAsync()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterRequest request = new GetRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
Router expectedResponse = new Router
{
Id = 11672635353343658936UL,
Bgp = new RouterBgp(),
Kind = "kindf7aa39d9",
Name = "name1c9368b0",
Nats = { new RouterNat(), },
Interfaces =
{
new RouterInterface(),
},
CreationTimestamp = "creation_timestamp235e59a1",
Region = "regionedb20d96",
Network = "networkd22ce091",
EncryptedInterconnectRouter = false,
Description = "description2cf9da67",
BgpPeers =
{
new RouterBgpPeer(),
},
SelfLink = "self_link7e87f12d",
};
mockGrpcClient.Setup(x => x.GetAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Router>(stt::Task.FromResult(expectedResponse), null, null, null, null));
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
Router responseCallSettings = await client.GetAsync(request.Project, request.Region, request.Router, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Router responseCancellationToken = await client.GetAsync(request.Project, request.Region, request.Router, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetRouterStatusRequestObject()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterStatusRouterRequest request = new GetRouterStatusRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
RouterStatusResponse expectedResponse = new RouterStatusResponse
{
Kind = "kindf7aa39d9",
Result = new RouterStatus(),
};
mockGrpcClient.Setup(x => x.GetRouterStatus(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RouterStatusResponse response = client.GetRouterStatus(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetRouterStatusRequestObjectAsync()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterStatusRouterRequest request = new GetRouterStatusRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
RouterStatusResponse expectedResponse = new RouterStatusResponse
{
Kind = "kindf7aa39d9",
Result = new RouterStatus(),
};
mockGrpcClient.Setup(x => x.GetRouterStatusAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<RouterStatusResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RouterStatusResponse responseCallSettings = await client.GetRouterStatusAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
RouterStatusResponse responseCancellationToken = await client.GetRouterStatusAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetRouterStatus()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterStatusRouterRequest request = new GetRouterStatusRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
RouterStatusResponse expectedResponse = new RouterStatusResponse
{
Kind = "kindf7aa39d9",
Result = new RouterStatus(),
};
mockGrpcClient.Setup(x => x.GetRouterStatus(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RouterStatusResponse response = client.GetRouterStatus(request.Project, request.Region, request.Router);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetRouterStatusAsync()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
GetRouterStatusRouterRequest request = new GetRouterStatusRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
Project = "projectaa6ff846",
};
RouterStatusResponse expectedResponse = new RouterStatusResponse
{
Kind = "kindf7aa39d9",
Result = new RouterStatus(),
};
mockGrpcClient.Setup(x => x.GetRouterStatusAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<RouterStatusResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RouterStatusResponse responseCallSettings = await client.GetRouterStatusAsync(request.Project, request.Region, request.Router, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
RouterStatusResponse responseCancellationToken = await client.GetRouterStatusAsync(request.Project, request.Region, request.Router, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void PreviewRequestObject()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
PreviewRouterRequest request = new PreviewRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
RouterResource = new Router(),
Project = "projectaa6ff846",
};
RoutersPreviewResponse expectedResponse = new RoutersPreviewResponse
{
Resource = new Router(),
};
mockGrpcClient.Setup(x => x.Preview(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RoutersPreviewResponse response = client.Preview(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task PreviewRequestObjectAsync()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
PreviewRouterRequest request = new PreviewRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
RouterResource = new Router(),
Project = "projectaa6ff846",
};
RoutersPreviewResponse expectedResponse = new RoutersPreviewResponse
{
Resource = new Router(),
};
mockGrpcClient.Setup(x => x.PreviewAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<RoutersPreviewResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RoutersPreviewResponse responseCallSettings = await client.PreviewAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
RoutersPreviewResponse responseCancellationToken = await client.PreviewAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void Preview()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
PreviewRouterRequest request = new PreviewRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
RouterResource = new Router(),
Project = "projectaa6ff846",
};
RoutersPreviewResponse expectedResponse = new RoutersPreviewResponse
{
Resource = new Router(),
};
mockGrpcClient.Setup(x => x.Preview(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RoutersPreviewResponse response = client.Preview(request.Project, request.Region, request.Router, request.RouterResource);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task PreviewAsync()
{
moq::Mock<Routers.RoutersClient> mockGrpcClient = new moq::Mock<Routers.RoutersClient>(moq::MockBehavior.Strict);
mockGrpcClient.Setup(x => x.CreateOperationsClientForRegionOperations()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object);
PreviewRouterRequest request = new PreviewRouterRequest
{
Region = "regionedb20d96",
Router = "routerd55c39f3",
RouterResource = new Router(),
Project = "projectaa6ff846",
};
RoutersPreviewResponse expectedResponse = new RoutersPreviewResponse
{
Resource = new Router(),
};
mockGrpcClient.Setup(x => x.PreviewAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<RoutersPreviewResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
RoutersClient client = new RoutersClientImpl(mockGrpcClient.Object, null);
RoutersPreviewResponse responseCallSettings = await client.PreviewAsync(request.Project, request.Region, request.Router, request.RouterResource, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
RoutersPreviewResponse responseCancellationToken = await client.PreviewAsync(request.Project, request.Region, request.Router, request.RouterResource, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
// <copyright file="TurnBasedMatch.cs" company="Google Inc.">
// Copyright (C) 2014 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
namespace GooglePlayGames.BasicApi.Multiplayer
{
using System;
using System.Collections.Generic;
using System.Linq;
using GooglePlayGames.OurUtils;
/// <summary>
/// Represents a turn-based match.
/// </summary>
public class TurnBasedMatch
{
public enum MatchStatus
{
Active,
AutoMatching,
Cancelled,
Complete,
Expired,
Unknown,
Deleted
}
public enum MatchTurnStatus
{
Complete,
Invited,
MyTurn,
TheirTurn,
Unknown
}
private string mMatchId;
private byte[] mData;
private bool mCanRematch;
private uint mAvailableAutomatchSlots;
private string mSelfParticipantId;
private List<Participant> mParticipants;
private string mPendingParticipantId;
private MatchTurnStatus mTurnStatus;
private MatchStatus mMatchStatus;
private uint mVariant;
private uint mVersion;
internal TurnBasedMatch(string matchId, byte[] data, bool canRematch,
string selfParticipantId, List<Participant> participants, uint availableAutomatchSlots,
string pendingParticipantId, MatchTurnStatus turnStatus, MatchStatus matchStatus,
uint variant, uint version)
{
mMatchId = matchId;
mData = data;
mCanRematch = canRematch;
mSelfParticipantId = selfParticipantId;
mParticipants = participants;
// participant list is always sorted!
mParticipants.Sort();
mAvailableAutomatchSlots = availableAutomatchSlots;
mPendingParticipantId = pendingParticipantId;
mTurnStatus = turnStatus;
mMatchStatus = matchStatus;
mVariant = variant;
mVersion = version;
}
/// Match ID.
public string MatchId
{
get
{
return mMatchId;
}
}
/// The data associated with the match. The meaning of this data is defined by the game.
public byte[] Data
{
get
{
return mData;
}
}
/// If true, this match can be rematched.
public bool CanRematch
{
get
{
return mCanRematch;
}
}
/// The participant ID that represents the current player.
public string SelfParticipantId
{
get
{
return mSelfParticipantId;
}
}
/// The participant that represents the current player in the match.
public Participant Self
{
get
{
return GetParticipant(mSelfParticipantId);
}
}
/// Gets a participant by ID. Returns null if not found.
public Participant GetParticipant(string participantId)
{
foreach (Participant p in mParticipants)
{
if (p.ParticipantId.Equals(participantId))
{
return p;
}
}
Logger.w("Participant not found in turn-based match: " + participantId);
return null;
}
/// Returns the list of participants. Guaranteed to be sorted by participant ID.
public List<Participant> Participants
{
get
{
return mParticipants;
}
}
/// Returns the pending participant ID (whose turn it is).
public string PendingParticipantId
{
get
{
return mPendingParticipantId;
}
}
/// Returns the pending participant (whose turn it is).
public Participant PendingParticipant
{
get
{
return mPendingParticipantId == null ? null :
GetParticipant(mPendingParticipantId);
}
}
/// Returns the turn status (whether it's my turn).
public MatchTurnStatus TurnStatus
{
get
{
return mTurnStatus;
}
}
/// Returns the status of the match.
public MatchStatus Status
{
get
{
return mMatchStatus;
}
}
/// Returns the match variant being played. 0 for default.
public uint Variant
{
get
{
return mVariant;
}
}
/// Returns the version for the contained match.
public uint Version
{
get
{
return mVersion;
}
}
// Returns how many automatch slots are still open in the match.
public uint AvailableAutomatchSlots
{
get
{
return mAvailableAutomatchSlots;
}
}
public override string ToString()
{
return string.Format("[TurnBasedMatch: mMatchId={0}, mData={1}, mCanRematch={2}, " +
"mSelfParticipantId={3}, mParticipants={4}, mPendingParticipantId={5}, " +
"mTurnStatus={6}, mMatchStatus={7}, mVariant={8}, mVersion={9}]",
mMatchId,
mData,
mCanRematch,
mSelfParticipantId,
string.Join(",", mParticipants.Select(p => p.ToString()).ToArray()),
mPendingParticipantId,
mTurnStatus,
mMatchStatus,
mVariant,
mVersion);
}
}
}
| |
// SF API version v50.0
// Custom fields included: False
// Relationship objects included: True
using System;
using NetCoreForce.Client.Models;
using NetCoreForce.Client.Attributes;
using Newtonsoft.Json;
namespace NetCoreForce.Models
{
///<summary>
/// Consumption Schedule
///<para>SObject Name: ConsumptionSchedule</para>
///<para>Custom Object: False</para>
///</summary>
public class SfConsumptionSchedule : SObject
{
[JsonIgnore]
public static string SObjectTypeName
{
get { return "ConsumptionSchedule"; }
}
///<summary>
/// Consumption Schedule ID
/// <para>Name: Id</para>
/// <para>SF Type: id</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "id")]
[Updateable(false), Createable(false)]
public string Id { get; set; }
///<summary>
/// Owner ID
/// <para>Name: OwnerId</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "ownerId")]
public string OwnerId { get; set; }
///<summary>
/// Deleted
/// <para>Name: IsDeleted</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "isDeleted")]
[Updateable(false), Createable(false)]
public bool? IsDeleted { get; set; }
///<summary>
/// Consumption Schedule Name
/// <para>Name: Name</para>
/// <para>SF Type: string</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "name")]
public string Name { get; set; }
///<summary>
/// Created Date
/// <para>Name: CreatedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? CreatedDate { get; set; }
///<summary>
/// Created By ID
/// <para>Name: CreatedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "createdById")]
[Updateable(false), Createable(false)]
public string CreatedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: CreatedBy</para>
///</summary>
[JsonProperty(PropertyName = "createdBy")]
[Updateable(false), Createable(false)]
public SfUser CreatedBy { get; set; }
///<summary>
/// Last Modified Date
/// <para>Name: LastModifiedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? LastModifiedDate { get; set; }
///<summary>
/// Last Modified By ID
/// <para>Name: LastModifiedById</para>
/// <para>SF Type: reference</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedById")]
[Updateable(false), Createable(false)]
public string LastModifiedById { get; set; }
///<summary>
/// ReferenceTo: User
/// <para>RelationshipName: LastModifiedBy</para>
///</summary>
[JsonProperty(PropertyName = "lastModifiedBy")]
[Updateable(false), Createable(false)]
public SfUser LastModifiedBy { get; set; }
///<summary>
/// System Modstamp
/// <para>Name: SystemModstamp</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "systemModstamp")]
[Updateable(false), Createable(false)]
public DateTimeOffset? SystemModstamp { get; set; }
///<summary>
/// Last Viewed Date
/// <para>Name: LastViewedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "lastViewedDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? LastViewedDate { get; set; }
///<summary>
/// Last Referenced Date
/// <para>Name: LastReferencedDate</para>
/// <para>SF Type: datetime</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "lastReferencedDate")]
[Updateable(false), Createable(false)]
public DateTimeOffset? LastReferencedDate { get; set; }
///<summary>
/// Active
/// <para>Name: IsActive</para>
/// <para>SF Type: boolean</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "isActive")]
public bool? IsActive { get; set; }
///<summary>
/// Description
/// <para>Name: Description</para>
/// <para>SF Type: textarea</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "description")]
public string Description { get; set; }
///<summary>
/// Billing Term
/// <para>Name: BillingTerm</para>
/// <para>SF Type: int</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "billingTerm")]
public int? BillingTerm { get; set; }
///<summary>
/// Billing Term Unit
/// <para>Name: BillingTermUnit</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "billingTermUnit")]
public string BillingTermUnit { get; set; }
///<summary>
/// Type
/// <para>Name: Type</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "type")]
public string Type { get; set; }
///<summary>
/// Unit of Measure
/// <para>Name: UnitOfMeasure</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "unitOfMeasure")]
public string UnitOfMeasure { get; set; }
///<summary>
/// Rating Method
/// <para>Name: RatingMethod</para>
/// <para>SF Type: picklist</para>
/// <para>Nillable: False</para>
///</summary>
[JsonProperty(PropertyName = "ratingMethod")]
public string RatingMethod { get; set; }
///<summary>
/// Matching Attribute
/// <para>Name: MatchingAttribute</para>
/// <para>SF Type: string</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "matchingAttribute")]
public string MatchingAttribute { get; set; }
///<summary>
/// Number of Consumption Rates
/// <para>Name: NumberOfRates</para>
/// <para>SF Type: int</para>
/// <para>Nillable: True</para>
///</summary>
[JsonProperty(PropertyName = "numberOfRates")]
[Updateable(false), Createable(false)]
public int? NumberOfRates { get; set; }
}
}
| |
//------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------------------------
namespace Microsoft.Tools.ServiceModel.WsatConfig
{
using System;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
struct PSM
{
internal const int SETCURSEL = (0x0400 + 101);
internal const int REMOVEPAGE = (0x0400 + 102);
internal const int ADDPAGE = (0x0400 + 103);
internal const int CHANGED = (0x0400 + 104);
internal const int UNCHANGED = (0x0400 + 109);
internal const int SETWIZBUTTONS = (0x0400 + 112);
}
struct PSN
{
internal const int FIRST = unchecked((0 - 200));
internal const int LAST = unchecked((0 - 299));
internal const int SETACTIVE = unchecked((FIRST - 0));
internal const int KILLACTIVE = unchecked((FIRST - 1));
internal const int APPLY = unchecked((FIRST - 2));
internal const int RESET = unchecked((FIRST - 3));
internal const int HELP = unchecked((FIRST - 5));
internal const int WIZBACK = unchecked((FIRST - 6));
internal const int WIZNEXT = unchecked((FIRST - 7));
internal const int WIZFINISH = unchecked((FIRST - 8));
internal const int QUERYCANCEL = unchecked((FIRST - 9));
internal const int GETOBJECT = unchecked((FIRST - 10));
}
struct WM
{
internal const uint INITDIALOG = 0x0110;
internal const uint COMMAND = 0x0111;
internal const uint DESTROY = 0x0002;
internal const uint NOTIFY = 0x004E;
internal const uint PAINT = 0x000F;
internal const uint SETFOCUS = 0x0007;
internal const uint SHOWWINDOW = 0x0018;
}
struct PSP
{
internal const int DEFAULT = 0x00000000;
internal const int DLGINDIRECT = 0x00000001;
internal const int USEHICON = 0x00000002;
internal const int USEICONID = 0x00000004;
internal const int USETITLE = 0x00000008;
internal const int RTLREADING = 0x00000010;
internal const int HASHELP = 0x00000020;
internal const int USEREFPARENT = 0x00000040;
internal const int USECALLBACK = 0x00000080;
internal const int PREMATURE = 0x00000400;
internal const int HIDEHEADER = 0x00000800;
internal const int USEHEADERTITLE = 0x00001000;
internal const int USEHEADERSUBTITLE = 0x00002000;
}
struct HRESULT
{
internal const int S_OK = 0;
internal const int S_FALSE = 1;
internal const int E_FAIL = unchecked((int)0x80004005);
internal const int E_NOTIMPL = unchecked((int)0x80004001);
internal const int E_ACCESSDENIED = unchecked((int)0x80070005);
}
struct PSNRET
{
internal const long NOERROR = 0;
internal const long INVALID = 1;
internal const long INVALID_NOCHANGEPAGE = 2;
internal const long MESSAGE_HANDLED = 3;
}
struct DS
{
internal const int SETFONT = 0x40;
internal const int FIXEDSYS = 0x0008;
internal const int DS_ABSALIGN = 0x01;
internal const int DS_SYSMODAL = 0x02;
internal const int DS_LOCALEDIT = 0x20; /* Edit items get Local storage. */
internal const int DS_SETFONT = 0x40; /* User specified font for Dlg controls */
internal const int DS_MODALFRAME = 0x80; /* Can be combined with WS_CAPTION */
internal const int DS_NOIDLEMSG = 0x100; /* WM_ENTERIDLE message will not be sent */
internal const int DS_SETFOREGROUND = 0x200; /* not in win3.1 */
internal const int DS_3DLOOK = 0x0004;
internal const int DS_FIXEDSYS = 0x0008;
internal const int DS_NOFAILCREATE = 0x0010;
internal const int DS_CONTROL = 0x0400;
internal const int DS_CENTER = 0x0800;
internal const int DS_CENTERMOUSE = 0x1000;
internal const int DS_CONTEXTHELP = 0x2000;
internal const int DS_SHELLFONT = (DS_SETFONT | DS_FIXEDSYS);
internal const int DS_USEPIXELS = 0x8000;
}
[StructLayout(LayoutKind.Sequential)]
struct NMHDR
{
internal IntPtr hwndFrom;
internal UIntPtr idFrom;
internal uint code;
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Auto)]
struct PropSheetPage
{
internal int dwSize;
internal int dwFlags;
internal IntPtr hInstance;
// This is a union of the following Data items
// String pszTemplate;
internal SafeLocalAllocation pResource;
// This is a union of the following Data items
// IntPtr hIcon;
// String pszIcon;
internal IntPtr hIcon; // This must be IntPtr.Zero or a SafeHandle should be used
[MarshalAs(UnmanagedType.LPWStr)]
internal string pszTitle;
internal DialogProc pfnDlgProc;
internal IntPtr longParameter;
internal PropSheetPageProc pfnCallback;
internal IntPtr pcRefParent;
[MarshalAs(UnmanagedType.LPWStr)]
internal string pszHeaderTitle;
[MarshalAs(UnmanagedType.LPWStr)]
internal string pszHeaderSubTitle;
}
[StructLayout(LayoutKind.Sequential, Pack = 2, CharSet = CharSet.Auto)]
struct DialogTemplate
{
internal uint style;
internal uint dwExtendedStyle;
internal ushort cdit;
internal short x;
internal short y;
internal short cx;
internal short cy;
// DialogTemplate is a varialbe-length structure
// The following 3 fields will be length of 3 sub-arrays and they'll be zeroes in this app
internal short wMenuResource;
internal short wWindowClass;
internal short wTitleArray;
}
[CLSCompliantAttribute(false)]
public delegate bool DialogProc(IntPtr windowDialog, UInt32 message, IntPtr wordParameter, IntPtr longParameter);
[CLSCompliantAttribute(false)]
public delegate int PropSheetPageProc(IntPtr window, int message, IntPtr longParameter);
[ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("85DE64DD-EF21-11cf-A285-00C04FD8DBE6")]
public interface IPropertySheetCallback
{
[PreserveSig()]
int AddPage(SafePropertyPage prop);
[PreserveSig()]
int RemovePage(IntPtr prop);
}
[ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("85DE64DC-EF21-11cf-A285-00C04FD8DBE6")]
public interface IExtendPropertySheet
{
[PreserveSig()]
int CreatePropertyPages(IPropertySheetCallback provider, IntPtr handle, IDataObject dataObject);
[PreserveSig()]
int QueryPagesFor(IDataObject dataObject);
}
[ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("B7A87232-4A51-11D1-A7EA-00C04FD909DD")]
public interface IExtendPropertySheet2
{
[PreserveSig()]
int CreatePropertyPages(IPropertySheetCallback provider, IntPtr handle, IDataObject dataObject);
[PreserveSig()]
int QueryPagesFor(IDataObject dataObject);
[PreserveSig()]
int GetWatermarks(IDataObject dataObject, ref IntPtr watermark, ref IntPtr header, ref IntPtr palette, ref int stretch);
}
}
| |
namespace PICkit2V2
{
partial class FormMultiWinProgMem
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle1 = new System.Windows.Forms.DataGridViewCellStyle();
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle2 = new System.Windows.Forms.DataGridViewCellStyle();
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle3 = new System.Windows.Forms.DataGridViewCellStyle();
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle4 = new System.Windows.Forms.DataGridViewCellStyle();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(FormMultiWinProgMem));
this.dataGridProgramMemory = new System.Windows.Forms.DataGridView();
this.comboBoxProgMemView = new System.Windows.Forms.ComboBox();
this.displayDataSource = new System.Windows.Forms.Label();
this.labelDataSource = new System.Windows.Forms.Label();
this.contextMenuStrip1 = new System.Windows.Forms.ContextMenuStrip(this.components);
this.toolStripMenuItemContextSelectAll = new System.Windows.Forms.ToolStripMenuItem();
this.toolStripMenuItemContextCopy = new System.Windows.Forms.ToolStripMenuItem();
((System.ComponentModel.ISupportInitialize)(this.dataGridProgramMemory)).BeginInit();
this.contextMenuStrip1.SuspendLayout();
this.SuspendLayout();
//
// dataGridProgramMemory
//
this.dataGridProgramMemory.AllowUserToAddRows = false;
this.dataGridProgramMemory.AllowUserToDeleteRows = false;
this.dataGridProgramMemory.AllowUserToResizeColumns = false;
this.dataGridProgramMemory.AllowUserToResizeRows = false;
this.dataGridProgramMemory.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.dataGridProgramMemory.BackgroundColor = System.Drawing.SystemColors.Window;
this.dataGridProgramMemory.CellBorderStyle = System.Windows.Forms.DataGridViewCellBorderStyle.None;
dataGridViewCellStyle1.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;
dataGridViewCellStyle1.BackColor = System.Drawing.SystemColors.Control;
dataGridViewCellStyle1.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
dataGridViewCellStyle1.ForeColor = System.Drawing.SystemColors.WindowText;
dataGridViewCellStyle1.SelectionBackColor = System.Drawing.SystemColors.Highlight;
dataGridViewCellStyle1.SelectionForeColor = System.Drawing.SystemColors.HighlightText;
dataGridViewCellStyle1.WrapMode = System.Windows.Forms.DataGridViewTriState.True;
this.dataGridProgramMemory.ColumnHeadersDefaultCellStyle = dataGridViewCellStyle1;
this.dataGridProgramMemory.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.DisableResizing;
this.dataGridProgramMemory.ColumnHeadersVisible = false;
this.dataGridProgramMemory.ContextMenuStrip = this.contextMenuStrip1;
dataGridViewCellStyle2.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;
dataGridViewCellStyle2.BackColor = System.Drawing.SystemColors.Window;
dataGridViewCellStyle2.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
dataGridViewCellStyle2.ForeColor = System.Drawing.SystemColors.ControlText;
dataGridViewCellStyle2.SelectionBackColor = System.Drawing.SystemColors.Highlight;
dataGridViewCellStyle2.SelectionForeColor = System.Drawing.SystemColors.HighlightText;
dataGridViewCellStyle2.WrapMode = System.Windows.Forms.DataGridViewTriState.False;
this.dataGridProgramMemory.DefaultCellStyle = dataGridViewCellStyle2;
this.dataGridProgramMemory.Enabled = false;
this.dataGridProgramMemory.Location = new System.Drawing.Point(12, 39);
this.dataGridProgramMemory.Name = "dataGridProgramMemory";
dataGridViewCellStyle3.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;
dataGridViewCellStyle3.BackColor = System.Drawing.SystemColors.Control;
dataGridViewCellStyle3.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
dataGridViewCellStyle3.ForeColor = System.Drawing.SystemColors.WindowText;
dataGridViewCellStyle3.SelectionBackColor = System.Drawing.SystemColors.Highlight;
dataGridViewCellStyle3.SelectionForeColor = System.Drawing.SystemColors.HighlightText;
dataGridViewCellStyle3.WrapMode = System.Windows.Forms.DataGridViewTriState.True;
this.dataGridProgramMemory.RowHeadersDefaultCellStyle = dataGridViewCellStyle3;
this.dataGridProgramMemory.RowHeadersVisible = false;
this.dataGridProgramMemory.RowHeadersWidth = 75;
this.dataGridProgramMemory.RowHeadersWidthSizeMode = System.Windows.Forms.DataGridViewRowHeadersWidthSizeMode.DisableResizing;
dataGridViewCellStyle4.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleCenter;
this.dataGridProgramMemory.RowsDefaultCellStyle = dataGridViewCellStyle4;
this.dataGridProgramMemory.RowTemplate.Height = 17;
this.dataGridProgramMemory.ScrollBars = System.Windows.Forms.ScrollBars.Vertical;
this.dataGridProgramMemory.SelectionMode = System.Windows.Forms.DataGridViewSelectionMode.CellSelect;
this.dataGridProgramMemory.Size = new System.Drawing.Size(512, 123);
this.dataGridProgramMemory.TabIndex = 5;
this.dataGridProgramMemory.CellMouseDown += new System.Windows.Forms.DataGridViewCellMouseEventHandler(this.dataGridProgramMemory_CellMouseDown);
this.dataGridProgramMemory.CellEndEdit += new System.Windows.Forms.DataGridViewCellEventHandler(this.progMemEdit);
//
// comboBoxProgMemView
//
this.comboBoxProgMemView.BackColor = System.Drawing.SystemColors.Info;
this.comboBoxProgMemView.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.comboBoxProgMemView.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.comboBoxProgMemView.FormattingEnabled = true;
this.comboBoxProgMemView.Items.AddRange(new object[] {
"Hex Only",
"Word ASCII",
"Byte ASCII"});
this.comboBoxProgMemView.Location = new System.Drawing.Point(12, 11);
this.comboBoxProgMemView.Margin = new System.Windows.Forms.Padding(2);
this.comboBoxProgMemView.Name = "comboBoxProgMemView";
this.comboBoxProgMemView.Size = new System.Drawing.Size(91, 21);
this.comboBoxProgMemView.TabIndex = 6;
this.comboBoxProgMemView.SelectionChangeCommitted += new System.EventHandler(this.comboBoxProgMemView_SelectionChangeCommitted);
//
// displayDataSource
//
this.displayDataSource.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.displayDataSource.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this.displayDataSource.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.displayDataSource.Location = new System.Drawing.Point(172, 13);
this.displayDataSource.Margin = new System.Windows.Forms.Padding(2, 0, 2, 0);
this.displayDataSource.MinimumSize = new System.Drawing.Size(279, 16);
this.displayDataSource.Name = "displayDataSource";
this.displayDataSource.Size = new System.Drawing.Size(352, 16);
this.displayDataSource.TabIndex = 8;
this.displayDataSource.Text = "None (Empty/Erased)";
this.displayDataSource.UseCompatibleTextRendering = true;
//
// labelDataSource
//
this.labelDataSource.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.labelDataSource.AutoSize = true;
this.labelDataSource.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.labelDataSource.Location = new System.Drawing.Point(119, 14);
this.labelDataSource.Margin = new System.Windows.Forms.Padding(2, 0, 2, 0);
this.labelDataSource.Name = "labelDataSource";
this.labelDataSource.Size = new System.Drawing.Size(51, 13);
this.labelDataSource.TabIndex = 7;
this.labelDataSource.Text = "Source:";
//
// contextMenuStrip1
//
this.contextMenuStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.toolStripMenuItemContextSelectAll,
this.toolStripMenuItemContextCopy});
this.contextMenuStrip1.Name = "contextMenuStrip1";
this.contextMenuStrip1.Size = new System.Drawing.Size(164, 48);
//
// toolStripMenuItemContextSelectAll
//
this.toolStripMenuItemContextSelectAll.Name = "toolStripMenuItemContextSelectAll";
this.toolStripMenuItemContextSelectAll.ShortcutKeyDisplayString = "Ctrl-A";
this.toolStripMenuItemContextSelectAll.Size = new System.Drawing.Size(163, 22);
this.toolStripMenuItemContextSelectAll.Text = "Select All";
this.toolStripMenuItemContextSelectAll.Click += new System.EventHandler(this.toolStripMenuItemContextSelectAll_Click);
//
// toolStripMenuItemContextCopy
//
this.toolStripMenuItemContextCopy.Name = "toolStripMenuItemContextCopy";
this.toolStripMenuItemContextCopy.ShortcutKeyDisplayString = "Ctrl-C";
this.toolStripMenuItemContextCopy.Size = new System.Drawing.Size(163, 22);
this.toolStripMenuItemContextCopy.Text = "Copy";
this.toolStripMenuItemContextCopy.Click += new System.EventHandler(this.toolStripMenuItemContextCopy_Click);
//
// FormMultiWinProgMem
//
this.AutoScaleDimensions = new System.Drawing.SizeF(96F, 96F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi;
this.ClientSize = new System.Drawing.Size(536, 174);
this.Controls.Add(this.displayDataSource);
this.Controls.Add(this.labelDataSource);
this.Controls.Add(this.comboBoxProgMemView);
this.Controls.Add(this.dataGridProgramMemory);
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.MinimumSize = new System.Drawing.Size(200, 110);
this.Name = "FormMultiWinProgMem";
this.SizeGripStyle = System.Windows.Forms.SizeGripStyle.Show;
this.StartPosition = System.Windows.Forms.FormStartPosition.Manual;
this.Text = "PICkit 2 Program Memory";
this.Resize += new System.EventHandler(this.FormMultiWinProgMem_Resize);
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.FormMultiWinProgMem_FormClosing);
this.ResizeEnd += new System.EventHandler(this.FormMultiWinProgMem_ResizeEnd);
((System.ComponentModel.ISupportInitialize)(this.dataGridProgramMemory)).EndInit();
this.contextMenuStrip1.ResumeLayout(false);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.DataGridView dataGridProgramMemory;
private System.Windows.Forms.ComboBox comboBoxProgMemView;
private System.Windows.Forms.Label displayDataSource;
private System.Windows.Forms.Label labelDataSource;
private System.Windows.Forms.ContextMenuStrip contextMenuStrip1;
private System.Windows.Forms.ToolStripMenuItem toolStripMenuItemContextSelectAll;
private System.Windows.Forms.ToolStripMenuItem toolStripMenuItemContextCopy;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics.Contracts;
using System.Collections;
using System.Globalization;
using System.Threading;
namespace System.Text
{
// This class overrides Encoding with the things we need for our NLS Encodings
//
// All of the GetBytes/Chars GetByte/CharCount methods are just wrappers for the pointer
// plus decoder/encoder method that is our real workhorse. Note that this is an internal
// class, so our public classes cannot derive from this class. Because of this, all of the
// GetBytes/Chars GetByte/CharCount wrapper methods are duplicated in all of our public
// encodings, which currently include:
//
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, & UnicodeEncoding
//
// So if you change the wrappers in this class, you must change the wrappers in the other classes
// as well because they should have the same behavior.
internal abstract class EncodingNLS : Encoding
{
protected EncodingNLS(int codePage) : base(codePage)
{
}
// Returns the number of bytes required to encode a range of characters in
// a character array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetByteCount(char[] chars, int index, int count)
{
// Validate input parameters
if (chars == null)
throw new ArgumentNullException("chars", SR.ArgumentNull_Array);
if (index < 0 || count < 0)
throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (chars.Length - index < count)
throw new ArgumentOutOfRangeException("chars", SR.ArgumentOutOfRange_IndexCountBuffer);
Contract.EndContractBlock();
// If no input, return 0, avoid fixed empty array problem
if (count == 0)
return 0;
// Just call the pointer version
fixed (char* pChars = chars)
return GetByteCount(pChars + index, count, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetByteCount(String s)
{
// Validate input
if (s==null)
throw new ArgumentNullException("s");
Contract.EndContractBlock();
fixed (char* pChars = s)
return GetByteCount(pChars, s.Length, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
public override unsafe int GetByteCount(char* chars, int count)
{
// Validate Parameters
if (chars == null)
throw new ArgumentNullException("chars", SR.ArgumentNull_Array);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
// Call it with empty encoder
return GetByteCount(chars, count, null);
}
// Parent method is safe.
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
public override unsafe int GetBytes(String s, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
if (s == null || bytes == null)
throw new ArgumentNullException((s == null ? "s" : "bytes"), SR.ArgumentNull_Array);
if (charIndex < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((charIndex < 0 ? "charIndex" : "charCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (s.Length - charIndex < charCount)
throw new ArgumentOutOfRangeException("s", SR.ArgumentOutOfRange_IndexCount);
if (byteIndex < 0 || byteIndex > bytes.Length)
throw new ArgumentOutOfRangeException("byteIndex", SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
int byteCount = bytes.Length - byteIndex;
// Fixed doesn't like empty arrays
if (bytes.Length == 0)
bytes = new byte[1];
fixed (char* pChars = s) fixed (byte* pBytes = &bytes[0])
return GetBytes(pChars + charIndex, charCount, pBytes + byteIndex, byteCount, null);
}
// Encodes a range of characters in a character array into a range of bytes
// in a byte array. An exception occurs if the byte array is not large
// enough to hold the complete encoding of the characters. The
// GetByteCount method can be used to determine the exact number of
// bytes that will be produced for a given range of characters.
// Alternatively, the GetMaxByteCount method can be used to
// determine the maximum number of bytes that will be produced for a given
// number of characters, regardless of the actual character values.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetBytes(char[] chars, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
// Validate parameters
if (chars == null || bytes == null)
throw new ArgumentNullException((chars == null ? "chars" : "bytes"), SR.ArgumentNull_Array);
if (charIndex < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((charIndex < 0 ? "charIndex" : "charCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (chars.Length - charIndex < charCount)
throw new ArgumentOutOfRangeException("chars", SR.ArgumentOutOfRange_IndexCountBuffer);
if (byteIndex < 0 || byteIndex > bytes.Length)
throw new ArgumentOutOfRangeException("byteIndex", SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
// If nothing to encode return 0, avoid fixed problem
if (charCount == 0)
return 0;
// Just call pointer version
int byteCount = bytes.Length - byteIndex;
// Fixed doesn't like empty arrays
if (bytes.Length == 0)
bytes = new byte[1];
fixed (char* pChars = chars) fixed (byte* pBytes = &bytes[0])
// Remember that byteCount is # to decode, not size of array.
return GetBytes(pChars + charIndex, charCount, pBytes + byteIndex, byteCount, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
public override unsafe int GetBytes(char* chars, int charCount, byte* bytes, int byteCount)
{
// Validate Parameters
if (bytes == null || chars == null)
throw new ArgumentNullException(bytes == null ? "bytes" : "chars", SR.ArgumentNull_Array);
if (charCount < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((charCount < 0 ? "charCount" : "byteCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
return GetBytes(chars, charCount, bytes, byteCount, null);
}
// Returns the number of characters produced by decoding a range of bytes
// in a byte array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetCharCount(byte[] bytes, int index, int count)
{
// Validate Parameters
if (bytes == null)
throw new ArgumentNullException("bytes", SR.ArgumentNull_Array);
if (index < 0 || count < 0)
throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (bytes.Length - index < count)
throw new ArgumentOutOfRangeException("bytes", SR.ArgumentOutOfRange_IndexCountBuffer);
Contract.EndContractBlock();
// If no input just return 0, fixed doesn't like 0 length arrays
if (count == 0)
return 0;
// Just call pointer version
fixed (byte* pBytes = bytes)
return GetCharCount(pBytes + index, count, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
public override unsafe int GetCharCount(byte* bytes, int count)
{
// Validate Parameters
if (bytes == null)
throw new ArgumentNullException("bytes", SR.ArgumentNull_Array);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
return GetCharCount(bytes, count, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetChars(byte[] bytes, int byteIndex, int byteCount,
char[] chars, int charIndex)
{
// Validate Parameters
if (bytes == null || chars == null)
throw new ArgumentNullException(bytes == null ? "bytes" : "chars", SR.ArgumentNull_Array);
if (byteIndex < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((byteIndex < 0 ? "byteIndex" : "byteCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
if ( bytes.Length - byteIndex < byteCount)
throw new ArgumentOutOfRangeException("bytes", SR.ArgumentOutOfRange_IndexCountBuffer);
if (charIndex < 0 || charIndex > chars.Length)
throw new ArgumentOutOfRangeException("charIndex", SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
// If no input, return 0 & avoid fixed problem
if (byteCount == 0)
return 0;
// Just call pointer version
int charCount = chars.Length - charIndex;
// Fixed doesn't like empty arrays
if (chars.Length == 0)
chars = new char[1];
fixed (byte* pBytes = bytes) fixed (char* pChars = &chars[0])
// Remember that charCount is # to decode, not size of array
return GetChars(pBytes + byteIndex, byteCount, pChars + charIndex, charCount, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
public unsafe override int GetChars(byte* bytes, int byteCount, char* chars, int charCount)
{
// Validate Parameters
if (bytes == null || chars == null)
throw new ArgumentNullException(bytes == null ? "bytes" : "chars", SR.ArgumentNull_Array);
if (charCount < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((charCount < 0 ? "charCount" : "byteCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
return GetChars(bytes, byteCount, chars, charCount, null);
}
// Returns a string containing the decoded representation of a range of
// bytes in a byte array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe String GetString(byte[] bytes, int index, int count)
{
// Validate Parameters
if (bytes == null)
throw new ArgumentNullException("bytes", SR.ArgumentNull_Array);
if (index < 0 || count < 0)
throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (bytes.Length - index < count)
throw new ArgumentOutOfRangeException("bytes", SR.ArgumentOutOfRange_IndexCountBuffer);
Contract.EndContractBlock();
// Avoid problems with empty input buffer
if (count == 0) return String.Empty;
fixed (byte* pBytes = bytes)
return String.CreateStringFromEncoding(
pBytes + index, count, this);
}
public override Decoder GetDecoder()
{
return new DecoderNLS(this);
}
public override Encoder GetEncoder()
{
return new EncoderNLS(this);
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="CheckBoxRenderer.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Windows.Forms {
using System;
using System.Drawing;
using System.Diagnostics.CodeAnalysis;
using System.Windows.Forms.Internal;
using System.Windows.Forms.VisualStyles;
using Microsoft.Win32;
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer"]/*' />
/// <devdoc>
/// <para>
/// This is a rendering class for the CheckBox control. It works downlevel too (obviously
/// without visual styles applied.)
/// </para>
/// </devdoc>
public sealed class CheckBoxRenderer {
//Make this per-thread, so that different threads can safely use these methods.
[ThreadStatic]
private static VisualStyleRenderer visualStyleRenderer = null;
private static readonly VisualStyleElement CheckBoxElement = VisualStyleElement.Button.CheckBox.UncheckedNormal;
private static bool renderMatchingApplicationState = true;
//cannot instantiate
private CheckBoxRenderer() {
}
/// <include file='doc\ButtonRenderer.uex' path='docs/doc[@for="ButtonRenderer.RenderMatchingApplicationState"]/*' />
/// <devdoc>
/// <para>
/// If this property is true, then the renderer will use the setting from Application.RenderWithVisualStyles to
/// determine how to render.
/// If this property is false, the renderer will always render with visualstyles.
/// </para>
/// </devdoc>
public static bool RenderMatchingApplicationState {
get {
return renderMatchingApplicationState;
}
set {
renderMatchingApplicationState = value;
}
}
private static bool RenderWithVisualStyles {
get {
return (!renderMatchingApplicationState || Application.RenderWithVisualStyles);
}
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.IsBackgroundPartiallyTransparent"]/*' />
/// <devdoc>
/// <para>
/// Returns true if the background corresponding to the given state is partially transparent, else false.
/// </para>
/// </devdoc>
public static bool IsBackgroundPartiallyTransparent(CheckBoxState state) {
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
return visualStyleRenderer.IsBackgroundPartiallyTransparent();
}
else {
return false; //for downlevel, this is false
}
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.DrawParentBackground"]/*' />
/// <devdoc>
/// <para>
/// This is just a convenience wrapper for VisualStyleRenderer.DrawThemeParentBackground. For downlevel,
/// this isn't required and does nothing.
/// </para>
/// </devdoc>
[
SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters") // Using Graphics instead of IDeviceContext intentionally
]
public static void DrawParentBackground(Graphics g, Rectangle bounds, Control childControl) {
if (RenderWithVisualStyles) {
InitializeRenderer(0);
visualStyleRenderer.DrawParentBackground(g, bounds, childControl);
}
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.DrawCheckBox1"]/*' />
/// <devdoc>
/// <para>
/// Renders a CheckBox control.
/// </para>
/// </devdoc>
public static void DrawCheckBox(Graphics g, Point glyphLocation, CheckBoxState state) {
Rectangle glyphBounds = new Rectangle(glyphLocation, GetGlyphSize(g, state));
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
visualStyleRenderer.DrawBackground(g, glyphBounds);
}
else {
if (IsMixed(state)) {
ControlPaint.DrawMixedCheckBox(g, glyphBounds, ConvertToButtonState(state));
}
else {
ControlPaint.DrawCheckBox(g, glyphBounds, ConvertToButtonState(state));
}
}
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.DrawCheckBox2"]/*' />
/// <devdoc>
/// <para>
/// Renders a CheckBox control.
/// </para>
/// </devdoc>
public static void DrawCheckBox(Graphics g, Point glyphLocation, Rectangle textBounds, string checkBoxText, Font font, bool focused, CheckBoxState state) {
DrawCheckBox(g, glyphLocation, textBounds, checkBoxText, font,
TextFormatFlags.HorizontalCenter | TextFormatFlags.VerticalCenter | TextFormatFlags.SingleLine,
focused, state);
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.DrawCheckBox3"]/*' />
/// <devdoc>
/// <para>
/// Renders a CheckBox control.
/// </para>
/// </devdoc>
public static void DrawCheckBox(Graphics g, Point glyphLocation, Rectangle textBounds, string checkBoxText, Font font, TextFormatFlags flags, bool focused, CheckBoxState state) {
Rectangle glyphBounds = new Rectangle(glyphLocation, GetGlyphSize(g, state));
Color textColor;
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
visualStyleRenderer.DrawBackground(g, glyphBounds);
textColor = visualStyleRenderer.GetColor(ColorProperty.TextColor);
}
else {
if (IsMixed(state)) {
ControlPaint.DrawMixedCheckBox(g, glyphBounds, ConvertToButtonState(state));
}
else {
ControlPaint.DrawCheckBox(g, glyphBounds, ConvertToButtonState(state));
}
textColor = SystemColors.ControlText;
}
TextRenderer.DrawText(g, checkBoxText, font, textBounds, textColor, flags);
if (focused) {
ControlPaint.DrawFocusRectangle(g, textBounds);
}
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.DrawCheckBox4"]/*' />
/// <devdoc>
/// <para>
/// Renders a CheckBox control.
/// </para>
/// </devdoc>
public static void DrawCheckBox(Graphics g, Point glyphLocation, Rectangle textBounds, string checkBoxText, Font font, Image image, Rectangle imageBounds, bool focused, CheckBoxState state) {
DrawCheckBox(g, glyphLocation, textBounds, checkBoxText, font,
TextFormatFlags.HorizontalCenter | TextFormatFlags.VerticalCenter | TextFormatFlags.SingleLine,
image, imageBounds, focused, state);
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.DrawCheckBox5"]/*' />
/// <devdoc>
/// <para>
/// Renders a CheckBox control.
/// </para>
/// </devdoc>
public static void DrawCheckBox(Graphics g, Point glyphLocation, Rectangle textBounds, string checkBoxText, Font font, TextFormatFlags flags, Image image, Rectangle imageBounds, bool focused, CheckBoxState state) {
Rectangle glyphBounds = new Rectangle(glyphLocation, GetGlyphSize(g, state));
Color textColor;
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
//Keep this drawing order! It matches default drawing order.
visualStyleRenderer.DrawImage(g, imageBounds, image);
visualStyleRenderer.DrawBackground(g, glyphBounds);
textColor = visualStyleRenderer.GetColor(ColorProperty.TextColor);
}
else {
g.DrawImage(image, imageBounds);
if (IsMixed(state)) {
ControlPaint.DrawMixedCheckBox(g, glyphBounds, ConvertToButtonState(state));
}
else {
ControlPaint.DrawCheckBox(g, glyphBounds, ConvertToButtonState(state));
}
textColor = SystemColors.ControlText;
}
TextRenderer.DrawText(g, checkBoxText, font, textBounds, textColor, flags);
if (focused) {
ControlPaint.DrawFocusRectangle(g, textBounds);
}
}
/// <include file='doc\CheckBoxRenderer.uex' path='docs/doc[@for="CheckBoxRenderer.GetGlyphSize"]/*' />
/// <devdoc>
/// <para>
/// Returns the size of the CheckBox glyph.
/// </para>
/// </devdoc>
[
SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters") // Using Graphics instead of IDeviceContext intentionally
]
public static Size GetGlyphSize(Graphics g, CheckBoxState state) {
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
return visualStyleRenderer.GetPartSize(g, ThemeSizeType.Draw);
}
return new Size(13, 13);
}
internal static ButtonState ConvertToButtonState(CheckBoxState state) {
switch (state) {
case CheckBoxState.CheckedNormal:
case CheckBoxState.CheckedHot:
return ButtonState.Checked;
case CheckBoxState.CheckedPressed:
return (ButtonState.Checked | ButtonState.Pushed);
case CheckBoxState.CheckedDisabled:
return (ButtonState.Checked | ButtonState.Inactive);
case CheckBoxState.UncheckedPressed:
return ButtonState.Pushed;
case CheckBoxState.UncheckedDisabled:
return ButtonState.Inactive;
//Downlevel mixed drawing works only if ButtonState.Checked is set
case CheckBoxState.MixedNormal:
case CheckBoxState.MixedHot:
return ButtonState.Checked;
case CheckBoxState.MixedPressed:
return (ButtonState.Checked | ButtonState.Pushed);
case CheckBoxState.MixedDisabled:
return (ButtonState.Checked | ButtonState.Inactive);
default:
return ButtonState.Normal;
}
}
internal static CheckBoxState ConvertFromButtonState(ButtonState state, bool isMixed, bool isHot) {
if (isMixed) {
if ((state & ButtonState.Pushed) == ButtonState.Pushed) {
return CheckBoxState.MixedPressed;
}
else if ((state & ButtonState.Inactive) == ButtonState.Inactive) {
return CheckBoxState.MixedDisabled;
}
else if (isHot) {
return CheckBoxState.MixedHot;
}
return CheckBoxState.MixedNormal;
}
else if ((state & ButtonState.Checked) == ButtonState.Checked) {
if ((state & ButtonState.Pushed) == ButtonState.Pushed) {
return CheckBoxState.CheckedPressed;
}
else if ((state & ButtonState.Inactive) == ButtonState.Inactive) {
return CheckBoxState.CheckedDisabled;
}
else if (isHot) {
return CheckBoxState.CheckedHot;
}
return CheckBoxState.CheckedNormal;
}
else { //unchecked
if ((state & ButtonState.Pushed) == ButtonState.Pushed) {
return CheckBoxState.UncheckedPressed;
}
else if ((state & ButtonState.Inactive) == ButtonState.Inactive) {
return CheckBoxState.UncheckedDisabled;
}
else if (isHot) {
return CheckBoxState.UncheckedHot;
}
return CheckBoxState.UncheckedNormal;
}
}
private static bool IsMixed(CheckBoxState state) {
switch (state) {
case CheckBoxState.MixedNormal:
case CheckBoxState.MixedHot:
case CheckBoxState.MixedPressed:
case CheckBoxState.MixedDisabled:
return true;
default:
return false;
}
}
private static void InitializeRenderer(int state) {
if (visualStyleRenderer == null) {
visualStyleRenderer = new VisualStyleRenderer(CheckBoxElement.ClassName, CheckBoxElement.Part, state);
}
else {
visualStyleRenderer.SetParameters(CheckBoxElement.ClassName, CheckBoxElement.Part, state);
}
}
}
}
| |
using System;
using System.IO;
using System.Linq;
namespace SteamBot
{
public class Log : IDisposable
{
public enum LogLevel
{
Debug,
Info,
Success,
Warn,
CheekyWizard,
LuckyCat,
Ban,
Error,
MajorError,
Interface, // if the user needs to input something
Craft,
Chat,
Trade,
Admin,
Announcement,
Message,
TF2,
Nothing // not recommended; it basically silences
// the console output because nothing is
// greater than it. even if the bot needs
// input, it won't be shown in the console.
}
protected StreamWriter _FileStream;
protected string _botName;
private bool disposed;
public LogLevel OutputLevel;
public LogLevel FileLogLevel;
public ConsoleColor DefaultConsoleColor = ConsoleColor.White;
public bool ShowBotName { get; set; }
public Log(string logFile, string botName = "", LogLevel consoleLogLevel = LogLevel.Info, LogLevel fileLogLevel = LogLevel.Info)
{
Directory.CreateDirectory(Path.Combine(System.Windows.Forms.Application.StartupPath, "logs"));
_FileStream = File.AppendText(Path.Combine("logs", logFile));
_FileStream.AutoFlush = true;
_botName = botName;
OutputLevel = consoleLogLevel;
FileLogLevel = fileLogLevel;
Console.ForegroundColor = DefaultConsoleColor;
ShowBotName = true;
}
~Log()
{
Dispose(false);
}
// This outputs a log entry of the level info.
public void Info(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Info, data, formatParams);
}
// This outputs a log entry of the level debug.
public void Debug(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Debug, data, formatParams);
}
// This outputs a log entry of the level success.
public void Success(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Success, data, formatParams);
}
// This outputs a log entry of the level warn.
public void Warn(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Warn, data, formatParams);
}
// This outputs a log entry of the level ban.
public void Ban(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Ban, data, formatParams);
}
// This outputs a log entry of the level error.
public void Error(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Error, data, formatParams);
}
// This outputs a log entry of the level error.
public void LuckyCat(string data, params object[] formatParams)
{
_OutputLine(LogLevel.LuckyCat, data, formatParams);
}
// This outputs a log entry of the level error.
public void CheekyWizard(string data, params object[] formatParams)
{
_OutputLine(LogLevel.CheekyWizard, data, formatParams);
}
// This outputs a log entry of the level error.
public void MajorError(string data, params object[] formatParams)
{
_OutputLine(LogLevel.MajorError, data, formatParams);
}
// This outputs a log entry of the level error.
public void Admin(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Admin, data, formatParams);
}
// This outputs a log entry of the level error.
public void Chat(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Chat, data, formatParams);
}
// This outputs a log entry of the level error.
public void Craft(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Craft, data, formatParams);
}
// This outputs a log entry of the level error.
public void Trade(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Trade, data, formatParams);
}
// This outputs a log entry of the level error.
public void Announcement(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Announcement, data, formatParams);
}
// This outputs a log entry of the level error.
public void Message(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Message, data, formatParams);
}
// This outputs a log entry of the level error.
public void TF2(string data, params object[] formatParams)
{
_OutputLine(LogLevel.TF2, data, formatParams);
}
// This outputs a log entry of the level interface;
// normally, this means that some sort of user interaction
// is required.
public void Interface(string data, params object[] formatParams)
{
_OutputLine(LogLevel.Interface, data, formatParams);
}
// Outputs a line to both the log and the console, if
// applicable.
protected void _OutputLine(LogLevel level, string line, params object[] formatParams)
{
if (disposed)
return;
string formattedString = String.Format(
"[{0}{1}] {2}: {3}",
GetLogBotName(),
DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"),
_LogLevel(level).ToUpper(), (formatParams != null && formatParams.Any() ? String.Format(line, formatParams) : line)
);
if (level >= FileLogLevel)
{
_FileStream.WriteLine(formattedString);
}
if (level >= OutputLevel)
{
_OutputLineToConsole(level, formattedString);
}
}
private string GetLogBotName()
{
if (_botName == null)
{
return "(System) ";
}
else if (ShowBotName)
{
return _botName + " ";
}
return "";
}
// Outputs a line to the console, with the correct color
// formatting.
protected void _OutputLineToConsole(LogLevel level, string line)
{
Console.ForegroundColor = _LogColor(level);
Console.WriteLine(line);
Console.ForegroundColor = DefaultConsoleColor;
}
// Determine the string equivalent of the LogLevel.
protected string _LogLevel(LogLevel level)
{
switch (level)
{
case LogLevel.Info:
return "info";
case LogLevel.Debug:
return "debug";
case LogLevel.Success:
return "success";
case LogLevel.Warn:
return "warn";
case LogLevel.Ban:
return "chat: ban";
case LogLevel.CheekyWizard:
return "cheeky wizard";
case LogLevel.LuckyCat:
return "lucky cat";
case LogLevel.Error:
return "error";
case LogLevel.MajorError:
return "major error";
case LogLevel.Interface:
return "interface";
case LogLevel.Chat:
return "chat";
case LogLevel.Craft:
return "craft";
case LogLevel.Trade:
return "trade";
case LogLevel.Admin:
return "admin";
case LogLevel.Announcement:
return "announcement";
case LogLevel.Message:
return "message";
case LogLevel.TF2:
return "tf2";
case LogLevel.Nothing:
return "nothing";
default:
return "undef";
}
}
// Determine the color to be used when outputting to the
// console.
protected ConsoleColor _LogColor(LogLevel level)
{
switch (level)
{
case LogLevel.Debug:
return ConsoleColor.White;
case LogLevel.Success:
return ConsoleColor.Green;
case LogLevel.Warn:
return ConsoleColor.Yellow;
case LogLevel.Ban:
return ConsoleColor.Red;
case LogLevel.CheekyWizard:
return ConsoleColor.Magenta;
case LogLevel.LuckyCat:
return ConsoleColor.Magenta;
case LogLevel.Error:
return ConsoleColor.Red;
case LogLevel.MajorError:
return ConsoleColor.DarkRed;
case LogLevel.Interface:
return ConsoleColor.DarkCyan;
case LogLevel.Chat:
return ConsoleColor.Cyan;
case LogLevel.Craft:
return ConsoleColor.DarkGray;
case LogLevel.Admin:
return ConsoleColor.Magenta;
case LogLevel.Announcement:
return ConsoleColor.DarkGreen;
case LogLevel.Message:
return ConsoleColor.DarkGreen;
case LogLevel.Trade:
return ConsoleColor.Blue;
case LogLevel.TF2:
return ConsoleColor.DarkYellow;
default:
return DefaultConsoleColor;
}
}
private void Dispose(bool disposing)
{
if (disposed)
return;
if (disposing)
_FileStream.Dispose();
disposed = true;
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
}
}
| |
using System;
using System.Configuration;
namespace NuSync.Api.Settings
{
/// <summary>
/// uSync Settings -
///
/// reads the uSync bit of the Web.Config
///
/// <uSync>
/// <Settings>
/// <add
/// read="true" - read the uSync directory on startup
/// write="false" - write the uSync directory on statup
/// attach="true" - attach the events to save on the fly
/// folder="~/uSync/" - place to put files
/// archive="~/uSync.Archive/" - place to archive files
/// versions="true" - store versions at every save
/// />
/// </settings>
/// </uSync>
///
/// </summary>
public class uSyncSettingsSection : ConfigurationSection
{
[ConfigurationProperty("read", DefaultValue = "true", IsRequired = false)]
public Boolean Read
{
get
{
return (Boolean)this["read"];
}
set
{
this["read"] = value;
}
}
[ConfigurationProperty("write", DefaultValue = "false", IsRequired = false)]
public Boolean Write
{
get
{
return (Boolean)this["write"];
}
set
{
this["write"] = value;
}
}
[ConfigurationProperty("attach", DefaultValue = "true", IsRequired = false)]
public Boolean Attach
{
get
{
return (Boolean)this["attach"];
}
set
{
this["attach"] = value;
}
}
[ConfigurationProperty("folder", DefaultValue = "~/uSync/", IsRequired = false)]
public String Folder
{
get
{
return (String)this["folder"];
}
set
{
this["folder"] = value;
}
}
[ConfigurationProperty("archive", DefaultValue = "~/uSync.archive/", IsRequired = false)]
public String Archive
{
get
{
return (String)this["archive"];
}
set
{
this["archive"] = value;
}
}
[ConfigurationProperty("versions", DefaultValue = "true", IsRequired = false)]
public Boolean Versions
{
get
{
return (Boolean)this["versions"];
}
set
{
this["versions"] = value;
}
}
[ConfigurationProperty("maxVersions", DefaultValue = 0, IsRequired = false)]
public int MaxVersions
{
get
{
return (int)this["maxVersions"];
}
set
{
this["maxVersions"] = value;
}
}
[ConfigurationProperty("preserve", DefaultValue = "true", IsRequired = false)]
public Boolean Preserve
{
get
{
return (Boolean)this["preserve"];
}
set
{
this["preserve"] = value;
}
}
[ConfigurationProperty("PreservedPreValues")]
public uSyncPreservedPreValues PreservedPreValues
{
get { return (uSyncPreservedPreValues)this["PreservedPreValues"]; }
}
[ConfigurationProperty("MatchedPreValues")]
public uSyncPreservedPreValues MatchPreValues
{
get { return (uSyncPreservedPreValues)this["MatchedPreValues"]; }
}
[ConfigurationProperty("Elements", IsRequired = false)]
public uSyncElements Elements
{
get { return (uSyncElements)this["Elements"]; }
}
[ConfigurationProperty("DocumentTypes", IsRequired = false)]
public uSyncDocTypeSettings DocTypeSettings
{
get { return (uSyncDocTypeSettings)this["DocumentTypes"]; }
}
[ConfigurationProperty("DataTypes", IsRequired = false)]
public uSyncDataTypeSettings DataTypeSettings
{
get { return (uSyncDataTypeSettings)this["DataTypes"]; }
}
[ConfigurationProperty("watchFolder", DefaultValue = "false", IsRequired = false)]
public Boolean WatchFolder
{
get
{
return (Boolean)this["watchFolder"];
}
set
{
this["watchFolder"] = value;
}
}
[ConfigurationProperty("dontThrowErrors", DefaultValue = "false", IsRequired = false)]
public Boolean DontThrowErrors
{
get
{
return (Boolean)this["dontThrowErrors"];
}
set
{
this["dontThrowErrors"] = value;
}
}
[ConfigurationProperty("quickUpdate", DefaultValue = "false", IsRequired = false)]
public Boolean QuickUpdates
{
get
{
return (Boolean)this["quickUpdate"];
}
set
{
this["quickUpdate"] = value;
}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Threading.Tasks;
using osu.Framework.Allocation;
using osu.Framework.Bindables;
using osu.Framework.Extensions.Color4Extensions;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Effects;
using osu.Framework.Graphics.Shapes;
using osu.Framework.Graphics.Sprites;
using osu.Framework.Graphics.Textures;
using osu.Framework.Input.Events;
using osu.Framework.Localisation;
using osu.Game.Beatmaps;
using osu.Game.Graphics;
using osu.Game.Graphics.Containers;
using osu.Game.Graphics.Sprites;
using osu.Game.Graphics.UserInterface;
using osu.Game.Overlays.Music;
using osuTK;
using osuTK.Graphics;
namespace osu.Game.Overlays
{
public class NowPlayingOverlay : OsuFocusedOverlayContainer
{
private const float player_height = 130;
private const float transition_length = 800;
private const float progress_height = 10;
private const float bottom_black_area_height = 55;
private Drawable background;
private ProgressBar progressBar;
private IconButton prevButton;
private IconButton playButton;
private IconButton nextButton;
private IconButton playlistButton;
private SpriteText title, artist;
private PlaylistOverlay playlist;
private Container dragContainer;
private Container playerContainer;
/// <summary>
/// Provide a source for the toolbar height.
/// </summary>
public Func<float> GetToolbarHeight;
[Resolved]
private MusicController musicController { get; set; }
[Resolved]
private Bindable<WorkingBeatmap> beatmap { get; set; }
[Resolved]
private OsuColour colours { get; set; }
public NowPlayingOverlay()
{
Width = 400;
Margin = new MarginPadding(10);
}
[BackgroundDependencyLoader]
private void load()
{
Children = new Drawable[]
{
dragContainer = new DragContainer
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
RelativeSizeAxes = Axes.X,
AutoSizeAxes = Axes.Y,
Children = new Drawable[]
{
playlist = new PlaylistOverlay
{
RelativeSizeAxes = Axes.X,
Y = player_height + 10,
},
playerContainer = new Container
{
RelativeSizeAxes = Axes.X,
Height = player_height,
Masking = true,
CornerRadius = 5,
EdgeEffect = new EdgeEffectParameters
{
Type = EdgeEffectType.Shadow,
Colour = Color4.Black.Opacity(40),
Radius = 5,
},
Children = new[]
{
background = new Background(),
title = new OsuSpriteText
{
Origin = Anchor.BottomCentre,
Anchor = Anchor.TopCentre,
Position = new Vector2(0, 40),
Font = OsuFont.GetFont(size: 25, italics: true),
Colour = Color4.White,
Text = @"Nothing to play",
},
artist = new OsuSpriteText
{
Origin = Anchor.TopCentre,
Anchor = Anchor.TopCentre,
Position = new Vector2(0, 45),
Font = OsuFont.GetFont(size: 15, weight: FontWeight.Bold, italics: true),
Colour = Color4.White,
Text = @"Nothing to play",
},
new Container
{
Padding = new MarginPadding { Bottom = progress_height },
Height = bottom_black_area_height,
RelativeSizeAxes = Axes.X,
Origin = Anchor.BottomCentre,
Anchor = Anchor.BottomCentre,
Children = new Drawable[]
{
new FillFlowContainer<IconButton>
{
AutoSizeAxes = Axes.Both,
Direction = FillDirection.Horizontal,
Spacing = new Vector2(5),
Origin = Anchor.Centre,
Anchor = Anchor.Centre,
Children = new[]
{
prevButton = new MusicIconButton
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
Action = () => musicController.PreviousTrack(),
Icon = FontAwesome.Solid.StepBackward,
},
playButton = new MusicIconButton
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
Scale = new Vector2(1.4f),
IconScale = new Vector2(1.4f),
Action = () => musicController.TogglePause(),
Icon = FontAwesome.Regular.PlayCircle,
},
nextButton = new MusicIconButton
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
Action = () => musicController.NextTrack(),
Icon = FontAwesome.Solid.StepForward,
},
}
},
playlistButton = new MusicIconButton
{
Origin = Anchor.Centre,
Anchor = Anchor.CentreRight,
Position = new Vector2(-bottom_black_area_height / 2, 0),
Icon = FontAwesome.Solid.Bars,
Action = () => playlist.ToggleVisibility(),
},
}
},
progressBar = new HoverableProgressBar
{
Origin = Anchor.BottomCentre,
Anchor = Anchor.BottomCentre,
Height = progress_height / 2,
FillColour = colours.Yellow,
BackgroundColour = colours.YellowDarker.Opacity(0.5f),
OnSeek = musicController.SeekTo
}
},
},
}
}
};
}
protected override void LoadComplete()
{
base.LoadComplete();
playlist.BeatmapSets.BindTo(musicController.BeatmapSets);
playlist.State.BindValueChanged(s => playlistButton.FadeColour(s.NewValue == Visibility.Visible ? colours.Yellow : Color4.White, 200, Easing.OutQuint), true);
beatmap.BindDisabledChanged(beatmapDisabledChanged, true);
musicController.TrackChanged += trackChanged;
trackChanged(beatmap.Value);
}
protected override void PopIn()
{
base.PopIn();
this.FadeIn(transition_length, Easing.OutQuint);
dragContainer.ScaleTo(1, transition_length, Easing.OutElastic);
}
protected override void PopOut()
{
base.PopOut();
this.FadeOut(transition_length, Easing.OutQuint);
dragContainer.ScaleTo(0.9f, transition_length, Easing.OutQuint);
}
protected override void UpdateAfterChildren()
{
base.UpdateAfterChildren();
Height = dragContainer.Height;
dragContainer.Padding = new MarginPadding { Top = GetToolbarHeight?.Invoke() ?? 0 };
}
protected override void Update()
{
base.Update();
if (pendingBeatmapSwitch != null)
{
pendingBeatmapSwitch();
pendingBeatmapSwitch = null;
}
var track = beatmap.Value?.TrackLoaded ?? false ? beatmap.Value.Track : null;
if (track?.IsDummyDevice == false)
{
progressBar.EndTime = track.Length;
progressBar.CurrentTime = track.CurrentTime;
playButton.Icon = track.IsRunning ? FontAwesome.Regular.PauseCircle : FontAwesome.Regular.PlayCircle;
}
else
{
progressBar.CurrentTime = 0;
progressBar.EndTime = 1;
playButton.Icon = FontAwesome.Regular.PlayCircle;
}
}
private Action pendingBeatmapSwitch;
private void trackChanged(WorkingBeatmap beatmap, TrackChangeDirection direction = TrackChangeDirection.None)
{
// avoid using scheduler as our scheduler may not be run for a long time, holding references to beatmaps.
pendingBeatmapSwitch = delegate
{
// todo: this can likely be replaced with WorkingBeatmap.GetBeatmapAsync()
Task.Run(() =>
{
if (beatmap?.Beatmap == null) //this is not needed if a placeholder exists
{
title.Text = @"Nothing to play";
artist.Text = @"Nothing to play";
}
else
{
BeatmapMetadata metadata = beatmap.Metadata;
title.Text = new LocalisedString((metadata.TitleUnicode, metadata.Title));
artist.Text = new LocalisedString((metadata.ArtistUnicode, metadata.Artist));
}
});
LoadComponentAsync(new Background(beatmap) { Depth = float.MaxValue }, newBackground =>
{
switch (direction)
{
case TrackChangeDirection.Next:
newBackground.Position = new Vector2(400, 0);
newBackground.MoveToX(0, 500, Easing.OutCubic);
background.MoveToX(-400, 500, Easing.OutCubic);
break;
case TrackChangeDirection.Prev:
newBackground.Position = new Vector2(-400, 0);
newBackground.MoveToX(0, 500, Easing.OutCubic);
background.MoveToX(400, 500, Easing.OutCubic);
break;
}
background.Expire();
background = newBackground;
playerContainer.Add(newBackground);
});
};
}
private void beatmapDisabledChanged(bool disabled)
{
if (disabled)
playlist.Hide();
playButton.Enabled.Value = !disabled;
prevButton.Enabled.Value = !disabled;
nextButton.Enabled.Value = !disabled;
playlistButton.Enabled.Value = !disabled;
}
protected override void Dispose(bool isDisposing)
{
base.Dispose(isDisposing);
if (musicController != null)
musicController.TrackChanged -= trackChanged;
}
private class MusicIconButton : IconButton
{
public MusicIconButton()
{
AutoSizeAxes = Axes.Both;
}
[BackgroundDependencyLoader]
private void load(OsuColour colours)
{
HoverColour = colours.YellowDark.Opacity(0.6f);
FlashColour = colours.Yellow;
}
protected override void LoadComplete()
{
base.LoadComplete();
// works with AutoSizeAxes above to make buttons autosize with the scale animation.
Content.AutoSizeAxes = Axes.None;
Content.Size = new Vector2(DEFAULT_BUTTON_SIZE);
}
}
private class Background : BufferedContainer
{
private readonly Sprite sprite;
private readonly WorkingBeatmap beatmap;
public Background(WorkingBeatmap beatmap = null)
{
this.beatmap = beatmap;
Depth = float.MaxValue;
RelativeSizeAxes = Axes.Both;
CacheDrawnFrameBuffer = true;
Children = new Drawable[]
{
sprite = new Sprite
{
RelativeSizeAxes = Axes.Both,
Colour = OsuColour.Gray(150),
FillMode = FillMode.Fill,
},
new Box
{
RelativeSizeAxes = Axes.X,
Height = bottom_black_area_height,
Origin = Anchor.BottomCentre,
Anchor = Anchor.BottomCentre,
Colour = Color4.Black.Opacity(0.5f)
}
};
}
[BackgroundDependencyLoader]
private void load(TextureStore textures)
{
sprite.Texture = beatmap?.Background ?? textures.Get(@"Backgrounds/bg4");
}
}
private class DragContainer : Container
{
protected override bool OnDragStart(DragStartEvent e)
{
return true;
}
protected override void OnDrag(DragEvent e)
{
Vector2 change = e.MousePosition - e.MouseDownPosition;
// Diminish the drag distance as we go further to simulate "rubber band" feeling.
change *= change.Length <= 0 ? 0 : MathF.Pow(change.Length, 0.7f) / change.Length;
this.MoveTo(change);
}
protected override void OnDragEnd(DragEndEvent e)
{
this.MoveTo(Vector2.Zero, 800, Easing.OutElastic);
base.OnDragEnd(e);
}
}
private class HoverableProgressBar : ProgressBar
{
protected override bool OnHover(HoverEvent e)
{
this.ResizeHeightTo(progress_height, 500, Easing.OutQuint);
return base.OnHover(e);
}
protected override void OnHoverLost(HoverLostEvent e)
{
this.ResizeHeightTo(progress_height / 2, 500, Easing.OutQuint);
base.OnHoverLost(e);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclassautoprop.regclassautoprop;
using Xunit;
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclassautoprop.regclassautoprop
{
public class MyClass
{
public int Field = 0;
}
public struct MyStruct
{
public int Number;
}
public enum MyEnum
{
First = 1,
Second = 2,
Third = 3
}
public class MemberClass
{
public string Property_string
{
set;
get;
}
public MyClass Property_MyClass
{
get;
set;
}
public MyStruct Property_MyStruct
{
set;
get;
}
public MyEnum Property_MyEnum
{
set;
private get;
}
public short Property_short
{
set;
protected get;
}
public ulong Property_ulong
{
set;
protected internal get;
}
public char Property_char
{
private set;
get;
}
public bool Property_bool
{
protected set;
get;
}
public decimal Property_decimal
{
protected internal set;
get;
}
public MyStruct? Property_MyStructNull
{
set;
get;
}
public MyEnum? Property_MyEnumNull
{
set;
private get;
}
public short? Property_shortNull
{
set;
get;
}
public ulong? Property_ulongNull
{
set;
protected internal get;
}
public char? Property_charNull
{
private set;
get;
}
public bool? Property_boolNull
{
protected set;
get;
}
public decimal? Property_decimalNull
{
protected internal set;
get;
}
public string[] Property_stringArr
{
set;
get;
}
public MyClass[] Property_MyClassArr
{
set;
get;
}
public MyStruct[] Property_MyStructArr
{
get;
set;
}
public MyEnum[] Property_MyEnumArr
{
set;
private get;
}
public short[] Property_shortArr
{
set;
protected get;
}
public ulong[] Property_ulongArr
{
set;
protected internal get;
}
public char[] Property_charArr
{
private set;
get;
}
public bool[] Property_boolArr
{
protected set;
get;
}
public decimal[] Property_decimalArr
{
protected internal set;
get;
}
public MyStruct?[] Property_MyStructNullArr
{
set;
get;
}
public MyEnum?[] Property_MyEnumNullArr
{
set;
private get;
}
public short?[] Property_shortNullArr
{
set;
protected get;
}
public ulong?[] Property_ulongNullArr
{
set;
protected internal get;
}
public char?[] Property_charNullArr
{
private set;
get;
}
public bool?[] Property_boolNullArr
{
protected set;
get;
}
public decimal?[] Property_decimalNullArr
{
protected internal set;
get;
}
public float Property_Float
{
get;
set;
}
public float?[] Property_FloatNullArr
{
get;
set;
}
public dynamic Property_Dynamic
{
get;
set;
}
public static string Property_stringStatic
{
set;
get;
}
// Move declarations to the call site
}
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass001.regclass001
{
// <Title> Tests regular class auto property used in generic method body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
Test t1 = new Test();
return t1.TestGetMethod<long>(1, new MemberClass()) + t1.TestSetMethod<Test, string>(string.Empty, new MemberClass()) == 0 ? 0 : 1;
}
public int TestGetMethod<T>(T t, MemberClass mc)
{
mc.Property_string = "Test";
dynamic dy = mc;
if ((string)dy.Property_string != "Test")
return 1;
else
return 0;
}
public int TestSetMethod<U, V>(V v, MemberClass mc)
{
dynamic dy = mc;
dy.Property_string = "Test";
mc = dy; //because we might change the property on a boxed version of it if MemberClass is a struct
if (mc.Property_string != "Test")
return 1;
else
return 0;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass003.regclass003
{
// <Title> Tests regular class auto property used in variable initializer.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
string[] bas = new string[]
{
"Test", string.Empty, null
}
;
MemberClass mc = new MemberClass();
mc.Property_stringArr = bas;
dynamic dy = mc;
string[] loc = dy.Property_stringArr;
if (ReferenceEquals(bas, loc) && loc[0] == "Test" && loc[1] == string.Empty && loc[2] == null)
{
return 0;
}
else
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass004.regclass004
{
// <Title> Tests regular class auto property used in implicitly-typed array initializer.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc1 = new MemberClass();
MemberClass mc2 = new MemberClass();
mc1.Property_MyStructNull = null;
mc2.Property_MyStructNull = new MyStruct()
{
Number = 1
}
;
dynamic dy1 = mc1;
dynamic dy2 = mc2;
var loc = new MyStruct?[]
{
(MyStruct? )dy1.Property_MyStructNull, (MyStruct? )dy2.Property_MyStructNull
}
;
if (loc.Length == 2 && loc[0] == null && loc[1].Value.Number == 1)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass005.regclass005
{
// <Title> Tests regular class auto property used in operator.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc1 = new MemberClass();
MemberClass mc2 = new MemberClass();
mc1.Property_string = "a";
mc2.Property_string = "b";
dynamic dy1 = mc1;
dynamic dy2 = mc2;
string s = (string)dy1.Property_string + (string)dy2.Property_string;
if (s == "ab")
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass006.regclass006
{
// <Title> Tests regular class auto property used in null coalescing operator.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
mc.Property_MyStructArr = new MyStruct[]
{
new MyStruct()
{
Number = 0
}
, new MyStruct()
{
Number = 1
}
}
;
dynamic dy = mc;
string s1 = ((string)dy.Property_string) ?? string.Empty;
mc.Property_string = "Test";
dy = mc;
MyStruct[] b1 = ((MyStruct[])dy.Property_MyStructArr) ?? (new MyStruct[1]);
MyStruct[] b2 = ((MyStruct[])dy.Property_MyStructArr) ?? (new MyStruct[1]);
string s2 = ((string)dy.Property_string) ?? string.Empty;
if (b1.Length == 2 && s1 == string.Empty && b2.Length == 2 && s2 == "Test")
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass007.regclass007
{
// <Title> Tests regular class auto property used in destructor.</Title>
// <Description>
// On IA64 the GC.WaitForPendingFinalizers() does not actually work...
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using System;
using System.Runtime.CompilerServices;
public class Test
{
private static string s_field;
public static object locker = new object();
~Test()
{
lock (locker)
{
MemberClass mc = new MemberClass();
mc.Property_string = "Test";
dynamic dy = mc;
s_field = dy.Property_string;
}
}
private static int Verify()
{
lock (Test.locker)
{
if (Test.s_field != "Test")
{
return 1;
}
}
return 0;
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static void RequireLifetimesEnded()
{
Test t = new Test();
Test.s_field = "Field";
GC.KeepAlive(t);
}
[ConditionalFact(typeof(PlatformDetection), nameof(PlatformDetection.IsPreciseGcSupported))]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
RequireLifetimesEnded();
GC.Collect();
GC.WaitForPendingFinalizers();
// If move the code in Verify() to here, the finalizer will only be executed after exited Main
return Verify();
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass008.regclass008
{
// <Title> Tests regular class auto property used in extension method body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
int a1 = 10;
MyStruct ms1 = a1.TestSetMyStruct();
MyStruct ms2 = a1.TestGetMyStruct();
if (ms1.Number == 10 && ms2.Number == 10)
return 0;
return 1;
}
}
public static class Extension
{
public static MyStruct TestSetMyStruct(this int i)
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
dy.Property_MyStruct = new MyStruct()
{
Number = i
}
;
mc = dy; //because MC might be a struct
return mc.Property_MyStruct;
}
public static MyStruct TestGetMyStruct(this int i)
{
MemberClass mc = new MemberClass();
mc.Property_MyStruct = new MyStruct()
{
Number = i
}
;
dynamic dy = mc;
return (MyStruct)dy.Property_MyStruct;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass009.regclass009
{
// <Title> Tests regular class auto property used in variable initializer.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
char value = (char)dy.Property_char;
if (value == default(char))
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass010.regclass010
{
// <Title> Tests regular class auto property used in array initializer list.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
bool[] array = new bool[]
{
(bool)dy.Property_bool, true
}
;
if (array.Length == 2 && array[0] == false && array[1] == true)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass014.regclass014
{
// <Title> Tests regular class auto property used in for loop body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
dynamic mc = new MemberClass();
ulong[] array = new ulong[]
{
1L, 2L, 3L, ulong.MinValue, ulong.MaxValue
}
;
for (int i = 0; i < array.Length; i++)
{
mc.Property_ulong = array[i];
}
ulong x = (ulong)mc.Property_ulong;
return 0;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass015.regclass015
{
// <Title> Tests regular class auto property used in foreach expression.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using System.Collections.Generic;
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
mc.Property_MyClassArr = new MyClass[]
{
null, new MyClass()
{
Field = -1
}
}
;
dynamic dy = mc;
List<MyClass> list = new List<MyClass>();
foreach (MyClass myclass in dy.Property_MyClassArr)
{
list.Add(myclass);
}
if (list.Count == 2 && list[0] == null && list[1].Field == -1)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass016.regclass016
{
// <Title> Tests regular class auto property used in while body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test : MemberClass
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
Test mc = new Test();
dynamic dy = mc;
short a = 0;
short v = 0;
while (a < 10)
{
v = a;
dy.Property_shortNull = a;
a = (short)((short)dy.Property_shortNull + 1);
if (a != v + 1)
return 1;
}
return 0;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass018.regclass018
{
// <Title> Tests regular class auto property used in uncheck expression.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
ulong result = 1;
dy.Property_ulongNull = ulong.MaxValue;
result = unchecked(dy.Property_ulongNull + 1); //0
return (int)result;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass019.regclass019
{
// <Title> Tests regular class auto property used in static constructor.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
private static char? s_charValue = 'a';
static Test()
{
dynamic dy = new MemberClass();
s_charValue = dy.Property_charNull;
}
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
if (Test.s_charValue == null)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass020.regclass020
{
// <Title> Tests regular class auto property used in variable named dynamic.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
dynamic dynamic = new MemberClass();
if (dynamic.Property_boolNull == null)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass022.regclass022
{
// <Title> Tests regular class auto property used in field initailizer outside of constructor.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
private static dynamic s_dy = new MemberClass();
private char[] _result = s_dy.Property_charArr;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
Test t = new Test();
if (t._result == null)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass023.regclass023
{
// <Title> Tests regular class auto property used in static generic method body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
return TestMethod<Test>();
}
private static int TestMethod<T>()
{
dynamic dy = new MemberClass();
dy.Property_MyEnumArr = new MyEnum[0];
return 0;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass024.regclass024
{
// <Title> Tests regular class auto property used in static generic method body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
// <Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
return TestMethod<int>();
}
private static int TestMethod<T>()
{
dynamic dy = new MemberClass();
dy.Property_shortArr = new short[2];
try
{
short[] result = dy.Property_shortArr; // protected
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException e)
{
if (ErrorVerifier.Verify(ErrorMessageId.InaccessibleGetter, e.Message, "MemberClass.Property_shortArr"))
return 0;
}
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass025.regclass025
{
// <Title> Tests regular class auto property used in inside#if, #else block.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
dynamic dy = new MemberClass();
ulong[] array = null;
dy.Property_ulongArr = new ulong[]
{
0, 1
}
;
#if MS
array = new ulong[] { (ulong)dy.Property_ulong };
#else
try
{
array = dy.Property_ulongArr;
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException e)
{
if (ErrorVerifier.Verify(ErrorMessageId.InaccessibleGetter, e.Message, "MemberClass.Property_ulongArr"))
return 0;
else
{
System.Console.WriteLine(e);
return 1;
}
}
#endif
// different case actually
if (array.Length == 2 && array[0] == 0 && array[1] == 1)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass026.regclass026
{
// <Title> Tests regular class auto property used in regular method body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
if (new Test().TestMethod())
return 0;
return 1;
}
private bool TestMethod()
{
dynamic dy = new MemberClass();
bool[] result = dy.Property_boolArr;
return result == null;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass027.regclass027
{
// <Title> Tests regular class auto property used in using block.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using System.IO;
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
mc.Property_decimalArr = new decimal[]
{
1M, 1.1M
}
;
dynamic dy = mc;
using (MemoryStream ms = new MemoryStream())
{
if (((decimal[])dy.Property_decimalArr)[0] != 1M && ((decimal[])dy.Property_decimalArr)[1] != 1.1M)
return 1;
}
using (MemoryStream ms = new MemoryStream())
{
dy.Property_decimalArr = new decimal[]
{
10M
}
;
((decimal[])dy.Property_decimalArr)[0] = 10.01M;
}
if (mc.Property_decimalArr.Length == 1 && mc.Property_decimalArr[0] == 10.01M)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass028.regclass028
{
// <Title> Tests regular class auto property used in ternary operator expression.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
Test t = new Test();
return t.TestGet() + t.TestSet();
}
public int TestGet()
{
MemberClass mc = new MemberClass();
mc.Property_MyStructNullArr = new MyStruct?[]
{
null, new MyStruct()
{
Number = 10
}
}
;
dynamic dy = mc;
return (int)dy.Property_MyStructNullArr.Length == 2 ? 0 : 1;
}
public int TestSet()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
dy.Property_MyStructNullArr = new MyStruct?[]
{
null, new MyStruct()
{
Number = 10
}
}
;
mc = dy;
return (int)dy.Property_MyStructNullArr.Length == 2 ? 0 : 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass029.regclass029
{
// <Title> Tests regular class auto property used in null coalescing operator.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
dynamic dy = new MemberClass();
try
{
MyEnum?[] result = dy.Property_MyEnumNullArr ?? new MyEnum?[1]; //private, should have exception
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException e)
{
if (ErrorVerifier.Verify(ErrorMessageId.InaccessibleGetter, e.Message, "MemberClass.Property_MyEnumNullArr"))
return 0;
}
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass030.regclass030
{
// <Title> Tests regular class auto property used in constructor.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
public static int Return;
public Test()
{
dynamic dy = new MemberClass();
try
{
// public for struct
short?[] result = dy.Property_shortNullArr; //protected, should have exception
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException e)
{
if (ErrorVerifier.Verify(ErrorMessageId.InaccessibleGetter, e.Message, "MemberClass.Property_shortNullArr"))
Test.Return = 0;
else
Test.Return = 1;
}
}
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
Test t = new Test();
return Test.Return;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass031.regclass031
{
// <Title> Tests regular class auto property used in null coalescing operator.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
ulong?[] result1 = dy.Property_ulongNullArr ?? new ulong?[1];
if (result1.Length != 1 || dy.Property_ulongNullArr != null)
return 1;
dy.Property_ulongNullArr = dy.Property_ulongNullArr ?? new ulong?[0];
return 0;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass032.regclass032
{
// <Title> Tests regular class auto property used in static variable.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
private static dynamic s_dy = new MemberClass();
private static char?[] s_result = s_dy.Property_charNullArr;
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
if (s_result == null)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass034.regclass034
{
// <Title> Tests regular class auto property used in switch section statement.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
int result = int.MaxValue;
try
{
dy.Property_decimalNullArr = new decimal?[]
{
int.MinValue
}
;
}
catch (Microsoft.CSharp.RuntimeBinder.RuntimeBinderException e)
{
if (ErrorVerifier.Verify(ErrorMessageId.InaccessibleSetter, e.Message, "MemberClass.Property_decimalNullArr"))
result = int.MaxValue;
}
switch (result)
{
case int.MaxValue:
try
{
result = (int)((decimal?[])dy.Property_decimalNullArr)[0];
}
catch (System.NullReferenceException)
{
result = int.MinValue;
}
break;
default:
break;
}
if (result == int.MinValue)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass035.regclass035
{
// <Title> Tests regular class auto property used in switch default section statement.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
int result = 4;
dy.Property_Float = 4;
switch (result)
{
case 4:
dy.Property_Float = float.NaN;
break;
default:
result = (int)dy.Property_Float;
break;
}
if (result == 4)
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass036.regclass036
{
// <Title> Tests regular class auto property used in foreach body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass mc = new MemberClass();
dynamic dy = mc;
dy.Property_FloatNullArr = new float?[]
{
float.Epsilon, float.MaxValue, float.MinValue, float.NaN, float.NegativeInfinity, float.PositiveInfinity
}
;
if (dy.Property_FloatNullArr.Length == 6 && dy.Property_FloatNullArr[0] == float.Epsilon && dy.Property_FloatNullArr[1] == float.MaxValue && dy.Property_FloatNullArr[2] == float.MinValue && float.IsNaN((float)dy.Property_FloatNullArr[3]) && float.IsNegativeInfinity((float)dy.Property_FloatNullArr[4]) && float.IsPositiveInfinity((float)dy.Property_FloatNullArr[5]))
return 0;
return 1;
}
}
//</Code>
}
namespace ManagedTests.DynamicCSharp.Conformance.dynamic.context.property.autoproperty.regclass.regclass037.regclass037
{
// <Title> Tests regular class auto property used in static method body.</Title>
// <Description>
// </Description>
// <RelatedBugs></RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
public class Test
{
[Fact]
public static void DynamicCSharpRunTest()
{
Assert.Equal(0, MainMethod());
}
public static int MainMethod()
{
MemberClass.Property_stringStatic = "Test";
dynamic dynamic = MemberClass.Property_stringStatic;
if ((string)dynamic == "Test")
return 0;
return 1;
}
}
//</Code>
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Reflection.Metadata.Cil.Decoder;
using System.Reflection.Metadata.Cil.Visitor;
using System.Text;
namespace System.Reflection.Metadata.Cil
{
public struct CilProperty : ICilVisitable
{
private PropertyDefinition _propertyDef;
private CilReaders _readers;
private string _name;
private MethodSignature<CilType> _signature;
private IEnumerable<CilCustomAttribute> _customAttributes;
private CilMethodDefinition _getter;
private CilMethodDefinition _setter;
private CilConstant _defaultValue;
private bool _isDefaultValueInitialized;
private bool _isGetterInitialized;
private bool _isSetterInitialized;
private PropertyAccessors _accessors;
private bool _isSignatureInitialized;
private CilTypeDefinition _typeDefinition;
private int _token;
internal static CilProperty Create(PropertyDefinition propertyDef, int token, ref CilReaders readers, CilTypeDefinition typeDefinition)
{
CilProperty property = new CilProperty();
property._typeDefinition = typeDefinition;
property._propertyDef = propertyDef;
property._readers = readers;
property._isSignatureInitialized = false;
property._isDefaultValueInitialized = false;
property._isGetterInitialized = false;
property._isSetterInitialized = false;
property._token = token;
property._accessors = propertyDef.GetAccessors();
return property;
}
internal int Token
{
get
{
return _token;
}
}
public CilTypeDefinition DeclaringType
{
get
{
return _typeDefinition;
}
}
public string Name
{
get
{
return CilDecoder.GetCachedValue(_propertyDef.Name, _readers, ref _name);
}
}
public bool HasGetter
{
get
{
return !_accessors.Getter.IsNil;
}
}
public bool HasSetter
{
get
{
return !_accessors.Setter.IsNil;
}
}
public bool HasDefault
{
get
{
return Attributes.HasFlag(PropertyAttributes.HasDefault);
}
}
public CilConstant DefaultValue
{
get
{
if (!_isDefaultValueInitialized)
{
if (!HasDefault)
{
throw new InvalidOperationException("Property doesn't have default value");
}
_isDefaultValueInitialized = true;
_defaultValue = GetDefaultValue();
}
return _defaultValue;
}
}
public CilMethodDefinition Getter
{
get
{
if (!_isGetterInitialized)
{
_isGetterInitialized = true;
if (HasGetter)
{
_getter = CilMethodDefinition.Create(_accessors.Getter, ref _readers, _typeDefinition);
}
}
return _getter;
}
}
public CilMethodDefinition Setter
{
get
{
if (!_isSetterInitialized)
{
_isSetterInitialized = true;
if (HasSetter)
{
_setter = CilMethodDefinition.Create(_accessors.Setter, ref _readers, _typeDefinition);
}
}
return _setter;
}
}
public MethodSignature<CilType> Signature
{
get
{
if (!_isSignatureInitialized)
{
_isSignatureInitialized = true;
_signature = SignatureDecoder.DecodeMethodSignature(_propertyDef.Signature, _readers.Provider);
}
return _signature;
}
}
public IEnumerable<CilCustomAttribute> CustomAttributes
{
get
{
if(_customAttributes == null)
{
_customAttributes = GetCustomAttributes();
}
return _customAttributes;
}
}
public PropertyAttributes Attributes
{
get
{
return _propertyDef.Attributes;
}
}
public void Accept(ICilVisitor visitor)
{
visitor.Visit(this);
}
public string GetDecodedSignature()
{
string attributes = GetAttributesForSignature();
StringBuilder signature = new StringBuilder();
if (Signature.Header.IsInstance)
{
signature.Append("instance ");
}
signature.Append(Signature.ReturnType);
return string.Format("{0}{1} {2}{3}", attributes, signature.ToString(), Name, CilDecoder.DecodeSignatureParamerTypes(Signature));
}
private IEnumerable<CilCustomAttribute> GetCustomAttributes()
{
foreach(var handle in _propertyDef.GetCustomAttributes())
{
var attribute = _readers.MdReader.GetCustomAttribute(handle);
yield return new CilCustomAttribute(attribute, ref _readers);
}
}
private string GetAttributesForSignature()
{
if (Attributes.HasFlag(PropertyAttributes.SpecialName))
{
return "specialname ";
}
if (Attributes.HasFlag(PropertyAttributes.RTSpecialName))
{
return "rtspecialname ";
}
return string.Empty;
}
private CilConstant GetDefaultValue()
{
Constant constant = _readers.MdReader.GetConstant(_propertyDef.GetDefaultValue());
return CilConstant.Create(constant, ref _readers);
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
using tk2dRuntime.TileMap;
[System.Flags]
public enum tk2dTileFlags {
None = 0x00000000,
FlipX = 0x01000000,
FlipY = 0x02000000,
Rot90 = 0x04000000,
}
[ExecuteInEditMode]
[AddComponentMenu("2D Toolkit/TileMap/TileMap")]
/// <summary>
/// Tile Map
/// </summary>
public class tk2dTileMap : MonoBehaviour, tk2dRuntime.ISpriteCollectionForceBuild
{
/// <summary>
/// This is a link to the editor data object (tk2dTileMapEditorData).
/// It contains presets, and other data which isn't really relevant in game.
/// </summary>
public string editorDataGUID = "";
/// <summary>
/// Tile map data, stores shared parameters for tilemaps
/// </summary>
public tk2dTileMapData data;
/// <summary>
/// Tile map render and collider object
/// </summary>
public GameObject renderData;
/// <summary>
/// The sprite collection used by the tilemap
/// </summary>
[SerializeField]
private tk2dSpriteCollectionData spriteCollection = null;
public tk2dSpriteCollectionData Editor__SpriteCollection
{
get
{
return spriteCollection;
}
set
{
spriteCollection = value;
}
}
public tk2dSpriteCollectionData SpriteCollectionInst
{
get
{
if (spriteCollection != null)
return spriteCollection.inst;
else
return null;
}
}
[SerializeField]
int spriteCollectionKey;
/// <summary>Width of the tilemap</summary>
public int width = 128;
/// <summary>Height of the tilemap</summary>
public int height = 128;
/// <summary>X axis partition size for this tilemap</summary>
public int partitionSizeX = 32;
/// <summary>Y axis partition size for this tilemap</summary>
public int partitionSizeY = 32;
[SerializeField]
Layer[] layers;
[SerializeField]
ColorChannel colorChannel;
[SerializeField]
GameObject prefabsRoot;
[System.Serializable]
public class TilemapPrefabInstance {
public int x, y, layer;
public GameObject instance;
}
[SerializeField]
List<TilemapPrefabInstance> tilePrefabsList = new List<TilemapPrefabInstance>();
[SerializeField]
bool _inEditMode = false;
public bool AllowEdit { get { return _inEditMode; } }
// holds a path to a serialized mesh, uses this to work out dump directory for meshes
public string serializedMeshPath;
void Awake()
{
bool spriteCollectionKeyMatch = true;
if (SpriteCollectionInst && (SpriteCollectionInst.buildKey != spriteCollectionKey || SpriteCollectionInst.needMaterialInstance)) {
spriteCollectionKeyMatch = false;
}
if (Application.platform == RuntimePlatform.WindowsEditor ||
Application.platform == RuntimePlatform.OSXEditor)
{
if ((Application.isPlaying && _inEditMode == true) || !spriteCollectionKeyMatch)
{
// Switched to edit mode while still in edit mode, rebuild
EndEditMode();
}
else {
if (spriteCollection != null && data != null && renderData == null) {
Build(BuildFlags.ForceBuild);
}
}
}
else
{
if (_inEditMode == true)
{
Debug.LogError("Tilemap " + name + " is still in edit mode. Please fix." +
"Building overhead will be significant.");
EndEditMode();
}
else if (!spriteCollectionKeyMatch)
{
Build(BuildFlags.ForceBuild);
}
else if (spriteCollection != null && data != null && renderData == null) {
Build(BuildFlags.ForceBuild);
}
}
}
#if UNITY_EDITOR
void OnEnable() {
if (spriteCollection != null && data != null && renderData != null
&& SpriteCollectionInst != null && SpriteCollectionInst.needMaterialInstance) {
bool needBuild = false;
if (layers != null) {
foreach (tk2dRuntime.TileMap.Layer layer in layers) {
if (layer.spriteChannel != null && layer.spriteChannel.chunks != null) {
foreach (tk2dRuntime.TileMap.SpriteChunk chunk in layer.spriteChannel.chunks) {
if (chunk.gameObject != null && chunk.gameObject.GetComponent<Renderer>() != null) {
if (chunk.gameObject.GetComponent<Renderer>().sharedMaterial == null) {
needBuild = true;
break;
}
}
}
}
}
}
if (needBuild) {
Build(BuildFlags.ForceBuild);
}
}
}
#endif
void OnDestroy() {
if (layers != null) {
foreach (tk2dRuntime.TileMap.Layer layer in layers) {
layer.DestroyGameData(this);
}
}
if (renderData != null) {
tk2dUtil.DestroyImmediate(renderData);
}
}
#if UNITY_EDITOR
void OnDrawGizmos() {
if (data != null) {
Vector3 p0 = data.tileOrigin;
Vector3 p1 = new Vector3(p0.x + data.tileSize.x * width, p0.y + data.tileSize.y * height, 0.0f);
Gizmos.color = Color.clear;
Gizmos.matrix = transform.localToWorldMatrix;
Gizmos.DrawCube((p0 + p1) * 0.5f, (p1 - p0));
Gizmos.matrix = Matrix4x4.identity;
Gizmos.color = Color.white;
}
}
#endif
[System.Flags]
public enum BuildFlags {
Default = 0,
EditMode = 1,
ForceBuild = 2
};
/// <summary>
/// Builds the tilemap. Call this after using the SetTile functions to
/// rebuild the affected partitions. Build only rebuilds affected partitions
/// and is efficent enough to use at runtime if you don't use Unity colliders.
/// Avoid building tilemaps every frame if you use Unity colliders as it will
/// likely be too slow for runtime use.
/// </summary>
public void Build() { Build(BuildFlags.Default); }
/// <summary>
/// Like <see cref="T:Build"/> above, but forces a build of all partitions.
/// </summary>
public void ForceBuild() { Build(BuildFlags.ForceBuild); }
// Clears all spawned instances, but retains the renderData object
void ClearSpawnedInstances()
{
if (layers == null)
return;
BuilderUtil.HideTileMapPrefabs( this );
for (int layerIdx = 0; layerIdx < layers.Length; ++layerIdx)
{
Layer layer = layers[layerIdx];
for (int chunkIdx = 0; chunkIdx < layer.spriteChannel.chunks.Length; ++chunkIdx)
{
var chunk = layer.spriteChannel.chunks[chunkIdx];
if (chunk.gameObject == null)
continue;
var transform = chunk.gameObject.transform;
List<Transform> children = new List<Transform>();
for (int i = 0; i < transform.childCount; ++i)
children.Add(transform.GetChild(i));
for (int i = 0; i < children.Count; ++i)
tk2dUtil.DestroyImmediate(children[i].gameObject);
}
}
}
void SetPrefabsRootActive(bool active) {
if (prefabsRoot != null)
#if UNITY_3_5
prefabsRoot.SetActiveRecursively(active);
#else
tk2dUtil.SetActive(prefabsRoot, active);
#endif
}
public void Build(BuildFlags buildFlags)
{
#if UNITY_EDITOR
// Sanitize tilePrefabs input, to avoid branches later
if (data != null && spriteCollection != null)
{
if (data.tilePrefabs == null)
data.tilePrefabs = new GameObject[SpriteCollectionInst.Count];
else if (data.tilePrefabs.Length != SpriteCollectionInst.Count)
System.Array.Resize(ref data.tilePrefabs, SpriteCollectionInst.Count);
// Fix up data if necessary
BuilderUtil.InitDataStore(this);
}
else
{
return;
}
// Sanitize sprite collection material ids
if (SpriteCollectionInst)
SpriteCollectionInst.InitMaterialIds();
bool forceBuild = (buildFlags & BuildFlags.ForceBuild) != 0;
// When invalid, everything needs to be rebuilt
if (SpriteCollectionInst && SpriteCollectionInst.buildKey != spriteCollectionKey)
forceBuild = true;
// Remember active layers
Dictionary<Layer, bool> layersActive = new Dictionary<Layer,bool>();
if (layers != null)
{
for (int layerIdx = 0; layerIdx < layers.Length; ++layerIdx)
{
Layer layer = layers[layerIdx];
if (layer != null && layer.gameObject != null)
{
#if UNITY_3_5
layersActive[layer] = layer.gameObject.active;
#else
layersActive[layer] = layer.gameObject.activeSelf;
#endif
}
}
}
if (forceBuild) {
ClearSpawnedInstances();
}
BuilderUtil.CreateRenderData(this, _inEditMode, layersActive);
RenderMeshBuilder.Build(this, _inEditMode, forceBuild);
if (!_inEditMode)
{
#if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2)
tk2dSpriteDefinition def = SpriteCollectionInst.FirstValidDefinition;
if (def != null && def.physicsEngine == tk2dSpriteDefinition.PhysicsEngine.Physics2D) {
#if !STRIP_PHYSICS_2D
ColliderBuilder2D.Build(this, forceBuild);
#endif
}
else
#endif
{
#if !STRIP_PHYSICS_3D
ColliderBuilder3D.Build(this, forceBuild);
#endif
}
BuilderUtil.SpawnPrefabs(this, forceBuild);
}
// Clear dirty flag on everything
foreach (var layer in layers)
layer.ClearDirtyFlag();
if (colorChannel != null)
colorChannel.ClearDirtyFlag();
// Update sprite collection key
if (SpriteCollectionInst)
spriteCollectionKey = SpriteCollectionInst.buildKey;
#endif
}
/// <summary>
/// Gets the tile coordinate at position. This can be used to obtain tile or color data explicitly from layers
/// Returns true if the position is within the tilemap bounds
/// </summary>
public bool GetTileAtPosition(Vector3 position, out int x, out int y)
{
float ox, oy;
bool b = GetTileFracAtPosition(position, out ox, out oy);
x = (int)ox;
y = (int)oy;
return b;
}
/// <summary>
/// Gets the tile coordinate at position. This can be used to obtain tile or color data explicitly from layers
/// The fractional value returned is the fraction into the current tile
/// Returns true if the position is within the tilemap bounds
/// </summary>
public bool GetTileFracAtPosition(Vector3 position, out float x, out float y)
{
switch (data.tileType)
{
case tk2dTileMapData.TileType.Rectangular:
{
Vector3 localPosition = transform.worldToLocalMatrix.MultiplyPoint(position);
x = (localPosition.x - data.tileOrigin.x) / data.tileSize.x;
y = (localPosition.y - data.tileOrigin.y) / data.tileSize.y;
return (x >= 0 && x < width && y >= 0 && y < height);
}
case tk2dTileMapData.TileType.Isometric:
{
if (data.tileSize.x == 0.0f)
break;
float tileAngle = Mathf.Atan2(data.tileSize.y, data.tileSize.x / 2.0f);
Vector3 localPosition = transform.worldToLocalMatrix.MultiplyPoint(position);
x = (localPosition.x - data.tileOrigin.x) / data.tileSize.x;
y = ((localPosition.y - data.tileOrigin.y) / (data.tileSize.y));
float fy = y * 0.5f;
int iy = (int)fy;
float fry = fy - iy;
float frx = x % 1.0f;
x = (int)x;
y = iy * 2;
if (frx > 0.5f)
{
if (fry > 0.5f && Mathf.Atan2(1.0f - fry, (frx - 0.5f) * 2) < tileAngle)
y += 1;
else if (fry < 0.5f && Mathf.Atan2(fry, (frx - 0.5f) * 2) < tileAngle)
y -= 1;
}
else if (frx < 0.5f)
{
if (fry > 0.5f && Mathf.Atan2(fry - 0.5f, frx * 2) > tileAngle)
{
y += 1;
x -= 1;
}
if (fry < 0.5f && Mathf.Atan2(fry, (0.5f - frx) * 2) < tileAngle)
{
y -= 1;
x -= 1;
}
}
return (x >= 0 && x < width && y >= 0 && y < height);
}
}
x = 0.0f;
y = 0.0f;
return false;
}
/// <summary>
/// Returns the tile position in world space
/// </summary>
public Vector3 GetTilePosition(int x, int y)
{
switch (data.tileType)
{
case tk2dTileMapData.TileType.Rectangular:
default:
{
Vector3 localPosition = new Vector3(
x * data.tileSize.x + data.tileOrigin.x,
y * data.tileSize.y + data.tileOrigin.y,
0);
return transform.localToWorldMatrix.MultiplyPoint(localPosition);
}
case tk2dTileMapData.TileType.Isometric:
{
Vector3 localPosition = new Vector3(
((float)x + (((y & 1) == 0) ? 0.0f : 0.5f)) * data.tileSize.x + data.tileOrigin.x,
y * data.tileSize.y + data.tileOrigin.y,
0);
return transform.localToWorldMatrix.MultiplyPoint(localPosition);
}
}
}
/// <summary>
/// Gets the tile at position. This can be used to obtain tile data, etc
/// -1 = no data or empty tile
/// </summary>
public int GetTileIdAtPosition(Vector3 position, int layer)
{
if (layer < 0 || layer >= layers.Length)
return -1;
int x, y;
if (!GetTileAtPosition(position, out x, out y))
return -1;
return layers[layer].GetTile(x, y);
}
/// <summary>
/// Returns the tile info chunk for the tile. Use this to store additional metadata
/// </summary>
public tk2dRuntime.TileMap.TileInfo GetTileInfoForTileId(int tileId)
{
return data.GetTileInfoForSprite(tileId);
}
/// <summary>
/// Gets the tile at position. This can be used to obtain tile data, etc
/// -1 = no data or empty tile
/// </summary>
public Color GetInterpolatedColorAtPosition(Vector3 position)
{
Vector3 localPosition = transform.worldToLocalMatrix.MultiplyPoint(position);
int x = (int)((localPosition.x - data.tileOrigin.x) / data.tileSize.x);
int y = (int)((localPosition.y - data.tileOrigin.y) / data.tileSize.y);
if (colorChannel == null || colorChannel.IsEmpty)
return Color.white;
if (x < 0 || x >= width ||
y < 0 || y >= height)
{
return colorChannel.clearColor;
}
int offset;
ColorChunk colorChunk = colorChannel.FindChunkAndCoordinate(x, y, out offset);
if (colorChunk.Empty)
{
return colorChannel.clearColor;
}
else
{
int colorChunkRowOffset = partitionSizeX + 1;
Color tileColorx0y0 = colorChunk.colors[offset];
Color tileColorx1y0 = colorChunk.colors[offset + 1];
Color tileColorx0y1 = colorChunk.colors[offset + colorChunkRowOffset];
Color tileColorx1y1 = colorChunk.colors[offset + colorChunkRowOffset + 1];
float wx = x * data.tileSize.x + data.tileOrigin.x;
float wy = y * data.tileSize.y + data.tileOrigin.y;
float ix = (localPosition.x - wx) / data.tileSize.x;
float iy = (localPosition.y - wy) / data.tileSize.y;
Color cy0 = Color.Lerp(tileColorx0y0, tileColorx1y0, ix);
Color cy1 = Color.Lerp(tileColorx0y1, tileColorx1y1, ix);
return Color.Lerp(cy0, cy1, iy);
}
}
// ISpriteCollectionBuilder
public bool UsesSpriteCollection(tk2dSpriteCollectionData spriteCollection)
{
return (this.spriteCollection != null) && (spriteCollection == this.spriteCollection || spriteCollection == this.spriteCollection.inst);
}
// We might need to end edit mode when running in game
public void EndEditMode()
{
_inEditMode = false;
SetPrefabsRootActive(true);
Build(BuildFlags.ForceBuild);
if (prefabsRoot != null) {
tk2dUtil.DestroyImmediate(prefabsRoot);
prefabsRoot = null;
}
}
#if UNITY_EDITOR
public void BeginEditMode()
{
if (layers == null) {
_inEditMode = true;
return;
}
if (!_inEditMode) {
_inEditMode = true;
// Destroy all children
// Only necessary when switching INTO edit mode
BuilderUtil.HideTileMapPrefabs(this);
SetPrefabsRootActive(false);
}
Build(BuildFlags.ForceBuild);
}
public bool AreSpritesInitialized()
{
return layers != null;
}
public bool HasColorChannel()
{
return (colorChannel != null && !colorChannel.IsEmpty);
}
public void CreateColorChannel()
{
colorChannel = new ColorChannel(width, height, partitionSizeX, partitionSizeY);
colorChannel.Create();
}
public void DeleteColorChannel()
{
colorChannel.Delete();
}
public void DeleteSprites(int layerId, int x0, int y0, int x1, int y1)
{
x0 = Mathf.Clamp(x0, 0, width - 1);
y0 = Mathf.Clamp(y0, 0, height - 1);
x1 = Mathf.Clamp(x1, 0, width - 1);
y1 = Mathf.Clamp(y1, 0, height - 1);
int numTilesX = x1 - x0 + 1;
int numTilesY = y1 - y0 + 1;
var layer = layers[layerId];
for (int y = 0; y < numTilesY; ++y)
{
for (int x = 0; x < numTilesX; ++x)
{
layer.SetTile(x0 + x, y0 + y, -1);
}
}
layer.OptimizeIncremental();
}
#endif
public void TouchMesh(Mesh mesh)
{
#if UNITY_EDITOR
tk2dUtil.SetDirty(mesh);
#endif
}
public void DestroyMesh(Mesh mesh)
{
#if UNITY_EDITOR
if (UnityEditor.AssetDatabase.GetAssetPath(mesh).Length != 0)
{
mesh.Clear();
UnityEditor.AssetDatabase.DeleteAsset(UnityEditor.AssetDatabase.GetAssetPath(mesh));
}
else
{
tk2dUtil.DestroyImmediate(mesh);
}
#else
tk2dUtil.DestroyImmediate(mesh);
#endif
}
public int GetTilePrefabsListCount() {
return tilePrefabsList.Count;
}
public List<TilemapPrefabInstance> TilePrefabsList {
get {
return tilePrefabsList;
}
}
public void GetTilePrefabsListItem(int index, out int x, out int y, out int layer, out GameObject instance) {
TilemapPrefabInstance item = tilePrefabsList[index];
x = item.x;
y = item.y;
layer = item.layer;
instance = item.instance;
}
public void SetTilePrefabsList(List<int> xs, List<int> ys, List<int> layers, List<GameObject> instances) {
int n = instances.Count;
tilePrefabsList = new List<TilemapPrefabInstance>(n);
for (int i = 0; i < n; ++i) {
TilemapPrefabInstance item = new TilemapPrefabInstance();
item.x = xs[i];
item.y = ys[i];
item.layer = layers[i];
item.instance = instances[i];
tilePrefabsList.Add(item);
}
}
/// <summary>
/// Gets or sets the layers.
/// </summary>
public Layer[] Layers
{
get { return layers; }
set { layers = value; }
}
/// <summary>
/// Gets or sets the color channel.
/// </summary>
public ColorChannel ColorChannel
{
get { return colorChannel; }
set { colorChannel = value; }
}
/// <summary>
/// Gets or sets the prefabs root.
/// </summary>
public GameObject PrefabsRoot
{
get { return prefabsRoot; }
set { prefabsRoot = value; }
}
/// <summary>Gets the tile on a layer at x, y</summary>
/// <returns>The tile - either a sprite Id or -1 if the tile is empty.</returns>
public int GetTile(int x, int y, int layer) {
if (layer < 0 || layer >= layers.Length)
return -1;
return layers[layer].GetTile(x, y);
}
/// <summary>Gets the tile flags on a layer at x, y</summary>
/// <returns>The tile flags - a combination of tk2dTileFlags</returns>
public tk2dTileFlags GetTileFlags(int x, int y, int layer) {
if (layer < 0 || layer >= layers.Length)
return tk2dTileFlags.None;
return layers[layer].GetTileFlags(x, y);
}
/// <summary>Sets the tile on a layer at x, y - either a sprite Id or -1 if the tile is empty.</summary>
public void SetTile(int x, int y, int layer, int tile) {
if (layer < 0 || layer >= layers.Length)
return;
layers[layer].SetTile(x, y, tile);
}
/// <summary>Sets the tile flags on a layer at x, y - a combination of tk2dTileFlags</summary>
public void SetTileFlags(int x, int y, int layer, tk2dTileFlags flags) {
if (layer < 0 || layer >= layers.Length)
return;
layers[layer].SetTileFlags(x, y, flags);
}
/// <summary>Clears the tile on a layer at x, y</summary>
public void ClearTile(int x, int y, int layer) {
if (layer < 0 || layer >= layers.Length)
return;
layers[layer].ClearTile(x, y);
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Windows.Controls;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.ChangeSignature;
using Microsoft.CodeAnalysis.Editor.Shared.Extensions;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.Notification;
using Microsoft.VisualStudio.LanguageServices.Implementation.Utilities;
using Roslyn.Utilities;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.ChangeSignature
{
internal class ChangeSignatureDialogViewModel : AbstractNotifyPropertyChanged
{
private readonly INotificationService _notificationService;
private readonly ClassificationTypeMap _classificationTypeMap;
private readonly ParameterConfiguration _originalParameterConfiguration;
private readonly ISymbol _symbol;
private readonly ParameterViewModel _thisParameter;
private List<ParameterViewModel> _parameterGroup1;
private List<ParameterViewModel> _parameterGroup2;
private readonly ParameterViewModel _paramsParameter;
private HashSet<IParameterSymbol> _disabledParameters = new HashSet<IParameterSymbol>();
private ImmutableArray<SymbolDisplayPart> _declarationParts;
private bool _previewChanges;
internal ChangeSignatureDialogViewModel(INotificationService notificationService, ParameterConfiguration parameters, ISymbol symbol, ClassificationTypeMap classificationTypeMap)
{
_originalParameterConfiguration = parameters;
_notificationService = notificationService;
_classificationTypeMap = classificationTypeMap;
int startingSelectedIndex = 0;
if (parameters.ThisParameter != null)
{
startingSelectedIndex++;
_thisParameter = new ParameterViewModel(this, parameters.ThisParameter);
_disabledParameters.Add(parameters.ThisParameter);
}
if (parameters.ParamsParameter != null)
{
_paramsParameter = new ParameterViewModel(this, parameters.ParamsParameter);
}
_symbol = symbol;
_declarationParts = symbol.ToDisplayParts(s_symbolDeclarationDisplayFormat);
_parameterGroup1 = parameters.ParametersWithoutDefaultValues.Select(p => new ParameterViewModel(this, p)).ToList();
_parameterGroup2 = parameters.RemainingEditableParameters.Select(p => new ParameterViewModel(this, p)).ToList();
this.SelectedIndex = startingSelectedIndex;
}
public int GetStartingSelectionIndex()
{
return _thisParameter == null ? 0 : 1;
}
public bool PreviewChanges
{
get
{
return _previewChanges;
}
set
{
_previewChanges = value;
}
}
public bool CanRemove
{
get
{
if (!SelectedIndex.HasValue)
{
return false;
}
var index = SelectedIndex.Value;
if (index == 0 && _thisParameter != null)
{
return false;
}
// index = thisParameter == null ? index : index - 1;
return !AllParameters[index].IsRemoved;
}
}
public bool CanRestore
{
get
{
if (!SelectedIndex.HasValue)
{
return false;
}
var index = SelectedIndex.Value;
if (index == 0 && _thisParameter != null)
{
return false;
}
// index = thisParameter == null ? index : index - 1;
return AllParameters[index].IsRemoved;
}
}
internal void Remove()
{
AllParameters[_selectedIndex.Value].IsRemoved = true;
NotifyPropertyChanged(nameof(AllParameters));
NotifyPropertyChanged(nameof(SignatureDisplay));
NotifyPropertyChanged(nameof(SignaturePreviewAutomationText));
NotifyPropertyChanged(nameof(IsOkButtonEnabled));
NotifyPropertyChanged(nameof(CanRemove));
NotifyPropertyChanged(nameof(RemoveAutomationText));
NotifyPropertyChanged(nameof(CanRestore));
NotifyPropertyChanged(nameof(RestoreAutomationText));
}
internal void Restore()
{
AllParameters[_selectedIndex.Value].IsRemoved = false;
NotifyPropertyChanged(nameof(AllParameters));
NotifyPropertyChanged(nameof(SignatureDisplay));
NotifyPropertyChanged(nameof(SignaturePreviewAutomationText));
NotifyPropertyChanged(nameof(IsOkButtonEnabled));
NotifyPropertyChanged(nameof(CanRemove));
NotifyPropertyChanged(nameof(RemoveAutomationText));
NotifyPropertyChanged(nameof(CanRestore));
NotifyPropertyChanged(nameof(RestoreAutomationText));
}
internal ParameterConfiguration GetParameterConfiguration()
{
return new ParameterConfiguration(
_originalParameterConfiguration.ThisParameter,
_parameterGroup1.Where(p => !p.IsRemoved).Select(p => p.ParameterSymbol).ToList(),
_parameterGroup2.Where(p => !p.IsRemoved).Select(p => p.ParameterSymbol).ToList(),
(_paramsParameter == null || _paramsParameter.IsRemoved) ? null : _paramsParameter.ParameterSymbol);
}
private static SymbolDisplayFormat s_symbolDeclarationDisplayFormat = new SymbolDisplayFormat(
genericsOptions: SymbolDisplayGenericsOptions.IncludeTypeParameters,
miscellaneousOptions: SymbolDisplayMiscellaneousOptions.EscapeKeywordIdentifiers | SymbolDisplayMiscellaneousOptions.UseSpecialTypes,
extensionMethodStyle: SymbolDisplayExtensionMethodStyle.StaticMethod,
memberOptions:
SymbolDisplayMemberOptions.IncludeType |
SymbolDisplayMemberOptions.IncludeExplicitInterface |
SymbolDisplayMemberOptions.IncludeAccessibility |
SymbolDisplayMemberOptions.IncludeModifiers);
private static SymbolDisplayFormat s_parameterDisplayFormat = new SymbolDisplayFormat(
genericsOptions: SymbolDisplayGenericsOptions.IncludeTypeParameters,
miscellaneousOptions: SymbolDisplayMiscellaneousOptions.EscapeKeywordIdentifiers | SymbolDisplayMiscellaneousOptions.UseSpecialTypes,
parameterOptions:
SymbolDisplayParameterOptions.IncludeType |
SymbolDisplayParameterOptions.IncludeParamsRefOut |
SymbolDisplayParameterOptions.IncludeDefaultValue |
SymbolDisplayParameterOptions.IncludeExtensionThis |
SymbolDisplayParameterOptions.IncludeName);
public TextBlock SignatureDisplay
{
get
{
// TODO: Should probably use original syntax & formatting exactly instead of regenerating here
List<SymbolDisplayPart> displayParts = GetSignatureDisplayParts();
var textBlock = displayParts.ToTextBlock(_classificationTypeMap);
foreach (var inline in textBlock.Inlines)
{
inline.FontSize = 12;
}
textBlock.IsEnabled = false;
return textBlock;
}
}
public string SignaturePreviewAutomationText
{
get
{
return GetSignatureDisplayParts().Select(sdp => sdp.ToString()).Join(" ");
}
}
internal string TEST_GetSignatureDisplayText()
{
return GetSignatureDisplayParts().Select(p => p.ToString()).Join("");
}
private List<SymbolDisplayPart> GetSignatureDisplayParts()
{
var displayParts = new List<SymbolDisplayPart>();
displayParts.AddRange(_declarationParts);
displayParts.Add(new SymbolDisplayPart(SymbolDisplayPartKind.Punctuation, null, "("));
bool first = true;
foreach (var parameter in AllParameters.Where(p => !p.IsRemoved))
{
if (!first)
{
displayParts.Add(new SymbolDisplayPart(SymbolDisplayPartKind.Punctuation, null, ","));
displayParts.Add(new SymbolDisplayPart(SymbolDisplayPartKind.Space, null, " "));
}
first = false;
displayParts.AddRange(parameter.ParameterSymbol.ToDisplayParts(s_parameterDisplayFormat));
}
displayParts.Add(new SymbolDisplayPart(SymbolDisplayPartKind.Punctuation, null, ")"));
return displayParts;
}
public List<ParameterViewModel> AllParameters
{
get
{
var list = new List<ParameterViewModel>();
if (_thisParameter != null)
{
list.Add(_thisParameter);
}
list.AddRange(_parameterGroup1);
list.AddRange(_parameterGroup2);
if (_paramsParameter != null)
{
list.Add(_paramsParameter);
}
return list;
}
}
public bool CanMoveUp
{
get
{
if (!SelectedIndex.HasValue)
{
return false;
}
var index = SelectedIndex.Value;
index = _thisParameter == null ? index : index - 1;
if (index <= 0 || index == _parameterGroup1.Count || index >= _parameterGroup1.Count + _parameterGroup2.Count)
{
return false;
}
return true;
}
}
public bool CanMoveDown
{
get
{
if (!SelectedIndex.HasValue)
{
return false;
}
var index = SelectedIndex.Value;
index = _thisParameter == null ? index : index - 1;
if (index < 0 || index == _parameterGroup1.Count - 1 || index >= _parameterGroup1.Count + _parameterGroup2.Count - 1)
{
return false;
}
return true;
}
}
internal void MoveUp()
{
Debug.Assert(CanMoveUp);
var index = SelectedIndex.Value;
index = _thisParameter == null ? index : index - 1;
Move(index < _parameterGroup1.Count ? _parameterGroup1 : _parameterGroup2, index < _parameterGroup1.Count ? index : index - _parameterGroup1.Count, delta: -1);
}
internal void MoveDown()
{
Debug.Assert(CanMoveDown);
var index = SelectedIndex.Value;
index = _thisParameter == null ? index : index - 1;
Move(index < _parameterGroup1.Count ? _parameterGroup1 : _parameterGroup2, index < _parameterGroup1.Count ? index : index - _parameterGroup1.Count, delta: 1);
}
private void Move(List<ParameterViewModel> list, int index, int delta)
{
var param = list[index];
list.RemoveAt(index);
list.Insert(index + delta, param);
SelectedIndex += delta;
NotifyPropertyChanged(nameof(AllParameters));
NotifyPropertyChanged(nameof(SignatureDisplay));
NotifyPropertyChanged(nameof(SignaturePreviewAutomationText));
NotifyPropertyChanged(nameof(IsOkButtonEnabled));
}
internal bool TrySubmit()
{
return IsOkButtonEnabled;
}
private bool IsDisabled(ParameterViewModel parameterViewModel)
{
return _disabledParameters.Contains(parameterViewModel.ParameterSymbol);
}
private IList<ParameterViewModel> GetSelectedGroup()
{
var index = SelectedIndex;
index = _thisParameter == null ? index : index - 1;
return index < _parameterGroup1.Count ? _parameterGroup1 : index < _parameterGroup1.Count + _parameterGroup2.Count ? _parameterGroup2 : SpecializedCollections.EmptyList<ParameterViewModel>();
}
public bool IsOkButtonEnabled
{
get
{
return AllParameters.Any(p => p.IsRemoved) ||
!_parameterGroup1.Select(p => p.ParameterSymbol).SequenceEqual(_originalParameterConfiguration.ParametersWithoutDefaultValues) ||
!_parameterGroup2.Select(p => p.ParameterSymbol).SequenceEqual(_originalParameterConfiguration.RemainingEditableParameters);
}
}
private int? _selectedIndex;
public int? SelectedIndex
{
get
{
return _selectedIndex;
}
set
{
var newSelectedIndex = value == -1 ? null : value;
if (newSelectedIndex == _selectedIndex)
{
return;
}
_selectedIndex = newSelectedIndex;
NotifyPropertyChanged(nameof(CanMoveUp));
NotifyPropertyChanged(nameof(MoveUpAutomationText));
NotifyPropertyChanged(nameof(CanMoveDown));
NotifyPropertyChanged(nameof(MoveDownAutomationText));
NotifyPropertyChanged(nameof(CanRemove));
NotifyPropertyChanged(nameof(RemoveAutomationText));
NotifyPropertyChanged(nameof(CanRestore));
NotifyPropertyChanged(nameof(RestoreAutomationText));
}
}
public string MoveUpAutomationText
{
get
{
if (!CanMoveUp)
{
return string.Empty;
}
return string.Format(ServicesVSResources.Move_0_above_1, AllParameters[SelectedIndex.Value].ParameterAutomationText, AllParameters[SelectedIndex.Value - 1].ParameterAutomationText);
}
}
public string MoveDownAutomationText
{
get
{
if (!CanMoveDown)
{
return string.Empty;
}
return string.Format(ServicesVSResources.Move_0_below_1, AllParameters[SelectedIndex.Value].ParameterAutomationText, AllParameters[SelectedIndex.Value + 1].ParameterAutomationText);
}
}
public string RemoveAutomationText
{
get
{
if (!CanRemove)
{
return string.Empty;
}
return string.Format(ServicesVSResources.Remove_0, AllParameters[SelectedIndex.Value].ParameterAutomationText);
}
}
public string RestoreAutomationText
{
get
{
if (!CanRestore)
{
return string.Empty;
}
return string.Format(ServicesVSResources.Restore_0, AllParameters[SelectedIndex.Value].ParameterAutomationText);
}
}
public class ParameterViewModel
{
private readonly IParameterSymbol _parameter;
private ChangeSignatureDialogViewModel _changeSignatureDialogViewModel;
public IParameterSymbol ParameterSymbol
{
get { return _parameter; }
}
public ParameterViewModel(ChangeSignatureDialogViewModel changeSignatureDialogViewModel, IParameterSymbol parameter)
{
_changeSignatureDialogViewModel = changeSignatureDialogViewModel;
_parameter = parameter;
}
public string ParameterAutomationText
{
get { return $"{Type} {Parameter}"; }
}
public string Modifier
{
get
{
// Todo: support VB
switch (_parameter.RefKind)
{
case RefKind.Out:
return "out";
case RefKind.Ref:
return "ref";
}
if (_parameter.IsParams)
{
return "params";
}
if (_changeSignatureDialogViewModel._thisParameter != null &&
_parameter == _changeSignatureDialogViewModel._thisParameter._parameter)
{
return "this";
}
return string.Empty;
}
}
public string Type
{
get { return _parameter.Type.ToDisplayString(s_parameterDisplayFormat); }
}
public string Parameter
{
get { return _parameter.Name; }
}
public string Default
{
get
{
if (!_parameter.HasExplicitDefaultValue)
{
return string.Empty;
}
return _parameter.ExplicitDefaultValue == null
? "null"
: _parameter.ExplicitDefaultValue is string
? "\"" + _parameter.ExplicitDefaultValue.ToString() + "\""
: _parameter.ExplicitDefaultValue.ToString();
}
}
public bool IsDisabled
{
get
{
return _changeSignatureDialogViewModel.IsDisabled(this);
}
}
public bool NeedsBottomBorder
{
get
{
if (this == _changeSignatureDialogViewModel._thisParameter)
{
return true;
}
if (this == _changeSignatureDialogViewModel._parameterGroup1.LastOrDefault() &&
(_changeSignatureDialogViewModel._parameterGroup2.Any() || _changeSignatureDialogViewModel._paramsParameter != null))
{
return true;
}
if (this == _changeSignatureDialogViewModel._parameterGroup2.LastOrDefault() &&
_changeSignatureDialogViewModel._paramsParameter != null)
{
return true;
}
return false;
}
}
private bool _isRemoved;
public bool IsRemoved
{
get
{
return _isRemoved;
}
set
{
_isRemoved = value;
}
}
}
}
}
| |
//
// PrintOperation.cs
//
// Author:
// Stephane Delcroix <stephane@delcroix.org>
//
// Copyright (C) 2008-2009 Novell, Inc.
// Copyright (C) 2008-2009 Stephane Delcroix
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using Cairo;
using System;
using System.Runtime.InteropServices;
using Mono.Unix;
using FSpot.Core;
using FSpot.Widgets;
using FSpot.Imaging;
using Hyena;
namespace FSpot
{
public class PrintOperation : Gtk.PrintOperation
{
IPhoto [] selected_photos;
int photos_per_page = 1;
CustomPrintWidget.FitMode fit = CustomPrintWidget.FitMode.Scaled;
bool repeat, white_borders, crop_marks;
string print_label_format;
string comment;
public PrintOperation (IPhoto [] selected_photos) : base ()
{
this.selected_photos = selected_photos;
CustomTabLabel = Catalog.GetString ("Image Settings");
NPages = selected_photos.Length;
DefaultPageSetup = Global.PageSetup;
}
protected override void OnBeginPrint (Gtk.PrintContext context)
{
base.OnBeginPrint (context);
}
protected override Gtk.Widget OnCreateCustomWidget ()
{
Gtk.Widget widget = new CustomPrintWidget (this);
widget.ShowAll ();
(widget as CustomPrintWidget).Changed += OnCustomWidgetChanged;
OnCustomWidgetChanged (widget);
return widget;
}
protected override void OnCustomWidgetApply (Gtk.Widget widget)
{
CustomPrintWidget cpw = widget as CustomPrintWidget;
UseFullPage = cpw.UseFullPage;
photos_per_page = cpw.PhotosPerPage;
repeat = cpw.Repeat;
NPages = repeat ? selected_photos.Length :(int) Math.Ceiling (1.0 * selected_photos.Length / photos_per_page);
fit = cpw.Fitmode;
white_borders = cpw.WhiteBorders;
crop_marks = cpw.CropMarks;
print_label_format = cpw.PrintLabelFormat;
comment = cpw.CustomText;
}
protected void OnCustomWidgetChanged (Gtk.Widget widget)
{
OnCustomWidgetApply (widget);
using (ImageSurface surface = new ImageSurface (Format.ARGB32, 360, 254)) {
using (Context gr = new Context (surface)) {
gr.Color = new Color (1, 1, 1);
gr.Rectangle (0, 0, 360, 254);
gr.Fill ();
using (Gdk.Pixbuf pixbuf = Gdk.Pixbuf.LoadFromResource ("flower.png")) {
DrawImage (gr, pixbuf,0, 0, 360, 254);
}
}
(widget as CustomPrintWidget).PreviewImage.Pixbuf = CreatePixbuf (surface);
}
}
protected override void OnDrawPage (Gtk.PrintContext context, int page_nr)
{
base.OnDrawPage (context, page_nr);
Context cr = context.CairoContext;
int ppx, ppy;
switch (photos_per_page) {
default:
case 1: ppx = ppy =1; break;
case 2: ppx = 1; ppy = 2; break;
case 4: ppx = ppy = 2; break;
case 9: ppx = ppy = 3; break;
case 12: ppx = 3; ppy = 4; break;
case 20: ppx = 4; ppy = 5; break;
case 30: ppx = 5; ppy = 6; break;
}
//FIXME: if paper is landscape, swap ppx with ppy
double w = context.Width / ppx;
double h = context.Height / ppy;
// compute picture size using 4800DPI
double mx=(w / 25.4) * 4800, my=(h / 25.4) * 4800;
for (int x = 0; x <= ppx; x++) {
for (int y = 0; y <= ppy; y++) {
int p_index = repeat ? page_nr : page_nr * photos_per_page + y * ppx + x;
if (crop_marks)
DrawCropMarks (cr, x*w, y*h, w*.1);
if (x == ppx || y == ppy || p_index >= selected_photos.Length)
continue;
using (var img = ImageFile.Create (selected_photos[p_index].DefaultVersion.Uri))
{
Gdk.Pixbuf pixbuf;
try {
pixbuf = img.Load ((int) mx, (int) my);
Cms.Profile printer_profile;
if (FSpot.ColorManagement.Profiles.TryGetValue (Preferences.Get<string> (Preferences.COLOR_MANAGEMENT_OUTPUT_PROFILE), out printer_profile))
FSpot.ColorManagement.ApplyProfile (pixbuf, img.GetProfile (), printer_profile);
} catch (Exception e) {
Log.Exception ("Unable to load image " + selected_photos[p_index].DefaultVersion.Uri + "\n", e);
// If the image is not found load error pixbuf
pixbuf = new Gdk.Pixbuf (PixbufUtils.ErrorPixbuf, 0, 0,
PixbufUtils.ErrorPixbuf.Width,
PixbufUtils.ErrorPixbuf.Height);
}
//Gdk.Pixbuf pixbuf = img.Load (100, 100);
bool rotated = false;
if (Math.Sign ((double)pixbuf.Width/pixbuf.Height - 1.0) != Math.Sign (w/h - 1.0)) {
Gdk.Pixbuf d_pixbuf = pixbuf.RotateSimple (Gdk.PixbufRotation.Counterclockwise);
pixbuf.Dispose ();
pixbuf = d_pixbuf;
rotated = true;
}
DrawImage (cr, pixbuf, x * w, y * h, w, h);
string tag_string = "";
foreach (Tag t in selected_photos[p_index].Tags)
tag_string = String.Concat (tag_string, t.Name);
string label = String.Format (print_label_format,
comment,
selected_photos[p_index].Name,
selected_photos[p_index].Time.ToLocalTime ().ToShortDateString (),
selected_photos[p_index].Time.ToLocalTime ().ToShortTimeString (),
tag_string,
selected_photos[p_index].Description);
DrawComment (context, (x + 1) * w, (rotated ? y : y + 1) * h, (rotated ? w : h) * .025, label, rotated);
pixbuf.Dispose ();
}
}
}
}
protected override void OnEndPrint (Gtk.PrintContext context)
{
base.OnEndPrint (context);
context.Dispose ();
}
protected override void OnRequestPageSetup (Gtk.PrintContext context, int page_nr, Gtk.PageSetup setup)
{
base.OnRequestPageSetup (context, page_nr, setup);
}
private void DrawCropMarks (Context cr, double x, double y, double length)
{
cr.Save ();
cr.Color = new Color (0, 0, 0);
cr.MoveTo (x - length/2, y);
cr.LineTo (x + length/2, y);
cr.MoveTo (x, y - length/2);
cr.LineTo (x, y + length/2);
cr.LineWidth = .2;
cr.SetDash (new double[] {length*.4, length*.2}, 0);
cr.Stroke ();
cr.Restore ();
}
private static void DrawComment (Gtk.PrintContext context, double x, double y, double h, string comment, bool rotated)
{
if (comment == null || comment == String.Empty)
return;
Context cr = context.CairoContext;
cr.Save ();
Pango.Layout layout = context.CreatePangoLayout ();
Pango.FontDescription desc = Pango.FontDescription.FromString ("sans 14");
layout.FontDescription = desc;
layout.SetText (comment);
int lay_w, lay_h;
layout.GetPixelSize (out lay_w, out lay_h);
double scale = h/lay_h;
if (rotated) {
cr.Translate (x - h, y + lay_w * scale);
cr.Rotate (- Math.PI / 2);
}
else
cr.Translate (x - lay_w * scale, y - h);
cr.Scale (scale, scale);
Pango.CairoHelper.ShowLayout (context.CairoContext, layout);
cr.Restore ();
}
private void DrawImage (Context cr, Gdk.Pixbuf pixbuf, double x, double y, double w, double h)
{
double scalex, scaley;
switch (fit) {
case CustomPrintWidget.FitMode.Zoom:
scalex = scaley = Math.Max (w/pixbuf.Width, h/pixbuf.Height);
break;
case CustomPrintWidget.FitMode.Fill:
scalex = w/pixbuf.Width;
scaley = h/pixbuf.Height;
break;
default:
case CustomPrintWidget.FitMode.Scaled:
scalex = scaley = Math.Min (w/pixbuf.Width, h/pixbuf.Height);
break;
}
double rectw = w / scalex;
double recth = h / scaley;
cr.Save ();
if (white_borders)
cr.Translate (w * .025, h * .025);
cr.Translate (x, y);
if (white_borders)
cr.Scale (.95, .95);
cr.Scale (scalex, scaley);
cr.Rectangle (0, 0, rectw, recth);
Gdk.CairoHelper.SetSourcePixbuf (cr, pixbuf, (rectw - pixbuf.Width) / 2.0, (recth - pixbuf.Height) / 2.0);
cr.Fill ();
if (white_borders) {
cr.Rectangle (0, 0 ,rectw, recth);
cr.Color = new Color (0, 0, 0);
cr.LineWidth = 1 / scalex;
cr.Stroke ();
}
cr.Restore ();
}
[DllImport("libfspot")]
static extern IntPtr f_pixbuf_from_cairo_surface (IntPtr handle);
private static Gdk.Pixbuf CreatePixbuf (Surface s)
{
IntPtr result = f_pixbuf_from_cairo_surface (s.Handle);
return (Gdk.Pixbuf) GLib.Object.GetObject (result, true);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.