context stringlengths 2.52k 185k | gt stringclasses 1 value |
|---|---|
//
// Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using System.Linq;
namespace NLog
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using Internal.Fakeables;
using NLog.Common;
using NLog.Config;
using NLog.Internal;
using NLog.Targets;
#if SILVERLIGHT
using System.Windows;
#endif
/// <summary>
/// Creates and manages instances of <see cref="T:NLog.Logger" /> objects.
/// </summary>
public class LogFactory : IDisposable
{
#if !SILVERLIGHT
private readonly MultiFileWatcher watcher;
private const int ReconfigAfterFileChangedTimeout = 1000;
#endif
private static IAppDomain currentAppDomain;
private readonly Dictionary<LoggerCacheKey, WeakReference> loggerCache = new Dictionary<LoggerCacheKey, WeakReference>();
private static TimeSpan defaultFlushTimeout = TimeSpan.FromSeconds(15);
#if !SILVERLIGHT
private Timer reloadTimer;
#endif
private LoggingConfiguration config;
private LogLevel globalThreshold = LogLevel.MinLevel;
private bool configLoaded;
private int logsEnabled;
/// <summary>
/// Initializes a new instance of the <see cref="LogFactory" /> class.
/// </summary>
public LogFactory()
{
#if !SILVERLIGHT
this.watcher = new MultiFileWatcher();
this.watcher.OnChange += this.ConfigFileChanged;
#endif
}
/// <summary>
/// Initializes a new instance of the <see cref="LogFactory" /> class.
/// </summary>
/// <param name="config">The config.</param>
public LogFactory(LoggingConfiguration config)
: this()
{
this.Configuration = config;
}
/// <summary>
/// Occurs when logging <see cref="Configuration" /> changes.
/// </summary>
public event EventHandler<LoggingConfigurationChangedEventArgs> ConfigurationChanged;
#if !SILVERLIGHT
/// <summary>
/// Occurs when logging <see cref="Configuration" /> gets reloaded.
/// </summary>
public event EventHandler<LoggingConfigurationReloadedEventArgs> ConfigurationReloaded;
#endif
/// <summary>
/// Gets the current <see cref="IAppDomain"/>.
/// </summary>
public static IAppDomain CurrentAppDomain
{
get { return currentAppDomain ?? (currentAppDomain = AppDomainWrapper.CurrentDomain); }
set { currentAppDomain = value; }
}
/// <summary>
/// Gets or sets a value indicating whether exceptions should be thrown.
/// </summary>
/// <value>A value of <c>true</c> if exceptiosn should be thrown; otherwise, <c>false</c>.</value>
/// <remarks>By default exceptions
/// are not thrown under any circumstances.
/// </remarks>
public bool ThrowExceptions { get; set; }
/// <summary>
/// Gets or sets the current logging configuration.
/// </summary>
public LoggingConfiguration Configuration
{
get
{
lock (this)
{
if (this.configLoaded)
{
return this.config;
}
this.configLoaded = true;
#if !SILVERLIGHT
if (this.config == null)
{
// try to load default configuration
this.config = XmlLoggingConfiguration.AppConfig;
}
#endif
if (this.config == null)
{
foreach (string configFile in GetCandidateFileNames())
{
#if !SILVERLIGHT && !MONO
if (File.Exists(configFile))
{
InternalLogger.Debug("Attempting to load config from {0}", configFile);
this.config = new XmlLoggingConfiguration(configFile);
break;
}
#elif SILVERLIGHT
Uri configFileUri = new Uri(configFile, UriKind.Relative);
if (Application.GetResourceStream(configFileUri) != null)
{
InternalLogger.Debug("Attempting to load config from {0}", configFile);
this.config = new XmlLoggingConfiguration(configFile);
break;
}
#else
if (File.Exists(configFile))
{
InternalLogger.Debug("Attempting to load config from {0}", configFile);
this.config = new XmlLoggingConfiguration(configFile);
break;
}
#endif
}
}
#if !SILVERLIGHT
if (this.config != null)
{
Dump(this.config);
try
{
this.watcher.Watch(this.config.FileNamesToWatch);
}
catch (Exception exception)
{
InternalLogger.Warn("Cannot start file watching: {0}. File watching is disabled", exception);
}
}
#endif
if (this.config != null)
{
this.config.InitializeAll();
}
return this.config;
}
}
set
{
#if !SILVERLIGHT
try
{
this.watcher.StopWatching();
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
InternalLogger.Error("Cannot stop file watching: {0}", exception);
}
#endif
lock (this)
{
LoggingConfiguration oldConfig = this.config;
if (oldConfig != null)
{
InternalLogger.Info("Closing old configuration.");
#if !SILVERLIGHT
this.Flush();
#endif
oldConfig.Close();
}
this.config = value;
this.configLoaded = true;
if (this.config != null)
{
Dump(this.config);
this.config.InitializeAll();
this.ReconfigExistingLoggers(this.config);
#if !SILVERLIGHT
try
{
this.watcher.Watch(this.config.FileNamesToWatch);
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
InternalLogger.Warn("Cannot start file watching: {0}", exception);
}
#endif
}
var configurationChangedDelegate = this.ConfigurationChanged;
if (configurationChangedDelegate != null)
{
configurationChangedDelegate(this, new LoggingConfigurationChangedEventArgs(oldConfig, value));
}
}
}
}
/// <summary>
/// Gets or sets the global log threshold. Log events below this threshold are not logged.
/// </summary>
public LogLevel GlobalThreshold
{
get
{
return this.globalThreshold;
}
set
{
lock (this)
{
this.globalThreshold = value;
this.ReconfigExistingLoggers();
}
}
}
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Creates a logger that discards all log messages.
/// </summary>
/// <returns>Null logger instance.</returns>
public Logger CreateNullLogger()
{
TargetWithFilterChain[] targetsByLevel = new TargetWithFilterChain[LogLevel.MaxLevel.Ordinal + 1];
Logger newLogger = new Logger();
newLogger.Initialize(string.Empty, new LoggerConfiguration(targetsByLevel), this);
return newLogger;
}
/// <summary>
/// Gets the logger named after the currently-being-initialized class.
/// </summary>
/// <returns>The logger.</returns>
/// <remarks>This is a slow-running method.
/// Make sure you're not doing this in a loop.</remarks>
[MethodImpl(MethodImplOptions.NoInlining)]
public Logger GetCurrentClassLogger()
{
#if SILVERLIGHT
var frame = new StackFrame(1);
#else
var frame = new StackFrame(1, false);
#endif
return this.GetLogger(frame.GetMethod().DeclaringType.FullName);
}
/// <summary>
/// Gets the logger named after the currently-being-initialized class.
/// </summary>
/// <param name="loggerType">The type of the logger to create. The type must inherit from NLog.Logger.</param>
/// <returns>The logger.</returns>
/// <remarks>This is a slow-running method.
/// Make sure you're not doing this in a loop.</remarks>
[MethodImpl(MethodImplOptions.NoInlining)]
public Logger GetCurrentClassLogger(Type loggerType)
{
#if !SILVERLIGHT
var frame = new StackFrame(1, false);
#else
var frame = new StackFrame(1);
#endif
return this.GetLogger(frame.GetMethod().DeclaringType.FullName, loggerType);
}
/// <summary>
/// Gets the specified named logger.
/// </summary>
/// <param name="name">Name of the logger.</param>
/// <returns>The logger reference. Multiple calls to <c>GetLogger</c> with the same argument aren't guaranteed to return the same logger reference.</returns>
public Logger GetLogger(string name)
{
return this.GetLogger(new LoggerCacheKey(typeof(Logger), name));
}
/// <summary>
/// Gets the specified named logger.
/// </summary>
/// <param name="name">Name of the logger.</param>
/// <param name="loggerType">The type of the logger to create. The type must inherit from NLog.Logger.</param>
/// <returns>The logger reference. Multiple calls to <c>GetLogger</c> with the
/// same argument aren't guaranteed to return the same logger reference.</returns>
public Logger GetLogger(string name, Type loggerType)
{
return this.GetLogger(new LoggerCacheKey(loggerType, name));
}
/// <summary>
/// Loops through all loggers previously returned by GetLogger
/// and recalculates their target and filter list. Useful after modifying the configuration programmatically
/// to ensure that all loggers have been properly configured.
/// </summary>
public void ReconfigExistingLoggers()
{
this.ReconfigExistingLoggers(this.config);
}
#if !SILVERLIGHT
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
public void Flush()
{
this.Flush(defaultFlushTimeout);
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="timeout">Maximum time to allow for the flush. Any messages after that time will be discarded.</param>
public void Flush(TimeSpan timeout)
{
try
{
AsyncHelpers.RunSynchronously(cb => this.Flush(cb, timeout));
}
catch (Exception e)
{
if (ThrowExceptions)
{
throw;
}
InternalLogger.Error(e.ToString());
}
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="timeoutMilliseconds">Maximum time to allow for the flush. Any messages after that time will be discarded.</param>
public void Flush(int timeoutMilliseconds)
{
this.Flush(TimeSpan.FromMilliseconds(timeoutMilliseconds));
}
#endif
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
public void Flush(AsyncContinuation asyncContinuation)
{
this.Flush(asyncContinuation, TimeSpan.MaxValue);
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
/// <param name="timeoutMilliseconds">Maximum time to allow for the flush. Any messages after that time will be discarded.</param>
public void Flush(AsyncContinuation asyncContinuation, int timeoutMilliseconds)
{
this.Flush(asyncContinuation, TimeSpan.FromMilliseconds(timeoutMilliseconds));
}
/// <summary>
/// Flush any pending log messages (in case of asynchronous targets).
/// </summary>
/// <param name="asyncContinuation">The asynchronous continuation.</param>
/// <param name="timeout">Maximum time to allow for the flush. Any messages after that time will be discarded.</param>
public void Flush(AsyncContinuation asyncContinuation, TimeSpan timeout)
{
try
{
InternalLogger.Trace("LogFactory.Flush({0})", timeout);
var loggingConfiguration = this.Configuration;
if (loggingConfiguration != null)
{
InternalLogger.Trace("Flushing all targets...");
loggingConfiguration.FlushAllTargets(AsyncHelpers.WithTimeout(asyncContinuation, timeout));
}
else
{
asyncContinuation(null);
}
}
catch (Exception e)
{
if (ThrowExceptions)
{
throw;
}
InternalLogger.Error(e.ToString());
}
}
/// <summary>Decreases the log enable counter and if it reaches -1
/// the logs are disabled.</summary>
/// <remarks>Logging is enabled if the number of <see cref="EnableLogging"/> calls is greater
/// than or equal to <see cref="DisableLogging"/> calls.</remarks>
/// <returns>An object that iplements IDisposable whose Dispose() method
/// reenables logging. To be used with C# <c>using ()</c> statement.</returns>
public IDisposable DisableLogging()
{
lock (this)
{
this.logsEnabled--;
if (this.logsEnabled == -1)
{
this.ReconfigExistingLoggers();
}
}
return new LogEnabler(this);
}
/// <summary>Increases the log enable counter and if it reaches 0 the logs are disabled.</summary>
/// <remarks>Logging is enabled if the number of <see cref="EnableLogging"/> calls is greater
/// than or equal to <see cref="DisableLogging"/> calls.</remarks>
public void EnableLogging()
{
lock (this)
{
this.logsEnabled++;
if (this.logsEnabled == 0)
{
this.ReconfigExistingLoggers();
}
}
}
/// <summary>
/// Returns <see langword="true" /> if logging is currently enabled.
/// </summary>
/// <returns>A value of <see langword="true" /> if logging is currently enabled,
/// <see langword="false"/> otherwise.</returns>
/// <remarks>Logging is enabled if the number of <see cref="EnableLogging"/> calls is greater
/// than or equal to <see cref="DisableLogging"/> calls.</remarks>
public bool IsLoggingEnabled()
{
return this.logsEnabled >= 0;
}
#if !SILVERLIGHT
internal void ReloadConfigOnTimer(object state)
{
LoggingConfiguration configurationToReload = (LoggingConfiguration)state;
InternalLogger.Info("Reloading configuration...");
lock (this)
{
if (this.reloadTimer != null)
{
this.reloadTimer.Dispose();
this.reloadTimer = null;
}
this.watcher.StopWatching();
try
{
if (this.Configuration != configurationToReload)
{
throw new NLogConfigurationException("Config changed in between. Not reloading.");
}
LoggingConfiguration newConfig = configurationToReload.Reload();
if (newConfig != null)
{
this.Configuration = newConfig;
if (this.ConfigurationReloaded != null)
{
this.ConfigurationReloaded(this, new LoggingConfigurationReloadedEventArgs(true, null));
}
}
else
{
throw new NLogConfigurationException("Configuration.Reload() returned null. Not reloading.");
}
}
catch (Exception exception)
{
if (exception.MustBeRethrown())
{
throw;
}
this.watcher.Watch(configurationToReload.FileNamesToWatch);
var configurationReloadedDelegate = this.ConfigurationReloaded;
if (configurationReloadedDelegate != null)
{
configurationReloadedDelegate(this, new LoggingConfigurationReloadedEventArgs(false, exception));
}
}
}
}
#endif
internal void ReconfigExistingLoggers(LoggingConfiguration configuration)
{
if (configuration != null)
{
configuration.EnsureInitialized();
}
foreach (var loggerWrapper in this.loggerCache.Values.ToList())
{
Logger logger = loggerWrapper.Target as Logger;
if (logger != null)
{
logger.SetConfiguration(this.GetConfigurationForLogger(logger.Name, configuration));
}
}
}
internal void GetTargetsByLevelForLogger(string name, IList<LoggingRule> rules, TargetWithFilterChain[] targetsByLevel, TargetWithFilterChain[] lastTargetsByLevel)
{
foreach (LoggingRule rule in rules)
{
if (!rule.NameMatches(name))
{
continue;
}
for (int i = 0; i <= LogLevel.MaxLevel.Ordinal; ++i)
{
if (i < this.GlobalThreshold.Ordinal || !rule.IsLoggingEnabledForLevel(LogLevel.FromOrdinal(i)))
{
continue;
}
foreach (Target target in rule.Targets)
{
var awf = new TargetWithFilterChain(target, rule.Filters);
if (lastTargetsByLevel[i] != null)
{
lastTargetsByLevel[i].NextInChain = awf;
}
else
{
targetsByLevel[i] = awf;
}
lastTargetsByLevel[i] = awf;
}
}
this.GetTargetsByLevelForLogger(name, rule.ChildRules, targetsByLevel, lastTargetsByLevel);
if (rule.Final)
{
break;
}
}
for (int i = 0; i <= LogLevel.MaxLevel.Ordinal; ++i)
{
TargetWithFilterChain tfc = targetsByLevel[i];
if (tfc != null)
{
tfc.PrecalculateStackTraceUsage();
}
}
}
internal LoggerConfiguration GetConfigurationForLogger(string name, LoggingConfiguration configuration)
{
TargetWithFilterChain[] targetsByLevel = new TargetWithFilterChain[LogLevel.MaxLevel.Ordinal + 1];
TargetWithFilterChain[] lastTargetsByLevel = new TargetWithFilterChain[LogLevel.MaxLevel.Ordinal + 1];
if (configuration != null && this.IsLoggingEnabled())
{
this.GetTargetsByLevelForLogger(name, configuration.LoggingRules, targetsByLevel, lastTargetsByLevel);
}
InternalLogger.Debug("Targets for {0} by level:", name);
for (int i = 0; i <= LogLevel.MaxLevel.Ordinal; ++i)
{
StringBuilder sb = new StringBuilder();
sb.AppendFormat(CultureInfo.InvariantCulture, "{0} =>", LogLevel.FromOrdinal(i));
for (TargetWithFilterChain afc = targetsByLevel[i]; afc != null; afc = afc.NextInChain)
{
sb.AppendFormat(CultureInfo.InvariantCulture, " {0}", afc.Target.Name);
if (afc.FilterChain.Count > 0)
{
sb.AppendFormat(CultureInfo.InvariantCulture, " ({0} filters)", afc.FilterChain.Count);
}
}
InternalLogger.Debug(sb.ToString());
}
return new LoggerConfiguration(targetsByLevel);
}
/// <summary>
/// Releases unmanaged and - optionally - managed resources.
/// </summary>
/// <param name="disposing">True to release both managed and unmanaged resources; <c>false</c> to release only unmanaged resources.</param>
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
#if !SILVERLIGHT
this.watcher.Dispose();
if (this.reloadTimer != null)
{
this.reloadTimer.Dispose();
this.reloadTimer = null;
}
#endif
}
}
private static IEnumerable<string> GetCandidateFileNames()
{
#if SILVERLIGHT
yield return "NLog.config";
#else
// NLog.config from application directory
if (CurrentAppDomain.BaseDirectory != null)
{
yield return Path.Combine(CurrentAppDomain.BaseDirectory, "NLog.config");
}
// current config file with .config renamed to .nlog
string cf = CurrentAppDomain.ConfigurationFile;
if (cf != null)
{
yield return Path.ChangeExtension(cf, ".nlog");
IEnumerable<string> privateBinPaths = CurrentAppDomain.PrivateBinPath;
if (privateBinPaths != null)
{
foreach (var path in privateBinPaths)
{
if (path != null)
{
yield return Path.Combine(path, "NLog.config");
}
}
}
}
// get path to NLog.dll.nlog only if the assembly is not in the GAC
var nlogAssembly = typeof(LogFactory).Assembly;
if (!nlogAssembly.GlobalAssemblyCache)
{
if (!string.IsNullOrEmpty(nlogAssembly.Location))
{
yield return nlogAssembly.Location + ".nlog";
}
}
#endif
}
private static void Dump(LoggingConfiguration config)
{
if (!InternalLogger.IsDebugEnabled)
{
return;
}
config.Dump();
}
private Logger GetLogger(LoggerCacheKey cacheKey)
{
lock (this)
{
WeakReference l;
if (this.loggerCache.TryGetValue(cacheKey, out l))
{
Logger existingLogger = l.Target as Logger;
if (existingLogger != null)
{
// logger in the cache and still referenced
return existingLogger;
}
}
Logger newLogger;
if (cacheKey.ConcreteType != null && cacheKey.ConcreteType != typeof(Logger))
{
try
{
newLogger = (Logger)FactoryHelper.CreateInstance(cacheKey.ConcreteType);
}
catch(Exception exception)
{
if(exception.MustBeRethrown())
{
throw;
}
if(ThrowExceptions)
{
throw;
}
InternalLogger.Error("Cannot create instance of specified type. Proceeding with default type instance. Exception : {0}",exception);
//Creating default instance of logger if instance of specified type cannot be created.
cacheKey = new LoggerCacheKey(typeof(Logger),cacheKey.Name);
newLogger = new Logger();
}
}
else
{
newLogger = new Logger();
}
if (cacheKey.ConcreteType != null)
{
newLogger.Initialize(cacheKey.Name, this.GetConfigurationForLogger(cacheKey.Name, this.Configuration), this);
}
this.loggerCache[cacheKey] = new WeakReference(newLogger);
return newLogger;
}
}
#if !SILVERLIGHT
private void ConfigFileChanged(object sender, EventArgs args)
{
InternalLogger.Info("Configuration file change detected! Reloading in {0}ms...", ReconfigAfterFileChangedTimeout);
// In the rare cases we may get multiple notifications here,
// but we need to reload config only once.
//
// The trick is to schedule the reload in one second after
// the last change notification comes in.
lock (this)
{
if (this.reloadTimer == null)
{
this.reloadTimer = new Timer(
this.ReloadConfigOnTimer,
this.Configuration,
ReconfigAfterFileChangedTimeout,
Timeout.Infinite);
}
else
{
this.reloadTimer.Change(ReconfigAfterFileChangedTimeout, Timeout.Infinite);
}
}
}
#endif
/// <summary>
/// Logger cache key.
/// </summary>
internal class LoggerCacheKey
{
internal LoggerCacheKey(Type loggerConcreteType, string name)
{
this.ConcreteType = loggerConcreteType;
this.Name = name;
}
internal Type ConcreteType { get; private set; }
internal string Name { get; private set; }
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
/// <returns>
/// A hash code for the current <see cref="T:System.Object"/>.
/// </returns>
public override int GetHashCode()
{
return this.ConcreteType.GetHashCode() ^ this.Name.GetHashCode();
}
/// <summary>
/// Determines if two objects are equal in value.
/// </summary>
/// <param name="o">Other object to compare to.</param>
/// <returns>True if objects are equal, false otherwise.</returns>
public override bool Equals(object o)
{
var key = o as LoggerCacheKey;
if (ReferenceEquals(key, null))
{
return false;
}
return (this.ConcreteType == key.ConcreteType) && (key.Name == this.Name);
}
}
/// <summary>
/// Enables logging in <see cref="IDisposable.Dispose"/> implementation.
/// </summary>
private class LogEnabler : IDisposable
{
private LogFactory factory;
/// <summary>
/// Initializes a new instance of the <see cref="LogEnabler" /> class.
/// </summary>
/// <param name="factory">The factory.</param>
public LogEnabler(LogFactory factory)
{
this.factory = factory;
}
/// <summary>
/// Enables logging.
/// </summary>
void IDisposable.Dispose()
{
this.factory.EnableLogging();
}
}
}
}
| |
using System;
using System.Linq;
using System.Collections.Generic;
using NUnit.Framework;
using Rothko.Services;
using Rothko.Commands;
using Rothko.Interfaces.Services;
using Rothko.Interfaces.Commands;
using Rothko.Model;
using Moq;
using Rothko.Interfaces.Factories;
using Rothko.Enumerators;
namespace Rothko.Tests.Services
{
[TestFixture]
public class CommandInterpreterServiceTests
{
Mock<ICommandManagerService> _ManagerServiceMock;
CommandInterpreterService _InterpreterService;
Mock<ICommandFactory> _CommandFactoryMock;
[SetUp]
public void SetUp()
{
_ManagerServiceMock = new Mock<ICommandManagerService>();
_CommandFactoryMock = new Mock<ICommandFactory>();
_InterpreterService = new CommandInterpreterService(
_ManagerServiceMock.Object,
_CommandFactoryMock.Object);
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerUnselect()
{
_InterpreterService.Interpret(CommandType.Unselect.ToString());
_CommandFactoryMock.Verify(c => c.CreateUnselectCommand());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerSelect()
{
_InterpreterService.Interpret(CommandType.Select.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateSelectCommand(1, 1));
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerWhenSelectShouldPutUpdateUnitsThatCanBeBuilt()
{
_InterpreterService.Interpret(CommandType.Select.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateUpdateUnitsThatCanBeBuiltCommand());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerEndTurn()
{
_InterpreterService.Interpret(CommandType.EndTurn.ToString());
_CommandFactoryMock.Verify(c => c.CreateEndTurnCommand());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerWhenEndTurnShouldPutUpdateFogTiles()
{
_InterpreterService.Interpret(CommandType.EndTurn.ToString());
_CommandFactoryMock.Verify(c => c.CreateUpdateFogTilesCommand());
}
[Test]
public void InterpreterShouldPutCorrectCommandTypeInManagerPrepareAttack ()
{
_InterpreterService.Interpret(
CommandType.Attack.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreatePrepareAttackCommand(1, 1));
}
[Test]
public void InterpreterShouldPutCorrectCommandTypeInManagerWhenAttackShouldPutUpdateFogTiles ()
{
_InterpreterService.Interpret(
CommandType.Attack.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateUpdateFogTilesCommand());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerAttack()
{
_InterpreterService.Interpret(
CommandType.Attack.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateAttackCommand(1, 1));
}
[Test]
public void InterpreterShouldPutCorrectCommandTypeInManagerPrepareMove ()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreatePrepareMoveCommand(1, 1));
}
[Test]
public void InterpreterShouldPutCorrectCommandTypeInManagerWhenMoveShouldPutUpdateFogTiles ()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateUpdateFogTilesCommand());
}
[Test]
public void InterpreterShouldPutCorrectCommandTypeInManagerSelectUnitForUnloading ()
{
_InterpreterService.Interpret(
CommandType.SelectUnitForUnloading.ToString() + " 1 2");
_CommandFactoryMock.Verify(c => c.CreateSelectUnitForUnloadingCommand(1, 2));
}
[Test]
public void InterpreterShouldPutCorrectCommandTypeInManagerChangeCursorPosition ()
{
_InterpreterService.Interpret(
CommandType.ChangeCursorPosition.ToString() + " 1 2");
_CommandFactoryMock.Verify(c => c.CreateChangeCursorPositionCommand(1, 2));
}
[Test]
public void InterpreterShouldPutCorrectCommandTypeInManagerPrepareMoveWithMoreArguments ()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 1 1 2 4");
_CommandFactoryMock.Verify(c => c.CreatePrepareMoveCommand(1, 1), Times.Once());
_CommandFactoryMock.Verify(c => c.CreatePrepareMoveCommand(2, 4), Times.Once());
_CommandFactoryMock.Verify(c => c.CreatePrepareMoveCommand(It.IsAny<int>(), It.IsAny<int>()),
Times.Exactly(2));
}
[Test]
public void InterpreterShouldPutCorrectNumberOfCommandsManager ()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 1 1 2 4");
_CommandFactoryMock.Verify(c => c
.CreatePrepareMoveCommand(It.IsAny<int>(), It.IsAny<int>()), Times.Exactly(2));
_CommandFactoryMock.Verify(c => c
.CreateMoveCommand(It.IsAny<int>(), It.IsAny<int>()), Times.Exactly(2));
_CommandFactoryMock.Verify(c => c
.CreateUpdateFogTilesCommand(), Times.Exactly(2));
_CommandFactoryMock.Verify(c => c
.CreateEndMoveCommand(), Times.Once());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerMove()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateMoveCommand(1, 1));
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerMoveMultipleArguments()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 2 3 4 5 6 7");
_CommandFactoryMock.Verify(c => c.CreateMoveCommand(2, 3), Times.Once());
_CommandFactoryMock.Verify(c => c.CreateMoveCommand(4, 5), Times.Once());
_CommandFactoryMock.Verify(c => c.CreateMoveCommand(6, 7), Times.Once());
_CommandFactoryMock.Verify(c => c.CreateMoveCommand(It.IsAny<int>(), It.IsAny<int>()),
Times.Exactly(3));
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerEndMove()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateEndMoveCommand());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerChangeGameState()
{
_InterpreterService.Interpret(
CommandType.ChangeGameState.ToString() + " UnitSelected");
_CommandFactoryMock.Verify(c => c.CreateChangeGameStateCommand(GameState.UnitSelected));
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerEndMoveMultipleArguments()
{
_InterpreterService.Interpret(
CommandType.Move.ToString() + " 1 1 2 3 4 5 6 7 8 9");
_CommandFactoryMock.Verify(c => c.CreateEndMoveCommand(), Times.Once ());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerEnterCarrierUnit()
{
_InterpreterService.Interpret(CommandType.EnterCarrierUnit.ToString() + " 1 1");
_CommandFactoryMock.Verify(c => c.CreateEnterCarrierUnitCommand(1, 1));
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerCreateUnit()
{
_InterpreterService.Interpret(CommandType.CreateUnit.ToString() + " 1 2");
_CommandFactoryMock.Verify(c => c.CreateCreateUnitCommand(1, 2));
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerCreateUnitShouldPutUnselectCommand()
{
_InterpreterService.Interpret(CommandType.CreateUnit.ToString() + " 1 2");
_CommandFactoryMock.Verify(c => c.CreateUnselectCommand());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerWhenCreateUnitShouldPutUpdateFogTiles()
{
_InterpreterService.Interpret(CommandType.CreateUnit.ToString() + " 1 2");
_CommandFactoryMock.Verify(c => c.CreateUpdateFogTilesCommand());
}
[Test]
public void InterpretShouldPutCorrectCommandTypeInManagerLeaveCarrierUnit()
{
_InterpreterService.Interpret(CommandType.LeaveCarrierUnit.ToString() + " 1 1 1");
_CommandFactoryMock.Verify(c => c.CreateLeaveCarrierUnitCommand(1, 1, 1));
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionJustSelect()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.Select.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionJustMove()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.Move.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionSelectWithOneArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.Select.ToString() + " 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionSelectUnitForUnloadingWithNoArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.SelectUnitForUnloading.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionSelectUnitForUnloadingWithOneArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.SelectUnitForUnloading.ToString() + " 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionMoveWithOneArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.Move.ToString() + " 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionMoveWithThreeArguments()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.Move.ToString() + " 10 5 2");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionJustAttack()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.Attack.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionAttackWithOneArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.Attack.ToString() + " 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionJustCreateUnit()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.CreateUnit.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionCreateUnitWithOneArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.CreateUnit.ToString() + " 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionJustEnterCarrierUnit()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.EnterCarrierUnit.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionChangeGameState()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.ChangeGameState.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionEnterCarrierUnitWithOneArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.EnterCarrierUnit.ToString() + " 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionJustLeaveCarrier()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.LeaveCarrierUnit.ToString());
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionLeaveCarrierUnitWithOneArgument()
{
WhenNotEnoughArgumentsArePassedShouldThrowException(CommandType.LeaveCarrierUnit.ToString() + " 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenNotEnoughArgumentsArePassedShouldThrowExceptionLeaveCarrierUnitWithTwoArguments ()
{
WhenNotEnoughArgumentsArePassedShouldThrowException (CommandType.LeaveCarrierUnit.ToString() + " 10 10");
}
public void WhenNotEnoughArgumentsArePassedShouldThrowException(string command)
{
_InterpreterService.Interpret(command);
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException1()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.Select.ToString() + " a 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException2()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.Move.ToString() + " gibberish 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException3()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.Attack.ToString() + " gibberish 10");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException4()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.Select.ToString() + " 10 g1b3r1sh");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException5()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.Move.ToString() + " 10 123abc");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException6()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.Attack.ToString() + " 10 123abc");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException7()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.CreateUnit.ToString() + " 10 123abc");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException8()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.LeaveCarrierUnit.ToString() + " 10abc 1 1");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException9()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.LeaveCarrierUnit.ToString() + " 10 1fd 1");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException10()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.LeaveCarrierUnit.ToString() + " 10 1 1xv");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void WhenInvalidArgumentsArePassedShouldThrowException11()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.ChangeGameState.ToString() + " gibberish");
}
public void WhenInvalidArgumentsArePassedShouldThrowException(string command)
{
_InterpreterService.Interpret(command);
}
[Test]
[ExpectedException(typeof(InvalidOperationException))]
[TestCase("gibberish")]
[TestCase("")]
[TestCase(null)]
public void WhenInvalidCommandIsPassedShouldThrowException(string command)
{
_InterpreterService.Interpret(command);
}
[Test]
[ExpectedException(typeof(InvalidOperationException))]
public void WhenInvalidCommandIsPassedShouldThrowExceptionWithSelectAndInvalidArguments ()
{
WhenInvalidArgumentsArePassedShouldThrowException(CommandType.Select.ToString() + "abcd 10 10");
}
}
}
| |
using System.Collections.Generic;
using System.Linq;
using System;
using System.Text.RegularExpressions;
using Plivo.Exception;
namespace Plivo.Utilities
{
public class MpcUtils
{
public static void ValidSubAccount(string accountId)
{
if (accountId.Length != 20)
{
throw new PlivoValidationException("Subaccount Id should be of length 20");
}
if (accountId.Substring(0, 2) != "SA")
{
Console.WriteLine(accountId.Substring(0, 3));
throw new PlivoValidationException("Subaccount Id should start with 'SA' ");
}
}
public static bool ValidMultipleDestinationNos(string paramName, string paramValue, string role, char delimiter, int limit)
{
if (paramValue.Split(delimiter).Count() > 1 && role.ToLower() != "agent")
{
throw new PlivoValidationException("Multiple " + paramName + " values given for role " + role);
}
else if (paramValue.Split(delimiter).Count() >= limit)
{
throw new PlivoValidationException("No of " + paramName + " values provided should be lesser than " +
Convert.ToString(limit));
}
else
{
return true;
}
}
public static bool ValidMultipleDestinationIntegers(string paramName, string paramValue)
{
string[] values = paramValue.Split('<');
int n;
for (int i=0;i<values.Length;i++)
{
if (!(int.TryParse(values[i], out n)))
{
throw new PlivoValidationException(paramName + "Destination Values must be of type int");
}
}
return true;
}
public static bool ValidParamString(string paramName, string paramValue = null, bool mandatory = false, List<string> expectedValues = null)
{
if (mandatory && paramValue == null)
{
throw new PlivoValidationException(paramName + " is a required parameter");
}
if (paramValue == null)
{
return true;
}
if (expectedValues == null)
{
return true;
}
if (!expectedValues.Contains(paramValue))
{
throw new PlivoValidationException(paramName + ": Expected one of " + string.Join(" ", expectedValues) + " but received " + paramValue + " instead");
}
return true;
}
public static bool ValidParamInt(string paramName, uint? paramValue = null, bool mandatory = false, List<uint?> expectedValues = null)
{
if (mandatory && paramValue == null)
{
throw new PlivoValidationException(paramName + " is a required parameter");
}
if (paramValue == null)
{
return true;
}
if (expectedValues == null)
{
return true;
}
if (!expectedValues.Contains(paramValue))
{
throw new PlivoValidationException(paramName + ": Expected one of " + string.Join(" ", expectedValues) + " but received " + paramValue + " instead");
}
return true;
}
public static bool MultiValidParam(string paramName, string paramValue,bool mandatory = false, bool makeLowerCase = false, List<string> expectedValues = null, char separator = ',')
{
if (mandatory && paramValue == null)
{
throw new PlivoValidationException(paramName + " is a required parameter");
}
if (paramValue == null)
{
return true;
}
if (makeLowerCase)
{
paramValue = paramValue.ToLower();
}
else
{
paramValue = paramValue.ToUpper();
}
string[] values = paramValue.Split(separator);
foreach (string value in values)
{
if (!expectedValues.Contains(value.Trim()))
{
throw new PlivoValidationException(paramName + ": Expected one of " + string.Join(" ", expectedValues) + " but received " + value + " instead");
}
}
return true;
}
public static bool ValidUrl(string paramName, string paramValue, bool mandatory = false)
{
if (mandatory && paramValue == null)
{
throw new PlivoValidationException(paramName + " is a required parameter");
}
if (paramValue == null)
{
return true;
}
string Pattern = @"^(?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~:/?#[\]@!\$&'\(\)\*\+,;=.]+$";
Regex rgx = new Regex(Pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase);
if (!rgx.IsMatch(paramValue))
{
throw new PlivoValidationException("Invalid URL : Doesn't satisfy the URL format");
}
else
{
return true;
}
}
public static bool IsOneAmongStringUrl(string paramName, string paramValue, bool mandatory = false,
List<string> expectedValues = null)
{
if (mandatory && paramValue == null)
{
throw new PlivoValidationException(paramName + " is a required parameter");
}
if (paramValue == null)
{
return true;
}
if (expectedValues.Contains(paramValue.ToLower()) || expectedValues.Contains(paramValue.ToUpper()))
{
return true;
}
else if(ValidUrl(paramName, paramValue))
{
return true;
}
else
{
throw new PlivoValidationException(paramName + " neither a valid URL nor in the expected values");
}
}
public static bool ValidDateFormat(string paramName, string paramValue, bool mandatory = false)
{
if (mandatory && paramValue == null)
{
throw new PlivoValidationException(paramName + " is a required parameter");
}
if (paramValue == null)
{
return true;
}
string Pattern = @"^(\d{4}\-\d{2}\-\d{2}\ \d{2}\:\d{2}(\:\d{2}(\.\d{1,6})?)?)$";
Regex rgx = new Regex(Pattern, RegexOptions.Compiled | RegexOptions.IgnoreCase);
if (!rgx.IsMatch(paramValue))
{
throw new PlivoValidationException("Invalid Datetime : Doesn't satisfy the datetime format");
}
else
{
return true;
}
}
public static bool ValidRange(string paramName, uint? paramValue, bool mandatory = false, uint? lowerBound = null, uint? upperBound = null)
{
if (mandatory && paramValue == null)
{
throw new PlivoValidationException(paramName + " is a required parameter");
}
if (paramValue == null)
{
return true;
}
if (lowerBound != null && upperBound != null)
{
if (paramValue < lowerBound || paramValue > upperBound)
{
throw new PlivoValidationException(paramName + " ranges between " + Convert.ToString(lowerBound) + " and " + Convert.ToString(upperBound));
}
if (paramValue >= lowerBound && paramValue <= upperBound)
{
return true;
}
}
else if(lowerBound != null)
{
if (paramValue < lowerBound)
{
throw new PlivoValidationException(paramName + " should be greater than " +
Convert.ToString(lowerBound));
}
if (paramValue >= lowerBound)
{
return true;
}
}
else if(upperBound != null)
{
if (paramValue > upperBound)
{
throw new PlivoValidationException(paramName + " should be lesser than " +
Convert.ToString(upperBound));
}
if (paramValue <= upperBound)
{
return true;
}
}
else
{
throw new PlivoValidationException("Any one or both of lower and upper bound should be provided");
}
return true;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
namespace Microsoft.Azure.Management.Sql.Fluent
{
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Update;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Definition;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions;
using Microsoft.Azure.Management.Sql.Fluent.SqlDatabase.Definition;
using Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.Definition;
using Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.SqlElasticPoolDefinition;
using Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.Update;
using Microsoft.Azure.Management.Sql.Fluent.SqlElasticPoolOperations.Definition;
using Microsoft.Azure.Management.Sql.Fluent.SqlElasticPoolOperations.SqlElasticPoolOperationsDefinition;
using Microsoft.Azure.Management.Sql.Fluent.SqlServer.Definition;
using Microsoft.Azure.Management.Sql.Fluent.Models;
using System.Collections.Generic;
using System;
internal partial class SqlElasticPoolImpl
{
/// <summary>
/// Gets the name of the resource group.
/// </summary>
string Microsoft.Azure.Management.ResourceManager.Fluent.Core.IHasResourceGroup.ResourceGroupName
{
get
{
return this.ResourceGroupName();
}
}
/// <summary>
/// Sets the minimum DTU all SQL Azure Databases are guaranteed.
/// </summary>
/// <param name="databaseDtuMin">Minimum DTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithDatabaseDtuMin.WithDatabaseDtuMin(int databaseDtuMin)
{
return this.WithDatabaseDtuMin(databaseDtuMin);
}
/// <summary>
/// Sets the minimum DTU all SQL Azure Databases are guaranteed.
/// </summary>
/// <param name="databaseDtuMin">Minimum DTU for all SQL Azure databases.</param>
/// <return>The next stage of definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithDatabaseDtuMin.WithDatabaseDtuMin(int databaseDtuMin)
{
return this.WithDatabaseDtuMin(databaseDtuMin);
}
/// <summary>
/// Sets the premium edition for the SQL Elastic Pool.
/// </summary>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithPremiumEdition SqlElasticPoolOperations.Definition.IWithEditionBeta.WithPremiumPool()
{
return this.WithPremiumPool();
}
/// <summary>
/// Sets the edition for the SQL Elastic Pool.
/// </summary>
/// <param name="edition">Edition to be set for Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithEditionBeta.WithEdition(string edition)
{
return this.WithEdition(edition);
}
/// <summary>
/// Sets the standard edition for the SQL Elastic Pool.
/// </summary>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithStandardEdition SqlElasticPoolOperations.Definition.IWithEditionBeta.WithStandardPool()
{
return this.WithStandardPool();
}
/// <summary>
/// Sets the basic edition for the SQL Elastic Pool.
/// </summary>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithBasicEdition SqlElasticPoolOperations.Definition.IWithEditionBeta.WithBasicPool()
{
return this.WithBasicPool();
}
/// <summary>
/// Sets the total shared DTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="dtu">Total shared DTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithAttach<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithDtu<SqlServer.Definition.IWithCreate>.WithDtu(int dtu)
{
return this.WithDtu(dtu);
}
/// <summary>
/// Sets the storage limit for the SQL Azure Database Elastic Pool in MB.
/// </summary>
/// <param name="storageMB">Storage limit for the SQL Azure Database Elastic Pool in MB.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithStorageCapacity.WithStorageCapacity(int storageMB)
{
return this.WithStorageCapacity(storageMB);
}
/// <summary>
/// Sets the storage limit for the SQL Azure Database Elastic Pool in MB.
/// </summary>
/// <param name="storageMB">Storage limit for the SQL Azure Database Elastic Pool in MB.</param>
/// <return>The next stage of definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithStorageCapacity.WithStorageCapacity(int storageMB)
{
return this.WithStorageCapacity(storageMB);
}
/// <summary>
/// Adds an existing database in the SQL elastic pool.
/// </summary>
/// <param name="databaseName">Name of the existing database to be added in the elastic pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithDatabase.WithExistingDatabase(string databaseName)
{
return this.WithExistingDatabase(databaseName);
}
/// <summary>
/// Adds the database in the SQL elastic pool.
/// </summary>
/// <param name="database">Database instance to be added in SQL elastic pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithDatabase.WithExistingDatabase(ISqlDatabase database)
{
return this.WithExistingDatabase(database);
}
/// <summary>
/// Begins the definition of a new SQL Database to be added to this server.
/// </summary>
/// <param name="databaseName">The name of the new SQL Database.</param>
/// <return>The first stage of the new SQL Database definition.</return>
SqlDatabase.Definition.IWithExistingDatabaseAfterElasticPool<SqlElasticPoolOperations.Definition.IWithCreate> SqlElasticPoolOperations.Definition.IWithDatabaseBeta.DefineDatabase(string databaseName)
{
return this.DefineDatabase(databaseName);
}
/// <summary>
/// Creates a new database in the SQL elastic pool.
/// </summary>
/// <param name="databaseName">Name of the new database to be added in the elastic pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithDatabase.WithNewDatabase(string databaseName)
{
return this.WithNewDatabase(databaseName);
}
/// <summary>
/// Adds an existing database in the SQL elastic pool.
/// </summary>
/// <param name="databaseName">Name of the existing database to be added in the elastic pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithDatabase.WithExistingDatabase(string databaseName)
{
return this.WithExistingDatabase(databaseName);
}
/// <summary>
/// Adds the database in the SQL elastic pool.
/// </summary>
/// <param name="database">Database instance to be added in SQL elastic pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithDatabase.WithExistingDatabase(ISqlDatabase database)
{
return this.WithExistingDatabase(database);
}
/// <summary>
/// Creates a new database in the SQL elastic pool.
/// </summary>
/// <param name="databaseName">Name of the new database to be added in the elastic pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithDatabase.WithNewDatabase(string databaseName)
{
return this.WithNewDatabase(databaseName);
}
/// <summary>
/// Sets the maximum DTU any one SQL Azure Database can consume.
/// </summary>
/// <param name="databaseDtuMax">Maximum DTU any one SQL Azure Database can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithDatabaseDtuMax.WithDatabaseDtuMax(int databaseDtuMax)
{
return this.WithDatabaseDtuMax(databaseDtuMax);
}
/// <summary>
/// Sets the maximum DTU any one SQL Azure Database can consume.
/// </summary>
/// <param name="databaseDtuMax">Maximum DTU any one SQL Azure Database can consume.</param>
/// <return>The next stage of definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithDatabaseDtuMax.WithDatabaseDtuMax(int databaseDtuMax)
{
return this.WithDatabaseDtuMax(databaseDtuMax);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithBasicEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithBasicEditionBeta<SqlServer.Definition.IWithCreate>.WithReservedDtu(SqlElasticPoolBasicEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithBasicEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithBasicEditionBeta<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMin(SqlElasticPoolBasicMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithBasicEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithBasicEditionBeta<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMax(SqlElasticPoolBasicMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithPremiumEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithPremiumEditionBeta<SqlServer.Definition.IWithCreate>.WithReservedDtu(SqlElasticPoolPremiumEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithPremiumEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithPremiumEditionBeta<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMin(SqlElasticPoolPremiumMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the storage capacity for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="storageCapacity">Storage capacity for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithPremiumEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithPremiumEditionBeta<SqlServer.Definition.IWithCreate>.WithStorageCapacity(SqlElasticPoolPremiumSorage storageCapacity)
{
return this.WithStorageCapacity(storageCapacity);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithPremiumEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithPremiumEditionBeta<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMax(SqlElasticPoolPremiumMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Removes a tag from the resource.
/// </summary>
/// <param name="key">The key of the tag to remove.</param>
/// <return>The next stage of the resource update.</return>
Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.Update.IUpdate IUpdateWithTags<SqlElasticPool.Update.IUpdate>.WithoutTag(string key)
{
return this.WithoutTag(key);
}
/// <summary>
/// Specifies tags for the resource as a Map.
/// </summary>
/// <param name="tags">A Map of tags.</param>
/// <return>The next stage of the resource update.</return>
Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.Update.IUpdate Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Update.IUpdateWithTags<Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.Update.IUpdate>.WithTags(IDictionary<string,string> tags)
{
return this.WithTags(tags);
}
/// <summary>
/// Adds a tag to the resource.
/// </summary>
/// <param name="key">The key for the tag.</param>
/// <param name="value">The value for the tag.</param>
/// <return>The next stage of the resource update.</return>
Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.Update.IUpdate Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Update.IUpdateWithTags<Microsoft.Azure.Management.Sql.Fluent.SqlElasticPool.Update.IUpdate>.WithTag(string key, string value)
{
return this.WithTag(key, value);
}
/// <summary>
/// Sets the total shared DTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="dtu">Total shared DTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithCreate SqlElasticPoolOperations.Definition.IWithDtu.WithDtu(int dtu)
{
return this.WithDtu(dtu);
}
/// <summary>
/// Sets the total shared DTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="dtu">Total shared DTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithDtu.WithDtu(int dtu)
{
return this.WithDtu(dtu);
}
/// <summary>
/// Attaches the child definition to the parent resource definiton.
/// </summary>
/// <return>The next stage of the parent definition.</return>
SqlServer.Definition.IWithCreate Microsoft.Azure.Management.ResourceManager.Fluent.Core.ChildResource.Definition.IInDefinition<SqlServer.Definition.IWithCreate>.Attach()
{
return this.Attach();
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithPremiumEdition SqlElasticPoolOperations.Definition.IWithPremiumEditionBeta.WithReservedDtu(SqlElasticPoolPremiumEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithPremiumEdition SqlElasticPoolOperations.Definition.IWithPremiumEditionBeta.WithDatabaseDtuMin(SqlElasticPoolPremiumMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the storage capacity for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="storageCapacity">Storage capacity for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithPremiumEdition SqlElasticPoolOperations.Definition.IWithPremiumEditionBeta.WithStorageCapacity(SqlElasticPoolPremiumSorage storageCapacity)
{
return this.WithStorageCapacity(storageCapacity);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithPremiumEdition SqlElasticPoolOperations.Definition.IWithPremiumEditionBeta.WithDatabaseDtuMax(SqlElasticPoolPremiumMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Sets the maximum DTU any one SQL Azure Database can consume.
/// </summary>
/// <param name="databaseDtuMax">Maximum DTU any one SQL Azure Database can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithAttach<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithDatabaseDtuMax<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMax(int databaseDtuMax)
{
return this.WithDatabaseDtuMax(databaseDtuMax);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithStandardEdition SqlElasticPoolOperations.Definition.IWithStandardEditionBeta.WithReservedDtu(SqlElasticPoolStandardEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithStandardEdition SqlElasticPoolOperations.Definition.IWithStandardEditionBeta.WithDatabaseDtuMin(SqlElasticPoolStandardMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the storage capacity for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="storageCapacity">Storage capacity for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithStandardEdition SqlElasticPoolOperations.Definition.IWithStandardEditionBeta.WithStorageCapacity(SqlElasticPoolStandardStorage storageCapacity)
{
return this.WithStorageCapacity(storageCapacity);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithStandardEdition SqlElasticPoolOperations.Definition.IWithStandardEditionBeta.WithDatabaseDtuMax(SqlElasticPoolStandardMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithBasicEdition SqlElasticPoolOperations.Definition.IWithBasicEditionBeta.WithReservedDtu(SqlElasticPoolBasicEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithBasicEdition SqlElasticPoolOperations.Definition.IWithBasicEditionBeta.WithDatabaseDtuMin(SqlElasticPoolBasicMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithBasicEdition SqlElasticPoolOperations.Definition.IWithBasicEditionBeta.WithDatabaseDtuMax(SqlElasticPoolBasicMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Lists the SQL databases in this SQL Elastic Pool.
/// </summary>
/// <return>The information about databases in elastic pool.</return>
System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.ISqlDatabase> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabases()
{
return this.ListDatabases();
}
/// <summary>
/// Gets the parent SQL server ID.
/// </summary>
string Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ParentId
{
get
{
return this.ParentId();
}
}
/// <summary>
/// Asynchronously lists the database metric definitions for this SQL Elastic Pool.
/// </summary>
/// <return>A representation of the deferred computation of this call.</return>
async Task<System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.ISqlDatabaseMetricDefinition>> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabaseMetricDefinitionsAsync(CancellationToken cancellationToken)
{
return await this.ListDatabaseMetricDefinitionsAsync(cancellationToken);
}
/// <summary>
/// Gets name of the SQL Server to which this elastic pool belongs.
/// </summary>
string Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.SqlServerName
{
get
{
return this.SqlServerName();
}
}
/// <summary>
/// Gets the edition of Azure SQL Elastic Pool.
/// </summary>
string Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.Edition
{
get
{
return this.Edition();
}
}
/// <summary>
/// Gets the maximum DTU any one SQL Azure database can consume.
/// </summary>
int Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.DatabaseDtuMax
{
get
{
return this.DatabaseDtuMax();
}
}
/// <return>The information about elastic pool database activities.</return>
async Task<System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.IElasticPoolDatabaseActivity>> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabaseActivitiesAsync(CancellationToken cancellationToken)
{
return await this.ListDatabaseActivitiesAsync(cancellationToken);
}
/// <return>A representation of the deferred computation of the information about elastic pool activities.</return>
async Task<System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.IElasticPoolActivity>> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListActivitiesAsync(CancellationToken cancellationToken)
{
return await this.ListActivitiesAsync(cancellationToken);
}
/// <summary>
/// Gets The total shared DTU for the SQL Azure Database Elastic Pool.
/// </summary>
int Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.Dtu
{
get
{
return this.Dtu();
}
}
/// <summary>
/// Gets the name of the region the resource is in.
/// </summary>
string Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.RegionName
{
get
{
return this.RegionName();
}
}
/// <summary>
/// Gets the storage limit for the SQL Azure Database Elastic Pool in MB.
/// </summary>
int Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.StorageMB
{
get
{
return this.StorageMB();
}
}
/// <summary>
/// Deletes this SQL Elastic Pool asynchronously from the parent SQL server.
/// </summary>
/// <return>A representation of the deferred computation of this call.</return>
async Task Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.DeleteAsync(CancellationToken cancellationToken)
{
await this.DeleteAsync(cancellationToken);
}
/// <return>The information about elastic pool activities.</return>
System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.IElasticPoolActivity> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListActivities()
{
return this.ListActivities();
}
/// <summary>
/// Asynchronously lists the database metrics for this SQL Elastic Pool.
/// </summary>
/// <param name="filter">An OData filter expression that describes a subset of metrics to return.</param>
/// <return>A representation of the deferred computation of this call.</return>
async Task<System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.ISqlDatabaseMetric>> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabaseMetricsAsync(string filter, CancellationToken cancellationToken)
{
return await this.ListDatabaseMetricsAsync(filter, cancellationToken);
}
/// <summary>
/// Lists the database metric definitions for this SQL Elastic Pool.
/// </summary>
/// <return>The elastic pool's metric definitions.</return>
System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.ISqlDatabaseMetricDefinition> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabaseMetricDefinitions()
{
return this.ListDatabaseMetricDefinitions();
}
/// <summary>
/// Gets the specific database in the elastic pool.
/// </summary>
/// <param name="databaseName">Name of the database to look into.</param>
/// <return>The information about specific database in elastic pool.</return>
Microsoft.Azure.Management.Sql.Fluent.ISqlDatabase Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.GetDatabase(string databaseName)
{
return this.GetDatabase(databaseName);
}
/// <summary>
/// Gets the storage capacity limit for the SQL Azure Database Elastic Pool in MB.
/// </summary>
int Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.StorageCapacityInMB
{
get
{
return this.StorageCapacityInMB();
}
}
/// <summary>
/// Deletes this SQL Elastic Pool from the parent SQL server.
/// </summary>
void Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.Delete()
{
this.Delete();
}
/// <summary>
/// Gets the creation date of the Azure SQL Elastic Pool.
/// </summary>
System.DateTime Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.CreationDate
{
get
{
return this.CreationDate();
}
}
/// <summary>
/// Asynchronously lists the SQL databases in this SQL Elastic Pool.
/// </summary>
/// <return>A representation of the deferred computation of this call.</return>
async Task<System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.ISqlDatabase>> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabasesAsync(CancellationToken cancellationToken)
{
return await this.ListDatabasesAsync(cancellationToken);
}
/// <summary>
/// Adds a new SQL Database to the Elastic Pool.
/// </summary>
/// <param name="databaseName">Name of the database.</param>
/// <return>The database.</return>
Microsoft.Azure.Management.Sql.Fluent.ISqlDatabase Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.AddNewDatabase(string databaseName)
{
return this.AddNewDatabase(databaseName);
}
/// <summary>
/// Gets the minimum DTU all SQL Azure Databases are guaranteed.
/// </summary>
int Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.DatabaseDtuMin
{
get
{
return this.DatabaseDtuMin();
}
}
/// <summary>
/// Adds an existing SQL Database to the Elastic Pool.
/// </summary>
/// <param name="databaseName">Name of the database.</param>
/// <return>The database.</return>
Microsoft.Azure.Management.Sql.Fluent.ISqlDatabase Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.AddExistingDatabase(string databaseName)
{
return this.AddExistingDatabase(databaseName);
}
/// <summary>
/// Adds an existing SQL Database to the Elastic Pool.
/// </summary>
/// <param name="database">The database to be added.</param>
/// <return>The database.</return>
Microsoft.Azure.Management.Sql.Fluent.ISqlDatabase Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.AddExistingDatabase(ISqlDatabase database)
{
return this.AddExistingDatabase(database);
}
/// <summary>
/// Lists the database metrics for this SQL Elastic Pool.
/// </summary>
/// <param name="filter">An OData filter expression that describes a subset of metrics to return.</param>
/// <return>The elastic pool's database metrics.</return>
System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.ISqlDatabaseMetric> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabaseMetrics(string filter)
{
return this.ListDatabaseMetrics(filter);
}
/// <summary>
/// Gets the region the resource is in.
/// </summary>
Microsoft.Azure.Management.ResourceManager.Fluent.Core.Region Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.Region
{
get
{
return this.Region();
}
}
/// <summary>
/// Removes an existing SQL Database from the Elastic Pool.
/// </summary>
/// <param name="databaseName">Name of the database.</param>
/// <return>The database.</return>
Microsoft.Azure.Management.Sql.Fluent.ISqlDatabase Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.RemoveDatabase(string databaseName)
{
return this.RemoveDatabase(databaseName);
}
/// <summary>
/// Gets the state of the Azure SQL Elastic Pool.
/// </summary>
string Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.State
{
get
{
return this.State();
}
}
/// <return>The information about elastic pool database activities.</return>
System.Collections.Generic.IReadOnlyList<Microsoft.Azure.Management.Sql.Fluent.IElasticPoolDatabaseActivity> Microsoft.Azure.Management.Sql.Fluent.ISqlElasticPool.ListDatabaseActivities()
{
return this.ListDatabaseActivities();
}
/// <summary>
/// Sets the storage limit for the SQL Azure Database Elastic Pool in MB.
/// </summary>
/// <param name="storageMB">Storage limit for the SQL Azure Database Elastic Pool in MB.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithAttach<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithStorageCapacity<SqlServer.Definition.IWithCreate>.WithStorageCapacity(int storageMB)
{
return this.WithStorageCapacity(storageMB);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithStandardEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithStandardEditionBeta<SqlServer.Definition.IWithCreate>.WithReservedDtu(SqlElasticPoolStandardEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithStandardEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithStandardEditionBeta<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMin(SqlElasticPoolStandardMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the storage capacity for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="storageCapacity">Storage capacity for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithStandardEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithStandardEditionBeta<SqlServer.Definition.IWithCreate>.WithStorageCapacity(SqlElasticPoolStandardStorage storageCapacity)
{
return this.WithStorageCapacity(storageCapacity);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithStandardEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithStandardEditionBeta<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMax(SqlElasticPoolStandardMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Sets the premium edition for the SQL Elastic Pool.
/// </summary>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithPremiumEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithEditionBeta<SqlServer.Definition.IWithCreate>.WithPremiumPool()
{
return this.WithPremiumPool();
}
/// <summary>
/// Sets the edition for the SQL Elastic Pool.
/// </summary>
/// <param name="edition">Edition to be set for elastic pool.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithAttach<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithEditionBeta<SqlServer.Definition.IWithCreate>.WithEdition(string edition)
{
return this.WithEdition(edition);
}
/// <summary>
/// Sets the standard edition for the SQL Elastic Pool.
/// </summary>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithStandardEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithEditionBeta<SqlServer.Definition.IWithCreate>.WithStandardPool()
{
return this.WithStandardPool();
}
/// <summary>
/// Sets the basic edition for the SQL Elastic Pool.
/// </summary>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithBasicEdition<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithEditionBeta<SqlServer.Definition.IWithCreate>.WithBasicPool()
{
return this.WithBasicPool();
}
/// <summary>
/// Sets the parent SQL server name and resource group it belongs to.
/// </summary>
/// <param name="resourceGroupName">The name of the resource group the parent SQL server.</param>
/// <param name="sqlServerName">The parent SQL server name.</param>
/// <param name="location">The parent SQL server location.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithEdition SqlElasticPoolOperations.Definition.IWithSqlServer.WithExistingSqlServer(string resourceGroupName, string sqlServerName, string location)
{
return this.WithExistingSqlServer(resourceGroupName, sqlServerName, location);
}
/// <summary>
/// Sets the parent SQL server for the new Elastic Pool.
/// </summary>
/// <param name="sqlServer">The parent SQL server.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPoolOperations.Definition.IWithEdition SqlElasticPoolOperations.Definition.IWithSqlServer.WithExistingSqlServer(ISqlServer sqlServer)
{
return this.WithExistingSqlServer(sqlServer);
}
/// <summary>
/// Begins an update for a new resource.
/// This is the beginning of the builder pattern used to update top level resources
/// in Azure. The final method completing the definition and starting the actual resource creation
/// process in Azure is Appliable.apply().
/// </summary>
/// <return>The stage of new resource update.</return>
SqlElasticPool.Update.IUpdate Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions.IUpdatable<SqlElasticPool.Update.IUpdate>.Update()
{
return this.Update();
}
/// <summary>
/// Sets the minimum DTU all SQL Azure Databases are guaranteed.
/// </summary>
/// <param name="databaseDtuMin">Minimum DTU for all SQL Azure databases.</param>
/// <return>The next stage of the definition.</return>
SqlElasticPool.Definition.IWithAttach<SqlServer.Definition.IWithCreate> SqlElasticPool.Definition.IWithDatabaseDtuMin<SqlServer.Definition.IWithCreate>.WithDatabaseDtuMin(int databaseDtuMin)
{
return this.WithDatabaseDtuMin(databaseDtuMin);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithReservedDtu(SqlElasticPoolBasicEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithReservedDtu(SqlElasticPoolStandardEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the total shared eDTU for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="eDTU">Total shared eDTU for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithReservedDtu(SqlElasticPoolPremiumEDTUs eDTU)
{
return this.WithReservedDtu(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithDatabaseDtuMin(SqlElasticPoolBasicMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithDatabaseDtuMin(SqlElasticPoolStandardMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the minimum number of eDTU for each database in the pool are regardless of its activity.
/// </summary>
/// <param name="eDTU">Minimum eDTU for all SQL Azure databases.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithDatabaseDtuMin(SqlElasticPoolPremiumMinEDTUs eDTU)
{
return this.WithDatabaseDtuMin(eDTU);
}
/// <summary>
/// Sets the storage capacity for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="storageCapacity">Storage capacity for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithStorageCapacity(SqlElasticPoolStandardStorage storageCapacity)
{
return this.WithStorageCapacity(storageCapacity);
}
/// <summary>
/// Sets the storage capacity for the SQL Azure Database Elastic Pool.
/// </summary>
/// <param name="storageCapacity">Storage capacity for the SQL Azure Database Elastic Pool.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithStorageCapacity(SqlElasticPoolPremiumSorage storageCapacity)
{
return this.WithStorageCapacity(storageCapacity);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithDatabaseDtuMax(SqlElasticPoolBasicMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithDatabaseDtuMax(SqlElasticPoolStandardMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Sets the maximum number of eDTU a database in the pool can consume.
/// </summary>
/// <param name="eDTU">Maximum eDTU a database in the pool can consume.</param>
/// <return>The next stage of the update definition.</return>
SqlElasticPool.Update.IUpdate SqlElasticPool.Update.IWithReservedDTUAndStorageCapacityBeta.WithDatabaseDtuMax(SqlElasticPoolPremiumMaxEDTUs eDTU)
{
return this.WithDatabaseDtuMax(eDTU);
}
/// <summary>
/// Specifies tags for the resource as a Map.
/// </summary>
/// <param name="tags">A Map of tags.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Sql.Fluent.SqlElasticPoolOperations.Definition.IWithCreate Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithTags<Microsoft.Azure.Management.Sql.Fluent.SqlElasticPoolOperations.Definition.IWithCreate>.WithTags(IDictionary<string,string> tags)
{
return this.WithTags(tags);
}
/// <summary>
/// Adds a tag to the resource.
/// </summary>
/// <param name="key">The key for the tag.</param>
/// <param name="value">The value for the tag.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.Sql.Fluent.SqlElasticPoolOperations.Definition.IWithCreate Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithTags<Microsoft.Azure.Management.Sql.Fluent.SqlElasticPoolOperations.Definition.IWithCreate>.WithTag(string key, string value)
{
return this.WithTag(key, value);
}
}
}
| |
/*
* Copyright (C) Sony Computer Entertainment America LLC.
* All Rights Reserved.
*/
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Linq;
using System.Windows.Forms;
using Sce.Atf;
using Sce.Atf.Adaptation;
using Sce.Atf.Applications;
using Sce.Atf.Dom;
using Sce.Sled.Resources;
using Sce.Sled.Shared;
using Sce.Sled.Shared.Document;
using Sce.Sled.Shared.Dom;
using Sce.Sled.Shared.Plugin;
using Sce.Sled.Shared.Services;
using Sce.Sled.SyntaxEditor;
namespace Sce.Sled
{
/// <summary>
/// SledBreakpointService Class
/// </summary>
[Export(typeof(IInitializable))]
[Export(typeof(ISledBreakpointService))]
[Export(typeof(ISledDocumentPlugin))]
[Export(typeof(IContextMenuCommandProvider))]
[Export(typeof(SledBreakpointService))]
[PartCreationPolicy(CreationPolicy.Shared)]
class SledBreakpointService : IInitializable, ISledBreakpointService, ISledDocumentPlugin, IContextMenuCommandProvider, ICommandClient
{
[ImportingConstructor]
public SledBreakpointService(MainForm mainForm, ICommandService commandService)
{
m_mainForm = mainForm;
var menuInfoContext =
commandService.RegisterMenu(
Menu.Context,
"Breakpoint Context",
"Breakpoint Context Menu");
commandService.RegisterCommand(
Command.BreakpointAdd,
Menu.Context,
CommandGroup.Context,
Localization.SledBreakpointMenuBreakpointAdd,
Localization.SledBreakpointMenuBreakpointAddComment,
this);
commandService.RegisterCommand(
Command.BreakpointRemove,
Menu.Context,
CommandGroup.Context,
Localization.SledBreakpointMenuBreakpointRemove,
Localization.SledBreakpointMenuBreakpointRemoveComment,
this);
commandService.RegisterCommand(
Command.BreakpointEnable,
Menu.Context,
CommandGroup.Context,
Localization.SledBreakpointMenuBreakpointEnable,
Localization.SledBreakpointMenuBreakpointEnableComment,
this);
commandService.RegisterCommand(
Command.BreakpointDisable,
Menu.Context,
CommandGroup.Context,
Localization.SledBreakpointMenuBreakpointDisable,
Localization.SledBreakpointMenuBreakpointDisableComment,
this);
commandService.RegisterCommand(
Command.BreakpointCondition,
Menu.Context,
CommandGroup.Context,
Localization.SledBreakpointMenuBreakpointCondition,
Localization.SledBreakpointMenuBreakpointConditionComment,
this);
commandService.RegisterCommand(
Command.BreakpointRemoveAll,
Menu.Context,
CommandGroup.Context,
Localization.SledBreakpointMenuBreakpointRemoveAll,
Localization.SledBreakpointMenuBreakpointRemoveAllComment,
this);
menuInfoContext.GetMenuItem().Visible = false;
var menuInfoWindow =
commandService.RegisterMenu(
Menu.Window,
"Breakpoint Window",
"Breakpoint Window Menu");
commandService.RegisterCommand(
Command.BreakpointWindowRemove,
Menu.Window,
CommandGroup.Window,
Localization.BreakpointWindowRemove,
Localization.BreakpointWindowRemoveComment,
this);
commandService.RegisterCommand(
Command.BreakpointWindowEnable,
Menu.Window,
CommandGroup.Window,
Localization.BreakpointWindowEnable,
Localization.BreakpointWindowEnableComment,
this);
commandService.RegisterCommand(
Command.BreakpointWindowDisable,
Menu.Window,
CommandGroup.Window,
Localization.BreakpointWindowDisable,
Localization.BreakpointWindowDisableComment,
this);
commandService.RegisterCommand(
Command.BreakpointWindowCondition,
Menu.Window,
CommandGroup.Window,
Localization.BreakpointWindowCondition,
Localization.BreakpointWindowConditionComment,
this);
commandService.RegisterCommand(
Command.BreakpointWindowConditionEnable,
Menu.Window,
CommandGroup.Window,
Localization.BreakpointWindowConditionEnable,
Localization.BreakpointWindowConditionEnableComment,
this);
commandService.RegisterCommand(
Command.BreakpointWindowConditionDisable,
Menu.Window,
CommandGroup.Window,
Localization.BreakpointWindowConditionDisable,
Localization.BreakpointWindowConditionDisableComment,
this);
commandService.RegisterCommand(
Command.BreakpointWindowRemoveAll,
Menu.Window,
CommandGroup.Window,
Localization.BreakpointWindowRemoveAll,
Localization.BreakpointWindowRemoveAllComment,
this);
menuInfoWindow.GetMenuItem().Visible = false;
}
#region IInitializable Interface
void IInitializable.Initialize()
{
m_documentService = SledServiceInstance.Get<ISledDocumentService>();
m_documentService.Opened += DocumentServiceOpened;
m_documentService.Closed += DocumentServiceClosed;
m_projectService = SledServiceInstance.Get<ISledProjectService>();
m_projectService.Created += ProjectServiceCreated;
m_projectService.Opened += ProjectServiceOpened;
m_projectService.Closing += ProjectServiceClosing;
m_projectService.FileAdded += ProjectServiceFileAdded;
m_projectService.FileOpened += ProjectServiceFileOpened;
m_projectService.FileRemoving += ProjectServiceFileRemoving;
m_breakpointEditor = SledServiceInstance.Get<SledBreakpointEditor>();
m_modifiedFilesFormService = SledServiceInstance.Get<ISledModifiedFilesFormService>();
m_modifiedFilesFormService.FileReloading += ModifiedFilesFormServiceFileReloading;
m_modifiedFilesFormService.FileReloaded += ModifiedFilesFormServiceFileReloaded;
}
#endregion
#region Commands
enum Command
{
// Breakpoint context menu commands (commands shown in SledDocument context menu)
BreakpointAdd, // Command to add a breakpoint to a line
BreakpointRemove, // Command to remove a breakpoint from a line
BreakpointEnable, // Command to enable a breakpoint on a line
BreakpointDisable, // Command to disable a breakpoint on a line
BreakpointCondition, // Command to bring up the breakpoint condition window
BreakpointRemoveAll, // Command to remote all breakpoints from a file
// Breakpoint window commands (commands from the breakpoint window)
BreakpointWindowRemove, // Command to remove a breakpoint from a line
BreakpointWindowEnable, // Command to enable a breakpoint on a line
BreakpointWindowDisable, // Command to disable a breakpoint on a line
BreakpointWindowCondition, // Command to bring up the breakpoint condition window
BreakpointWindowConditionEnable, // Command to enable the condition
BreakpointWindowConditionDisable, // Command to disable the condition
BreakpointWindowRemoveAll, // Command to remove all selected breakpoints
}
enum Menu
{
Context,
Window,
}
enum CommandGroup
{
Context,
Window,
}
#endregion
#region ICommandClient Interface
public bool CanDoCommand(object commandTag)
{
var bEnabled = false;
if (commandTag is Command)
{
var command = (Command)commandTag;
bEnabled = CanDoContextCommand(command);
if (!bEnabled)
bEnabled = CanDoWindowCommand(command);
}
return bEnabled;
}
private bool CanDoContextCommand(Command command)
{
// Rest of the commands rely on there being an active document
// so we can bail now if no active document exists
if (!m_documentService.Active)
return false;
var bEnabled = false;
// Grab active document
var sd = m_documentService.ActiveDocument;
// Several of the following commands check these
var bIsValidLine = sd.IsValidLine(m_iContextMenuClickedLine);
var bBreakpointSet = sd.IsBreakpointSet(m_iContextMenuClickedLine);
switch (command)
{
case Command.BreakpointAdd:
bEnabled = bIsValidLine && !bBreakpointSet;
break;
case Command.BreakpointRemove:
bEnabled = bIsValidLine && bBreakpointSet;
break;
case Command.BreakpointEnable:
bEnabled =
bIsValidLine &&
bBreakpointSet &&
!sd.IsBreakpointEnabled(m_iContextMenuClickedLine);
break;
case Command.BreakpointDisable:
bEnabled =
bIsValidLine &&
bBreakpointSet &&
sd.IsBreakpointSet(m_iContextMenuClickedLine);
break;
case Command.BreakpointCondition:
bEnabled = bIsValidLine && bBreakpointSet && (sd.LanguagePlugin != null) && (sd.SledProjectFile != null);
break;
case Command.BreakpointRemoveAll:
bEnabled = (sd.Editor.GetBreakpoints().Length > 0);
break;
}
return bEnabled;
}
private bool CanDoWindowCommand(Command command)
{
var bEnabled = false;
switch (command)
{
case Command.BreakpointWindowRemove:
case Command.BreakpointWindowEnable:
case Command.BreakpointWindowDisable:
case Command.BreakpointWindowCondition:
bEnabled = (m_windowSelection.Count == 1);
break;
case Command.BreakpointWindowConditionEnable:
{
if (m_windowSelection.Count == 1)
{
var bp =
m_windowSelection[0].As<SledProjectFilesBreakpointType>();
if (!string.IsNullOrEmpty(bp.Condition))
bEnabled = !bp.ConditionEnabled;
}
}
break;
case Command.BreakpointWindowConditionDisable:
{
if (m_windowSelection.Count == 1)
{
var bp =
m_windowSelection[0].As<SledProjectFilesBreakpointType>();
if (!string.IsNullOrEmpty(bp.Condition))
bEnabled = bp.ConditionEnabled;
}
}
break;
case Command.BreakpointWindowRemoveAll:
bEnabled = (m_windowSelection.Count > 1);
break;
}
return bEnabled;
}
public void DoCommand(object commandTag)
{
if (commandTag is Command)
{
switch ((Command)commandTag)
{
case Command.BreakpointAdd:
m_documentService.ActiveDocument.Editor.Breakpoint(m_iContextMenuClickedLine, true);
break;
case Command.BreakpointRemove:
m_documentService.ActiveDocument.Editor.Breakpoint(m_iContextMenuClickedLine, false);
break;
case Command.BreakpointEnable:
EnableOrDisable(m_documentService.ActiveDocument, m_iContextMenuClickedLine, true);
break;
case Command.BreakpointDisable:
EnableOrDisable(m_documentService.ActiveDocument, m_iContextMenuClickedLine, false);
break;
case Command.BreakpointCondition:
ShowBreakpointConditionForm(m_documentService.ActiveDocument, m_iContextMenuClickedLine);
break;
case Command.BreakpointRemoveAll:
RemoveBreakpoints(m_documentService.ActiveDocument, m_documentService.ActiveDocument.Editor.GetBreakpoints());
break;
case Command.BreakpointWindowRemove:
RemoveBreakpoint(m_windowSelection[0]);
break;
case Command.BreakpointWindowEnable:
EnableOrDisable(m_windowSelection[0], true);
break;
case Command.BreakpointWindowDisable:
EnableOrDisable(m_windowSelection[0], false);
break;
case Command.BreakpointWindowCondition:
ShowBreakpointConditionForm(m_windowSelection[0]);
break;
case Command.BreakpointWindowConditionEnable:
ConditionEnableOrDisable(m_windowSelection[0], true);
break;
case Command.BreakpointWindowConditionDisable:
ConditionEnableOrDisable(m_windowSelection[0], false);
break;
case Command.BreakpointWindowRemoveAll:
{
foreach (var domNode in m_windowSelection)
{
RemoveBreakpoint(domNode);
}
}
break;
}
// Save changes
m_projectService.SaveSettings();
}
}
public void UpdateCommand(object commandTag, CommandState state)
{
}
#endregion
#region ISledBreakpointService Interface
/// <summary>
/// Event fired when a breakpoint has been added
/// </summary>
public event EventHandler<SledBreakpointServiceBreakpointEventArgs> Added;
/// <summary>
/// Event fired when a breakpoint has been silently added
/// </summary>
public event EventHandler<SledBreakpointServiceBreakpointEventArgs> SilentAdded;
/// <summary>
/// Event fired when a breakpoint is being removed
/// </summary>
public event EventHandler<SledBreakpointServiceBreakpointEventArgs> Removing;
/// <summary>
/// Event fired when a breakpoint is about to be changed
/// </summary>
public event EventHandler<SledBreakpointServiceBreakpointChangingEventArgs> Changing;
/// <summary>
/// Event fired after a breakpoint has changed
/// </summary>
public event EventHandler<SledBreakpointServiceBreakpointChangingEventArgs> Changed;
/// <summary>
/// Add a breakpoint to a file
/// </summary>
/// <param name="file">File to add breakpoint to</param>
/// <param name="lineNumber">Line number</param>
public void AddBreakpoint(SledProjectFilesFileType file, int lineNumber)
{
AddBreakpoint(file, lineNumber, null, true, false);
}
/// <summary>
/// Add a breakpoint to a file supplying a condition
/// </summary>
/// <param name="file">File to add breakpoint to</param>
/// <param name="lineNumber">Line number</param>
/// <param name="condition">Condition</param>
/// <param name="bConditionResult">Whether condition evaluates to true or false</param>
public void AddBreakpoint(SledProjectFilesFileType file, int lineNumber, string condition, bool bConditionResult)
{
AddBreakpoint(file, lineNumber, condition, bConditionResult, false);
}
/// <summary>
/// Add a breakpoint to a file supplying a condition
/// </summary>
/// <param name="file">File to add breakpoint to</param>
/// <param name="lineNumber">Line number</param>
/// <param name="condition">Condition</param>
/// <param name="bConditionResult">Whether condition evaluates to true or false</param>
/// <param name="bUseFunctionEnvironment">Whether to use the current function's environment or _G when checking the breakpoint condition (if any)</param>
public void AddBreakpoint(SledProjectFilesFileType file, int lineNumber, string condition, bool bConditionResult, bool bUseFunctionEnvironment)
{
SledProjectFilesBreakpointType breakpoint;
AddBreakpoint(file, lineNumber, condition, bConditionResult, true, bUseFunctionEnvironment, out breakpoint);
}
/// <summary>
/// Add a breakpoint to a file supplying a condition
/// </summary>
/// <param name="file">File to add breakpoint to</param>
/// <param name="lineNumber">Line number</param>
/// <param name="condition">Condition</param>
/// <param name="conditionResult">Whether condition evaluates to true or false</param>
/// <param name="conditionEnabled">Whether the condition is enabled or not</param>
/// <param name="useFunctionEnvironment">Whether to use the current function's environment or _G when checking the breakpoint condition (if any)</param>
/// <param name="breakpoint">The breakpoint if it was added otherwise null</param>
/// <returns>Whether the breakpoint was added or not</returns>
public bool AddBreakpoint(SledProjectFilesFileType file, int lineNumber, string condition, bool conditionResult, bool conditionEnabled, bool useFunctionEnvironment, out SledProjectFilesBreakpointType breakpoint)
{
breakpoint = null;
if (file == null)
return false;
// Can't add more than one breakpoint per line
if (IsDuplicate(file, lineNumber))
return false;
// Breakpoint in the document (or none if document not open)
IBreakpoint ibp = null;
var sd = file.SledDocument;
if (sd != null)
{
m_bAddingOrRemoving = true;
// Add breakpoint to open document
sd.Editor.Breakpoint(lineNumber, true);
ibp = sd.Editor.GetBreakpoint(lineNumber);
m_bAddingOrRemoving = false;
}
// Create project style breakpoint
breakpoint = SledProjectFilesBreakpointType.Create(ibp);
// set some properties in case the document isn't open and we don't have a real IBreakpoint yet
if (ibp == null)
{
breakpoint.Line = lineNumber;
breakpoint.Enabled = true;
}
// Setup condition if any
if (!string.IsNullOrEmpty(condition))
{
breakpoint.Condition = condition;
breakpoint.ConditionResult = conditionResult;
breakpoint.ConditionEnabled = conditionEnabled;
breakpoint.UseFunctionEnvironment = useFunctionEnvironment;
// Draw breakpoint indicator in open document breakpoint
if (ibp != null)
ibp.Marker = true;
}
// Add breakpoint to file finally
file.Breakpoints.Add(breakpoint);
return breakpoint != null;
}
/// <summary>
/// Remove a breakpoint from a file
/// </summary>
/// <param name="file">File to remove breakpoint from</param>
/// <param name="lineNumber">Line number</param>
public void RemoveBreakpoint(SledProjectFilesFileType file, int lineNumber)
{
if (file == null)
return;
if (lineNumber < 0)
return;
var breakpoint = file.Breakpoints.FirstOrDefault(bp => bp.Line == lineNumber);
if (breakpoint == null)
return;
file.Breakpoints.Remove(breakpoint);
}
#endregion
#region ISledDocument Breakpoint Events
private void EditorBreakpointChanging(object sender, BreakpointEventArgs e)
{
// Set to cancel unless we meet criteria later
e.Cancel = true;
CheckLineNumber(e.LineNumber);
var sd = GetSledDocumentFromSender(sender);
if (sd == null)
return;
// Don't allow breakpoints in files that don't have a plugin associated with them
if (sd.LanguagePlugin == null)
return;
// By default allow the breakpoint change to continue
e.Cancel = false;
// When adding a breakpoint we need to check with the language plugin to see if it
// supports the addition of a new breakpoint
if (e.IsSet)
{
// Grab all breakpoint plugins
var plugins =
SledServiceInstance.GetAll<ISledBreakpointPlugin>();
foreach (var listable in plugins)
{
// Find plugin that implments the breakpoint plugin
if (listable != sd.LanguagePlugin)
continue;
// Check with plugin to see if breakpoint can be added
e.Cancel =
!listable.CanAdd(
sd,
e.LineNumber,
e.LineText,
GetNumberOfBreakpointsForLanguagePlugin(sd.LanguagePlugin));
}
}
}
private void EditorBreakpointAdded(object sender, IBreakpointEventArgs e)
{
if (m_bAddingOrRemoving)
return;
var sd = GetSledDocumentFromSender(sender);
if (sd == null)
return;
if (sd.SledProjectFile == null)
return;
AddBreakpoint(sd, e.Breakpoint);
}
private void EditorBreakpointRemoved(object sender, IBreakpointEventArgs e)
{
if (m_bAddingOrRemoving)
return;
var sd = GetSledDocumentFromSender(sender);
if (sd == null)
return;
if (sd.SledProjectFile == null)
return;
RemoveBreakpoint(sd, e.Breakpoint);
}
private void SledDocumentDocumentLineCountChanged(object sender, SledDocumentLineCountChangedArgs e)
{
var sd = GetSledDocumentFromSender(sender);
if (sd == null)
return;
// Don't care about files not in the project
if (sd.SledProjectFile == null)
return;
var bChanged = false;
foreach (var bp in sd.SledProjectFile.Breakpoints)
{
if (bp.Breakpoint == null)
continue;
// Only want to process breakpoints that actually moved lines
if (bp.RawLine == bp.Breakpoint.LineNumber)
continue;
bChanged = true;
// Create event
var ea =
new SledBreakpointServiceBreakpointChangingEventArgs(
SledBreakpointChangeType.LineNumber,
bp,
bp.RawLine,
bp.Breakpoint.LineNumber);
// Fire event
OnBreakpointChanging(ea);
// Sync up line numbers from IBreakpoint counterpart
bp.Refresh();
// Fire event
OnBreakpointChanged(ea);
}
// Save changes
if (bChanged)
m_projectService.SaveSettings();
}
private static ISledDocument GetSledDocumentFromSender(object sender)
{
if (sender == null)
return null;
if (sender is ISledDocument)
return sender as ISledDocument;
var sec = sender as ISyntaxEditorControl;
if (sec == null)
return null;
if (sec.Control == null)
return null;
if (sec.Control.Tag == null)
return null;
return sec.Control.Tag as ISledDocument;
}
private int GetNumberOfBreakpointsForLanguagePlugin(ISledLanguagePlugin languagePlugin)
{
// Grab all files this language plugin 'owns'
var where =
m_projectService.AllFiles.Where(
file => file.LanguagePlugin == languagePlugin);
// Count files' breakpoints
return where.Sum(file => file.Breakpoints.Count);
}
#endregion
#region ISledDocumentService Events
private void DocumentServiceOpened(object sender, SledDocumentServiceEventArgs e)
{
var sd = e.Document;
if (sd == null)
return;
if (sd.Editor == null)
return;
sd.Editor.BreakpointChanging += EditorBreakpointChanging;
sd.Editor.BreakpointAdded += EditorBreakpointAdded;
sd.Editor.BreakpointRemoved += EditorBreakpointRemoved;
sd.DocumentLineCountChanged += SledDocumentDocumentLineCountChanged;
}
private void DocumentServiceClosed(object sender, SledDocumentServiceEventArgs e)
{
var sd = e.Document;
if (sd == null)
return;
if (sd.Editor == null)
return;
sd.Editor.BreakpointChanging -= EditorBreakpointChanging;
sd.Editor.BreakpointAdded -= EditorBreakpointAdded;
sd.Editor.BreakpointRemoved -= EditorBreakpointRemoved;
sd.DocumentLineCountChanged -= SledDocumentDocumentLineCountChanged;
}
#endregion
#region DomNode Events
private void SubscribeToEvents(SledProjectFilesType project)
{
project.DomNode.AttributeChanging += CollectionAttributeChanging;
project.DomNode.AttributeChanged += CollectionAttributeChanged;
project.DomNode.ChildInserted += CollectionChildInserted;
project.DomNode.ChildRemoving += CollectionChildRemoving;
}
private void UnsubscribeFromEvents(SledProjectFilesType project)
{
project.DomNode.ChildInserted -= CollectionChildInserted;
project.DomNode.ChildRemoving -= CollectionChildRemoving;
}
private void CollectionAttributeChanging(object sender, AttributeEventArgs e)
{
if (e.DomNode.Type != SledSchema.SledProjectFilesBreakpointType.Type)
return;
var bp = e.DomNode.As<SledProjectFilesBreakpointType>();
if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.enabledAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.Enabled
: SledBreakpointChangeType.Disabled;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanging(ea);
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.conditionenabledAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.ConditionEnabled
: SledBreakpointChangeType.ConditionDisabled;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanging(ea);
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.conditionresultAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.ConditionResultTrue
: SledBreakpointChangeType.ConditionResultFalse;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanging(ea);
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.conditionAttribute)
{
var oldValue = e.OldValue as string;
var newValue = e.NewValue as string;
if (string.Compare(oldValue, newValue) != 0)
{
const SledBreakpointChangeType changeType =
SledBreakpointChangeType.Condition;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp, oldValue, newValue);
// Fire event
OnBreakpointChanging(ea);
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.usefunctionenvironmentAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.UseFunctionEnvironmentTrue
: SledBreakpointChangeType.UseFunctionEnvironmentFalse;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanging(ea);
}
}
}
private void CollectionAttributeChanged(object sender, AttributeEventArgs e)
{
if (e.DomNode.Type != SledSchema.SledProjectFilesBreakpointType.Type)
return;
var bp = e.DomNode.As<SledProjectFilesBreakpointType>();
if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.enabledAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.Enabled
: SledBreakpointChangeType.Disabled;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanged(ea);
// Assure open document's breakpoints are drawn correctly
var sd = bp.File.SledDocument;
if (sd != null)
sd.Control.Refresh();
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.conditionenabledAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.ConditionEnabled
: SledBreakpointChangeType.ConditionDisabled;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanged(ea);
// Assure open document's breakpoints are drawn correctly
var sd = bp.File.SledDocument;
if (sd != null)
sd.Control.Refresh();
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.conditionresultAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.ConditionResultTrue
: SledBreakpointChangeType.ConditionResultFalse;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanged(ea);
// Assure open document's breakpoints are drawn correctly
var sd = bp.File.SledDocument;
if (sd != null)
sd.Control.Refresh();
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.conditionAttribute)
{
var oldValue = e.OldValue as string;
var newValue = e.NewValue as string;
if (string.Compare(oldValue, newValue) != 0)
{
const SledBreakpointChangeType changeType =
SledBreakpointChangeType.Condition;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp, oldValue, newValue);
// Fire event
OnBreakpointChanged(ea);
// Assure open document's breakpoints are drawn correctly
var sd = bp.File.SledDocument;
if (sd != null)
sd.Control.Refresh();
}
}
else if (e.AttributeInfo == SledSchema.SledProjectFilesBreakpointType.usefunctionenvironmentAttribute)
{
var bOldValue = e.OldValue == null ? (bool)e.AttributeInfo.DefaultValue : (bool)e.OldValue;
var bNewValue = (bool)e.NewValue;
if (bOldValue != bNewValue)
{
var changeType =
bNewValue
? SledBreakpointChangeType.UseFunctionEnvironmentTrue
: SledBreakpointChangeType.UseFunctionEnvironmentFalse;
var ea = new SledBreakpointServiceBreakpointChangingEventArgs(changeType, bp);
// Fire event
OnBreakpointChanged(ea);
}
}
}
private void CollectionChildInserted(object sender, ChildEventArgs e)
{
if (e.Child.Type != SledSchema.SledProjectFilesBreakpointType.Type)
return;
var bp = e.Child.As<SledProjectFilesBreakpointType>();
// Fire event
OnBreakpointAdded(new SledBreakpointServiceBreakpointEventArgs(bp));
}
private void CollectionChildRemoving(object sender, ChildEventArgs e)
{
if (e.Child.Type != SledSchema.SledProjectFilesBreakpointType.Type)
return;
var bp = e.Child.As<SledProjectFilesBreakpointType>();
// Fire event
OnBreakpointRemoving(new SledBreakpointServiceBreakpointEventArgs(bp));
if (m_bPreserveOpenDocumentBreakpoints)
return;
// Try and remove breakpoints from the
// corresponding open document (if any)
var sd = bp.File.SledDocument;
if (sd == null)
return;
// Preserve previous value
var bValue = m_bAddingOrRemoving;
// Make sure SledDocument breakpoint events won't fire
m_bAddingOrRemoving = true;
// Verify breakpoint on line in open document & remove if so
if (sd.IsBreakpointSet(bp.Line))
sd.Editor.Breakpoint(bp.Line, false);
// Reset to previous value
m_bAddingOrRemoving = bValue;
}
#endregion
#region ISledProjectService Events
public void ProjectServiceCreated(object sender, SledProjectServiceProjectEventArgs e)
{
SubscribeToEvents(e.Project);
AddBreakpoints(e.Project);
}
public void ProjectServiceOpened(object sender, SledProjectServiceProjectEventArgs e)
{
SubscribeToEvents(e.Project);
AddBreakpoints(e.Project);
//SendLiveConnectProjectBreakpoints(e.Project);
}
public void ProjectServiceClosing(object sender, SledProjectServiceProjectEventArgs e)
{
RemoveBreakpoints(e.Project);
UnsubscribeFromEvents(e.Project);
m_bAddingOrRemoving = false;
}
private void ProjectServiceFileAdded(object sender, SledProjectServiceFileEventArgs e)
{
var sd = e.File.SledDocument;
// If no open document then do nothing
if (sd == null)
return;
// Add breakpoints from the open document to the GUI
AddBreakpoints(sd, sd.Editor.GetBreakpoints());
}
private void ProjectServiceFileOpened(object sender, SledProjectServiceFileEventArgs e)
{
// Silently add breakpoints from the project to
// the newly opened document
AddBreakpoints(e.File);
}
private void ProjectServiceFileRemoving(object sender, SledProjectServiceFileEventArgs e)
{
// Keep breakpoints in the open document (if its open)
m_bPreserveOpenDocumentBreakpoints = true;
// Remove breakpoints in the file from the project
// but leave the open document (if any) alone
RemoveBreakpoints(e.File);
m_bPreserveOpenDocumentBreakpoints = false;
}
#endregion
#region ISledModifiedFilesFormService Events
private void ModifiedFilesFormServiceFileReloading(object sender, SledFileWatcherServiceEventArgs e)
{
m_breakpointsMoving.Clear();
// Try and track breakpoints
if ((e.Document == null) ||
(e.Document.SledProjectFile == null) ||
(e.Document.SledProjectFile.Breakpoints.Count <= 0))
return;
using (var sd = SledDocument.CreateHidden(e.Document.Uri, null))
{
if (sd.Editor == null)
return;
try
{
sd.Read();
}
catch (Exception ex)
{
SledOutDevice.OutLine(
SledMessageType.Error,
"{0}: Exception reading file for tracking breakpoints after external editor modification: {1}",
this, ex.Message);
return;
}
var breakpoints = e.Document.SledProjectFile.Breakpoints;
var bpsToRemove = new List<SledProjectFilesBreakpointType>();
foreach (var bp in breakpoints)
{
var originalFileLineText = bp.LineText;
if (string.IsNullOrEmpty(originalFileLineText))
continue;
const int invalidLine = -1;
var foundLine = invalidLine;
var pos = bp.Line;
const int iterationThreshold = 128;
var iterations = 0;
// check down
for (; (pos < sd.Editor.DocumentLineCount) && (foundLine == invalidLine) && (iterations < iterationThreshold); ++pos)
{
var modifiedFileLineText = GetLineText(sd.Editor, pos);
if (string.Compare(originalFileLineText, modifiedFileLineText, true) == 0)
foundLine = pos;
iterations++;
}
// check up if not found down
if (foundLine == invalidLine)
{
pos = bp.Line - 1;
iterations = 0;
for (; (pos >= 1) && (foundLine == invalidLine) && (iterations < iterationThreshold); --pos)
{
var modifiedFileLineText = GetLineText(sd.Editor, pos);
if (string.Compare(originalFileLineText, modifiedFileLineText, true) == 0)
foundLine = pos;
iterations++;
}
}
if (foundLine != invalidLine)
{
if (bp.Line != foundLine)
{
var tempBp =
new TempBpDetails(
bp.File,
foundLine,
bp.Condition,
bp.ConditionResult,
bp.ConditionEnabled,
bp.UseFunctionEnvironment);
m_breakpointsMoving.Add(tempBp);
bpsToRemove.Add(bp);
}
}
else
{
bpsToRemove.Add(bp);
}
}
// Remove any breakpoints that moved or that we were unable to track
foreach (var bp in bpsToRemove)
RemoveBreakpoint(bp.File, bp.Line);
}
}
private void ModifiedFilesFormServiceFileReloaded(object sender, SledFileWatcherServiceEventArgs e)
{
if (m_breakpointsMoving.Count <= 0)
return;
try
{
foreach (var tempBpDetails in m_breakpointsMoving)
{
SledProjectFilesBreakpointType breakpoint;
AddBreakpoint(
tempBpDetails.File,
tempBpDetails.Line,
tempBpDetails.Condition,
tempBpDetails.ConditionResult,
tempBpDetails.ConditionEnabled,
tempBpDetails.UseFunctionEnvironment,
out breakpoint);
}
}
finally
{
m_breakpointsMoving.Clear();
}
// Write out any breakpoint changes
m_projectService.SaveSettings();
}
private static string GetLineText(ISyntaxEditorControl control, int line)
{
try
{
return control.GetLineText(line);
}
catch (ArgumentOutOfRangeException)
{
return string.Empty;
}
}
#endregion
#region ISledDocumentPlugin Interface
/// <summary>
/// Gets context menu command tags for the target SledDocument
/// </summary>
/// <param name="args">Arguments (document, region clicked, line number clicked)</param>
/// <returns>List of context menu command tags for the target SledDocument</returns>
public IList<object> GetPopupCommandTags(SledDocumentContextMenuArgs args)
{
var commands = new List<object>();
// Only care about breakpoint region
if (args.Region != SledDocumentRegions.Breakpoint)
return commands;
// Get out if no active document
if (!m_documentService.Active)
return commands;
// Grab active document
var sd = m_documentService.ActiveDocument;
// Store line number
m_iContextMenuClickedLine = args.LineNumber;
// Add breakpoint commands
if (sd.IsValidLine(m_iContextMenuClickedLine))
{
if (sd.IsBreakpointSet(m_iContextMenuClickedLine))
{
commands.Add(Command.BreakpointRemove);
if (sd.SledProjectFile != null)
{
commands.Add(
sd.IsBreakpointEnabled(m_iContextMenuClickedLine)
? Command.BreakpointDisable
: Command.BreakpointEnable);
commands.Add(Command.BreakpointCondition);
}
}
else
commands.Add(Command.BreakpointAdd);
}
if (sd.Editor.GetBreakpoints().Length > 0)
commands.Add(Command.BreakpointRemoveAll);
return commands;
}
/// <summary>
/// Gets values for hovered over tokens
/// </summary>
/// <param name="args">Arguments (document, token, line number)</param>
/// <returns>List of strings representing possible values for the hovered over token</returns>
public IList<string> GetMouseHoverOverTokenValues(SledDocumentHoverOverTokenArgs args)
{
return null;
}
#endregion
#region IContextMenuCommandProvider Interface
public IEnumerable<object> GetCommands(object context, object target)
{
if (target == null)
return s_emptyObjectList;
if (!target.Is<SledProjectFilesBreakpointType>())
return s_emptyObjectList;
var commands = new List<object>();
// Copy over selection
m_windowSelection.Clear();
foreach (object item in m_breakpointEditor.Selection)
{
if (!item.Is<DomNode>())
continue;
m_windowSelection.Add(item.As<DomNode>());
}
if (m_windowSelection.Count == 1)
{
var bp = m_windowSelection[0].As<SledProjectFilesBreakpointType>();
commands.Add(Command.BreakpointWindowRemove);
commands.Add(
bp.Enabled
? Command.BreakpointWindowDisable
: Command.BreakpointWindowEnable);
commands.Add(Command.BreakpointWindowCondition);
if (!string.IsNullOrEmpty(bp.Condition))
{
commands.Add(
bp.ConditionEnabled
? Command.BreakpointWindowConditionDisable
: Command.BreakpointWindowConditionEnable);
}
}
else if (m_windowSelection.Count > 1)
{
commands.Add(Command.BreakpointWindowRemoveAll);
}
return commands;
}
#endregion
#region Member Methods
private void AddBreakpoints(SledProjectFilesType project)
{
try
{
m_bAddingOrRemoving = true;
var bpInvalids = new List<SledProjectFilesBreakpointType>();
// Go through files in project adding breakpoints to the GUI
// Also add breakpoint indicators to open documents that
// correspond to files in the project
foreach (var file in project.AllFiles)
{
var sd = file.SledDocument;
foreach (var bp in file.Breakpoints)
{
var bValid = true;
if (sd != null)
{
// Verify breakpoint not already set on line
// and that the line is valid
if (!sd.IsBreakpointSet(bp.Line) &&
!sd.IsValidLine(bp.Line))
{
bValid = false;
bpInvalids.Add(bp);
}
}
// Breakpoint isn't valid so skip to next
if (!bValid)
continue;
// Breakpoint needs to get added but first lets see
// if there's an IBreakpoint we can associate it with
// Get breakpoint from the open document
IBreakpoint ibp = null;
if (sd != null)
{
if (sd.IsBreakpointSet(bp.Line))
{
// Grab existing
ibp = sd.Editor.GetBreakpoint(bp.Line);
}
else
{
// Create new indicator in the document
sd.Editor.Breakpoint(bp.Line, true);
if (sd.IsBreakpointSet(bp.Line))
ibp = sd.Editor.GetBreakpoint(bp.Line);
}
// Couldn't set breakpoint somehow?
if (ibp == null)
{
bpInvalids.Add(bp);
continue;
}
}
if (ibp != null)
{
// Copy over values first
ibp.Enabled = bp.Enabled;
ibp.Marker = bp.ConditionEnabled;
}
// Set reference
bp.Breakpoint = ibp;
// Fire event
OnBreakpointAdded(new SledBreakpointServiceBreakpointEventArgs(bp));
}
if (sd != null)
{
AddBreakpoints(sd, sd.Editor.GetBreakpoints());
sd.Control.Refresh();
}
}
// Remove invalid breakpoints
foreach (var bp in bpInvalids)
{
bp.File.Breakpoints.Remove(bp);
}
}
finally
{
m_bAddingOrRemoving = false;
}
}
private void RemoveBreakpoints(SledProjectFilesType project)
{
try
{
m_bAddingOrRemoving = true;
foreach (var file in project.AllFiles)
{
var sd = file.SledDocument;
foreach (var bp in file.Breakpoints)
{
// Remove from open document (if any)
if ((sd != null) && sd.IsBreakpointSet(bp.Line))
{
sd.Editor.Breakpoint(bp.Line, false);
}
// Fire event
OnBreakpointRemoving(new SledBreakpointServiceBreakpointEventArgs(bp));
}
}
}
finally
{
m_bAddingOrRemoving = false;
}
}
private void AddBreakpoints(SledProjectFilesFileType file)
{
// Silently add breakpoints from the project to
// the newly opened document
if (m_bAddingOrRemoving)
return;
try
{
m_bAddingOrRemoving = true;
var sd = file.SledDocument;
if (sd == null)
return;
var bpInvalids = new List<SledProjectFilesBreakpointType>();
foreach (var bp in file.Breakpoints)
{
if (sd.IsBreakpointSet(bp.Line))
continue;
if (!sd.IsValidLine(bp.Line))
{
bpInvalids.Add(bp);
continue;
}
sd.Editor.Breakpoint(bp.Line, true);
if (!sd.IsBreakpointSet(bp.Line))
{
bpInvalids.Add(bp);
continue;
}
var ibp = sd.Editor.GetBreakpoint(bp.Line);
ibp.Enabled = bp.Enabled;
ibp.Marker = bp.ConditionEnabled;
bp.Breakpoint = ibp;
bp.Refresh();
// Fire event
OnBreakpointSilentAdded(new SledBreakpointServiceBreakpointEventArgs(bp));
}
foreach (var bp in bpInvalids)
{
bp.File.Breakpoints.Remove(bp);
}
sd.Control.Refresh();
}
finally
{
m_bAddingOrRemoving = false;
}
}
private void RemoveBreakpoints(SledProjectFilesFileType file)
{
// Remove breakpoints in the file from the project
// but leave the open document (if any) alone
var needsSaving = file.Breakpoints.Count > 0;
while (file.Breakpoints.Count > 0)
{
// Remove breakpoint from GUI (will fire event through Collection_Removing)
file.Breakpoints.RemoveAt(0);
}
if (needsSaving)
m_projectService.SaveSettings();
}
private void AddBreakpoints(ISledDocument sd, IEnumerable<IBreakpoint> ibps)
{
// Add breakpoints from "ibps" to the open document "sd"
foreach (var ibp in ibps)
{
// Check for duplicates
var bp = FindBreakpointInFile(sd.SledProjectFile, ibp);
if (bp != null)
continue;
// Create new
bp = SledProjectFilesBreakpointType.Create(ibp);
// Add breakpoint to GUI (will fire event through Collection_Inserted)
sd.SledProjectFile.Breakpoints.Add(bp);
}
m_projectService.SaveSettings();
}
private void AddBreakpoint(ISledDocument sd, IBreakpoint ibp)
{
// Called when click-adding a breakpoint in an open document
AddBreakpoints(sd, new[] { ibp });
}
private void RemoveBreakpoints(ISledDocument sd, IEnumerable<IBreakpoint> ibps)
{
// Called when remove-all is selected
try
{
m_bAddingOrRemoving = true;
var file = sd.SledProjectFile;
foreach (var ibp in ibps)
{
sd.Editor.Breakpoint(ibp.LineNumber, false);
if (file == null)
continue;
var bp = FindBreakpointInFile(file, ibp);
// Remove breakpoint from file (will fire event through Collection_Removing)
if (file.Breakpoints.Contains(bp))
file.Breakpoints.Remove(bp);
}
// Save changes
m_projectService.SaveSettings();
}
finally
{
m_bAddingOrRemoving = false;
}
}
private void RemoveBreakpoint(ISledDocument sd, IBreakpoint ibp)
{
// Called when click-removing a breakpoint in an open document
var file = sd.SledProjectFile;
if (file == null)
return;
var bp = FindBreakpointInFile(file, ibp);
if (bp == null)
return;
// Remove breakpoint from file (will fire event through Collection_Removing)
if (file.Breakpoints.Contains(bp))
file.Breakpoints.Remove(bp);
// Save changes
m_projectService.SaveSettings();
}
private static void RemoveBreakpoint(DomNode domNode)
{
var bp = domNode.As<SledProjectFilesBreakpointType>();
// Remove from project and breakpoint window (event gets fired through DomCollection_ChildRemoving)
bp.File.Breakpoints.Remove(bp);
}
[System.Diagnostics.Conditional("DEBUG")]
private static void CheckLineNumber(int lineNumber)
{
if (lineNumber <= 0)
System.Diagnostics.Debugger.Break();
}
private static SledProjectFilesBreakpointType FindBreakpointInFile(SledProjectFilesFileType file, IBreakpoint ibp)
{
return file.Breakpoints.FirstOrDefault(bp => bp.Breakpoint == ibp);
}
private static bool IsDuplicate(SledProjectFilesFileType file, int lineNumber)
{
return file.Breakpoints.Any(bp => bp.Line == lineNumber);
}
private static void EnableOrDisable(ISledDocument sd, int iLine, bool bEnable)
{
if (sd == null)
return;
if (!sd.IsValidLine(iLine))
return;
var ibp = sd.Editor.GetBreakpoint(iLine);
if (ibp == null)
return;
var file = sd.SledProjectFile;
if (file == null)
return;
var bp = FindBreakpointInFile(file, ibp);
if (bp == null)
return;
// Make change (will cause events to fire in DomCollection_AttributeChanging/DomCollection_AttributeChanged)
bp.Enabled = bEnable;
}
private static void EnableOrDisable(DomNode domNode, bool bEnable)
{
var bp =
domNode.As<SledProjectFilesBreakpointType>();
// Make change (will cause events to fire in DomCollection_AttributeChanging/DomCollection_AttributeChanged)
bp.Enabled = bEnable;
}
private static void ConditionEnableOrDisable(DomNode domNode, bool bEnable)
{
var bp =
domNode.As<SledProjectFilesBreakpointType>();
// Make change (will cause events to fire in DomCollection_AttributeChanging/DomCollection_AttributeChanged)
bp.ConditionEnabled = bEnable;
}
private void ShowBreakpointConditionForm(ISledDocument sd, int lineNumber)
{
// Show form coming from context menu in an open document
if (sd == null)
return;
if (!sd.IsValidLine(lineNumber))
return;
if (sd.LanguagePlugin == null)
return;
if (sd.SledProjectFile == null)
return;
var ibp = sd.Editor.GetBreakpoint(lineNumber);
if (ibp == null)
return;
var bp =
FindBreakpointInFile(sd.SledProjectFile, ibp);
if (bp == null)
return;
ShowBreakpointConditionFormInternal(bp);
}
private void ShowBreakpointConditionForm(DomNode domNode)
{
if (domNode == null)
return;
if (!domNode.Is<SledProjectFilesBreakpointType>())
return;
var bp =
domNode.As<SledProjectFilesBreakpointType>();
if (bp == null)
return;
ShowBreakpointConditionFormInternal(bp);
}
private void ShowBreakpointConditionFormInternal(SledProjectFilesBreakpointType bp)
{
using (var form = new SledBreakpointConditionForm())
{
// Store values
var condition = bp.Condition;
var bConditionResult = bp.ConditionResult;
var bConditionEnabled = string.IsNullOrEmpty(condition) ? false : bp.ConditionEnabled;
var bUseFunctionEnvironment = bp.UseFunctionEnvironment;
// Setup form
form.Plugin = bp.File.LanguagePlugin;
form.SyntaxHighlighter = GetHighlighter(bp);
form.Condition = condition;
form.ConditionResult = bConditionResult;
form.ConditionEnabled = bConditionEnabled;
form.UseFunctionEnvironment = bUseFunctionEnvironment;
// Show form
if (form.ShowDialog(m_mainForm) != DialogResult.OK)
return;
// Update if changed
if (string.Compare(condition, form.Condition) != 0)
bp.Condition = form.Condition;
// Update if changed
if (bConditionResult != form.ConditionResult)
bp.ConditionResult = form.ConditionResult;
// Update if changed
if (bConditionEnabled != form.ConditionEnabled)
bp.ConditionEnabled = form.ConditionEnabled;
// Update if changed
if (bUseFunctionEnvironment != form.UseFunctionEnvironment)
bp.UseFunctionEnvironment = form.UseFunctionEnvironment;
}
}
private SledDocumentSyntaxHighlighter GetHighlighter(SledProjectFilesBreakpointType bp)
{
if (bp == null)
return null;
if (bp.File == null)
return null;
if (bp.File.Uri == null)
return null;
if (m_documentService == null)
return null;
var documentClient = m_documentService.GetDocumentClient(bp.File.Uri);
return documentClient.SyntaxHighlighter;
}
private void OnBreakpointAdded(SledBreakpointServiceBreakpointEventArgs e)
{
Added.Raise(this, e);
}
private void OnBreakpointSilentAdded(SledBreakpointServiceBreakpointEventArgs e)
{
SilentAdded.Raise(this, e);
}
private void OnBreakpointChanging(SledBreakpointServiceBreakpointChangingEventArgs e)
{
Changing.Raise(this, e);
}
private void OnBreakpointChanged(SledBreakpointServiceBreakpointChangingEventArgs e)
{
Changed.Raise(this, e);
}
private void OnBreakpointRemoving(SledBreakpointServiceBreakpointEventArgs e)
{
Removing.Raise(this, e);
}
#endregion
#region Private Classes
private class TempBpDetails
{
public TempBpDetails(SledProjectFilesFileType file, int line, string condition, bool conditionResult, bool conditionEnabled, bool useFunctionEnvironment)
{
File = file;
Line = line;
Condition = condition;
ConditionResult = conditionResult;
ConditionEnabled = conditionEnabled;
UseFunctionEnvironment = useFunctionEnvironment;
}
public SledProjectFilesFileType File { get; private set; }
public int Line { get; private set; }
public string Condition { get; private set; }
public bool ConditionResult { get; private set; }
public bool ConditionEnabled { get; private set; }
public bool UseFunctionEnvironment { get; private set; }
}
#endregion
private ISledProjectService m_projectService;
private ISledDocumentService m_documentService;
private SledBreakpointEditor m_breakpointEditor;
private ISledModifiedFilesFormService m_modifiedFilesFormService;
private bool m_bAddingOrRemoving;
private int m_iContextMenuClickedLine = -1;
private bool m_bPreserveOpenDocumentBreakpoints;
private readonly MainForm m_mainForm;
private readonly List<DomNode> m_windowSelection =
new List<DomNode>();
private readonly List<TempBpDetails> m_breakpointsMoving =
new List<TempBpDetails>();
private static readonly IEnumerable<object> s_emptyObjectList =
EmptyEnumerable<object>.Instance;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Reflection;
using System.Diagnostics;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Reflection.Runtime.General;
using System.Reflection.Runtime.TypeInfos;
using Internal.LowLevelLinq;
using Internal.Reflection.Core;
using Internal.Reflection.Augments;
using Internal.Reflection.Core.Execution;
using Internal.Metadata.NativeFormat;
namespace System.Reflection.Runtime.CustomAttributes
{
//
// The Runtime's implementation of CustomAttributeData for normal metadata-based attributes
//
internal sealed class RuntimeNormalCustomAttributeData : RuntimeCustomAttributeData
{
internal RuntimeNormalCustomAttributeData(MetadataReader reader, CustomAttributeHandle customAttributeHandle)
{
_reader = reader;
_customAttribute = customAttributeHandle.GetCustomAttribute(reader);
}
public sealed override Type AttributeType
{
get
{
Type lazyAttributeType = _lazyAttributeType;
if (lazyAttributeType == null)
{
lazyAttributeType = _lazyAttributeType = _customAttribute.GetAttributeTypeHandle(_reader).Resolve(_reader, new TypeContext(null, null));
}
return lazyAttributeType;
}
}
internal sealed override String AttributeTypeString
{
get
{
return _customAttribute.GetAttributeTypeHandle(_reader).FormatTypeName(_reader, new TypeContext(null, null));
}
}
//
// If throwIfMissingMetadata is false, returns null rather than throwing a MissingMetadataException.
//
internal sealed override IList<CustomAttributeTypedArgument> GetConstructorArguments(bool throwIfMissingMetadata)
{
int index = 0;
LowLevelList<Handle> lazyCtorTypeHandles = null;
LowLevelListWithIList<CustomAttributeTypedArgument> customAttributeTypedArguments = new LowLevelListWithIList<CustomAttributeTypedArgument>();
foreach (FixedArgumentHandle fixedArgumentHandle in _customAttribute.FixedArguments)
{
CustomAttributeTypedArgument customAttributeTypedArgument =
ParseFixedArgument(
_reader,
fixedArgumentHandle,
throwIfMissingMetadata,
delegate ()
{
// If we got here, the custom attribute blob lacked type information (this is actually the typical case.) We must fallback to
// parsing the constructor's signature to get the type info.
if (lazyCtorTypeHandles == null)
{
IEnumerable<Handle> parameterTypeSignatureHandles;
HandleType handleType = _customAttribute.Constructor.HandleType;
switch (handleType)
{
case HandleType.QualifiedMethod:
parameterTypeSignatureHandles = _customAttribute.Constructor.ToQualifiedMethodHandle(_reader).GetQualifiedMethod(_reader).Method.GetMethod(_reader).Signature.GetMethodSignature(_reader).Parameters;
break;
case HandleType.MemberReference:
parameterTypeSignatureHandles = _customAttribute.Constructor.ToMemberReferenceHandle(_reader).GetMemberReference(_reader).Signature.ToMethodSignatureHandle(_reader).GetMethodSignature(_reader).Parameters;
break;
default:
throw new BadImageFormatException();
}
LowLevelList<Handle> ctorTypeHandles = new LowLevelList<Handle>(parameterTypeSignatureHandles);
lazyCtorTypeHandles = ctorTypeHandles;
}
Handle typeHandle = lazyCtorTypeHandles[index];
Exception exception = null;
RuntimeTypeInfo argumentType = typeHandle.TryResolve(_reader, new TypeContext(null, null), ref exception);
if (argumentType == null)
{
if (throwIfMissingMetadata)
throw exception;
return null;
}
return argumentType;
}
);
if (customAttributeTypedArgument.ArgumentType == null)
{
Debug.Assert(!throwIfMissingMetadata);
return null;
}
customAttributeTypedArguments.Add(customAttributeTypedArgument);
index++;
}
return customAttributeTypedArguments;
}
//
// If throwIfMissingMetadata is false, returns null rather than throwing a MissingMetadataException.
//
internal sealed override IList<CustomAttributeNamedArgument> GetNamedArguments(bool throwIfMissingMetadata)
{
LowLevelListWithIList<CustomAttributeNamedArgument> customAttributeNamedArguments = new LowLevelListWithIList<CustomAttributeNamedArgument>();
foreach (NamedArgumentHandle namedArgumentHandle in _customAttribute.NamedArguments)
{
NamedArgument namedArgument = namedArgumentHandle.GetNamedArgument(_reader);
String memberName = namedArgument.Name.GetString(_reader);
bool isField = (namedArgument.Flags == NamedArgumentMemberKind.Field);
CustomAttributeTypedArgument typedValue =
ParseFixedArgument(
_reader,
namedArgument.Value,
throwIfMissingMetadata,
delegate ()
{
// We got here because the custom attribute blob did not inclue type information. For named arguments, this is considered illegal metadata
// (ECMA always includes type info for named arguments.)
throw new BadImageFormatException();
}
);
if (typedValue.ArgumentType == null)
{
Debug.Assert(!throwIfMissingMetadata);
return null;
}
customAttributeNamedArguments.Add(ReflectionAugments.CreateCustomAttributeNamedArgument(this.AttributeType, memberName, isField, typedValue));
}
return customAttributeNamedArguments;
}
// Equals/GetHashCode no need to override (they just implement reference equality but desktop never unified these things.)
//
// Helper for parsing custom attribute arguments.
//
// If throwIfMissingMetadata is false, returns default(CustomAttributeTypedArgument) rather than throwing a MissingMetadataException.
//
private CustomAttributeTypedArgument ParseFixedArgument(MetadataReader reader, FixedArgumentHandle fixedArgumentHandle, bool throwIfMissingMetadata, Func<RuntimeTypeInfo> getTypeFromConstructor)
{
FixedArgument fixedArgument = fixedArgumentHandle.GetFixedArgument(reader);
RuntimeTypeInfo argumentType = null;
if (fixedArgument.Type.IsNull(reader))
{
argumentType = getTypeFromConstructor();
if (argumentType == null)
{
Debug.Assert(!throwIfMissingMetadata);
return default(CustomAttributeTypedArgument);
}
}
else
{
Exception exception = null;
argumentType = fixedArgument.Type.TryResolve(reader, new TypeContext(null, null), ref exception);
if (argumentType == null)
{
if (throwIfMissingMetadata)
throw exception;
else
return default(CustomAttributeTypedArgument);
}
}
Object value;
Exception e = fixedArgument.Value.TryParseConstantValue(reader, out value);
if (e != null)
{
if (throwIfMissingMetadata)
throw e;
else
return default(CustomAttributeTypedArgument);
}
return WrapInCustomAttributeTypedArgument(value, argumentType);
}
//
// Wrap a custom attribute argument (or an element of an array-typed custom attribute argument) in a CustomAttributeTypeArgument structure
// for insertion into a CustomAttributeData value.
//
private CustomAttributeTypedArgument WrapInCustomAttributeTypedArgument(Object value, Type argumentType)
{
if (argumentType.Equals(typeof(Object)))
{
// If the declared attribute type is System.Object, we must report the type based on the runtime value.
if (value == null)
argumentType = typeof(String); // Why is null reported as System.String? Because that's what the desktop CLR does.
else if (value is Type)
argumentType = typeof(Type); // value.GetType() will not actually be System.Type - rather it will be some internal implementation type. We only want to report it as System.Type.
else
argumentType = value.GetType();
}
// Handle the array case
IEnumerable enumerableValue = value as IEnumerable;
if (enumerableValue != null && !(value is String))
{
if (!argumentType.IsArray)
throw new BadImageFormatException();
Type reportedElementType = argumentType.GetElementType();
LowLevelListWithIList<CustomAttributeTypedArgument> elementTypedArguments = new LowLevelListWithIList<CustomAttributeTypedArgument>();
foreach (Object elementValue in enumerableValue)
{
CustomAttributeTypedArgument elementTypedArgument = WrapInCustomAttributeTypedArgument(elementValue, reportedElementType);
elementTypedArguments.Add(elementTypedArgument);
}
return new CustomAttributeTypedArgument(argumentType, new ReadOnlyCollection<CustomAttributeTypedArgument>(elementTypedArguments));
}
else
{
return new CustomAttributeTypedArgument(argumentType, value);
}
}
private readonly MetadataReader _reader;
private readonly CustomAttribute _customAttribute;
private volatile Type _lazyAttributeType;
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Reflection;
using NEvilES;
using NEvilES.Pipeline;
using TiteAz.Domain;
using Autofac;
using TiteAz.Common;
using Microsoft.Extensions.Configuration;
using System.IO;
using System.Data;
using NEvilES.DataStore;
using Npgsql;
using System.Data.SqlClient;
namespace TiteAz.SeedData
{
public static class Program
{
public static IConfigurationRoot Configuration { get; set; }
public static void Main(string[] args)
{
SetupConfig();
var dbType = Configuration.GetSection("SeedData").GetSection("DbType").Value;
var databaseType = dbType == "pgsql" ? DatabaseType.Postgres : DatabaseType.SqlServer;
var connStrName = Configuration.GetSection("SeedData").GetSection("ConnectionString").Value;
Console.WriteLine("Seed data.......");
var builder = new ContainerBuilder();
builder.RegisterInstance(new CommandContext.User(Guid.NewGuid())).Named<CommandContext.IUser>("user");
builder.RegisterType<ReadModel.SqlReadModel>().AsImplementedInterfaces();
//builder.RegisterType<InMemoryReadModel>().AsImplementedInterfaces().SingleInstance();
builder.RegisterInstance<IEventTypeLookupStrategy>(new EventTypeLookupStrategy());
builder.RegisterModule(new EventStoreDatabaseModule(Configuration.GetConnectionString(connStrName), databaseType));
builder.RegisterModule(new EventProcessorModule(typeof(User).GetTypeInfo().Assembly, typeof(ReadModel.User).GetTypeInfo().Assembly));
var container = builder.Build();
container.Resolve<IEventTypeLookupStrategy>().ScanAssemblyOfType(typeof(User));
HandleDatabaseDropCreate(databaseType, dbType, connStrName);
SeedData(container);
using (var scope = container.BeginLifetimeScope())
{
ReplayEvents.Replay(container.Resolve<IFactory>(), scope.Resolve<IAccessDataStore>());
}
var reader = (ReadModel.SqlReadModel)container.Resolve<IReadFromReadModel>();
// var x = reader.Get<ReadModel.User>(Guid.Empty);
//Console.WriteLine("Read Model Document Count {0}", reader.Count());
Console.WriteLine("Done - Hit any key!");
Console.ReadKey();
}
private static void SeedData(IContainer container)
{
var id = CombGuid.NewGuid();
using (var scope = container.BeginLifetimeScope())
{
scope.Resolve<PipelineTransaction>();
var processor = scope.Resolve<ICommandProcessor>();
var craig = new User.NewUser
{
StreamId = CombGuid.NewGuid(),
Details = new User.Details("craig@test.com", "xxx", "Craig", "Gardiner")
};
processor.Process(craig);
var elijah = new User.NewUser
{
StreamId = CombGuid.NewGuid(),
Details = new User.Details("elijah@test.com", "xxx", "Elijah", "Bate")
};
processor.Process(elijah);
var ourBill = new Bill.Created { StreamId = id, Description = "Sunday arvo fun ;)", Amount = 20.35m };
processor.Process(ourBill);
var youOweMe = new Debt.YouOweMe(CombGuid.NewGuid(), craig.StreamId, elijah.StreamId, ourBill.StreamId,
10.1725m);
processor.Process(youOweMe);
processor.Process(new Debt.Accept { StreamId = youOweMe.StreamId });
}
}
public static void SetupConfig()
{
var builder = new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("appsettings.json", optional: true, reloadOnChange: false)
.AddJsonFile($"appsettings.Development.json", optional: true, reloadOnChange: false);
Configuration = builder.Build();
}
public static void HandleDatabaseDropCreate(DatabaseType databaseType, string dbType, string connStrName)
{
var connStr = new ConnectionString(Configuration.GetConnectionString(connStrName));
var createSql = string.Empty;
IDbConnection conn;
var drop = Configuration.GetSection("DbTypes").GetSection(dbType).GetSection("drop").Value;
if (databaseType == DatabaseType.Postgres)
{
conn = new NpgsqlConnection(string.Format("User ID={0};Password={1};Host={2};Port={3};Database=postgres;",
connStr.Keys["User ID"], connStr.Keys["Password"], connStr.Keys["Host"], connStr.Keys["Port"]));
drop = string.Format(drop, connStr.Keys["Database"]);
createSql = File.ReadAllText(Path.Combine(Directory.GetCurrentDirectory(), "init.pgsql.sql"));
}
else if (databaseType == DatabaseType.SqlServer && dbType != "localdb")
{
conn = new SqlConnection(string.Format(@"Server={0};Database=Master;User Id={1};Password={2};",
connStr.Keys["Server"], connStr.Keys["User Id"], connStr.Keys["Password"]));
drop = string.Format(drop, connStr.Keys["Database"]);
createSql = File.ReadAllText(Path.Combine(Directory.GetCurrentDirectory(), "init.mssql.sql"));
}
else
{
conn = new SqlConnection(string.Format(@"Server={0};Database=Master;Integrated Security=true;",
connStr.Keys["Server"]));
drop = string.Format(drop, connStr.Keys["Database"]);
createSql = File.ReadAllText(Path.Combine(Directory.GetCurrentDirectory(), "init.mssql.sql"));
}
conn.Open();
using (var cmd = conn.CreateCommand())
{
try
{
cmd.CommandText = drop;
cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
System.Console.WriteLine(ex.Message);
}
cmd.CommandText = $"Create database {connStr.Keys["Database"]}";
cmd.ExecuteNonQuery();
}
conn.ChangeDatabase(connStr.Keys["Database"]);
using (var t = conn.BeginTransaction())
{
var cmd = conn.CreateCommand();
cmd.Transaction = t;
cmd.CommandText = createSql;
cmd.ExecuteNonQuery();
t.Commit();
}
}
}
public class InMemoryReadModel : IReadFromReadModel, IWriteReadModel
{
private readonly ConcurrentDictionary<Guid, object> data;
public InMemoryReadModel()
{
data = new ConcurrentDictionary<Guid, object>();
}
public void Insert<T>(T item) where T : class, IHaveIdentity
{
data.TryAdd(item.Id, item);
}
public void Update<T>(T item) where T : class, IHaveIdentity
{
data[item.Id] = item;
}
public T Get<T>(Guid id) where T : IHaveIdentity
{
return (T)data[id];
}
public void Clear()
{
data.Clear();
}
public int Count()
{
return data.Count;
}
}
}
| |
using System;
using System.IO;
using System.Collections.Specialized;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Net;
using System.Net.Cache;
using System.Text;
using System.Web;
using System.Security.Cryptography;
using Newtonsoft.Json;
using System.Security.Cryptography.X509Certificates;
using System.Net.Security;
using SteamKit2;
using System.Threading;
namespace SteamTrade
{
/// <summary>
/// SteamWeb class to create an API endpoint to the Steam Web.
/// </summary>
public class SteamWeb
{
/// <summary>
/// Base steam community domain.
/// </summary>
public const string SteamCommunityDomain = "steamcommunity.com";
/// <summary>
/// Token of steam. Generated after login.
/// </summary>
public string Token { get; private set; }
/// <summary>
/// Session id of Steam after Login.
/// </summary>
public string SessionId { get; private set; }
/// <summary>
/// Token secure as string. It is generated after the Login.
/// </summary>
public string TokenSecure { get; private set; }
/// <summary>
/// The Accept-Language header when sending all HTTP requests. Default value is determined according to the constructor caller thread's culture.
/// </summary>
public string AcceptLanguageHeader { get { return acceptLanguageHeader; } set { acceptLanguageHeader = value; } }
private string acceptLanguageHeader = Thread.CurrentThread.CurrentCulture.TwoLetterISOLanguageName == "en" ? Thread.CurrentThread.CurrentCulture.ToString() + ",en;q=0.8" : Thread.CurrentThread.CurrentCulture.ToString() + "," + Thread.CurrentThread.CurrentCulture.TwoLetterISOLanguageName + ";q=0.8,en;q=0.6";
/// <summary>
/// CookieContainer to save all cookies during the Login.
/// </summary>
private CookieContainer _cookies = new CookieContainer();
/// <summary>
/// This method is using the Request method to return the full http stream from a web request as string.
/// </summary>
/// <param name="url">URL of the http request.</param>
/// <param name="method">Gets the HTTP data transfer method (such as GET, POST, or HEAD) used by the client.</param>
/// <param name="data">A NameValueCollection including Headers added to the request.</param>
/// <param name="ajax">A bool to define if the http request is an ajax request.</param>
/// <param name="referer">Gets information about the URL of the client's previous request that linked to the current URL.</param>
/// <param name="fetchError">If true, response codes other than HTTP 200 will still be returned, rather than throwing exceptions</param>
/// <returns>The string of the http return stream.</returns>
/// <remarks>If you want to know how the request method works, use: <see cref="SteamWeb.Request"/></remarks>
public string Fetch(string url, string method, NameValueCollection data = null, bool ajax = true, string referer = "", bool fetchError = false)
{
// Reading the response as stream and read it to the end. After that happened return the result as string.
using (HttpWebResponse response = Request(url, method, data, ajax, referer, fetchError))
{
using (Stream responseStream = response.GetResponseStream())
{
// If the response stream is null it cannot be read. So return an empty string.
if (responseStream == null)
{
return "";
}
using (StreamReader reader = new StreamReader(responseStream))
{
return reader.ReadToEnd();
}
}
}
}
/// <summary>
/// Custom wrapper for creating a HttpWebRequest, edited for Steam.
/// </summary>
/// <param name="url">Gets information about the URL of the current request.</param>
/// <param name="method">Gets the HTTP data transfer method (such as GET, POST, or HEAD) used by the client.</param>
/// <param name="data">A NameValueCollection including Headers added to the request.</param>
/// <param name="ajax">A bool to define if the http request is an ajax request.</param>
/// <param name="referer">Gets information about the URL of the client's previous request that linked to the current URL.</param>
/// <param name="fetchError">Return response even if its status code is not 200</param>
/// <returns>An instance of a HttpWebResponse object.</returns>
public HttpWebResponse Request(string url, string method, NameValueCollection data = null, bool ajax = true, string referer = "", bool fetchError = false)
{
// Append the data to the URL for GET-requests.
bool isGetMethod = (method.ToLower() == "get");
string dataString = (data == null ? null : String.Join("&", Array.ConvertAll(data.AllKeys, key =>
// ReSharper disable once UseStringInterpolation
string.Format("{0}={1}", HttpUtility.UrlEncode(key), HttpUtility.UrlEncode(data[key]))
)));
// Example working with C# 6
// string dataString = (data == null ? null : String.Join("&", Array.ConvertAll(data.AllKeys, key => $"{HttpUtility.UrlEncode(key)}={HttpUtility.UrlEncode(data[key])}" )));
// Append the dataString to the url if it is a GET request.
if (isGetMethod && !string.IsNullOrEmpty(dataString))
{
url += (url.Contains("?") ? "&" : "?") + dataString;
}
// Setup the request.
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(url);
request.Method = method;
request.Accept = "application/json, text/javascript;q=0.9, */*;q=0.5";
request.Headers[HttpRequestHeader.AcceptLanguage] = AcceptLanguageHeader;
request.ContentType = "application/x-www-form-urlencoded; charset=UTF-8";
// request.Host is set automatically.
request.UserAgent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.57 Safari/537.36";
request.Referer = string.IsNullOrEmpty(referer) ? "http://steamcommunity.com/trade/1" : referer;
request.Timeout = 50000; // Timeout after 50 seconds.
request.CachePolicy = new HttpRequestCachePolicy(HttpRequestCacheLevel.Revalidate);
request.AutomaticDecompression = DecompressionMethods.Deflate | DecompressionMethods.GZip;
// If the request is an ajax request we need to add various other Headers, defined below.
if (ajax)
{
request.Headers.Add("X-Requested-With", "XMLHttpRequest");
request.Headers.Add("X-Prototype-Version", "1.7");
}
// Cookies
request.CookieContainer = _cookies;
// If the request is a GET request return now the response. If not go on. Because then we need to apply data to the request.
if (isGetMethod || string.IsNullOrEmpty(dataString))
{
return request.GetResponse() as HttpWebResponse;
}
// Write the data to the body for POST and other methods.
byte[] dataBytes = Encoding.UTF8.GetBytes(dataString);
request.ContentLength = dataBytes.Length;
using (Stream requestStream = request.GetRequestStream())
{
requestStream.Write(dataBytes, 0, dataBytes.Length);
}
// Get the response and return it.
try
{
return request.GetResponse() as HttpWebResponse;
}
catch (WebException ex)
{
//this is thrown if response code is not 200
if (fetchError)
{
var resp = ex.Response as HttpWebResponse;
if (resp != null)
{
return resp;
}
}
throw;
}
}
/// <summary>
/// Executes the login by using the Steam Website.
/// This Method is not used by Steambot repository, but it could be very helpful if you want to build a own Steambot or want to login into steam services like backpack.tf/csgolounge.com.
/// Updated: 10-02-2015.
/// </summary>
/// <param name="username">Your Steam username.</param>
/// <param name="password">Your Steam password.</param>
/// <returns>A bool containing a value, if the login was successful.</returns>
public bool DoLogin(string username, string password)
{
var data = new NameValueCollection {{"username", username}};
// First get the RSA key with which we will encrypt our password.
string response = Fetch("https://steamcommunity.com/login/getrsakey", "POST", data, false);
GetRsaKey rsaJson = JsonConvert.DeserializeObject<GetRsaKey>(response);
// Validate, if we could get the rsa key.
if (!rsaJson.success)
{
return false;
}
// RSA Encryption.
RSACryptoServiceProvider rsa = new RSACryptoServiceProvider();
RSAParameters rsaParameters = new RSAParameters
{
Exponent = HexToByte(rsaJson.publickey_exp),
Modulus = HexToByte(rsaJson.publickey_mod)
};
rsa.ImportParameters(rsaParameters);
// Encrypt the password and convert it.
byte[] bytePassword = Encoding.ASCII.GetBytes(password);
byte[] encodedPassword = rsa.Encrypt(bytePassword, false);
string encryptedBase64Password = Convert.ToBase64String(encodedPassword);
SteamResult loginJson = null;
CookieCollection cookieCollection;
string steamGuardText = "";
string steamGuardId = "";
// Do this while we need a captcha or need email authentification. Probably you have misstyped the captcha or the SteamGaurd code if this comes multiple times.
do
{
Console.WriteLine("SteamWeb: Logging In...");
bool captcha = loginJson != null && loginJson.captcha_needed;
bool steamGuard = loginJson != null && loginJson.emailauth_needed;
string time = Uri.EscapeDataString(rsaJson.timestamp);
string capGid = string.Empty;
// Response does not need to send if captcha is needed or not.
// ReSharper disable once MergeSequentialChecks
if (loginJson != null && loginJson.captcha_gid != null)
{
capGid = Uri.EscapeDataString(loginJson.captcha_gid);
}
data = new NameValueCollection {{"password", encryptedBase64Password}, {"username", username}};
// Captcha Check.
string capText = "";
if (captcha)
{
Console.WriteLine("SteamWeb: Captcha is needed.");
System.Diagnostics.Process.Start("https://steamcommunity.com/public/captcha.php?gid=" + loginJson.captcha_gid);
Console.WriteLine("SteamWeb: Type the captcha:");
string consoleText = Console.ReadLine();
if (!string.IsNullOrEmpty(consoleText))
{
capText = Uri.EscapeDataString(consoleText);
}
}
data.Add("captchagid", captcha ? capGid : "");
data.Add("captcha_text", captcha ? capText : "");
// Captcha end.
// Added Header for two factor code.
data.Add("twofactorcode", "");
// Added Header for remember login. It can also set to true.
data.Add("remember_login", "false");
// SteamGuard check. If SteamGuard is enabled you need to enter it. Care probably you need to wait 7 days to trade.
// For further information about SteamGuard see: https://support.steampowered.com/kb_article.php?ref=4020-ALZM-5519&l=english.
if (steamGuard)
{
Console.WriteLine("SteamWeb: SteamGuard is needed.");
Console.WriteLine("SteamWeb: Type the code:");
string consoleText = Console.ReadLine();
if (!string.IsNullOrEmpty(consoleText))
{
steamGuardText = Uri.EscapeDataString(consoleText);
}
steamGuardId = loginJson.emailsteamid;
// Adding the machine name to the NameValueCollection, because it is requested by steam.
Console.WriteLine("SteamWeb: Type your machine name:");
consoleText = Console.ReadLine();
var machineName = string.IsNullOrEmpty(consoleText) ? "" : Uri.EscapeDataString(consoleText);
data.Add("loginfriendlyname", machineName != "" ? machineName : "defaultSteamBotMachine");
}
data.Add("emailauth", steamGuardText);
data.Add("emailsteamid", steamGuardId);
// SteamGuard end.
// Added unixTimestamp. It is included in the request normally.
var unixTimestamp = (int)(DateTime.UtcNow.Subtract(new DateTime(1970, 1, 1))).TotalSeconds;
// Added three "0"'s because Steam has a weird unix timestamp interpretation.
data.Add("donotcache", unixTimestamp + "000");
data.Add("rsatimestamp", time);
// Sending the actual login.
using(HttpWebResponse webResponse = Request("https://steamcommunity.com/login/dologin/", "POST", data, false))
{
var stream = webResponse.GetResponseStream();
if (stream == null)
{
return false;
}
using (StreamReader reader = new StreamReader(stream))
{
string json = reader.ReadToEnd();
loginJson = JsonConvert.DeserializeObject<SteamResult>(json);
cookieCollection = webResponse.Cookies;
}
}
} while (loginJson.captcha_needed || loginJson.emailauth_needed);
// If the login was successful, we need to enter the cookies to steam.
if (loginJson.success)
{
_cookies = new CookieContainer();
foreach (Cookie cookie in cookieCollection)
{
_cookies.Add(cookie);
}
SubmitCookies(_cookies);
return true;
}
else
{
Console.WriteLine("SteamWeb Error: " + loginJson.message);
return false;
}
}
///<summary>
/// Authenticate using SteamKit2 and ISteamUserAuth.
/// This does the same as SteamWeb.DoLogin(), but without contacting the Steam Website.
/// </summary>
/// <remarks>Should this one doesnt work anymore, use <see cref="SteamWeb.DoLogin"/></remarks>
/// <param name="myUniqueId">Id what you get to login.</param>
/// <param name="client">An instance of a SteamClient.</param>
/// <param name="myLoginKey">Login Key of your account.</param>
/// <returns>A bool, which is true if the login was successful.</returns>
public bool Authenticate(string myUniqueId, SteamClient client, string myLoginKey)
{
Token = TokenSecure = "";
SessionId = Convert.ToBase64String(Encoding.UTF8.GetBytes(myUniqueId));
_cookies = new CookieContainer();
using (dynamic userAuth = WebAPI.GetInterface("ISteamUserAuth"))
{
// Generate an AES session key.
var sessionKey = CryptoHelper.GenerateRandomBlock(32);
// rsa encrypt it with the public key for the universe we're on
byte[] cryptedSessionKey;
using (RSACrypto rsa = new RSACrypto(KeyDictionary.GetPublicKey(client.ConnectedUniverse)))
{
cryptedSessionKey = rsa.Encrypt(sessionKey);
}
byte[] loginKey = new byte[20];
Array.Copy(Encoding.ASCII.GetBytes(myLoginKey), loginKey, myLoginKey.Length);
// AES encrypt the loginkey with our session key.
byte[] cryptedLoginKey = CryptoHelper.SymmetricEncrypt(loginKey, sessionKey);
KeyValue authResult;
// Get the Authentification Result.
try
{
authResult = userAuth.AuthenticateUser(
steamid: client.SteamID.ConvertToUInt64(),
sessionkey: HttpUtility.UrlEncode(cryptedSessionKey),
encrypted_loginkey: HttpUtility.UrlEncode(cryptedLoginKey),
method: "POST",
secure: true
);
}
catch (Exception)
{
Token = TokenSecure = null;
return false;
}
Token = authResult["token"].AsString();
TokenSecure = authResult["tokensecure"].AsString();
// Adding cookies to the cookie container.
_cookies.Add(new Cookie("sessionid", SessionId, string.Empty, SteamCommunityDomain));
_cookies.Add(new Cookie("steamLogin", Token, string.Empty, SteamCommunityDomain));
_cookies.Add(new Cookie("steamLoginSecure", TokenSecure, string.Empty, SteamCommunityDomain));
return true;
}
}
/// <summary>
/// Authenticate using an array of cookies from a browser or whatever source, without contacting the server.
/// It is recommended that you call <see cref="VerifyCookies"/> after calling this method to ensure that the cookies are valid.
/// </summary>
/// <param name="cookies">An array of cookies from a browser or whatever source. Must contain sessionid, steamLogin, steamLoginSecure</param>
/// <exception cref="ArgumentException">One of the required cookies(steamLogin, steamLoginSecure, sessionid) is missing.</exception>
public void Authenticate(System.Collections.Generic.IEnumerable<Cookie> cookies)
{
var cookieContainer = new CookieContainer();
string token = null;
string tokenSecure = null;
string sessionId = null;
foreach (var cookie in cookies)
{
if (cookie.Name == "sessionid")
sessionId = cookie.Value;
else if (cookie.Name == "steamLogin")
token = cookie.Value;
else if (cookie.Name == "steamLoginSecure")
tokenSecure = cookie.Value;
cookieContainer.Add(cookie);
}
if (token == null)
throw new ArgumentException("Cookie with name \"steamLogin\" is not found.");
if (tokenSecure == null)
throw new ArgumentException("Cookie with name \"steamLoginSecure\" is not found.");
if (sessionId == null)
throw new ArgumentException("Cookie with name \"sessionid\" is not found.");
Token = token;
TokenSecure = tokenSecure;
SessionId = sessionId;
_cookies = cookieContainer;
}
/// <summary>
/// Helper method to verify our precious cookies.
/// </summary>
/// <returns>true if cookies are correct; false otherwise</returns>
public bool VerifyCookies()
{
using (HttpWebResponse response = Request("http://steamcommunity.com/", "HEAD"))
{
return response.Cookies["steamLogin"] == null || !response.Cookies["steamLogin"].Value.Equals("deleted");
}
}
/// <summary>
/// Method to submit cookies to Steam after Login.
/// </summary>
/// <param name="cookies">Cookiecontainer which contains cookies after the login to Steam.</param>
static void SubmitCookies (CookieContainer cookies)
{
HttpWebRequest w = WebRequest.Create("https://steamcommunity.com/") as HttpWebRequest;
// Check, if the request is null.
if (w == null)
{
return;
}
w.Method = "POST";
w.ContentType = "application/x-www-form-urlencoded";
w.CookieContainer = cookies;
// Added content-length because it is required.
w.ContentLength = 0;
w.GetResponse().Close();
}
/// <summary>
/// Method to convert a Hex to a byte.
/// </summary>
/// <param name="hex">Input parameter as string.</param>
/// <returns>The byte value.</returns>
private byte[] HexToByte(string hex)
{
if (hex.Length % 2 == 1)
{
throw new Exception("The binary key cannot have an odd number of digits");
}
return Enumerable.Range(0, hex.Length / 2)
.Select(x => Convert.ToByte(hex.Substring(x*2, 2), 16))
.ToArray();
}
/// <summary>
/// Method to allow all certificates.
/// </summary>
/// <param name="sender">An object that contains state information for this validation.</param>
/// <param name="certificate">The certificate used to authenticate the remote party.</param>
/// <param name="chain">The chain of certificate authorities associated with the remote certificate.</param>
/// <param name="policyErrors">One or more errors associated with the remote certificate.</param>
/// <returns>Always true to accept all certificates.</returns>
public bool ValidateRemoteCertificate(object sender, X509Certificate certificate, X509Chain chain, SslPolicyErrors policyErrors)
{
return true;
}
}
// JSON Classes
// These classes are used to deserialize response strings from the login:
// Example of a return string: {"success":true,"publickey_mod":"XXXX87144BF5B2CABFEC24E35655FDC5E438D6064E47D33A3531F3AAB195813E316A5D8AAB1D8A71CB7F031F801200377E8399C475C99CBAFAEFF5B24AE3CF64BXXXXB2FDBA3BC3974D6DCF1E760F8030AB5AB40FA8B9D193A8BEB43AA7260482EAD5CE429F718ED06B0C1F7E063FE81D4234188657DB40EEA4FAF8615111CD3E14CAF536CXXXX3C104BE060A342BF0C9F53BAAA2A4747E43349FF0518F8920664F6E6F09FE41D8D79C884F8FD037276DED0D1D1D540A2C2B6639CF97FF5180E3E75224EXXXX56AAA864EEBF9E8B35B80E25B405597219BFD90F3AD9765D81D148B9500F12519F1F96828C12AEF77D948D0DC9FDAF8C7CC73527ADE7C7F0FF33","publickey_exp":"010001","timestamp":"241590850000","steamid":"7656119824534XXXX","token_gid":"c35434c0c07XXXX"}
/// <summary>
/// Class to Deserialize the json response strings of the getResKey request. See: <see cref="SteamWeb.DoLogin"/>
/// </summary>
[SuppressMessage("ReSharper", "InconsistentNaming")]
public class GetRsaKey
{
public bool success { get; set; }
public string publickey_mod { get; set; }
public string publickey_exp { get; set; }
public string timestamp { get; set; }
}
// Examples:
// For not accepted SteamResult:
// {"success":false,"requires_twofactor":false,"message":"","emailauth_needed":true,"emaildomain":"gmail.com","emailsteamid":"7656119824534XXXX"}
// For accepted SteamResult:
// {"success":true,"requires_twofactor":false,"login_complete":true,"transfer_url":"https:\/\/store.steampowered.com\/login\/transfer","transfer_parameters":{"steamid":"7656119824534XXXX","token":"XXXXC39589A9XXXXCB60D651EFXXXX85578AXXXX","auth":"XXXXf1d9683eXXXXc76bdc1888XXXX29","remember_login":false,"webcookie":"XXXX4C33779A4265EXXXXC039D3512DA6B889D2F","token_secure":"XXXX63F43AA2CXXXXC703441A312E1B14AC2XXXX"}}
/// <summary>
/// Class to Deserialize the json response strings after the login. See: <see cref="SteamWeb.DoLogin"/>
/// </summary>
[SuppressMessage("ReSharper", "InconsistentNaming")]
public class SteamResult
{
public bool success { get; set; }
public string message { get; set; }
public bool captcha_needed { get; set; }
public string captcha_gid { get; set; }
public bool emailauth_needed { get; set; }
public string emailsteamid { get; set; }
}
}
| |
#if (UNITY_3_0 || UNITY_3_0_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4 || UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_5 || UNITY_4_6)
#define UNITY_3_AND_4
#endif
// Copyright 2013 Howling Moon Software. All rights reserved.
// See http://chipmunk2d.net/legal.php for more information.
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using CP = ChipmunkBinding;
using System;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
/// Used with Chipmunk.NearestPointQueryNearest().
public struct ChipmunkNearestPointQueryInfo {
public IntPtr _shapeHandle;
/// The nearest point on the shapes's surface to the query point.
public Vector2 point;
/// The distance of the query point to the nearest point.
public float distance;
/// The normalized direction from the query point to the closest point on the shape's surface.
public Vector2 gradient;
/// The shape that was nearest to the query point.
public ChipmunkShape shape {
get { return ChipmunkShape._FromHandle(_shapeHandle); }
}
}
/// Used with Chipmunk.SegmentQueryFirst().
public struct ChipmunkSegmentQueryInfo {
public IntPtr _shapeHandle;
/// The normalized value (from 0 to 1) between the start and end points where the segment query hit.
public float t;
/// The surface normal of the shape where it was struck by the segment query.
public Vector2 normal;
/// The shape that was first struck by the segment query.
public ChipmunkShape shape {
get { return ChipmunkShape._FromHandle(_shapeHandle); }
}
}
/// Similar in function to Unity's Physics class, but for working with Chipmunk2D.
public static class Chipmunk {
public static ChipmunkManager _manager;
public static ChipmunkInterpolationManager _interpolationManager;
private static ChipmunkManager CreateManager(){
ChipmunkManager manager = GetManager();
// Debug.Log("Configuring new space.");
if(manager._space != null) Debug.LogError("Space was already set?");
ChipmunkSpace space = manager._space = new ChipmunkSpace();
space.gravity = Physics.gravity;
space.iterations = Physics.solverIterationCount;
space.collisionSlop = getDefaultContactOffset();
space.sleepTimeThreshold = 0.5f;
return manager;
}
private static ChipmunkManager GetManager(){
GameObject go = GameObject.Find("ChipmunkManager");
ChipmunkManager manager;
if(go == null){
// Debug.Log("Creating new ChipmunkManager.");
go = new GameObject("ChipmunkManager");
manager = go.AddComponent<ChipmunkManager>();
} else {
// Debug.Log("Found existing ChipmunkManager.");
manager = go.GetComponent<ChipmunkManager>();
if(manager == null){
Debug.LogError("A ChipmunkManager game object already exists but does not have a ChipmunkManager component attached.");
Debug.Break();
}
}
return manager;
}
public static ChipmunkManager manager {
get {
if(_manager == null){
_manager = CreateManager();
_interpolationManager = _manager.gameObject.AddComponent<ChipmunkInterpolationManager>();
}
return _manager;
}
}
/// Amount of gravity to use.
/// Is an alias for Physics.gravity.
public static Vector2 gravity {
get { return Physics.gravity; }
set {
manager._space.gravity = value;
Physics.gravity = value;
}
}
/// Number of iterations to use in the solver.
/// Is an alias for Physics.solverIterationCount.
public static int solverIterationCount {
get { return Physics.solverIterationCount; }
set {
manager._space.iterations = value;
Physics.solverIterationCount = value;
}
}
/// Amount of allowed overlap of physics shapes.
/// Is an alias for Physics.minPenetrationForPenalty (Physics.defaultContactOffset in Unity 5+).
public static float minPenetrationForPenalty {
get { return getDefaultContactOffset(); }
set {
manager._space.collisionSlop = value;
setDefaultContactOffset(value);
}
}
/// Amount of damping to apply to velocity and angularVelocity.
/// A value of 0.9 for instance means that the body
/// will have 90% of it's velocity after a second.
public static float damping {
get { return manager._space.damping; }
set { manager._space.damping = value; }
}
/// When objects are moving slower than this for longer
/// than the sleepTimeThreshold they are considered idle.
/// The default idleSpeedThreshold is set based on the amount of gravity.
public static float idleSpeedThreshold {
get { return manager._space.idleSpeedThreshold; }
set { manager._space.idleSpeedThreshold = value; }
}
/// When objects are moving slower than the idleSpeedThreshold for longer
/// than the sleepTimeThreshold they are considered idle.
/// The sleepTimeThreshold default is 0.5
public static float sleepTimeThreshold {
get { return manager._space.sleepTimeThreshold; }
set { manager._space.sleepTimeThreshold = value; }
}
/// Rate at which overlapping objects are pushed apart.
/// Defaults to 0.9^60, meaning it will fix 10% of overlap per 1/60th second.
public static float collisionBias {
get { return manager._space.collisionBias; }
set { manager._space.collisionBias = value; }
}
/// Unity doesn't provide notification events for when a transform is modified.
/// Unfortunately that means that you need to call this to let Chipmunk know when you modify a transform.
public static void UpdatedTransform(GameObject root){
HashSet<ChipmunkBody> bodies = new HashSet<ChipmunkBody>();
foreach(ChipmunkBinding.Base component in root.GetComponentsInChildren<ChipmunkBinding.Base>()){
ChipmunkBody affectedBody = component._UpdatedTransform();
if(affectedBody != null) bodies.Add(affectedBody);
}
// Update the mass properties of the bodies.
foreach(ChipmunkBody body in bodies)
body._RecalculateMass();
}
//MARK: Nearest Point Query
/// Delegate type to use with nearest point queries.
/// Parmeters passed are the shape, the nearest point on the surface of that shape, and the distance to the query point.
public delegate void NearestPointQueryDelegate(ChipmunkShape shape, float dist, Vector2 point);
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private static extern void _NearestPointQuery(IntPtr handle, Vector2 point, float maxDist, uint layers, int group, NearestPointQueryDelegate del);
/// Iterate all the shapes within 'maxDist' of 'point' by calling 'del' for each.
/// Shapes are filtered using the group and layers in the same way as collisions.
public static void NearestPointQuery(Vector2 point, float maxDist, uint layers, string group, NearestPointQueryDelegate del){
IntPtr handle = manager._space._handle;
_NearestPointQuery(handle, point, maxDist, layers, ChipmunkBinding.InternString(group), del);
}
/// Calls NearestPointQuery() with all layers and no group.
public static void NearestPointQuery(Vector2 point, float maxDist, NearestPointQueryDelegate del){
NearestPointQuery(point, maxDist, ~(uint)0, "", del);
}
[DllImport(CP.IMPORT)] private static extern IntPtr
cpSpaceNearestPointQueryNearest(IntPtr handle, Vector2 point, float maxDistance, uint layers, int group, out ChipmunkNearestPointQueryInfo info);
/// Return only information about the nearest shape to the query point.
/// Shapes are filtered using the group and layers in the same way as collisions.
public static ChipmunkShape NearestPointQueryNearest(Vector2 point, float maxDistance, uint layers, string group, out ChipmunkNearestPointQueryInfo info){
IntPtr handle = manager._space._handle;
return ChipmunkShape._FromHandle(cpSpaceNearestPointQueryNearest(handle, point, maxDistance, layers, ChipmunkBinding.InternString(group), out info));
}
/// Calls NearestPointQueryNearest() with all layers an no group.
public static ChipmunkShape NearestPointQueryNearest(Vector2 point, float maxDistance, out ChipmunkNearestPointQueryInfo info){
return NearestPointQueryNearest(point, maxDistance, ~(uint)0, "", out info);
}
//Mark: Segment Queries
/// Delegate type to use with segment queries.
/// Parameters passed are the shape, the fraction along the query segment and the normal of the intersection with the shape.
public delegate void SegmentQueryDelegate(ChipmunkShape shape, float fraction, Vector2 normal);
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private static extern void _SegmentQuery(IntPtr handle, Vector2 start, Vector2 end, uint layers, int group, SegmentQueryDelegate del);
/// Find all shapes overlapping the segment with the given start and end points.
/// Shapes are filtered using the group and layers in the same way as collisions.
public static void SegmentQuery(Vector2 start, Vector2 end, uint layers, string group, SegmentQueryDelegate del){
IntPtr handle = manager._space._handle;
_SegmentQuery(handle, start, end, layers, ChipmunkBinding.InternString(group), del);
}
/// Calls SegmentQuery() with all layers and no group.
public static void SegmentQuery(Vector2 start, Vector2 end, SegmentQueryDelegate del){
SegmentQuery(start, end, ~(uint)0, "", del);
}
[DllImport(CP.IMPORT)] private static extern IntPtr
cpSpaceSegmentQueryFirst(IntPtr handle, Vector2 start, Vector2 end, uint layers, int group, out ChipmunkSegmentQueryInfo info);
/// Return only the first shape struck by the segment query as it goes from start to end.
/// Shapes are filtered using the group and layers in the same way as collisions.
public static ChipmunkShape SegmentQueryFirst(Vector2 start, Vector2 end, uint layers, string group, out ChipmunkSegmentQueryInfo info){
IntPtr handle = manager._space._handle;
return ChipmunkShape._FromHandle(cpSpaceSegmentQueryFirst(handle, start, end, layers, ChipmunkBinding.InternString(group), out info));
}
/// Calls SegmentQueryFirst() with all layers and no group.
public static ChipmunkShape SegmentQueryFirst(Vector2 start, Vector2 end, out ChipmunkSegmentQueryInfo info){
return SegmentQueryFirst(start, end, ~(uint)0, "", out info);
}
/// <summary>
/// Gets the amount of allowed overlap of physics shapes.
/// Is an alias for Physics.minPenetrationForPenalty (Physics.defaultContactOffset in Unity 5+).
/// </summary>
public static float getDefaultContactOffset () {
#if !UNITY_3_AND_4
return Physics.defaultContactOffset;
#else
return Physics.minPenetrationForPenalty;
#endif
}
/// <summary>
/// Sets the amount of allowed overlap of physics shapes.
/// Is an alias for Physics.minPenetrationForPenalty (Physics.defaultContactOffset in Unity 5+).
/// </summary>
public static void setDefaultContactOffset (float value) {
#if !UNITY_3_AND_4
Physics.defaultContactOffset = value;
#else
Physics.minPenetrationForPenalty = value;
#endif
}
}
| |
//-----------------------------------------------------------------------------
// <copyright file="Connection.cs" company="WheelMUD Development Team">
// Copyright (c) WheelMUD Development Team. See LICENSE.txt. This file is
// subject to the Microsoft Public License. All other rights reserved.
// </copyright>
// <summary>
// This is the low level connection object that is assigned to a user when they connect.
// Created: August 2006 by Foxedup
// </summary>
//-----------------------------------------------------------------------------
namespace WheelMUD.Server
{
using System;
using System.Diagnostics;
using System.Net;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using WheelMUD.Core;
using WheelMUD.Core.Enums;
using WheelMUD.Core.Output;
using WheelMUD.Interfaces;
using WheelMUD.Server.Telnet;
/// <summary>Represents a connection to a client.</summary>
public class Connection : IConnection
{
/// <summary>The threshold, in characters, beyond which MCCP should be used.</summary>
private const int MCCPThreshold = 100;
/// <summary>The socket upon which this connection is based.</summary>
private readonly Socket socket;
/// <summary>The hosting system of this connection.</summary>
private readonly ISubSystem connectionHost;
/// <summary>How many rows of text the client can handle as a single display page.</summary>
private int pagingRowLimit;
/// <summary>Initializes a new instance of the <see cref="Connection"/> class.</summary>
/// <param name="socket">The socket upon which this connection is to be based.</param>
/// <param name="connectionHost">The system hosting this connection.</param>
public Connection(Socket socket, ISubSystem connectionHost)
{
this.Buffer = new StringBuilder();
this.OutputBuffer = new OutputBuffer();
this.Data = new byte[1];
this.Terminal = new Terminal();
this.socket = socket;
var remoteEndPoint = (IPEndPoint)this.socket.RemoteEndPoint;
this.CurrentIPAddress = remoteEndPoint.Address;
this.ID = Guid.NewGuid().ToString();
this.TelnetCodeHandler = new TelnetCodeHandler(this);
// @@@ TODO: Paging row size should be dynamic; this WAS called BufferLength in
// discussion: http://www.wheelmud.net/Forums/tabid/59/aft/1600/Default.aspx
this.PagingRowLimit = 40;
this.connectionHost = connectionHost;
}
/// <summary>The 'client disconnected' event handler.</summary>
public event EventHandler<ConnectionArgs> ClientDisconnected;
/// <summary>The 'data received' event handler.</summary>
public event EventHandler<ConnectionArgs> DataReceived;
/// <summary>The 'data sent' event handler.</summary>
public event EventHandler<ConnectionArgs> DataSent;
/// <summary>Gets the Terminal Options of this connection.</summary>
public ITerminal Terminal { get; private set; }
/// <summary>Gets the ID of this connection.</summary>
public string ID { get; private set; }
/// <summary>Gets the IP Address for this connection.</summary>
public IPAddress CurrentIPAddress { get; private set; }
/// <summary>Gets the buffer of this connection.</summary>
public byte[] Data { get; private set; }
/// <summary>Gets or sets the buffer of data not yet passed as an action.</summary>
public StringBuilder Buffer { get; set; }
/// <summary>Gets or sets the number of buffered rows which the connection's client can handle as one page.</summary>
public int PagingRowLimit
{
get
{
return this.pagingRowLimit;
}
set
{
this.pagingRowLimit = value == 0 ? 1000 : value;
}
}
/// <summary>Gets or sets the last raw input the server received.</summary>
public string LastRawInput { get; set; }
/// <summary>Gets the telnet option code handler for this connection.</summary>
public ITelnetCodeHandler TelnetCodeHandler { get; private set; }
/// <summary>Gets or sets the last string used to end input from the client.</summary>
public string LastInputTerminator { get; set; }
/// <summary>Gets or sets the buffer still waiting to be sent to the connection.</summary>
public OutputBuffer OutputBuffer { get; set; }
/// <summary>Disconnects the connection.</summary>
public void Disconnect()
{
this.OnConnectionDisconnect();
}
/// <summary>Sends raw bytes to the connection.</summary>
/// <param name="data">The data to send to the connection.</param>
public void Send(byte[] data)
{
try
{
this.socket.BeginSend(data, 0, data.Length, 0, new AsyncCallback(this.OnSendComplete), null);
}
catch (SocketException)
{
this.OnConnectionDisconnect();
}
catch (ObjectDisposedException)
{
this.OnConnectionDisconnect();
}
}
/// <summary>Sends string data to the connection.</summary>
/// <remarks>The data passes through the handlers to be formatted for display.</remarks>
/// <param name="data">The data to send</param>
public void Send(string data)
{
this.Send(data, false);
}
/// <summary>Sends string data to the connection.</summary>
/// <param name="data">The data to send.</param>
/// <param name="bypassDataFormatter">Indicates whether the data formatter should be bypassed (for a quicker send).</param>
public void Send(string data, bool bypassDataFormatter)
{
this.Send(data, bypassDataFormatter, false);
}
/// <summary>Sends string data to the connection</summary>
/// <param name="data">data to send.</param>
/// <param name="bypassDataFormatter">Indicates whether the data formatter should be bypassed (for a quicker send).</param>
/// <param name="sendAllData">Indicates if paging should be allowed</param>
public void Send(string data, bool bypassDataFormatter, bool sendAllData)
{
if (!bypassDataFormatter)
{
data = DataFormatter.FormatData(data, this, sendAllData);
}
byte[] bytes;
// Check for MCCP (its not worth using for short strings as the overhead is quite high).
if (this.Terminal.UseMCCP && data.Length > MCCPThreshold)
{
// Compress the data
bytes = MCCPHandler.Compress(data);
// Send the sub request to say that the next load of data
// is compressed. The sub request is IAC SE COMPRESS2 IAC SB
this.Send(new byte[] { 255, 250, 86, 255, 240 });
}
else
{
// Line below commented out by JFed 11/28/2011. We lose the 8th bit with this encoding, which breaks special characters like ASCII art
// bytes = ASCIIEncoding.ASCII.GetBytes(data);
// Encoding using code page 437 (old 8bit default ascii).
bytes = Encoding.GetEncoding(437).GetBytes(data);
}
// Send the data.
this.Send(bytes);
}
/// <summary>Sends data from the output buffer to the client.</summary>
/// <param name="bufferDirection">Direction to move in the buffer.</param>
public void ProcessBuffer(BufferDirection bufferDirection)
{
if (this.OutputBuffer.HasMoreData)
{
string[] output = this.OutputBuffer.GetRows(bufferDirection, this.PagingRowLimit);
bool appendOverflow = this.OutputBuffer.HasMoreData;
string data = BufferHandler.Format(
output,
false,
appendOverflow,
this.OutputBuffer.CurrentLocation,
this.OutputBuffer.Length);
this.Send(data, false, true);
}
}
/// <summary>Asynchronously listens for any incoming data.</summary>
public void ListenForData()
{
try
{
// Start receiving any data written by the connected client
// asynchronously
this.socket.BeginReceive(
this.Data,
0,
this.Data.Length,
SocketFlags.None,
new AsyncCallback(this.OnDataReceived),
null);
}
catch (SocketException)
{
this.OnConnectionDisconnect();
}
}
/// <summary>Asynchronous callback when a send completes successfully.</summary>
/// <param name="asyncResult">The asynchronous result.</param>
private void OnSendComplete(IAsyncResult asyncResult)
{
try
{
this.socket.EndSend(asyncResult);
// Raise our data sent event.
if (this.DataSent != null)
{
this.DataSent(this, new ConnectionArgs(this));
}
}
catch
{
this.OnConnectionDisconnect();
}
}
/// <summary>The callback function invoked when the socket detects any client data was received.</summary>
/// <param name="asyncResult">The asynchronous result.</param>
private void OnDataReceived(IAsyncResult asyncResult)
{
try
{
int iRx;
if (this.socket.Connected)
{
// Complete the BeginReceive() asynchronous call by EndReceive() method
// which will return the number of characters written to the stream
// by the client
iRx = this.socket.EndReceive(asyncResult);
// If the number of bytes received is 0 then something fishy is going on, so
// we close the socket.
if (iRx == 0)
{
this.OnConnectionDisconnect();
}
else
{
// Raise the Data Received Event. Signals that some data has arrived.
if (this.DataReceived != null)
{
this.DataReceived(this, new ConnectionArgs(this));
}
// Continue the waiting for data on the Socket
this.ListenForData();
}
}
}
catch (ObjectDisposedException)
{
// If we're shutting down, quietly ignore these exceptions and try to close the connection.
this.OnConnectionDisconnect();
}
catch (ThreadAbortException)
{
// If we're shutting down, quietly ignore these exceptions and try to close the connection.
this.OnConnectionDisconnect();
}
catch (Exception ex)
{
// In order to isolate connection-specific issues, we're going to trap the exception, log
// the details, and kill that connection. (Other connections and the game itself should
// be able to continue through such situations.)
string ip = this.CurrentIPAddress == null ? "[null]" : this.CurrentIPAddress.ToString();
string format = "Exception encountered for connection:{0}IP: {1} (ID {2}):{0}{3}";
string message = string.Format(format, Environment.NewLine, ip, this.ID, ex.ToDeepString());
this.connectionHost.InformSubscribedSystem(message);
// If the debugger is attached, we probably want to break now in order to better debug
// the issue closer to where it occurred; if your debugger broke here you may want to
// look at the stack trace to see where the exception originated.
if (Debugger.IsAttached)
{
Debugger.Break();
}
this.OnConnectionDisconnect();
}
}
/// <summary>Disconnects the sockets and raises the disconnected event.</summary>
private void OnConnectionDisconnect()
{
if (this.socket.Connected)
{
this.socket.Shutdown(SocketShutdown.Both);
this.socket.Close();
if (this.ClientDisconnected != null)
{
this.ClientDisconnected(this, new ConnectionArgs(this));
}
}
}
}
}
| |
/*
Copyright 2018 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using ArcGIS.Core;
using ArcGIS.Core.Data;
using ArcGIS.Desktop.Framework.Threading.Tasks;
namespace SDKExamples
{
/// <summary>
/// Illustrates how to search from a Table.
/// </summary>
///
/// <remarks>
/// <para>
/// While it is true classes that are derived from the <see cref="ArcGIS.Core.CoreObjectsBase"/> super class
/// consumes native resources (e.g., <see cref="ArcGIS.Core.Data.Geodatabase"/> or <see cref="ArcGIS.Core.Data.FeatureClass"/>),
/// you can rest assured that the garbage collector will properly dispose of the unmanaged resources during
/// finalization. However, there are certain workflows that require a <b>deterministic</b> finalization of the
/// <see cref="ArcGIS.Core.Data.Geodatabase"/>. Consider the case of a file geodatabase that needs to be deleted
/// on the fly at a particular moment. Because of the <b>indeterministic</b> nature of garbage collection, we can't
/// count on the garbage collector to dispose of the Geodatabase object, thereby removing the <b>lock(s)</b> at the
/// moment we want. To ensure a deterministic finalization of important native resources such as a
/// <see cref="ArcGIS.Core.Data.Geodatabase"/> or <see cref="ArcGIS.Core.Data.FeatureClass"/>, you should declare
/// and instantiate said objects in a <b>using</b> statement. Alternatively, you can achieve the same result by
/// putting the object inside a try block and then calling Dispose() in a finally block.
/// </para>
/// <para>
/// In general, you should always call Dispose() on the following types of objects:
/// </para>
/// <para>
/// - Those that are derived from <see cref="ArcGIS.Core.Data.Datastore"/> (e.g., <see cref="ArcGIS.Core.Data.Geodatabase"/>).
/// </para>
/// <para>
/// - Those that are derived from <see cref="ArcGIS.Core.Data.Dataset"/> (e.g., <see cref="ArcGIS.Core.Data.Table"/>).
/// </para>
/// <para>
/// - <see cref="ArcGIS.Core.Data.RowCursor"/> and <see cref="ArcGIS.Core.Data.RowBuffer"/>.
/// </para>
/// <para>
/// - <see cref="ArcGIS.Core.Data.Row"/> and <see cref="ArcGIS.Core.Data.Feature"/>.
/// </para>
/// <para>
/// - <see cref="ArcGIS.Core.Data.Selection"/>.
/// </para>
/// <para>
/// - <see cref="ArcGIS.Core.Data.VersionManager"/> and <see cref="ArcGIS.Core.Data.Version"/>.
/// </para>
/// </remarks>
public class TableSearch
{
/// <summary>
/// In order to illustrate that Geodatabase calls have to be made on the MCT
/// </summary>
/// <returns></returns>
public async Task TableSearchAsync()
{
await QueuedTask.Run(() => MainMethodCode());
}
public void MainMethodCode()
{
using (Geodatabase geodatabase = new Geodatabase(new FileGeodatabaseConnectionPath(new Uri(@"C:\Data\LocalGovernment.gdb"))))
using (Table table = OpenTable(geodatabase, "EmployeeInfo"))
{
// If you are not sure if EmployeeInfo Exists...
if (table == null)
return;
// If you want to make sure the field Name exists...
TableDefinition tableDefinition = table.GetDefinition();
if (tableDefinition.FindField("COSTCTRN") < 0)
//This could be a custom exception...
throw new Exception("The desired Field Name does not exist. Need to investigate why this is missing");
// ******************** WITHOUT USING RECYCLING ********************
List<Row> informationTechnologyEmployees = null;
List<Row> nullList = null;
List<Row> possiblyEmptyListOfRows = null;
List<Row> partiallyPopulatedRows = null;
List<Row> distinctCombinationRows = null;
List<Row> orderedRows = null;
try
{
// This should return a list of rows if the Field Name exists.
informationTechnologyEmployees = GetRowListFor(table, new QueryFilter {
WhereClause = "COSTCTRN = 'Information Technology'"
});
// This should return a null since EmployeeInfo Table does not have an ADDRESS field.
nullList = GetRowListFor(table, new QueryFilter {
WhereClause = "ADDRESS = 'Something'"
});
// This should return an empty list Since there is a mismatch in the case of the requested CostCenter Name and the actual.
possiblyEmptyListOfRows = GetRowListFor(table, new QueryFilter {
WhereClause = "COSTCTRN = 'Water'"
});
// This should return a list of Rows with only OBJECTID, KNOWNAS and EMAIL fields populated. Everything else will be null.
partiallyPopulatedRows = GetRowListFor(table, new QueryFilter {
WhereClause = "COSTCTRN = 'Information Technology'",
SubFields = "KNOWNAS, EMAIL"
});
Row anyRow = partiallyPopulatedRows.First();
// Keep in mind that the FindField method is provided as a convenience method. It is a costly operation where
// all the fields are enumerated to find the index. So, you might want to be judicious in using it.
int knownAsFieldIndex = anyRow.FindField("KNOWNAS");
int emailFieldIndex = anyRow.FindField("EMAIL");
foreach (Row partiallyPopulatedRow in partiallyPopulatedRows)
{
//do something with
object knownAsValue = partiallyPopulatedRow[knownAsFieldIndex];
object emailValue = partiallyPopulatedRow[emailFieldIndex];
}
// This should return a list of Rows with name and location of one Elected Official per Wing .
distinctCombinationRows = GetRowListFor(table, new QueryFilter {
WhereClause = "COSTCTRN = 'Elected Officials'",
SubFields = "KNOWNAS, LOCATION, WING",
PrefixClause = "DISTINCT"
});
// This should return a list of Rows ordered by the office numbers of the IT employees.
orderedRows = GetRowListFor(table, new QueryFilter {
WhereClause = "COSTCTRN = 'Information Technology'",
SubFields = "KNOWNAS, OFFICE, LOCATION",
PostfixClause = "ORDER BY OFFICE"
});
}
finally
{
Dispose(informationTechnologyEmployees);
Dispose(nullList);
Dispose(possiblyEmptyListOfRows);
Dispose(partiallyPopulatedRows);
Dispose(distinctCombinationRows);
Dispose(orderedRows);
}
//************************ USING RECYCLING *****************************
using (RowCursor rowCursor = table.Search(new QueryFilter
{
WhereClause = "COSTCTRN = 'Elected Officials'",
SubFields = "KNOWNAS, LOCATION, WING",
PrefixClause = "DISTINCT"
}))
{
while (rowCursor.MoveNext())
{
// Do something with rowCursor.Current. Also, remember to Dispose of it when done processing.
}
}
// If you use try to assign the rowCursor.Current to Row references...
using (RowCursor rowCursor = table.Search(new QueryFilter
{
WhereClause = "COSTCTRN = 'Elected Officials'",
SubFields = "KNOWNAS, LOCATION, WING",
PrefixClause = "DISTINCT"
}))
{
List<Row> rows = new List<Row>();
Row lastRow = null;
while (rowCursor.MoveNext())
{
rows.Add(rowCursor.Current);
lastRow = rowCursor.Current;
}
// After the loop is done, lastRow will point to the last Row that was returned by the enumeration. Each row in the rows
// list will be pointing to the same Row Object as lastRow, which is the last object that was enumerated by the rowCursor
// enumerator before moving past the last result, i.e. for each row in rows, the condition row == lastRow will be true.
// Since Row encapsulates unmanaged resources, it is important to remember to call Dispose() on every entry in the list
// when the list is no longer in use. Alternatively, do not add the row to the list. Instead, process each of them
// inside the cursor.
Dispose(rows);
}
}
}
/// <summary>
/// Searches a given Table to return the content of the complete row. Note that this method is not using Recycling
/// </summary>
/// <remarks>using ArcGIS.Core.Data expected </remarks>
/// <note>ReturnValue is typeof List<Row> </note>
private List<Row> GetRowListFor(Table table, QueryFilter queryFilter)
{
List<Row> rows = new List<Row>();
try
{
using (RowCursor rowCursor = table.Search(queryFilter, false))
{
while (rowCursor.MoveNext())
{
rows.Add(rowCursor.Current);
}
}
}
catch (GeodatabaseFieldException fieldException)
{
// One of the fields in the where clause might not exist. There are multiple ways this can be handled:
// 1. You could rethrow the exception to bubble up to the caller after some debug or info logging
// 2. You could return null to signify that something went wrong. The logs added before return could tell you what went wrong.
// 3. You could return empty list of rows. This might not be advisable because if there was no exception thrown and the
// query returned no results, you would also get an empty list of rows. This might cause problems when
// differentiating between a failed Search attempt and a successful search attempt which gave no result.
// logger.Error(fieldException.Message);
return null;
}
catch (Exception exception)
{
// logger.Error(exception.Message);
return null;
}
return rows;
}
/// <summary>
/// Opens a table and returns reference if it exists
/// </summary>
private static Table OpenTable(Geodatabase geodatabase, string tableName)
{
Table table;
try
{
table = geodatabase.OpenDataset<Table>(tableName);
}
catch (GeodatabaseCatalogDatasetException exception)
{
// logger.Error(exception.Message);
return null;
}
return table;
}
private static void Dispose<T>(IEnumerable<T> iterator) where T : CoreObjectsBase
{
if (iterator != null)
{
foreach (T coreObject in iterator)
{
if (coreObject != null)
coreObject.Dispose();
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using Moq;
using NuGet.Test.Mocks;
using Xunit;
using Xunit.Extensions;
namespace NuGet.Test
{
public class PackageSourceProviderTest
{
[Fact]
public void LoadPackageSourcesPerformMigrationIfSpecified()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true)).Returns(
new[] {
new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false),
}
);
// disable package "three"
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new[] { new KeyValuePair<string, string>("three", "true" ) });
IList<KeyValuePair<string, string>> savedSettingValues = null;
settings.Setup(s => s.SetValues("packageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback<string, IList<KeyValuePair<string, string>>>((_, savedVals) => { savedSettingValues = savedVals; })
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object,
null,
new Dictionary<PackageSource, PackageSource> {
{ new PackageSource("onesource", "one"), new PackageSource("goodsource", "good") },
{ new PackageSource("foo", "bar"), new PackageSource("foo", "bar") },
{ new PackageSource("threesource", "three"), new PackageSource("awesomesource", "awesome") }
}
);
// Act
var values = provider.LoadPackageSources().ToList();
savedSettingValues = savedSettingValues.ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[0], "good", "goodsource", true);
AssertPackageSource(values[1], "two", "twosource", true);
AssertPackageSource(values[2], "awesome", "awesomesource", false);
Assert.Equal(3, savedSettingValues.Count);
Assert.Equal("good", savedSettingValues[0].Key);
Assert.Equal("goodsource", savedSettingValues[0].Value);
Assert.Equal("two", savedSettingValues[1].Key);
Assert.Equal("twosource", savedSettingValues[1].Value);
Assert.Equal("awesome", savedSettingValues[2].Key);
Assert.Equal("awesomesource", savedSettingValues[2].Value);
}
[Fact]
public void TestNoPackageSourcesAreReturnedIfUserSettingsIsEmpty()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new[] { new KeyValuePair<string, string>("two", "true") });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
var provider = CreatePackageSourceProvider(settings.Object);
// Arrange
var provider = CreatePackageSourceProvider();
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(0, values.Count);
}
[Fact]
public void LoadPackageSourcesReturnsEmptySequenceIfDefaultPackageSourceIsNull()
{
// Arrange
var settings = new Mock<ISettings>();
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null);
// Act
var values = provider.LoadPackageSources();
// Assert
Assert.False(values.Any());
}
[Fact]
public void LoadPackageSourcesReturnsEmptySequenceIfDefaultPackageSourceIsEmpty()
{
// Arrange
var settings = new Mock<ISettings>();
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: new PackageSource[] { });
// Act
var values = provider.LoadPackageSources();
// Assert
Assert.False(values.Any());
}
[Fact]
public void LoadPackageSourcesReturnsDefaultSourcesIfSpecified()
{
// Arrange
var settings = new Mock<ISettings>().Object;
var provider = CreatePackageSourceProvider(settings, providerDefaultSources: new[] { new PackageSource("A"), new PackageSource("B") });
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(2, values.Count);
Assert.Equal("A", values.First().Source);
Assert.Equal("B", values.Last().Source);
}
[Fact]
public void LoadPackageSourcesWhereAMigratedSourceIsAlsoADefaultSource()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("AOld", "urlA", false), new SettingValue("userDefinedSource", "userDefinedSourceUrl", false) });
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
var defaultPackageSourceA = new PackageSource("urlA", "ANew");
var defaultPackageSourceB = new PackageSource("urlB", "B");
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: new[] { defaultPackageSourceA, defaultPackageSourceB },
migratePackageSources: new Dictionary<PackageSource, PackageSource>
{
{ new PackageSource("urlA", "AOld"), defaultPackageSourceA },
});
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
// Package Source AOld will be migrated to ANew. B will simply get added
// Since default source B got added when there are other package sources it will be disabled
// However, package source ANew must stay enabled
// PackageSource userDefinedSource is a user package source and is untouched
Assert.Equal(3, values.Count);
Assert.Equal("urlA", values[0].Source);
Assert.Equal("ANew", values[0].Name);
Assert.True(values[0].IsEnabled);
Assert.Equal("userDefinedSourceUrl", values[1].Source);
Assert.Equal("userDefinedSource", values[1].Name);
Assert.True(values[1].IsEnabled);
Assert.Equal("urlB", values[2].Source);
Assert.Equal("B", values[2].Name);
Assert.False(values[2].IsEnabled);
}
[Fact]
public void CallSaveMethodAndLoadMethodShouldReturnTheSamePackageSet()
{
// Arrange
var expectedSources = new[] { new PackageSource("one", "one"), new PackageSource("two", "two"), new PackageSource("three", "three") };
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "one", false),
new SettingValue("two", "two", false),
new SettingValue("three", "three", false)
})
.Verifiable();
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.DeleteSection("packageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("disabledPackageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("packageSourceCredentials")).Returns(true).Verifiable();
settings.Setup(s => s.SetValues("packageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> values) =>
{
Assert.Equal(3, values.Count);
Assert.Equal("one", values[0].Key);
Assert.Equal("one", values[0].Value);
Assert.Equal("two", values[1].Key);
Assert.Equal("two", values[1].Value);
Assert.Equal("three", values[2].Key);
Assert.Equal("three", values[2].Value);
})
.Verifiable();
settings.Setup(s => s.SetValues("disabledPackageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> values) =>
{
Assert.Empty(values);
})
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var sources = provider.LoadPackageSources().ToList();
provider.SavePackageSources(sources);
// Assert
settings.Verify();
Assert.Equal(3, sources.Count);
for (int i = 0; i < sources.Count; i++)
{
AssertPackageSource(expectedSources[i], sources[i].Name, sources[i].Source, true);
}
}
[Fact]
public void WithMachineWideSources()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "one", true),
new SettingValue("two", "two", false),
new SettingValue("three", "three", false)
});
settings.Setup(s => s.SetValues("packageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> values) =>
{
// verifies that only sources "two" and "three" are passed.
// the machine wide source "one" is not.
Assert.Equal(2, values.Count);
Assert.Equal("two", values[0].Key);
Assert.Equal("two", values[0].Value);
Assert.Equal("three", values[1].Key);
Assert.Equal("three", values[1].Value);
})
.Verifiable();
settings.Setup(s => s.SetValues("disabledPackageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> values) =>
{
// verifies that the machine wide source "one" is passed here
// since it is disabled.
Assert.Equal(1, values.Count);
Assert.Equal("one", values[0].Key);
Assert.Equal("true", values[0].Value);
})
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var sources = provider.LoadPackageSources().ToList();
// disable the machine wide source "one", and save the result in provider.
Assert.Equal("one", sources[2].Name);
sources[2].IsEnabled = false;
provider.SavePackageSources(sources);
// Assert
// all assertions are done inside Callback()'s
}
[Fact]
public void LoadPackageSourcesReturnCorrectDataFromSettings()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", true),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
})
.Verifiable();
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[0], "two", "twosource", true);
AssertPackageSource(values[1], "three", "threesource", true);
AssertPackageSource(values[2], "one", "onesource", true, true);
}
[Fact]
public void LoadPackageSourcesReturnCorrectDataFromSettingsWhenSomePackageSourceIsDisabled()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new[] { new KeyValuePair<string, string>("two", "true") });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[0], "one", "onesource", true);
AssertPackageSource(values[1], "two", "twosource", false);
AssertPackageSource(values[2], "three", "threesource", true);
}
/// <summary>
/// The following test tests case 1 listed in PackageSourceProvider.SetDefaultPackageSources(...)
/// Case 1. Default Package Source is already present matching both feed source and the feed name
/// </summary>
[Fact]
public void LoadPackageSourcesWhereALoadedSourceMatchesDefaultSourceInNameAndSource()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false)});
// Disable package source one
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new[] { new KeyValuePair<string, string>("one", "true") });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
string configurationDefaultsFileContent = @"
<configuration>
<packageSources>
<add key='one' value='onesource' />
</packageSources>
</configuration>";
var mockFileSystem = new MockFileSystem();
var configurationDefaultsPath = "NuGetDefaults.config";
mockFileSystem.AddFile(configurationDefaultsPath, configurationDefaultsFileContent);
ConfigurationDefaults configurationDefaults = new ConfigurationDefaults(mockFileSystem, configurationDefaultsPath);
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null, migratePackageSources: null, configurationDefaultSources: configurationDefaults.DefaultPackageSources);
// Act
var values = provider.LoadPackageSources();
// Assert
Assert.Equal(1, values.Count());
// Package source 'one' represents case 1. No real change takes place. IsOfficial will become true though. IsEnabled remains false as it is ISettings
AssertPackageSource(values.First(), "one", "onesource", false, false, true);
}
/// <summary>
/// The following test tests case 2 listed in PackageSourceProvider.SetDefaultPackageSources(...)
/// Case 2. Default Package Source is already present matching feed source but with a different feed name. DO NOTHING
/// </summary>
[Fact]
public void LoadPackageSourcesWhereALoadedSourceMatchesDefaultSourceInSourceButNotInName()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("two", "twosource", false) });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
string configurationDefaultsFileContent = @"
<configuration>
<packageSources>
<add key='twodefault' value='twosource' />
</packageSources>
<disabledPackageSources>
<add key='twodefault' value='true' />
</disabledPackageSources>
</configuration>";
var mockFileSystem = new MockFileSystem();
var configurationDefaultsPath = "NuGetDefaults.config";
mockFileSystem.AddFile(configurationDefaultsPath, configurationDefaultsFileContent);
ConfigurationDefaults configurationDefaults = new ConfigurationDefaults(mockFileSystem, configurationDefaultsPath);
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null, migratePackageSources: null, configurationDefaultSources: configurationDefaults.DefaultPackageSources);
// Act
var values = provider.LoadPackageSources();
// Assert
Assert.Equal(1, values.Count());
// Package source 'two' represents case 2. No Change effected. The existing feed will not be official
AssertPackageSource(values.First(), "two", "twosource", true, false, false);
}
/// <summary>
/// The following test tests case 3 listed in PackageSourceProvider.SetDefaultPackageSources(...)
/// Case 3. Default Package Source is not present, but there is another feed source with the same feed name. Override that feed entirely
/// </summary>
[Fact]
public void LoadPackageSourcesWhereALoadedSourceMatchesDefaultSourceInNameButNotInSource()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("three", "threesource", false) });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
string configurationDefaultsFileContent = @"
<configuration>
<packageSources>
<add key='three' value='threedefaultsource' />
</packageSources>
</configuration>";
var mockFileSystem = new MockFileSystem();
var configurationDefaultsPath = "NuGetDefaults.config";
mockFileSystem.AddFile(configurationDefaultsPath, configurationDefaultsFileContent);
ConfigurationDefaults configurationDefaults = new ConfigurationDefaults(mockFileSystem, configurationDefaultsPath);
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null, migratePackageSources: null, configurationDefaultSources: configurationDefaults.DefaultPackageSources);
// Act
var values = provider.LoadPackageSources();
// Assert
Assert.Equal(1, values.Count());
// Package source 'three' represents case 3. Completely overwritten. Noticeably, Feed Source will match Configuration Default settings
AssertPackageSource(values.First(), "three", "threedefaultsource", true, false, true);
}
/// <summary>
/// The following test tests case 3 listed in PackageSourceProvider.SetDefaultPackageSources(...)
/// Case 4. Default Package Source is not present, simply, add it
/// </summary>
[Fact]
public void LoadPackageSourcesWhereNoLoadedSourceMatchesADefaultSource()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new List<SettingValue>());
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
string configurationDefaultsFileContent = @"
<configuration>
<packageSources>
<add key='four' value='foursource' />
</packageSources>
</configuration>";
var mockFileSystem = new MockFileSystem();
var configurationDefaultsPath = "NuGetDefaults.config";
mockFileSystem.AddFile(configurationDefaultsPath, configurationDefaultsFileContent);
ConfigurationDefaults configurationDefaults = new ConfigurationDefaults(mockFileSystem, configurationDefaultsPath);
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null, migratePackageSources: null, configurationDefaultSources: configurationDefaults.DefaultPackageSources);
// Act
var values = provider.LoadPackageSources();
// Assert
Assert.Equal(1, values.Count());
// Package source 'four' represents case 4. Simply Added to the list increasing the count by 1. ISettings only has 3 package sources. But, LoadPackageSources returns 4
AssertPackageSource(values.First(), "four", "foursource", true, false, true);
}
[Fact]
public void LoadPackageSourcesDoesNotReturnProviderDefaultsWhenConfigurationDefaultPackageSourcesIsNotEmpty()
{
// Arrange
var settings = new Mock<ISettings>().Object;
string configurationDefaultsFileContent = @"
<configuration>
<packageSources>
<add key='configurationDefaultOne' value='configurationDefaultOneSource' />
<add key='configurationDefaultTwo' value='configurationDefaultTwoSource' />
</packageSources>
</configuration>";
var mockFileSystem = new MockFileSystem();
var configurationDefaultsPath = "NuGetDefaults.config";
mockFileSystem.AddFile(configurationDefaultsPath, configurationDefaultsFileContent);
ConfigurationDefaults configurationDefaults = new ConfigurationDefaults(mockFileSystem, configurationDefaultsPath);
var provider = CreatePackageSourceProvider(settings,
providerDefaultSources: new[] { new PackageSource("providerDefaultA"), new PackageSource("providerDefaultB") },
migratePackageSources: null,
configurationDefaultSources: configurationDefaults.DefaultPackageSources);
// Act
var values = provider.LoadPackageSources();
// Assert
Assert.Equal(2, values.Count());
Assert.Equal("configurationDefaultOneSource", values.First().Source);
Assert.Equal("configurationDefaultTwoSource", values.Last().Source);
}
[Fact]
public void LoadPackageSourcesAddsAConfigurationDefaultBackEvenAfterMigration()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new List<SettingValue>() { new SettingValue("NuGet official package source", "https://nuget.org/api/v2", false) });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
string configurationDefaultsFileContent = @"
<configuration>
<packageSources>
<add key='NuGet official package source' value='https://nuget.org/api/v2' />
</packageSources>
</configuration>";
var mockFileSystem = new MockFileSystem();
var configurationDefaultsPath = "NuGetDefaults.config";
mockFileSystem.AddFile(configurationDefaultsPath, configurationDefaultsFileContent);
ConfigurationDefaults configurationDefaults = new ConfigurationDefaults(mockFileSystem, configurationDefaultsPath);
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null,
migratePackageSources: new Dictionary<PackageSource, PackageSource>
{
{ new PackageSource("https://nuget.org/api/v2", "NuGet official package source"), new PackageSource("https://www.nuget.org/api/v2", "nuget.org") }
},
configurationDefaultSources: configurationDefaults.DefaultPackageSources);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(2, values.Count);
Assert.Equal("nuget.org", values[0].Name);
Assert.Equal("https://www.nuget.org/api/v2", values[0].Source);
Assert.Equal("NuGet official package source", values[1].Name);
Assert.Equal("https://nuget.org/api/v2", values[1].Source);
}
[Fact]
public void LoadPackageSourcesDoesNotDuplicateFeedsOnMigration()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new List<SettingValue>() { new SettingValue("NuGet official package source", "https://nuget.org/api/v2", false),
new SettingValue("nuget.org", "https://www.nuget.org/api/v2", false) });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null,
migratePackageSources: new Dictionary<PackageSource, PackageSource>
{
{ new PackageSource("https://nuget.org/api/v2", "NuGet official package source"), new PackageSource("https://www.nuget.org/api/v2", "nuget.org") }
});
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(1, values.Count);
Assert.Equal("nuget.org", values[0].Name);
Assert.Equal("https://www.nuget.org/api/v2", values[0].Source);
}
[Fact]
public void LoadPackageSourcesDoesNotDuplicateFeedsOnMigrationAndSavesIt()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new List<SettingValue>() { new SettingValue("NuGet official package source", "https://nuget.org/api/v2", false),
new SettingValue("nuget.org", "https://www.nuget.org/api/v2", false) });
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", It.IsAny<string>())).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.GetValues("disabledPackageSources")).Returns(new KeyValuePair<string, string>[0]);
settings.Setup(s => s.DeleteSection("packageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("disabledPackageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("packageSourceCredentials")).Returns(true).Verifiable();
settings.Setup(s => s.SetValues("packageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> valuePairs) =>
{
Assert.Equal(1, valuePairs.Count);
Assert.Equal("nuget.org", valuePairs[0].Key);
Assert.Equal("https://www.nuget.org/api/v2", valuePairs[0].Value);
})
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object, providerDefaultSources: null,
migratePackageSources: new Dictionary<PackageSource, PackageSource>
{
{ new PackageSource("https://nuget.org/api/v2", "NuGet official package source"), new PackageSource("https://www.nuget.org/api/v2", "nuget.org") }
});
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(1, values.Count);
Assert.Equal("nuget.org", values[0].Name);
Assert.Equal("https://www.nuget.org/api/v2", values[0].Source);
settings.Verify();
}
[Fact]
public void DisablePackageSourceAddEntryToSettings()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.SetValue("disabledPackageSources", "A", "true")).Verifiable();
var provider = CreatePackageSourceProvider(settings.Object);
// Act
provider.DisablePackageSource(new PackageSource("source", "A"));
// Assert
settings.Verify();
}
[Fact]
public void IsPackageSourceEnabledReturnsFalseIfTheSourceIsDisabled()
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetValue("disabledPackageSources", "A")).Returns("sdfds");
var provider = CreatePackageSourceProvider(settings.Object);
// Act
bool isEnabled = provider.IsPackageSourceEnabled(new PackageSource("source", "A"));
// Assert
Assert.False(isEnabled);
}
[Theory]
[InlineData((string)null)]
[InlineData("")]
public void IsPackageSourceEnabledReturnsTrueIfTheSourceIsNotDisabled(string returnValue)
{
// Arrange
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.GetValue("disabledPackageSources", "A")).Returns(returnValue);
var provider = CreatePackageSourceProvider(settings.Object);
// Act
bool isEnabled = provider.IsPackageSourceEnabled(new PackageSource("source", "A"));
// Assert
Assert.True(isEnabled);
}
[Theory]
[InlineData(new object[] { null, "abcd" })]
[InlineData(new object[] { "", "abcd" })]
[InlineData(new object[] { "abcd", null })]
[InlineData(new object[] { "abcd", "" })]
public void LoadPackageSourcesIgnoresInvalidCredentialPairsFromSettings(string userName, string password)
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", "two"))
.Returns(new [] { new KeyValuePair<string, string>("Username", userName), new KeyValuePair<string, string>("Password", password) });
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[1], "two", "twosource", true);
Assert.Null(values[1].UserName);
Assert.Null(values[1].Password);
}
[Fact]
public void LoadPackageSourcesReadsCredentialPairsFromSettings()
{
// Arrange
string encryptedPassword = EncryptionUtility.EncryptString("topsecret");
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", "two"))
.Returns(new[] { new KeyValuePair<string, string>("Username", "user1"), new KeyValuePair<string, string>("Password", encryptedPassword) });
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[1], "two", "twosource", true);
Assert.Equal("user1", values[1].UserName);
Assert.Equal("topsecret", values[1].Password);
Assert.False(values[1].IsPasswordClearText);
}
[Fact]
public void LoadPackageSourcesReadsClearTextCredentialPairsFromSettings()
{
// Arrange
const string clearTextPassword = "topsecret";
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", "two"))
.Returns(new[] { new KeyValuePair<string, string>("Username", "user1"), new KeyValuePair<string, string>("ClearTextPassword", clearTextPassword) });
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[1], "two", "twosource", true);
Assert.Equal("user1", values[1].UserName);
Assert.True(values[1].IsPasswordClearText);
Assert.Equal("topsecret", values[1].Password);
}
[Theory]
[InlineData("Username=john;Password=johnspassword")]
[InlineData("uSerName=john;PASSWOrD=johnspassword")]
[InlineData(" Username=john; Password=johnspassword ")]
public void LoadPackageSourcesLoadsCredentialPairsFromEnvironmentVariables(string rawCredentials)
{
// Arrange
const string userName = "john";
const string password = "johnspassword";
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
var environment = new Mock<IEnvironmentVariableReader>();
environment.Setup(e => e.GetEnvironmentVariable("NuGetPackageSourceCredentials_two"))
.Returns(rawCredentials);
var provider = CreatePackageSourceProvider(settings.Object, environment:environment.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[1], "two", "twosource", true);
Assert.Equal(userName, values[1].UserName);
Assert.Equal(password, values[1].Password);
}
[Theory]
[InlineData("uername=john;Password=johnspassword")]
[InlineData(".Username=john;Password=johnspasswordf")]
[InlineData("What is this I don't even")]
public void LoadPackageSourcesIgnoresMalformedCredentialPairsFromEnvironmentVariables(string rawCredentials)
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
var environment = new Mock<IEnvironmentVariableReader>();
environment.Setup(e => e.GetEnvironmentVariable("NuGetPackageSourceCredentials_two"))
.Returns(rawCredentials);
var provider = CreatePackageSourceProvider(settings.Object, environment: environment.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[1], "two", "twosource", true);
Assert.Null(values[1].UserName);
Assert.Null(values[1].Password);
}
[Fact]
public void LoadPackageSourcesEnvironmentCredentialsTakePrecedenceOverSettingsCredentials()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", "two"))
.Returns(new[] { new KeyValuePair<string, string>("Username", "settinguser"), new KeyValuePair<string, string>("ClearTextPassword", "settingpassword") });
var environment = new Mock<IEnvironmentVariableReader>();
environment.Setup(e => e.GetEnvironmentVariable("NuGetPackageSourceCredentials_two"))
.Returns("Username=envirouser;Password=enviropassword");
var provider = CreatePackageSourceProvider(settings.Object, environment: environment.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[1], "two", "twosource", true);
Assert.Equal("envirouser", values[1].UserName);
Assert.Equal("enviropassword", values[1].Password);
}
[Fact]
public void LoadPackageSourcesWhenEnvironmentCredentialsAreMalformedFallsbackToSettingsCredentials()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("three", "threesource", false)
});
settings.Setup(s => s.GetNestedValues("packageSourceCredentials", "two"))
.Returns(new[] { new KeyValuePair<string, string>("Username", "settinguser"), new KeyValuePair<string, string>("ClearTextPassword", "settingpassword") });
var environment = new Mock<IEnvironmentVariableReader>();
environment.Setup(e => e.GetEnvironmentVariable("NuGetPackageSourceCredentials_two"))
.Returns("I for one don't understand environment variables");
var provider = CreatePackageSourceProvider(settings.Object, environment: environment.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(3, values.Count);
AssertPackageSource(values[1], "two", "twosource", true);
Assert.Equal("settinguser", values[1].UserName);
Assert.Equal("settingpassword", values[1].Password);
}
// Test that when there are duplicate sources, i.e. sources with the same name,
// then the source specified in one Settings with the highest priority is used.
[Fact]
public void DuplicatePackageSources()
{
// Arrange
var settings = new Mock<ISettings>();
settings.Setup(s => s.GetSettingValues("packageSources", true))
.Returns(new[] { new SettingValue("one", "onesource", false),
new SettingValue("two", "twosource", false),
new SettingValue("one", "threesource", false)
});
var provider = CreatePackageSourceProvider(settings.Object);
// Act
var values = provider.LoadPackageSources().ToList();
// Assert
Assert.Equal(2, values.Count);
AssertPackageSource(values[0], "two", "twosource", true);
AssertPackageSource(values[1], "one", "threesource", true);
}
[Fact]
public void SavePackageSourcesSaveCorrectDataToSettings()
{
// Arrange
var sources = new[] { new PackageSource("one"), new PackageSource("two"), new PackageSource("three") };
var settings = new Mock<ISettings>(MockBehavior.Strict);
settings.Setup(s => s.DeleteSection("packageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("disabledPackageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("packageSourceCredentials")).Returns(true).Verifiable();
settings.Setup(s => s.SetValues("packageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> values) =>
{
Assert.Equal(3, values.Count);
Assert.Equal("one", values[0].Key);
Assert.Equal("one", values[0].Value);
Assert.Equal("two", values[1].Key);
Assert.Equal("two", values[1].Value);
Assert.Equal("three", values[2].Key);
Assert.Equal("three", values[2].Value);
})
.Verifiable();
settings.Setup(s => s.SetValues("disabledPackageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> values) =>
{
Assert.Empty(values);
})
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object);
// Act
provider.SavePackageSources(sources);
// Assert
settings.Verify();
}
[Fact]
public void SavePackageSourcesSaveCorrectDataToSettingsWhenSomePackageSourceIsDisabled()
{
// Arrange
var sources = new[] { new PackageSource("one"), new PackageSource("two", "two", isEnabled: false), new PackageSource("three") };
var settings = new Mock<ISettings>();
settings.Setup(s => s.DeleteSection("disabledPackageSources")).Returns(true).Verifiable();
settings.Setup(s => s.SetValues("disabledPackageSources", It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, IList<KeyValuePair<string, string>> values) =>
{
Assert.Equal(1, values.Count);
Assert.Equal("two", values[0].Key);
Assert.Equal("true", values[0].Value, StringComparer.OrdinalIgnoreCase);
})
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object);
// Act
provider.SavePackageSources(sources);
// Assert
settings.Verify();
}
[Fact]
public void SavePackageSourcesSavesCredentials()
{
// Arrange
var entropyBytes = Encoding.UTF8.GetBytes("NuGet");
var sources = new[] { new PackageSource("one"),
new PackageSource("twosource", "twoname") { UserName = "User", Password = "password" },
new PackageSource("three")
};
var settings = new Mock<ISettings>();
settings.Setup(s => s.DeleteSection("packageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("packageSourceCredentials")).Returns(true).Verifiable();
settings.Setup(s => s.SetNestedValues("packageSourceCredentials", It.IsAny<string>(), It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, string key, IList<KeyValuePair<string, string>> values) =>
{
Assert.Equal("twoname", key);
Assert.Equal(2, values.Count);
AssertKVP(new KeyValuePair<string, string>("Username", "User"), values[0]);
Assert.Equal("Password", values[1].Key);
string decryptedPassword = Encoding.UTF8.GetString(
ProtectedData.Unprotect(Convert.FromBase64String(values[1].Value), entropyBytes, DataProtectionScope.CurrentUser));
Assert.Equal("Password", values[1].Key);
Assert.Equal("password", decryptedPassword);
})
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object);
// Act
provider.SavePackageSources(sources);
// Assert
settings.Verify();
}
[Fact]
public void SavePackageSourcesSavesClearTextCredentials()
{
// Arrange
var sources = new[] { new PackageSource("one"),
new PackageSource("twosource", "twoname") { UserName = "User", Password = "password", IsPasswordClearText = true},
new PackageSource("three")
};
var settings = new Mock<ISettings>();
settings.Setup(s => s.DeleteSection("packageSources")).Returns(true).Verifiable();
settings.Setup(s => s.DeleteSection("packageSourceCredentials")).Returns(true).Verifiable();
settings.Setup(s => s.SetNestedValues("packageSourceCredentials", It.IsAny<string>(), It.IsAny<IList<KeyValuePair<string, string>>>()))
.Callback((string section, string key, IList<KeyValuePair<string, string>> values) =>
{
Assert.Equal("twoname", key);
Assert.Equal(2, values.Count);
AssertKVP(new KeyValuePair<string, string>("Username", "User"), values[0]);
AssertKVP(new KeyValuePair<string, string>("ClearTextPassword", "password"), values[1]);
})
.Verifiable();
var provider = CreatePackageSourceProvider(settings.Object);
// Act
provider.SavePackageSources(sources);
// Assert
settings.Verify();
}
[Fact]
public void GetAggregateReturnsAggregateRepositoryForAllSources()
{
// Arrange
var repositoryA = new Mock<IPackageRepository>();
var repositoryB = new Mock<IPackageRepository>();
var factory = new Mock<IPackageRepositoryFactory>();
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("A")))).Returns(repositoryA.Object);
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("B")))).Returns(repositoryB.Object);
var sources = new Mock<IPackageSourceProvider>();
sources.Setup(c => c.LoadPackageSources()).Returns(new[] { new PackageSource("A"), new PackageSource("B") });
// Act
var repo = (AggregateRepository)sources.Object.CreateAggregateRepository(factory.Object, ignoreFailingRepositories: false);
// Assert
Assert.Equal(2, repo.Repositories.Count());
Assert.Equal(repositoryA.Object, repo.Repositories.First());
Assert.Equal(repositoryB.Object, repo.Repositories.Last());
}
[Fact]
public void GetAggregateSkipsInvalidSources()
{
// Arrange
var repositoryA = new Mock<IPackageRepository>();
var repositoryC = new Mock<IPackageRepository>();
var factory = new Mock<IPackageRepositoryFactory>();
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("A")))).Returns(repositoryA.Object);
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("B")))).Throws(new InvalidOperationException());
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("C")))).Returns(repositoryC.Object);
var sources = new Mock<IPackageSourceProvider>();
sources.Setup(c => c.LoadPackageSources()).Returns(new[] { new PackageSource("A"), new PackageSource("B"), new PackageSource("C") });
// Act
var repo = (AggregateRepository)sources.Object.CreateAggregateRepository(factory.Object, ignoreFailingRepositories: true);
// Assert
Assert.Equal(2, repo.Repositories.Count());
Assert.Equal(repositoryA.Object, repo.Repositories.First());
Assert.Equal(repositoryC.Object, repo.Repositories.Last());
}
[Fact]
public void GetAggregateSkipsDisabledSources()
{
// Arrange
var repositoryA = new Mock<IPackageRepository>();
var repositoryB = new Mock<IPackageRepository>();
var factory = new Mock<IPackageRepositoryFactory>();
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("A")))).Returns(repositoryA.Object);
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("B")))).Returns(repositoryB.Object);
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("C")))).Throws(new Exception());
var sources = new Mock<IPackageSourceProvider>();
sources.Setup(c => c.LoadPackageSources()).Returns(new[] {
new PackageSource("A"), new PackageSource("B", "B", isEnabled: false), new PackageSource("C", "C", isEnabled: false) });
// Act
var repo = (AggregateRepository)sources.Object.CreateAggregateRepository(factory.Object, ignoreFailingRepositories: false);
// Assert
Assert.Equal(1, repo.Repositories.Count());
Assert.Equal(repositoryA.Object, repo.Repositories.First());
}
[Fact]
public void GetAggregateHandlesInvalidUriSources()
{
// Arrange
var factory = PackageRepositoryFactory.Default;
var sources = new Mock<IPackageSourceProvider>();
sources.Setup(c => c.LoadPackageSources()).Returns(new[] {
new PackageSource("Bad 1"),
new PackageSource(@"x:sjdkfjhsdjhfgjdsgjglhjk"),
new PackageSource(@"http:\\//")
});
// Act
var repo = (AggregateRepository)sources.Object.CreateAggregateRepository(factory, ignoreFailingRepositories: true);
// Assert
Assert.False(repo.Repositories.Any());
}
[Fact]
public void GetAggregateSetsIgnoreInvalidRepositoryProperty()
{
// Arrange
var factory = new Mock<IPackageRepositoryFactory>();
bool ignoreRepository = true;
var sources = new Mock<IPackageSourceProvider>();
sources.Setup(c => c.LoadPackageSources()).Returns(Enumerable.Empty<PackageSource>());
// Act
var repo = (AggregateRepository)sources.Object.CreateAggregateRepository(factory.Object, ignoreFailingRepositories: ignoreRepository);
// Assert
Assert.True(repo.IgnoreFailingRepositories);
}
[Fact]
public void GetAggregateWithInvalidSourcesThrows()
{
// Arrange
var repositoryA = new Mock<IPackageRepository>();
var repositoryC = new Mock<IPackageRepository>();
var factory = new Mock<IPackageRepositoryFactory>();
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("A")))).Returns(repositoryA.Object);
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("B")))).Throws(new InvalidOperationException());
factory.Setup(c => c.CreateRepository(It.Is<string>(a => a.Equals("C")))).Returns(repositoryC.Object);
var sources = new Mock<IPackageSourceProvider>();
sources.Setup(c => c.LoadPackageSources()).Returns(new[] { new PackageSource("A"), new PackageSource("B"), new PackageSource("C") });
// Act and Assert
ExceptionAssert.Throws<InvalidOperationException>(() => sources.Object.CreateAggregateRepository(factory.Object, ignoreFailingRepositories: false));
}
[Fact]
public void ResolveSourceLooksUpNameAndSource()
{
// Arrange
var sources = new Mock<IPackageSourceProvider>();
PackageSource source1 = new PackageSource("Source", "SourceName"), source2 = new PackageSource("http://www.test.com", "Baz");
sources.Setup(c => c.LoadPackageSources()).Returns(new[] { source1, source2 });
// Act
var result1 = sources.Object.ResolveSource("http://www.test.com");
var result2 = sources.Object.ResolveSource("Baz");
var result3 = sources.Object.ResolveSource("SourceName");
// Assert
Assert.Equal(source2.Source, result1);
Assert.Equal(source2.Source, result2);
Assert.Equal(source1.Source, result3);
}
[Fact]
public void ResolveSourceIgnoreDisabledSources()
{
// Arrange
var sources = new Mock<IPackageSourceProvider>();
PackageSource source1 = new PackageSource("Source", "SourceName");
PackageSource source2 = new PackageSource("http://www.test.com", "Baz", isEnabled: false);
PackageSource source3 = new PackageSource("http://www.bing.com", "Foo", isEnabled: false);
sources.Setup(c => c.LoadPackageSources()).Returns(new[] { source1, source2, source3 });
// Act
var result1 = sources.Object.ResolveSource("http://www.test.com");
var result2 = sources.Object.ResolveSource("Baz");
var result3 = sources.Object.ResolveSource("Foo");
var result4 = sources.Object.ResolveSource("SourceName");
// Assert
Assert.Equal("http://www.test.com", result1);
Assert.Equal("Baz", result2);
Assert.Equal("Foo", result3);
Assert.Equal("Source", result4);
}
[Fact]
public void ResolveSourceReturnsOriginalValueIfNotFoundInSources()
{
// Arrange
var sources = new Mock<IPackageSourceProvider>();
PackageSource source1 = new PackageSource("Source", "SourceName"), source2 = new PackageSource("http://www.test.com", "Baz");
sources.Setup(c => c.LoadPackageSources()).Returns(new[] { source1, source2 });
var source = "http://www.does-not-exist.com";
// Act
var result = sources.Object.ResolveSource(source);
// Assert
Assert.Equal(source, result);
}
private void AssertPackageSource(PackageSource ps, string name, string source, bool isEnabled, bool isMachineWide = false, bool isOfficial = false)
{
Assert.Equal(name, ps.Name);
Assert.Equal(source, ps.Source);
Assert.True(ps.IsEnabled == isEnabled);
Assert.True(ps.IsMachineWide == isMachineWide);
Assert.True(ps.IsOfficial == isOfficial);
}
private IPackageSourceProvider CreatePackageSourceProvider(
ISettings settings = null,
IEnumerable<PackageSource> providerDefaultSources = null,
IDictionary<PackageSource, PackageSource> migratePackageSources = null,
IEnumerable<PackageSource> configurationDefaultSources = null,
IEnvironmentVariableReader environment = null)
{
settings = settings ?? new Mock<ISettings>().Object;
environment = environment ?? new Mock<IEnvironmentVariableReader>().Object;
return new PackageSourceProvider(settings, providerDefaultSources, migratePackageSources, configurationDefaultSources, environment);
}
private static void AssertKVP(KeyValuePair<string, string> expected, KeyValuePair<string, string> actual)
{
Assert.Equal(expected.Key, actual.Key);
Assert.Equal(expected.Value, actual.Value);
}
}
}
| |
namespace iControl {
using System.Xml.Serialization;
using System.Web.Services;
using System.ComponentModel;
using System.Web.Services.Protocols;
using System;
using System.Diagnostics;
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Web.Services.WebServiceBindingAttribute(Name="Networking.iSessionRemoteInterfaceV2Binding", Namespace="urn:iControl")]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(NetworkingUuid_128))]
public partial class NetworkingiSessionRemoteInterfaceV2 : iControlInterface {
public NetworkingiSessionRemoteInterfaceV2() {
this.Url = "https://url_to_service";
}
//=======================================================================
// Operations
//=======================================================================
//-----------------------------------------------------------------------
// create
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void create(
string [] peers,
string [] addresses,
CommonEnabledState [] enabled,
string [] names,
string [] serverssl
) {
this.Invoke("create", new object [] {
peers,
addresses,
enabled,
names,
serverssl});
}
public System.IAsyncResult Begincreate(string [] peers,string [] addresses,CommonEnabledState [] enabled,string [] names,string [] serverssl, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("create", new object[] {
peers,
addresses,
enabled,
names,
serverssl}, callback, asyncState);
}
public void Endcreate(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// delete_all_peers
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void delete_all_peers(
) {
this.Invoke("delete_all_peers", new object [0]);
}
public System.IAsyncResult Begindelete_all_peers(System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("delete_all_peers", new object[0], callback, asyncState);
}
public void Enddelete_all_peers(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// delete_peer
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void delete_peer(
string [] peers
) {
this.Invoke("delete_peer", new object [] {
peers});
}
public System.IAsyncResult Begindelete_peer(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("delete_peer", new object[] {
peers}, callback, asyncState);
}
public void Enddelete_peer(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// get_UUID
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public NetworkingUuid_128 [] get_UUID(
string [] peers
) {
object [] results = this.Invoke("get_UUID", new object [] {
peers});
return ((NetworkingUuid_128 [])(results[0]));
}
public System.IAsyncResult Beginget_UUID(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_UUID", new object[] {
peers}, callback, asyncState);
}
public NetworkingUuid_128 [] Endget_UUID(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((NetworkingUuid_128 [])(results[0]));
}
//-----------------------------------------------------------------------
// get_address
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_address(
string [] peers
) {
object [] results = this.Invoke("get_address", new object [] {
peers});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_address(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_address", new object[] {
peers}, callback, asyncState);
}
public string [] Endget_address(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_behind_nat_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public CommonEnabledState [] get_behind_nat_state(
string [] peers
) {
object [] results = this.Invoke("get_behind_nat_state", new object [] {
peers});
return ((CommonEnabledState [])(results[0]));
}
public System.IAsyncResult Beginget_behind_nat_state(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_behind_nat_state", new object[] {
peers}, callback, asyncState);
}
public CommonEnabledState [] Endget_behind_nat_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((CommonEnabledState [])(results[0]));
}
//-----------------------------------------------------------------------
// get_dedup_cache
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public long [] get_dedup_cache(
string [] peers
) {
object [] results = this.Invoke("get_dedup_cache", new object [] {
peers});
return ((long [])(results[0]));
}
public System.IAsyncResult Beginget_dedup_cache(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_dedup_cache", new object[] {
peers}, callback, asyncState);
}
public long [] Endget_dedup_cache(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((long [])(results[0]));
}
//-----------------------------------------------------------------------
// get_description
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_description(
string [] peers
) {
object [] results = this.Invoke("get_description", new object [] {
peers});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_description(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_description", new object[] {
peers}, callback, asyncState);
}
public string [] Endget_description(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_internal_forwarding
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public NetworkingiSessionRemoteInterfaceV2InternalForwarding [] get_internal_forwarding(
string [] peers
) {
object [] results = this.Invoke("get_internal_forwarding", new object [] {
peers});
return ((NetworkingiSessionRemoteInterfaceV2InternalForwarding [])(results[0]));
}
public System.IAsyncResult Beginget_internal_forwarding(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_internal_forwarding", new object[] {
peers}, callback, asyncState);
}
public NetworkingiSessionRemoteInterfaceV2InternalForwarding [] Endget_internal_forwarding(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((NetworkingiSessionRemoteInterfaceV2InternalForwarding [])(results[0]));
}
//-----------------------------------------------------------------------
// get_list
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_list(
) {
object [] results = this.Invoke("get_list", new object [0]);
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_list(System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_list", new object[0], callback, asyncState);
}
public string [] Endget_list(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_management_address
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_management_address(
string [] peers
) {
object [] results = this.Invoke("get_management_address", new object [] {
peers});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_management_address(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_management_address", new object[] {
peers}, callback, asyncState);
}
public string [] Endget_management_address(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_nat_config_status
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_nat_config_status(
string [] peers
) {
object [] results = this.Invoke("get_nat_config_status", new object [] {
peers});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_nat_config_status(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_nat_config_status", new object[] {
peers}, callback, asyncState);
}
public string [] Endget_nat_config_status(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_nat_source_address
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public NetworkingiSessionRemoteInterfaceV2NatSourceAddress [] get_nat_source_address(
string [] peers
) {
object [] results = this.Invoke("get_nat_source_address", new object [] {
peers});
return ((NetworkingiSessionRemoteInterfaceV2NatSourceAddress [])(results[0]));
}
public System.IAsyncResult Beginget_nat_source_address(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_nat_source_address", new object[] {
peers}, callback, asyncState);
}
public NetworkingiSessionRemoteInterfaceV2NatSourceAddress [] Endget_nat_source_address(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((NetworkingiSessionRemoteInterfaceV2NatSourceAddress [])(results[0]));
}
//-----------------------------------------------------------------------
// get_origin
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public NetworkingiSessionRemoteInterfaceV2OriginState [] get_origin(
string [] peers
) {
object [] results = this.Invoke("get_origin", new object [] {
peers});
return ((NetworkingiSessionRemoteInterfaceV2OriginState [])(results[0]));
}
public System.IAsyncResult Beginget_origin(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_origin", new object[] {
peers}, callback, asyncState);
}
public NetworkingiSessionRemoteInterfaceV2OriginState [] Endget_origin(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((NetworkingiSessionRemoteInterfaceV2OriginState [])(results[0]));
}
//-----------------------------------------------------------------------
// get_peer_enabled_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public CommonEnabledState [] get_peer_enabled_state(
string [] peers
) {
object [] results = this.Invoke("get_peer_enabled_state", new object [] {
peers});
return ((CommonEnabledState [])(results[0]));
}
public System.IAsyncResult Beginget_peer_enabled_state(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_peer_enabled_state", new object[] {
peers}, callback, asyncState);
}
public CommonEnabledState [] Endget_peer_enabled_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((CommonEnabledState [])(results[0]));
}
//-----------------------------------------------------------------------
// get_peer_ip_list
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] [] get_peer_ip_list(
string [] peers
) {
object [] results = this.Invoke("get_peer_ip_list", new object [] {
peers});
return ((string [] [])(results[0]));
}
public System.IAsyncResult Beginget_peer_ip_list(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_peer_ip_list", new object[] {
peers}, callback, asyncState);
}
public string [] [] Endget_peer_ip_list(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [] [])(results[0]));
}
//-----------------------------------------------------------------------
// get_peer_name
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_peer_name(
string [] peers
) {
object [] results = this.Invoke("get_peer_name", new object [] {
peers});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_peer_name(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_peer_name", new object[] {
peers}, callback, asyncState);
}
public string [] Endget_peer_name(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_peer_routing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public CommonEnabledState [] get_peer_routing_state(
string [] peers
) {
object [] results = this.Invoke("get_peer_routing_state", new object [] {
peers});
return ((CommonEnabledState [])(results[0]));
}
public System.IAsyncResult Beginget_peer_routing_state(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_peer_routing_state", new object[] {
peers}, callback, asyncState);
}
public CommonEnabledState [] Endget_peer_routing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((CommonEnabledState [])(results[0]));
}
//-----------------------------------------------------------------------
// get_profile_serverssl
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_profile_serverssl(
string [] peers
) {
object [] results = this.Invoke("get_profile_serverssl", new object [] {
peers});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_profile_serverssl(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_profile_serverssl", new object[] {
peers}, callback, asyncState);
}
public string [] Endget_profile_serverssl(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_tunnel_port
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public long [] get_tunnel_port(
string [] peers
) {
object [] results = this.Invoke("get_tunnel_port", new object [] {
peers});
return ((long [])(results[0]));
}
public System.IAsyncResult Beginget_tunnel_port(string [] peers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_tunnel_port", new object[] {
peers}, callback, asyncState);
}
public long [] Endget_tunnel_port(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((long [])(results[0]));
}
//-----------------------------------------------------------------------
// get_version
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string get_version(
) {
object [] results = this.Invoke("get_version", new object [] {
});
return ((string)(results[0]));
}
public System.IAsyncResult Beginget_version(System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_version", new object[] {
}, callback, asyncState);
}
public string Endget_version(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string)(results[0]));
}
//-----------------------------------------------------------------------
// set_behind_nat_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_behind_nat_state(
string [] peers,
CommonEnabledState [] states
) {
this.Invoke("set_behind_nat_state", new object [] {
peers,
states});
}
public System.IAsyncResult Beginset_behind_nat_state(string [] peers,CommonEnabledState [] states, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_behind_nat_state", new object[] {
peers,
states}, callback, asyncState);
}
public void Endset_behind_nat_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_description
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_description(
string [] peers,
string [] descriptions
) {
this.Invoke("set_description", new object [] {
peers,
descriptions});
}
public System.IAsyncResult Beginset_description(string [] peers,string [] descriptions, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_description", new object[] {
peers,
descriptions}, callback, asyncState);
}
public void Endset_description(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_internal_forwarding
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_internal_forwarding(
string [] peers,
NetworkingiSessionRemoteInterfaceV2InternalForwarding [] values
) {
this.Invoke("set_internal_forwarding", new object [] {
peers,
values});
}
public System.IAsyncResult Beginset_internal_forwarding(string [] peers,NetworkingiSessionRemoteInterfaceV2InternalForwarding [] values, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_internal_forwarding", new object[] {
peers,
values}, callback, asyncState);
}
public void Endset_internal_forwarding(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_management_address
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_management_address(
string [] peers,
string [] ips
) {
this.Invoke("set_management_address", new object [] {
peers,
ips});
}
public System.IAsyncResult Beginset_management_address(string [] peers,string [] ips, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_management_address", new object[] {
peers,
ips}, callback, asyncState);
}
public void Endset_management_address(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_nat_config_status
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_nat_config_status(
string [] peers,
string [] nat_configs
) {
this.Invoke("set_nat_config_status", new object [] {
peers,
nat_configs});
}
public System.IAsyncResult Beginset_nat_config_status(string [] peers,string [] nat_configs, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_nat_config_status", new object[] {
peers,
nat_configs}, callback, asyncState);
}
public void Endset_nat_config_status(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_nat_source_address
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_nat_source_address(
string [] peers,
NetworkingiSessionRemoteInterfaceV2NatSourceAddress [] nat_states
) {
this.Invoke("set_nat_source_address", new object [] {
peers,
nat_states});
}
public System.IAsyncResult Beginset_nat_source_address(string [] peers,NetworkingiSessionRemoteInterfaceV2NatSourceAddress [] nat_states, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_nat_source_address", new object[] {
peers,
nat_states}, callback, asyncState);
}
public void Endset_nat_source_address(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_origin
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_origin(
string [] peers,
NetworkingiSessionRemoteInterfaceV2OriginState [] origins
) {
this.Invoke("set_origin", new object [] {
peers,
origins});
}
public System.IAsyncResult Beginset_origin(string [] peers,NetworkingiSessionRemoteInterfaceV2OriginState [] origins, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_origin", new object[] {
peers,
origins}, callback, asyncState);
}
public void Endset_origin(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_peer_enabled_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_peer_enabled_state(
string [] peers,
CommonEnabledState [] states
) {
this.Invoke("set_peer_enabled_state", new object [] {
peers,
states});
}
public System.IAsyncResult Beginset_peer_enabled_state(string [] peers,CommonEnabledState [] states, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_peer_enabled_state", new object[] {
peers,
states}, callback, asyncState);
}
public void Endset_peer_enabled_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_peer_routing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_peer_routing_state(
string [] peers,
CommonEnabledState [] states
) {
this.Invoke("set_peer_routing_state", new object [] {
peers,
states});
}
public System.IAsyncResult Beginset_peer_routing_state(string [] peers,CommonEnabledState [] states, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_peer_routing_state", new object[] {
peers,
states}, callback, asyncState);
}
public void Endset_peer_routing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_profile_serverssl
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_profile_serverssl(
string [] peers,
string [] serverssl
) {
this.Invoke("set_profile_serverssl", new object [] {
peers,
serverssl});
}
public System.IAsyncResult Beginset_profile_serverssl(string [] peers,string [] serverssl, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_profile_serverssl", new object[] {
peers,
serverssl}, callback, asyncState);
}
public void Endset_profile_serverssl(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_tunnel_port
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:Networking/iSessionRemoteInterfaceV2",
RequestNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2", ResponseNamespace="urn:iControl:Networking/iSessionRemoteInterfaceV2")]
public void set_tunnel_port(
string [] peers,
long [] tunnel_ports
) {
this.Invoke("set_tunnel_port", new object [] {
peers,
tunnel_ports});
}
public System.IAsyncResult Beginset_tunnel_port(string [] peers,long [] tunnel_ports, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_tunnel_port", new object[] {
peers,
tunnel_ports}, callback, asyncState);
}
public void Endset_tunnel_port(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
}
//=======================================================================
// Enums
//=======================================================================
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "Networking.iSessionRemoteInterfaceV2.InternalForwarding", Namespace = "urn:iControl")]
public enum NetworkingiSessionRemoteInterfaceV2InternalForwarding
{
WOC_INTERNAL_FORWARDING_UNKNOWN,
WOC_INTERNAL_FORWARDING_DEFAULT,
WOC_INTERNAL_FORWARDING_ENABLED,
WOC_INTERNAL_FORWARDING_DISABLED,
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "Networking.iSessionRemoteInterfaceV2.NatSourceAddress", Namespace = "urn:iControl")]
public enum NetworkingiSessionRemoteInterfaceV2NatSourceAddress
{
WOC_NAT_SOURCE_ADDRESS_NONE,
WOC_NAT_SOURCE_ADDRESS_CLIENT,
WOC_NAT_SOURCE_ADDRESS_WOM,
WOC_NAT_SOURCE_ADDRESS_TUNNEL,
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "Networking.iSessionRemoteInterfaceV2.OriginState", Namespace = "urn:iControl")]
public enum NetworkingiSessionRemoteInterfaceV2OriginState
{
MCP_ORIGIN_DISCOVERED,
MCP_ORIGIN_CONFIGURED,
MCP_ORIGIN_PERSISTABLE,
MCP_ORIGIN_MANUALLY_SAVED,
}
//=======================================================================
// Structs
//=======================================================================
}
| |
using System;
using System.Collections.Generic;
namespace AT_Utils
{
public class ResourceProxy : ProtoPartResourceSnapshot
{
protected ConfigNode valuesRef;
protected ProtoPartResourceSnapshot protoRef;
static ConfigNode resource_values(ProtoPartResourceSnapshot res)
{
var node = new ConfigNode("RESOURCE");
res.Save(node);
return node;
}
public ResourceProxy(PartResource res) : base(res) {}
public ResourceProxy(ProtoPartResourceSnapshot res)
: base(resource_values(res))
{
if(res.resourceRef != null)
resourceRef = res.resourceRef;
protoRef = res;
}
public ResourceProxy(ConfigNode node_ref)
: base(node_ref)
{
valuesRef = node_ref;
}
public void Sync()
{
if(resourceRef != null)
{
resourceRef.amount = amount;
resourceRef.maxAmount = maxAmount;
resourceRef.flowState = flowState;
}
if(protoRef != null)
{
protoRef.amount = amount;
protoRef.maxAmount = maxAmount;
protoRef.flowState = flowState;
}
if(valuesRef != null)
Save(valuesRef);
}
public override string ToString()
{
return Utils.Format(
"(res.ref {}, proto.ref {}, have valuesRef: {}, amount {}/{})",
resourceRef != null? resourceRef.GetHashCode() : 0,
protoRef != null? protoRef.GetHashCode() : 0,
valuesRef != null,
amount,maxAmount
);
}
}
public class PartProxy : Dictionary<string, ResourceProxy>
{
public PartProxy(Part part)
{
foreach(var res in part.Resources)
Add(res.resourceName, new ResourceProxy(res));
}
public PartProxy(ProtoPartSnapshot proto_part)
{
foreach(var res in proto_part.resources)
Add(res.resourceName, new ResourceProxy(res));
}
public PartProxy(ConfigNode part_node)
{
foreach(var res in part_node.GetNodes("RESOURCE"))
{
var proxy = new ResourceProxy(res);
Add(proxy.resourceName, proxy);
}
}
}
public class VesselResources
{
public readonly List<PartProxy> Parts = new List<PartProxy>();
public readonly ListDict<string, PartProxy> Resources = new ListDict<string, PartProxy>();
public List<string> resourcesNames { get { return new List<string>(Resources.Keys); } }
void add_part_proxy(PartProxy proxy)
{
Parts.Add(proxy);
proxy.ForEach(res => Resources.Add(res.Key, proxy));
}
public VesselResources(IShipconstruct vessel)
{ vessel.Parts.ForEach(p => add_part_proxy(new PartProxy(p))); }
public VesselResources(ProtoVessel proto_vessel)
{ proto_vessel.protoPartSnapshots.ForEach(p => add_part_proxy(new PartProxy(p))); }
public VesselResources(ConfigNode vessel_node)
{
foreach(var part in vessel_node.GetNodes("PART"))
add_part_proxy(new PartProxy(part));
}
/// <summary>
/// Return the vessel's total capacity for the resource.
/// If the vessel has no such resource 0.0 is returned.
/// </summary>
/// <returns>Total resource capacity.</returns>
/// <param name="resource">Resource name.</param>
public double ResourceCapacity(string resource)
{
if(!Resources.ContainsKey(resource)) return 0.0;
double capacity = 0;
Resources[resource].ForEach(p => capacity += p[resource].maxAmount);
return capacity;
}
/// <summary>
/// Return the vessel's total available amount of the resource.
/// If the vessel has no such resource 0.0 is returned.
/// </summary>
/// <returns>Total resource amount.</returns>
/// <param name="resource">Resource name.</param>
public double ResourceAmount(string resource)
{
if(!Resources.ContainsKey(resource)) return 0.0;
double amount = 0;
Resources[resource].ForEach(p => amount += p[resource].amount);
return amount;
}
/// <summary>
/// Transfer a resource into (positive amount) or out of (negative
/// amount) the vessel. No attempt is made to balance the resource
/// across parts: they are filled/emptied on a first-come-first-served
/// basis.
/// If the vessel has no such resource no action is taken.
/// Returns the amount of resource not transfered (0 = all has been
/// transfered).
/// Based on the code from Extraplanetary Launchpads plugin. Resources.cs module.
/// </summary>
/// <returns>The resource.</returns>
/// <param name="resource">Resource.</param>
/// <param name="amount">Amount.</param>
public double TransferResource(string resource, double amount)
{
if(!Resources.ContainsKey(resource)) return 0.0;
foreach(var part in Resources[resource])
{
var adjust = amount;
var res = part[resource];
if(adjust < 0 && -adjust > res.amount)
// Ensure the resource amount never goes negative
adjust = -res.amount;
else if(adjust > 0 &&
adjust > (res.maxAmount - res.amount))
// ensure the resource amount never excees the maximum
adjust = res.maxAmount - res.amount;
res.amount += adjust;
res.Sync();
amount -= adjust;
}
return amount;
}
public override string ToString()
{
if(Parts.Count == 0)
return "No parts with resources.";
var ret = "";
Parts.ForEach(p => { if(p.Count > 0) ret += Utils.Format("{}\n", p); });
return ret;
}
}
public class ResourceManifest
{
public string name;
public double pool;
public double amount;
public double capacity;
public double offset;
public double host_amount;
public double host_capacity;
public double minAmount;
public double maxAmount;
public override string ToString()
{
return Utils.Format(
"{}: min {}, cur {}, max {}\n" +
"transfer: {}",
name, minAmount, amount, maxAmount,
offset-amount
);
}
}
public class ResourceManifestList : List<ResourceManifest>
{
public void NewTransfer(VesselResources host, VesselResources target)
{
Clear();
foreach(var r in target.resourcesNames)
{
if(host.ResourceCapacity(r) <= 0) continue;
var rm = new ResourceManifest();
rm.name = r;
rm.amount = target.ResourceAmount(r);
rm.capacity = target.ResourceCapacity(r);
rm.offset = rm.amount;
rm.host_amount = host.ResourceAmount(r);
rm.host_capacity = host.ResourceCapacity(r);
rm.pool = rm.host_amount + rm.offset;
rm.minAmount = Math.Max(0, rm.pool-rm.host_capacity);
rm.maxAmount = Math.Min(rm.pool, rm.capacity);
Add(rm);
}
}
public void UpdateHostInfo(VesselResources host)
{
foreach(ResourceManifest r in this)
{
r.host_amount = host.ResourceAmount(r.name);
r.pool = r.host_amount + r.offset;
r.minAmount = Math.Max(0, r.pool-r.host_capacity);
r.maxAmount = Math.Min(r.pool, r.capacity);
}
}
public void TransferResources(VesselResources host, VesselResources target, out double deltaMass, out double deltaCost)
{
deltaMass = deltaCost = 0;
if(Count == 0) return;
foreach(var r in this)
{
//transfer resource between host and target
var a = host.TransferResource(r.name, r.offset-r.amount);
a = r.amount-r.offset + a;
var b = target.TransferResource(r.name, a);
host.TransferResource(r.name, b);
//update masses
PartResourceDefinition res_def = PartResourceLibrary.Instance.GetDefinition(r.name);
if(res_def.density <= 0) continue;
deltaMass += a*res_def.density;
deltaCost += a*res_def.unitCost;
}
Clear();
}
}
}
| |
// Engine r75
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using Server;
using Server.Items;
using Server.Engines.Quests.Haven;
using Server.Engines.Quests.Necro;
namespace Server.Commands
{
public class GenOverseer
{
public static void Initialize()
{
CommandSystem.Register( "GenSeers", AccessLevel.Administrator, new CommandEventHandler( GenOverseer_OnCommand ) );
CommandSystem.Register( "GenOverseers", AccessLevel.Administrator, new CommandEventHandler( GenOverseer_OnCommand ) );
CommandSystem.Register( "GenSeer", AccessLevel.Administrator, new CommandEventHandler( GenOverseer_OnCommand ) );
CommandSystem.Register( "GenOverseer", AccessLevel.Administrator, new CommandEventHandler( GenOverseer_OnCommand ) );
}
[Usage( "GenSeers" )]
[Aliases( "GenSeer, GenOverseer and GenOverseers" )]
[Description( "Generates Spawns' Overseers around the world." )]
private static void GenOverseer_OnCommand( CommandEventArgs e )
{
m_Mobile = e.Mobile;
m_Count = 0;
m_Mobile.SendMessage( "Generating Spawns' Overseers, please wait." );
Generate( "Data/Monsters/overseers/Trammel", Map.Trammel );
Generate( "Data/Monsters/overseers/Felucca", Map.Felucca );
Generate( "Data/Monsters/overseers/Ilshenar", Map.Ilshenar );
Generate( "Data/Monsters/overseers/Malas", Map.Malas );
Generate( "Data/Monsters/overseers/Tokuno", Map.Tokuno );
Generate( "Data/Monsters/overseers/Termur", Map.TerMur );
m_Mobile.SendMessage( "Spawns' Overseers generation complete. {0} seers were generated.", m_Count );
}
public static void Generate( string folder, params Map[] maps )
{
if ( !Directory.Exists( folder ) )
return;
string[] files = Directory.GetFiles( folder, "*.cfg" );
for ( int i = 0; i < files.Length; ++i )
{
ArrayList list = DecorationListSeers.ReadAll( files[i] );
for ( int j = 0; j < list.Count; ++j )
m_Count += ((DecorationListSeers)list[j]).Generate( maps );
}
}
private static Mobile m_Mobile;
private static int m_Count;
}
public class DecorationListSeers
{
private Type m_Type;
private int m_ItemID;
private string[] m_Params;
private ArrayList m_Entries;
public DecorationListSeers()
{
}
private static Type typeofStatic = typeof( Static );
private static Type typeofLocalizedStatic = typeof( LocalizedStatic );
public Item Construct()
{
Item item;
try
{
if ( m_Type == typeofStatic )
{
item = new Static( m_ItemID );
}
else if ( m_Type == typeofLocalizedStatic )
{
int labelNumber = 0;
for ( int i = 0; i < m_Params.Length; ++i )
{
if ( m_Params[i].StartsWith( "LabelNumber" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
{
labelNumber = Utility.ToInt32( m_Params[i].Substring( ++indexOf ) );
break;
}
}
}
item = new LocalizedStatic( m_ItemID, labelNumber );
}
else
{
item = (Item)Activator.CreateInstance( m_Type );
}
}
catch ( Exception e )
{
throw new Exception( String.Format( "Bad type: {0}", m_Type ), e );
}
if ( item is Server.Items.SpawnsOverseer )
{
Server.Items.SpawnsOverseer sp = (Server.Items.SpawnsOverseer)item;
for ( int i = 0; i < m_Params.Length; ++i )
{
if ( m_Params[i].StartsWith( "Range" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
sp.Range = Utility.ToInt32( m_Params[i].Substring( ++indexOf ) );
}
else if ( m_Params[i].StartsWith( "InRangeDelay" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
sp.InRangeDelay = Utility.ToInt32( m_Params[i].Substring( ++indexOf ) );
}
else if ( m_Params[i].StartsWith( "OutRangeDelay" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
sp.OutRangeDelay = Utility.ToInt32( m_Params[i].Substring( ++indexOf ) );
}
}
}
item.Movable = false;
for ( int i = 0; i < m_Params.Length; ++i )
{
if ( m_Params[i].StartsWith( "Light" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
item.Light = (LightType)Enum.Parse( typeof( LightType ), m_Params[i].Substring( ++indexOf ), true );
}
else if ( m_Params[i].StartsWith( "Hue" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
{
int hue = Utility.ToInt32( m_Params[i].Substring( ++indexOf ) );
if ( item is DyeTub )
((DyeTub)item).DyedHue = hue;
else
item.Hue = hue;
}
}
else if ( m_Params[i].StartsWith( "Name" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
item.Name = m_Params[i].Substring( ++indexOf );
}
else if ( m_Params[i].StartsWith( "Amount" ) )
{
int indexOf = m_Params[i].IndexOf( '=' );
if ( indexOf >= 0 )
{
// Must supress stackable warnings
bool wasStackable = item.Stackable;
item.Stackable = true;
item.Amount = Utility.ToInt32( m_Params[i].Substring( ++indexOf ) );
item.Stackable = wasStackable;
}
}
}
return item;
}
private static Queue m_DeleteQueue = new Queue();
private static bool FindItem( int x, int y, int z, Map map, Item srcItem )
{
int itemID = srcItem.ItemID;
bool res = false;
IPooledEnumerable eable;
if ( (TileData.ItemTable[itemID & TileData.MaxItemValue].Flags & TileFlag.LightSource) != 0 )
{
eable = map.GetItemsInRange( new Point3D( x, y, z ), 0 );
LightType lt = srcItem.Light;
string srcName = srcItem.ItemData.Name;
foreach ( Item item in eable )
{
if ( item.Z == z )
{
if ( item.ItemID == itemID )
{
if ( item.Light != lt )
m_DeleteQueue.Enqueue( item );
else
res = true;
}
else if ( (item.ItemData.Flags & TileFlag.LightSource) != 0 && item.ItemData.Name == srcName )
{
m_DeleteQueue.Enqueue( item );
}
}
}
}
else
{
eable = map.GetItemsInRange( new Point3D( x, y, z ), 0 );
foreach ( Item item in eable )
{
if ( item.Z == z && item.ItemID == itemID )
{
eable.Free();
return true;
}
}
}
eable.Free();
while ( m_DeleteQueue.Count > 0 )
((Item)m_DeleteQueue.Dequeue()).Delete();
return res;
}
public int Generate( Map[] maps )
{
int count = 0;
Item item = null;
for ( int i = 0; i < m_Entries.Count; ++i )
{
DecorationEntrySeers entry = (DecorationEntrySeers)m_Entries[i];
Point3D loc = entry.Location;
string extra = entry.Extra;
for ( int j = 0; j < maps.Length; ++j )
{
if ( item == null )
item = Construct();
if ( item == null )
continue;
if ( FindItem( loc.X, loc.Y, loc.Z, maps[j], item ) )
{
}
else
{
item.MoveToWorld( loc, maps[j] );
++count;
item = null;
}
}
}
if ( item != null )
item.Delete();
return count;
}
public static ArrayList ReadAll( string path )
{
using ( StreamReader ip = new StreamReader( path ) )
{
ArrayList list = new ArrayList();
for ( DecorationListSeers v = Read( ip ); v != null; v = Read( ip ) )
list.Add( v );
return list;
}
}
private static string[] m_EmptyParams = new string[0];
public static DecorationListSeers Read( StreamReader ip )
{
string line;
while ( (line = ip.ReadLine()) != null )
{
line = line.Trim();
if ( line.Length > 0 && !line.StartsWith( "#" ) )
break;
}
if ( string.IsNullOrEmpty( line ) )
return null;
DecorationListSeers list = new DecorationListSeers();
int indexOf = line.IndexOf( ' ' );
list.m_Type = ScriptCompiler.FindTypeByName( line.Substring( 0, indexOf++ ), true );
if ( list.m_Type == null )
throw new ArgumentException( String.Format( "Type not found for header: '{0}'", line ) );
line = line.Substring( indexOf );
indexOf = line.IndexOf( '(' );
if ( indexOf >= 0 )
{
list.m_ItemID = Utility.ToInt32( line.Substring( 0, indexOf - 1 ) );
string parms = line.Substring( ++indexOf );
if ( line.EndsWith( ")" ) )
parms = parms.Substring( 0, parms.Length - 1 );
list.m_Params = parms.Split( ';' );
for ( int i = 0; i < list.m_Params.Length; ++i )
list.m_Params[i] = list.m_Params[i].Trim();
}
else
{
list.m_ItemID = Utility.ToInt32( line );
list.m_Params = m_EmptyParams;
}
list.m_Entries = new ArrayList();
while ( (line = ip.ReadLine()) != null )
{
line = line.Trim();
if ( line.Length == 0 )
break;
if ( line.StartsWith( "#" ) )
continue;
list.m_Entries.Add( new DecorationEntrySeers( line ) );
}
return list;
}
}
public class DecorationEntrySeers
{
private Point3D m_Location;
private string m_Extra;
public Point3D Location{ get{ return m_Location; } }
public string Extra{ get{ return m_Extra; } }
public DecorationEntrySeers( string line )
{
string x, y, z;
Pop( out x, ref line );
Pop( out y, ref line );
Pop( out z, ref line );
m_Location = new Point3D( Utility.ToInt32( x ), Utility.ToInt32( y ), Utility.ToInt32( z ) );
m_Extra = line;
}
public void Pop( out string v, ref string line )
{
int space = line.IndexOf( ' ' );
if ( space >= 0 )
{
v = line.Substring( 0, space++ );
line = line.Substring( space );
}
else
{
v = line;
line = "";
}
}
}
public class RemOverseer
{
public static void Initialize()
{
CommandSystem.Register( "RemOverseers", AccessLevel.Administrator, new CommandEventHandler( RemOverseers_OnCommand ) );
CommandSystem.Register( "RemSeers", AccessLevel.Administrator, new CommandEventHandler( RemOverseers_OnCommand ) );
CommandSystem.Register( "RemOverseer", AccessLevel.Administrator, new CommandEventHandler( RemOverseers_OnCommand ) );
CommandSystem.Register( "RemSeer", AccessLevel.Administrator, new CommandEventHandler( RemOverseers_OnCommand ) );
}
[Usage( "RemSeers" )]
[Aliases( "RemSeer, RemOverseer, RemOverseers" )]
[Description( "Remove all Overseers in all facets." )]
public static void RemOverseers_OnCommand( CommandEventArgs e )
{
Mobile from = e.Mobile;
World.Broadcast( 0x35, true, "Overseers are being removed, please wait." );
DateTime startTime = DateTime.Now;
int count = 0;
List<Item> itemsremove = new List<Item>();
foreach ( Item itemremove in World.Items.Values )
{
if ( itemremove is SpawnsOverseer && itemremove.Parent == null )
{
itemsremove.Add( itemremove );
count +=1;
}
}
foreach ( Item itemremove2 in itemsremove )
{
itemremove2.Delete();
}
DateTime endTime = DateTime.Now;
World.Broadcast( 0x35, true, "{0} Overseers has been removed in {1:F1} seconds.", count, (endTime - startTime).TotalSeconds );
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Timers;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Console;
using OpenSim.Framework.Statistics;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim
{
/// <summary>
/// Interactive OpenSim region server
/// </summary>
public class OpenSim : OpenSimBase
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
protected string m_startupCommandsFile;
protected string m_shutdownCommandsFile;
protected bool m_gui = false;
protected string m_consoleType = "local";
protected uint m_consolePort = 0;
private string m_timedScript = "disabled";
private Timer m_scriptTimer;
public OpenSim(IConfigSource configSource) : base(configSource)
{
}
protected override void ReadExtraConfigSettings()
{
base.ReadExtraConfigSettings();
IConfig startupConfig = m_config.Source.Configs["Startup"];
IConfig networkConfig = m_config.Source.Configs["Network"];
int stpMaxThreads = 15;
if (startupConfig != null)
{
m_startupCommandsFile = startupConfig.GetString("startup_console_commands_file", "startup_commands.txt");
m_shutdownCommandsFile = startupConfig.GetString("shutdown_console_commands_file", "shutdown_commands.txt");
if (startupConfig.GetString("console", String.Empty) == String.Empty)
m_gui = startupConfig.GetBoolean("gui", false);
else
m_consoleType= startupConfig.GetString("console", String.Empty);
if (networkConfig != null)
m_consolePort = (uint)networkConfig.GetInt("console_port", 0);
m_timedScript = startupConfig.GetString("timer_Script", "disabled");
if (m_logFileAppender != null)
{
if (m_logFileAppender is log4net.Appender.FileAppender)
{
log4net.Appender.FileAppender appender =
(log4net.Appender.FileAppender)m_logFileAppender;
string fileName = startupConfig.GetString("LogFile", String.Empty);
if (fileName != String.Empty)
{
appender.File = fileName;
appender.ActivateOptions();
}
m_log.InfoFormat("[LOGGING]: Logging started to file {0}", appender.File);
}
}
string asyncCallMethodStr = startupConfig.GetString("async_call_method", String.Empty);
FireAndForgetMethod asyncCallMethod;
if (!String.IsNullOrEmpty(asyncCallMethodStr) && Utils.EnumTryParse<FireAndForgetMethod>(asyncCallMethodStr, out asyncCallMethod))
Util.FireAndForgetMethod = asyncCallMethod;
stpMaxThreads = startupConfig.GetInt("MaxPoolThreads", 15);
}
if (Util.FireAndForgetMethod == FireAndForgetMethod.SmartThreadPool)
Util.InitThreadPool(stpMaxThreads);
m_log.Info("[OPENSIM MAIN]: Using async_call_method " + Util.FireAndForgetMethod);
}
/// <summary>
/// Performs initialisation of the scene, such as loading configuration from disk.
/// </summary>
protected override void StartupSpecific()
{
m_log.Info("====================================================================");
m_log.Info("========================= STARTING OPENSIM =========================");
m_log.Info("====================================================================");
m_log.InfoFormat("[OPENSIM MAIN]: Running in {0} mode",
(ConfigurationSettings.Standalone ? "sandbox" : "grid"));
//m_log.InfoFormat("[OPENSIM MAIN]: GC Is Server GC: {0}", GCSettings.IsServerGC.ToString());
// http://msdn.microsoft.com/en-us/library/bb384202.aspx
//GCSettings.LatencyMode = GCLatencyMode.Batch;
//m_log.InfoFormat("[OPENSIM MAIN]: GC Latency Mode: {0}", GCSettings.LatencyMode.ToString());
if (m_gui) // Driven by external GUI
m_console = new CommandConsole("Region");
else
{
switch (m_consoleType)
{
case "basic":
m_console = new CommandConsole("Region");
break;
case "rest":
m_console = new RemoteConsole("Region");
((RemoteConsole)m_console).ReadConfig(m_config.Source);
break;
default:
m_console = new LocalConsole("Region");
break;
}
}
MainConsole.Instance = m_console;
RegisterConsoleCommands();
base.StartupSpecific();
if (m_console is RemoteConsole)
{
if (m_consolePort == 0)
{
((RemoteConsole)m_console).SetServer(m_httpServer);
}
else
{
((RemoteConsole)m_console).SetServer(MainServer.GetHttpServer(m_consolePort));
}
}
//Run Startup Commands
if (String.IsNullOrEmpty(m_startupCommandsFile))
{
m_log.Info("[STARTUP]: No startup command script specified. Moving on...");
}
else
{
RunCommandScript(m_startupCommandsFile);
}
// Start timer script (run a script every xx seconds)
if (m_timedScript != "disabled")
{
m_scriptTimer = new Timer();
m_scriptTimer.Enabled = true;
m_scriptTimer.Interval = 1200*1000;
m_scriptTimer.Elapsed += RunAutoTimerScript;
}
// Hook up to the watchdog timer
Watchdog.OnWatchdogTimeout += WatchdogTimeoutHandler;
PrintFileToConsole("startuplogo.txt");
m_log.InfoFormat("[NETWORK]: Using {0} as SYSTEMIP", Util.GetLocalHost().ToString());
// For now, start at the 'root' level by default
if (m_sceneManager.Scenes.Count == 1) // If there is only one region, select it
ChangeSelectedRegion("region",
new string[] {"change", "region", m_sceneManager.Scenes[0].RegionInfo.RegionName});
else
ChangeSelectedRegion("region", new string[] {"change", "region", "root"});
}
/// <summary>
/// Register standard set of region console commands
/// </summary>
private void RegisterConsoleCommands()
{
m_console.Commands.AddCommand("region", false, "clear assets",
"clear assets",
"Clear the asset cache", HandleClearAssets);
m_console.Commands.AddCommand("region", false, "force update",
"force update",
"Force the update of all objects on clients",
HandleForceUpdate);
m_console.Commands.AddCommand("region", false, "debug packet",
"debug packet <level>",
"Turn on packet debugging", Debug);
m_console.Commands.AddCommand("region", false, "debug scene",
"debug scene <cripting> <collisions> <physics>",
"Turn on scene debugging", Debug);
m_console.Commands.AddCommand("region", false, "change region",
"change region <region name>",
"Change current console region", ChangeSelectedRegion);
m_console.Commands.AddCommand("region", false, "save xml",
"save xml",
"Save a region's data in XML format", SaveXml);
m_console.Commands.AddCommand("region", false, "save xml2",
"save xml2",
"Save a region's data in XML2 format", SaveXml2);
m_console.Commands.AddCommand("region", false, "load xml",
"load xml [-newIDs [<x> <y> <z>]]",
"Load a region's data from XML format", LoadXml);
m_console.Commands.AddCommand("region", false, "load xml2",
"load xml2",
"Load a region's data from XML2 format", LoadXml2);
m_console.Commands.AddCommand("region", false, "save prims xml2",
"save prims xml2 [<prim name> <file name>]",
"Save named prim to XML2", SavePrimsXml2);
m_console.Commands.AddCommand("region", false, "load oar",
"load oar [--merge] <oar name>",
"Load a region's data from OAR archive", LoadOar);
m_console.Commands.AddCommand("region", false, "save oar",
"save oar <oar name>",
"Save a region's data to an OAR archive",
"More information on forthcoming options here soon", SaveOar);
m_console.Commands.AddCommand("region", false, "edit scale",
"edit scale <name> <x> <y> <z>",
"Change the scale of a named prim", HandleEditScale);
m_console.Commands.AddCommand("region", false, "kick user",
"kick user <first> <last> [message]",
"Kick a user off the simulator", KickUserCommand);
m_console.Commands.AddCommand("region", false, "show assets",
"show assets",
"Show asset data", HandleShow);
m_console.Commands.AddCommand("region", false, "show users",
"show users [full]",
"Show user data", HandleShow);
m_console.Commands.AddCommand("region", false, "show connections",
"show connections",
"Show connection data", HandleShow);
m_console.Commands.AddCommand("region", false, "show users full",
"show users full",
String.Empty, HandleShow);
m_console.Commands.AddCommand("region", false, "show modules",
"show modules",
"Show module data", HandleShow);
m_console.Commands.AddCommand("region", false, "show regions",
"show regions",
"Show region data", HandleShow);
m_console.Commands.AddCommand("region", false, "show queues",
"show queues",
"Show queue data", HandleShow);
m_console.Commands.AddCommand("region", false, "show ratings",
"show ratings",
"Show rating data", HandleShow);
m_console.Commands.AddCommand("region", false, "backup",
"backup",
"Persist objects to the database now", RunCommand);
m_console.Commands.AddCommand("region", false, "create region",
"create region",
"Create a new region", HandleCreateRegion);
m_console.Commands.AddCommand("region", false, "restart",
"restart",
"Restart all sims in this instance", RunCommand);
m_console.Commands.AddCommand("region", false, "config set",
"config set <section> <field> <value>",
"Set a config option", HandleConfig);
m_console.Commands.AddCommand("region", false, "config get",
"config get <section> <field>",
"Read a config option", HandleConfig);
m_console.Commands.AddCommand("region", false, "config save",
"config save",
"Save current configuration", HandleConfig);
m_console.Commands.AddCommand("region", false, "command-script",
"command-script <script>",
"Run a command script from file", RunCommand);
m_console.Commands.AddCommand("region", false, "remove-region",
"remove-region <name>",
"Remove a region from this simulator", RunCommand);
m_console.Commands.AddCommand("region", false, "delete-region",
"delete-region <name>",
"Delete a region from disk", RunCommand);
m_console.Commands.AddCommand("region", false, "modules list",
"modules list",
"List modules", HandleModules);
m_console.Commands.AddCommand("region", false, "modules load",
"modules load <name>",
"Load a module", HandleModules);
m_console.Commands.AddCommand("region", false, "modules unload",
"modules unload <name>",
"Unload a module", HandleModules);
m_console.Commands.AddCommand("region", false, "Add-InventoryHost",
"Add-InventoryHost <host>",
String.Empty, RunCommand);
m_console.Commands.AddCommand("region", false, "kill uuid",
"kill uuid <UUID>",
"Kill an object by UUID", KillUUID);
if (ConfigurationSettings.Standalone)
{
m_console.Commands.AddCommand("region", false, "create user",
"create user [<first> [<last> [<pass> [<x> <y> [<email>]]]]]",
"Create a new user", HandleCreateUser);
m_console.Commands.AddCommand("region", false, "reset user password",
"reset user password [<first> [<last> [<password>]]]",
"Reset a user password", HandleResetUserPassword);
}
m_console.Commands.AddCommand("hypergrid", false, "link-mapping", "link-mapping [<x> <y>] <cr>",
"Set local coordinate to map HG regions to", RunCommand);
m_console.Commands.AddCommand("hypergrid", false, "link-region",
"link-region <Xloc> <Yloc> <HostName>:<HttpPort>[:<RemoteRegionName>] <cr>",
"Link a hypergrid region", RunCommand);
m_console.Commands.AddCommand("hypergrid", false, "unlink-region",
"unlink-region <local name> or <HostName>:<HttpPort> <cr>",
"Unlink a hypergrid region", RunCommand);
}
public override void ShutdownSpecific()
{
if (m_shutdownCommandsFile != String.Empty)
{
RunCommandScript(m_shutdownCommandsFile);
}
base.ShutdownSpecific();
}
/// <summary>
/// Timer to run a specific text file as console commands. Configured in in the main ini file
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void RunAutoTimerScript(object sender, EventArgs e)
{
if (m_timedScript != "disabled")
{
RunCommandScript(m_timedScript);
}
}
private void WatchdogTimeoutHandler(System.Threading.Thread thread, int lastTick)
{
int now = Environment.TickCount & Int32.MaxValue;
m_log.ErrorFormat("[WATCHDOG]: Timeout detected for thread \"{0}\". ThreadState={1}. Last tick was {2}ms ago",
thread.Name, thread.ThreadState, now - lastTick);
}
#region Console Commands
/// <summary>
/// Kicks users off the region
/// </summary>
/// <param name="module"></param>
/// <param name="cmdparams">name of avatar to kick</param>
private void KickUserCommand(string module, string[] cmdparams)
{
if (cmdparams.Length < 4)
return;
string alert = null;
if (cmdparams.Length > 4)
alert = String.Format("\n{0}\n", String.Join(" ", cmdparams, 4, cmdparams.Length - 4));
IList agents = m_sceneManager.GetCurrentSceneAvatars();
foreach (ScenePresence presence in agents)
{
RegionInfo regionInfo = m_sceneManager.GetRegionInfo(presence.RegionHandle);
if (presence.Firstname.ToLower().Contains(cmdparams[2].ToLower()) &&
presence.Lastname.ToLower().Contains(cmdparams[3].ToLower()))
{
MainConsole.Instance.Output(
String.Format(
"Kicking user: {0,-16}{1,-16}{2,-37} in region: {3,-16}",
presence.Firstname, presence.Lastname, presence.UUID, regionInfo.RegionName));
// kick client...
if (alert != null)
presence.ControllingClient.Kick(alert);
else
presence.ControllingClient.Kick("\nThe OpenSim manager kicked you out.\n");
// ...and close on our side
presence.Scene.IncomingCloseAgent(presence.UUID);
}
}
MainConsole.Instance.Output("");
}
/// <summary>
/// Run an optional startup list of commands
/// </summary>
/// <param name="fileName"></param>
private void RunCommandScript(string fileName)
{
if (File.Exists(fileName))
{
m_log.Info("[COMMANDFILE]: Running " + fileName);
using (StreamReader readFile = File.OpenText(fileName))
{
string currentCommand;
while ((currentCommand = readFile.ReadLine()) != null)
{
if (currentCommand != String.Empty)
{
m_log.Info("[COMMANDFILE]: Running '" + currentCommand + "'");
m_console.RunCommand(currentCommand);
}
}
}
}
}
/// <summary>
/// Opens a file and uses it as input to the console command parser.
/// </summary>
/// <param name="fileName">name of file to use as input to the console</param>
private static void PrintFileToConsole(string fileName)
{
if (File.Exists(fileName))
{
StreamReader readFile = File.OpenText(fileName);
string currentLine;
while ((currentLine = readFile.ReadLine()) != null)
{
m_log.Info("[!]" + currentLine);
}
}
}
private void HandleClearAssets(string module, string[] args)
{
MainConsole.Instance.Output("Not implemented.");
}
/// <summary>
/// Force resending of all updates to all clients in active region(s)
/// </summary>
/// <param name="module"></param>
/// <param name="args"></param>
private void HandleForceUpdate(string module, string[] args)
{
MainConsole.Instance.Output("Updating all clients");
m_sceneManager.ForceCurrentSceneClientUpdate();
}
/// <summary>
/// Edits the scale of a primative with the name specified
/// </summary>
/// <param name="module"></param>
/// <param name="args">0,1, name, x, y, z</param>
private void HandleEditScale(string module, string[] args)
{
if (args.Length == 6)
{
m_sceneManager.HandleEditCommandOnCurrentScene(args);
}
else
{
MainConsole.Instance.Output("Argument error: edit scale <prim name> <x> <y> <z>");
}
}
/// <summary>
/// Creates a new region based on the parameters specified. This will ask the user questions on the console
/// </summary>
/// <param name="module"></param>
/// <param name="cmd">0,1,region name, region XML file</param>
private void HandleCreateRegion(string module, string[] cmd)
{
if (cmd.Length < 4)
{
MainConsole.Instance.Output("Usage: create region <region name> <region_file.ini>");
return;
}
if (cmd[3].EndsWith(".xml"))
{
string regionsDir = ConfigSource.Source.Configs["Startup"].GetString("regionload_regionsdir", "Regions").Trim();
string regionFile = String.Format("{0}/{1}", regionsDir, cmd[3]);
// Allow absolute and relative specifiers
if (cmd[3].StartsWith("/") || cmd[3].StartsWith("\\") || cmd[3].StartsWith(".."))
regionFile = cmd[3];
IScene scene;
CreateRegion(new RegionInfo(cmd[2], regionFile, false, ConfigSource.Source), true, out scene);
}
else if (cmd[3].EndsWith(".ini"))
{
string regionsDir = ConfigSource.Source.Configs["Startup"].GetString("regionload_regionsdir", "Regions").Trim();
string regionFile = String.Format("{0}/{1}", regionsDir, cmd[3]);
// Allow absolute and relative specifiers
if (cmd[3].StartsWith("/") || cmd[3].StartsWith("\\") || cmd[3].StartsWith(".."))
regionFile = cmd[3];
IScene scene;
CreateRegion(new RegionInfo(cmd[2], regionFile, false, ConfigSource.Source, cmd[2]), true, out scene);
}
else
{
MainConsole.Instance.Output("Usage: create region <region name> <region_file.ini>");
return;
}
}
/// <summary>
/// Change and load configuration file data.
/// </summary>
/// <param name="module"></param>
/// <param name="cmd"></param>
private void HandleConfig(string module, string[] cmd)
{
List<string> args = new List<string>(cmd);
args.RemoveAt(0);
string[] cmdparams = args.ToArray();
string n = "CONFIG";
if (cmdparams.Length > 0)
{
switch (cmdparams[0].ToLower())
{
case "set":
if (cmdparams.Length < 4)
{
MainConsole.Instance.Output(String.Format("SYNTAX: {0} SET SECTION KEY VALUE",n));
MainConsole.Instance.Output(String.Format("EXAMPLE: {0} SET ScriptEngine.DotNetEngine NumberOfScriptThreads 5",n));
}
else
{
IConfig c;
IConfigSource source = new IniConfigSource();
c = source.AddConfig(cmdparams[1]);
if (c != null)
{
string _value = String.Join(" ", cmdparams, 3, cmdparams.Length - 3);
c.Set(cmdparams[2], _value);
m_config.Source.Merge(source);
MainConsole.Instance.Output(String.Format("{0} {0} {1} {2} {3}",n,cmdparams[1],cmdparams[2],_value));
}
}
break;
case "get":
if (cmdparams.Length < 3)
{
MainConsole.Instance.Output(String.Format("SYNTAX: {0} GET SECTION KEY",n));
MainConsole.Instance.Output(String.Format("EXAMPLE: {0} GET ScriptEngine.DotNetEngine NumberOfScriptThreads",n));
}
else
{
IConfig c = m_config.Source.Configs[cmdparams[1]];
if (c == null)
{
MainConsole.Instance.Output(String.Format("Section \"{0}\" does not exist.",cmdparams[1]));
break;
}
else
{
MainConsole.Instance.Output(String.Format("{0} GET {1} {2} : {3}",n,cmdparams[1],cmdparams[2],
c.GetString(cmdparams[2])));
}
}
break;
case "save":
if (cmdparams.Length < 2)
{
MainConsole.Instance.Output("SYNTAX: " + n + " SAVE FILE");
return;
}
if (Application.iniFilePath == cmdparams[1])
{
MainConsole.Instance.Output("FILE can not be " + Application.iniFilePath);
return;
}
MainConsole.Instance.Output("Saving configuration file: " + cmdparams[1]);
m_config.Save(cmdparams[1]);
break;
}
}
}
/// <summary>
/// Load, Unload, and list Region modules in use
/// </summary>
/// <param name="module"></param>
/// <param name="cmd"></param>
private void HandleModules(string module, string[] cmd)
{
List<string> args = new List<string>(cmd);
args.RemoveAt(0);
string[] cmdparams = args.ToArray();
if (cmdparams.Length > 0)
{
switch (cmdparams[0].ToLower())
{
case "list":
foreach (IRegionModule irm in m_moduleLoader.GetLoadedSharedModules)
{
MainConsole.Instance.Output(String.Format("Shared region module: {0}", irm.Name));
}
break;
case "unload":
if (cmdparams.Length > 1)
{
foreach (IRegionModule rm in new ArrayList(m_moduleLoader.GetLoadedSharedModules))
{
if (rm.Name.ToLower() == cmdparams[1].ToLower())
{
MainConsole.Instance.Output(String.Format("Unloading module: {0}", rm.Name));
m_moduleLoader.UnloadModule(rm);
}
}
}
break;
case "load":
if (cmdparams.Length > 1)
{
foreach (Scene s in new ArrayList(m_sceneManager.Scenes))
{
MainConsole.Instance.Output(String.Format("Loading module: {0}", cmdparams[1]));
m_moduleLoader.LoadRegionModules(cmdparams[1], s);
}
}
break;
}
}
}
/// <summary>
/// Runs commands issued by the server console from the operator
/// </summary>
/// <param name="command">The first argument of the parameter (the command)</param>
/// <param name="cmdparams">Additional arguments passed to the command</param>
public void RunCommand(string module, string[] cmdparams)
{
List<string> args = new List<string>(cmdparams);
if (args.Count < 1)
return;
string command = args[0];
args.RemoveAt(0);
cmdparams = args.ToArray();
switch (command)
{
case "command-script":
if (cmdparams.Length > 0)
{
RunCommandScript(cmdparams[0]);
}
break;
case "backup":
m_sceneManager.BackupCurrentScene();
break;
case "remove-region":
string regRemoveName = CombineParams(cmdparams, 0);
Scene removeScene;
if (m_sceneManager.TryGetScene(regRemoveName, out removeScene))
RemoveRegion(removeScene, false);
else
MainConsole.Instance.Output("no region with that name");
break;
case "delete-region":
string regDeleteName = CombineParams(cmdparams, 0);
Scene killScene;
if (m_sceneManager.TryGetScene(regDeleteName, out killScene))
RemoveRegion(killScene, true);
else
MainConsole.Instance.Output("no region with that name");
break;
case "restart":
m_sceneManager.RestartCurrentScene();
break;
case "Add-InventoryHost":
if (cmdparams.Length > 0)
{
MainConsole.Instance.Output("Not implemented.");
}
break;
}
}
/// <summary>
/// Change the currently selected region. The selected region is that operated upon by single region commands.
/// </summary>
/// <param name="cmdParams"></param>
protected void ChangeSelectedRegion(string module, string[] cmdparams)
{
if (cmdparams.Length > 2)
{
string newRegionName = CombineParams(cmdparams, 2);
if (!m_sceneManager.TrySetCurrentScene(newRegionName))
MainConsole.Instance.Output(String.Format("Couldn't select region {0}", newRegionName));
}
else
{
MainConsole.Instance.Output("Usage: change region <region name>");
}
string regionName = (m_sceneManager.CurrentScene == null ? "root" : m_sceneManager.CurrentScene.RegionInfo.RegionName);
MainConsole.Instance.Output(String.Format("Currently selected region is {0}", regionName));
m_console.DefaultPrompt = String.Format("Region ({0}) ", regionName);
m_console.ConsoleScene = m_sceneManager.CurrentScene;
}
/// <summary>
/// Execute switch for some of the create commands
/// </summary>
/// <param name="args"></param>
private void HandleCreateUser(string module, string[] cmd)
{
if (ConfigurationSettings.Standalone)
{
CreateUser(cmd);
}
else
{
MainConsole.Instance.Output("Create user is not available in grid mode, use the user server.");
}
}
/// <summary>
/// Execute switch for some of the reset commands
/// </summary>
/// <param name="args"></param>
protected void HandleResetUserPassword(string module, string[] cmd)
{
if (ConfigurationSettings.Standalone)
{
ResetUserPassword(cmd);
}
else
{
MainConsole.Instance.Output("Reset user password is not available in grid mode, use the user-server.");
}
}
/// <summary>
/// Turn on some debugging values for OpenSim.
/// </summary>
/// <param name="args"></param>
protected void Debug(string module, string[] args)
{
if (args.Length == 1)
return;
switch (args[1])
{
case "packet":
if (args.Length > 2)
{
int newDebug;
if (int.TryParse(args[2], out newDebug))
{
m_sceneManager.SetDebugPacketLevelOnCurrentScene(newDebug);
}
else
{
MainConsole.Instance.Output("packet debug should be 0..255");
}
MainConsole.Instance.Output(String.Format("New packet debug: {0}", newDebug));
}
break;
case "scene":
if (args.Length == 5)
{
if (m_sceneManager.CurrentScene == null)
{
MainConsole.Instance.Output("Please use 'change region <regioname>' first");
}
else
{
bool scriptingOn = !Convert.ToBoolean(args[2]);
bool collisionsOn = !Convert.ToBoolean(args[3]);
bool physicsOn = !Convert.ToBoolean(args[4]);
m_sceneManager.CurrentScene.SetSceneCoreDebug(scriptingOn, collisionsOn, physicsOn);
MainConsole.Instance.Output(
String.Format(
"Set debug scene scripting = {0}, collisions = {1}, physics = {2}",
!scriptingOn, !collisionsOn, !physicsOn));
}
}
else
{
MainConsole.Instance.Output("debug scene <scripting> <collisions> <physics> (where inside <> is true/false)");
}
break;
default:
MainConsole.Instance.Output("Unknown debug");
break;
}
}
// see BaseOpenSimServer
/// <summary>
/// Many commands list objects for debugging. Some of the types are listed here
/// </summary>
/// <param name="mod"></param>
/// <param name="cmd"></param>
public override void HandleShow(string mod, string[] cmd)
{
base.HandleShow(mod, cmd);
List<string> args = new List<string>(cmd);
args.RemoveAt(0);
string[] showParams = args.ToArray();
switch (showParams[0])
{
case "assets":
MainConsole.Instance.Output("Not implemented.");
break;
case "users":
IList agents;
if (showParams.Length > 1 && showParams[1] == "full")
{
agents = m_sceneManager.GetCurrentScenePresences();
}
else
{
agents = m_sceneManager.GetCurrentSceneAvatars();
}
MainConsole.Instance.Output(String.Format("\nAgents connected: {0}\n", agents.Count));
MainConsole.Instance.Output(
String.Format("{0,-16}{1,-16}{2,-37}{3,-11}{4,-16}{5,-30}", "Firstname", "Lastname",
"Agent ID", "Root/Child", "Region", "Position"));
foreach (ScenePresence presence in agents)
{
RegionInfo regionInfo = m_sceneManager.GetRegionInfo(presence.RegionHandle);
string regionName;
if (regionInfo == null)
{
regionName = "Unresolvable";
}
else
{
regionName = regionInfo.RegionName;
}
MainConsole.Instance.Output(
String.Format(
"{0,-16}{1,-16}{2,-37}{3,-11}{4,-16}{5,-30}",
presence.Firstname,
presence.Lastname,
presence.UUID,
presence.IsChildAgent ? "Child" : "Root",
regionName,
presence.AbsolutePosition.ToString()));
}
MainConsole.Instance.Output(String.Empty);
break;
case "connections":
System.Text.StringBuilder connections = new System.Text.StringBuilder("Connections:\n");
m_sceneManager.ForEachScene(
delegate(Scene scene)
{
scene.ForEachClient(
delegate(IClientAPI client)
{
connections.AppendFormat("{0}: {1} ({2}) from {3} on circuit {4}\n",
scene.RegionInfo.RegionName, client.Name, client.AgentId, client.RemoteEndPoint, client.CircuitCode);
}, false
);
}
);
MainConsole.Instance.Output(connections.ToString());
break;
case "modules":
MainConsole.Instance.Output("The currently loaded shared modules are:");
foreach (IRegionModule module in m_moduleLoader.GetLoadedSharedModules)
{
MainConsole.Instance.Output("Shared Module: " + module.Name);
}
MainConsole.Instance.Output("");
break;
case "regions":
m_sceneManager.ForEachScene(
delegate(Scene scene)
{
MainConsole.Instance.Output(String.Format(
"Region Name: {0}, Region XLoc: {1}, Region YLoc: {2}, Region Port: {3}",
scene.RegionInfo.RegionName,
scene.RegionInfo.RegionLocX,
scene.RegionInfo.RegionLocY,
scene.RegionInfo.InternalEndPoint.Port));
});
break;
case "queues":
Notice(GetQueuesReport());
break;
case "ratings":
m_sceneManager.ForEachScene(
delegate(Scene scene)
{
string rating = "";
if (scene.RegionInfo.RegionSettings.Maturity == 1)
{
rating = "MATURE";
}
else if (scene.RegionInfo.RegionSettings.Maturity == 2)
{
rating = "ADULT";
}
else
{
rating = "PG";
}
MainConsole.Instance.Output(String.Format(
"Region Name: {0}, Region Rating {1}",
scene.RegionInfo.RegionName,
rating));
});
break;
}
}
/// <summary>
/// print UDP Queue data for each client
/// </summary>
/// <returns></returns>
private string GetQueuesReport()
{
string report = String.Empty;
m_sceneManager.ForEachScene(delegate(Scene scene)
{
scene.ForEachClient(delegate(IClientAPI client)
{
if (client is IStatsCollector)
{
report = report + client.FirstName +
" " + client.LastName;
IStatsCollector stats =
(IStatsCollector) client;
report = report + string.Format("{0,7} {1,7} {2,7} {3,7} {4,7} {5,7} {6,7} {7,7} {8,7} {9,7}\n",
"Send",
"In",
"Out",
"Resend",
"Land",
"Wind",
"Cloud",
"Task",
"Texture",
"Asset");
report = report + stats.Report() +
"\n";
}
});
});
return report;
}
/// <summary>
/// Create a new user
/// </summary>
/// <param name="cmdparams">string array with parameters: firstname, lastname, password, locationX, locationY, email</param>
protected void CreateUser(string[] cmdparams)
{
string firstName;
string lastName;
string password;
string email;
uint regX = 1000;
uint regY = 1000;
IConfig standalone;
if ((standalone = m_config.Source.Configs["StandAlone"]) != null)
{
regX = (uint)standalone.GetInt("default_location_x", (int)regX);
regY = (uint)standalone.GetInt("default_location_y", (int)regY);
}
if (cmdparams.Length < 3)
firstName = MainConsole.Instance.CmdPrompt("First name", "Default");
else firstName = cmdparams[2];
if (cmdparams.Length < 4)
lastName = MainConsole.Instance.CmdPrompt("Last name", "User");
else lastName = cmdparams[3];
if (cmdparams.Length < 5)
password = MainConsole.Instance.PasswdPrompt("Password");
else password = cmdparams[4];
if (cmdparams.Length < 6)
regX = Convert.ToUInt32(MainConsole.Instance.CmdPrompt("Start Region X", regX.ToString()));
else regX = Convert.ToUInt32(cmdparams[5]);
if (cmdparams.Length < 7)
regY = Convert.ToUInt32(MainConsole.Instance.CmdPrompt("Start Region Y", regY.ToString()));
else regY = Convert.ToUInt32(cmdparams[6]);
if (cmdparams.Length < 8)
email = MainConsole.Instance.CmdPrompt("Email", "");
else email = cmdparams[7];
if (null == m_commsManager.UserProfileCacheService.GetUserDetails(firstName, lastName))
{
m_commsManager.UserAdminService.AddUser(firstName, lastName, password, email, regX, regY);
}
else
{
MainConsole.Instance.Output(string.Format("A user with the name {0} {1} already exists!", firstName, lastName));
}
}
/// <summary>
/// Reset a user password.
/// </summary>
/// <param name="cmdparams"></param>
private void ResetUserPassword(string[] cmdparams)
{
string firstName;
string lastName;
string newPassword;
if (cmdparams.Length < 4)
firstName = MainConsole.Instance.CmdPrompt("First name");
else firstName = cmdparams[3];
if (cmdparams.Length < 5)
lastName = MainConsole.Instance.CmdPrompt("Last name");
else lastName = cmdparams[4];
if (cmdparams.Length < 6)
newPassword = MainConsole.Instance.PasswdPrompt("New password");
else newPassword = cmdparams[5];
m_commsManager.UserAdminService.ResetUserPassword(firstName, lastName, newPassword);
}
/// <summary>
/// Use XML2 format to serialize data to a file
/// </summary>
/// <param name="module"></param>
/// <param name="cmdparams"></param>
protected void SavePrimsXml2(string module, string[] cmdparams)
{
if (cmdparams.Length > 5)
{
m_sceneManager.SaveNamedPrimsToXml2(cmdparams[3], cmdparams[4]);
}
else
{
m_sceneManager.SaveNamedPrimsToXml2("Primitive", DEFAULT_PRIM_BACKUP_FILENAME);
}
}
/// <summary>
/// Use XML format to serialize data to a file
/// </summary>
/// <param name="module"></param>
/// <param name="cmdparams"></param>
protected void SaveXml(string module, string[] cmdparams)
{
MainConsole.Instance.Output("PLEASE NOTE, save-xml is DEPRECATED and may be REMOVED soon. If you are using this and there is some reason you can't use save-xml2, please file a mantis detailing the reason.");
if (cmdparams.Length > 0)
{
m_sceneManager.SaveCurrentSceneToXml(cmdparams[2]);
}
else
{
m_sceneManager.SaveCurrentSceneToXml(DEFAULT_PRIM_BACKUP_FILENAME);
}
}
/// <summary>
/// Loads data and region objects from XML format.
/// </summary>
/// <param name="module"></param>
/// <param name="cmdparams"></param>
protected void LoadXml(string module, string[] cmdparams)
{
MainConsole.Instance.Output("PLEASE NOTE, load-xml is DEPRECATED and may be REMOVED soon. If you are using this and there is some reason you can't use load-xml2, please file a mantis detailing the reason.");
Vector3 loadOffset = new Vector3(0, 0, 0);
if (cmdparams.Length > 2)
{
bool generateNewIDS = false;
if (cmdparams.Length > 3)
{
if (cmdparams[3] == "-newUID")
{
generateNewIDS = true;
}
if (cmdparams.Length > 4)
{
loadOffset.X = (float)Convert.ToDecimal(cmdparams[4], Culture.NumberFormatInfo);
if (cmdparams.Length > 5)
{
loadOffset.Y = (float)Convert.ToDecimal(cmdparams[5], Culture.NumberFormatInfo);
}
if (cmdparams.Length > 6)
{
loadOffset.Z = (float)Convert.ToDecimal(cmdparams[6], Culture.NumberFormatInfo);
}
MainConsole.Instance.Output(String.Format("loadOffsets <X,Y,Z> = <{0},{1},{2}>",loadOffset.X,loadOffset.Y,loadOffset.Z));
}
}
m_sceneManager.LoadCurrentSceneFromXml(cmdparams[0], generateNewIDS, loadOffset);
}
else
{
try
{
m_sceneManager.LoadCurrentSceneFromXml(DEFAULT_PRIM_BACKUP_FILENAME, false, loadOffset);
}
catch (FileNotFoundException)
{
MainConsole.Instance.Output("Default xml not found. Usage: load-xml <filename>");
}
}
}
/// <summary>
/// Serialize region data to XML2Format
/// </summary>
/// <param name="module"></param>
/// <param name="cmdparams"></param>
protected void SaveXml2(string module, string[] cmdparams)
{
if (cmdparams.Length > 2)
{
m_sceneManager.SaveCurrentSceneToXml2(cmdparams[2]);
}
else
{
m_sceneManager.SaveCurrentSceneToXml2(DEFAULT_PRIM_BACKUP_FILENAME);
}
}
/// <summary>
/// Load region data from Xml2Format
/// </summary>
/// <param name="module"></param>
/// <param name="cmdparams"></param>
protected void LoadXml2(string module, string[] cmdparams)
{
if (cmdparams.Length > 2)
{
try
{
m_sceneManager.LoadCurrentSceneFromXml2(cmdparams[2]);
}
catch (FileNotFoundException)
{
MainConsole.Instance.Output("Specified xml not found. Usage: load xml2 <filename>");
}
}
else
{
try
{
m_sceneManager.LoadCurrentSceneFromXml2(DEFAULT_PRIM_BACKUP_FILENAME);
}
catch (FileNotFoundException)
{
MainConsole.Instance.Output("Default xml not found. Usage: load xml2 <filename>");
}
}
}
/// <summary>
/// Load a whole region from an opensimulator archive.
/// </summary>
/// <param name="cmdparams"></param>
protected void LoadOar(string module, string[] cmdparams)
{
try
{
m_sceneManager.LoadArchiveToCurrentScene(cmdparams);
}
catch (Exception e)
{
MainConsole.Instance.Output(e.Message);
}
}
/// <summary>
/// Save a region to a file, including all the assets needed to restore it.
/// </summary>
/// <param name="cmdparams"></param>
protected void SaveOar(string module, string[] cmdparams)
{
m_sceneManager.SaveCurrentSceneToArchive(cmdparams);
}
private static string CombineParams(string[] commandParams, int pos)
{
string result = String.Empty;
for (int i = pos; i < commandParams.Length; i++)
{
result += commandParams[i] + " ";
}
result = result.TrimEnd(' ');
return result;
}
/// <summary>
/// Kill an object given its UUID.
/// </summary>
/// <param name="cmdparams"></param>
protected void KillUUID(string module, string[] cmdparams)
{
if (cmdparams.Length > 2)
{
UUID id = UUID.Zero;
SceneObjectGroup grp = null;
Scene sc = null;
if (!UUID.TryParse(cmdparams[2], out id))
{
MainConsole.Instance.Output("[KillUUID]: Error bad UUID format!");
return;
}
m_sceneManager.ForEachScene(
delegate(Scene scene)
{
SceneObjectPart part = scene.GetSceneObjectPart(id);
if (part == null)
return;
grp = part.ParentGroup;
sc = scene;
});
if (grp == null)
{
MainConsole.Instance.Output(String.Format("[KillUUID]: Given UUID {0} not found!", id));
}
else
{
MainConsole.Instance.Output(String.Format("[KillUUID]: Found UUID {0} in scene {1}", id, sc.RegionInfo.RegionName));
try
{
sc.DeleteSceneObject(grp, false);
}
catch (Exception e)
{
m_log.ErrorFormat("[KillUUID]: Error while removing objects from scene: " + e);
}
}
}
else
{
MainConsole.Instance.Output("[KillUUID]: Usage: kill uuid <UUID>");
}
}
#endregion
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Document = Lucene.Net.Documents.Document;
using FieldSelector = Lucene.Net.Documents.FieldSelector;
using Directory = Lucene.Net.Store.Directory;
namespace Lucene.Net.Index
{
/// <summary>A <code>FilterIndexReader</code> contains another IndexReader, which it
/// uses as its basic source of data, possibly transforming the data along the
/// way or providing additional functionality. The class
/// <code>FilterIndexReader</code> itself simply implements all abstract methods
/// of <code>IndexReader</code> with versions that pass all requests to the
/// contained index reader. Subclasses of <code>FilterIndexReader</code> may
/// further override some of these methods and may also provide additional
/// methods and fields.
/// </summary>
public class FilterIndexReader : IndexReader
{
/// <summary>Base class for filtering {@link TermDocs} implementations. </summary>
public class FilterTermDocs : TermDocs
{
protected internal TermDocs in_Renamed;
public FilterTermDocs(TermDocs in_Renamed)
{
this.in_Renamed = in_Renamed;
}
public virtual void Seek(Term term)
{
in_Renamed.Seek(term);
}
public virtual void Seek(TermEnum termEnum)
{
in_Renamed.Seek(termEnum);
}
public virtual int Doc()
{
return in_Renamed.Doc();
}
public virtual int Freq()
{
return in_Renamed.Freq();
}
public virtual bool Next()
{
return in_Renamed.Next();
}
public virtual int Read(int[] docs, int[] freqs)
{
return in_Renamed.Read(docs, freqs);
}
public virtual bool SkipTo(int i)
{
return in_Renamed.SkipTo(i);
}
public virtual void Close()
{
in_Renamed.Close();
}
}
/// <summary>Base class for filtering {@link TermPositions} implementations. </summary>
public class FilterTermPositions : FilterTermDocs, TermPositions
{
public FilterTermPositions(TermPositions in_Renamed) : base(in_Renamed)
{
}
public virtual int NextPosition()
{
return ((TermPositions) this.in_Renamed).NextPosition();
}
public virtual int GetPayloadLength()
{
return ((TermPositions) this.in_Renamed).GetPayloadLength();
}
public virtual byte[] GetPayload(byte[] data, int offset)
{
return ((TermPositions) this.in_Renamed).GetPayload(data, offset);
}
public virtual bool IsPayloadAvailable()
{
return ((TermPositions) this.in_Renamed).IsPayloadAvailable();
}
}
/// <summary>Base class for filtering {@link TermEnum} implementations. </summary>
public class FilterTermEnum : TermEnum
{
protected internal TermEnum in_Renamed;
public FilterTermEnum(TermEnum in_Renamed)
{
this.in_Renamed = in_Renamed;
}
public override bool Next()
{
return in_Renamed.Next();
}
public override Term Term()
{
return in_Renamed.Term();
}
public override int DocFreq()
{
return in_Renamed.DocFreq();
}
public override void Close()
{
in_Renamed.Close();
}
}
protected internal IndexReader in_Renamed;
/// <summary> <p>Construct a FilterIndexReader based on the specified base reader.
/// Directory locking for delete, undeleteAll, and setNorm operations is
/// left to the base reader.</p>
/// <p>Note that base reader is closed if this FilterIndexReader is closed.</p>
/// </summary>
/// <param name="in">specified base reader.
/// </param>
public FilterIndexReader(IndexReader in_Renamed) : base()
{
this.in_Renamed = in_Renamed;
}
public override Directory Directory()
{
return in_Renamed.Directory();
}
public override TermFreqVector[] GetTermFreqVectors(int docNumber)
{
EnsureOpen();
return in_Renamed.GetTermFreqVectors(docNumber);
}
public override TermFreqVector GetTermFreqVector(int docNumber, System.String field)
{
EnsureOpen();
return in_Renamed.GetTermFreqVector(docNumber, field);
}
public override void GetTermFreqVector(int docNumber, System.String field, TermVectorMapper mapper)
{
EnsureOpen();
in_Renamed.GetTermFreqVector(docNumber, field, mapper);
}
public override void GetTermFreqVector(int docNumber, TermVectorMapper mapper)
{
EnsureOpen();
in_Renamed.GetTermFreqVector(docNumber, mapper);
}
public override int NumDocs()
{
// Don't call ensureOpen() here (it could affect performance)
return in_Renamed.NumDocs();
}
public override int MaxDoc()
{
// Don't call ensureOpen() here (it could affect performance)
return in_Renamed.MaxDoc();
}
public override Document Document(int n, FieldSelector fieldSelector)
{
EnsureOpen();
return in_Renamed.Document(n, fieldSelector);
}
public override bool IsDeleted(int n)
{
// Don't call ensureOpen() here (it could affect performance)
return in_Renamed.IsDeleted(n);
}
public override bool HasDeletions()
{
// Don't call ensureOpen() here (it could affect performance)
return in_Renamed.HasDeletions();
}
protected internal override void DoUndeleteAll()
{
in_Renamed.UndeleteAll();
}
public override bool HasNorms(System.String field)
{
EnsureOpen();
return in_Renamed.HasNorms(field);
}
public override byte[] Norms(System.String f)
{
EnsureOpen();
return in_Renamed.Norms(f);
}
public override void Norms(System.String f, byte[] bytes, int offset)
{
EnsureOpen();
in_Renamed.Norms(f, bytes, offset);
}
protected internal override void DoSetNorm(int d, System.String f, byte b)
{
in_Renamed.SetNorm(d, f, b);
}
public override TermEnum Terms()
{
EnsureOpen();
return in_Renamed.Terms();
}
public override TermEnum Terms(Term t)
{
EnsureOpen();
return in_Renamed.Terms(t);
}
public override int DocFreq(Term t)
{
EnsureOpen();
return in_Renamed.DocFreq(t);
}
public override TermDocs TermDocs()
{
EnsureOpen();
return in_Renamed.TermDocs();
}
public override TermPositions TermPositions()
{
EnsureOpen();
return in_Renamed.TermPositions();
}
protected internal override void DoDelete(int n)
{
in_Renamed.DeleteDocument(n);
}
protected internal override void DoCommit()
{
in_Renamed.Commit();
}
protected internal override void DoClose()
{
in_Renamed.Close();
}
public override System.Collections.Generic.ICollection<string> GetFieldNames(IndexReader.FieldOption fieldNames)
{
EnsureOpen();
return in_Renamed.GetFieldNames(fieldNames);
}
public override long GetVersion()
{
EnsureOpen();
return in_Renamed.GetVersion();
}
public override bool IsCurrent()
{
EnsureOpen();
return in_Renamed.IsCurrent();
}
public override bool IsOptimized()
{
EnsureOpen();
return in_Renamed.IsOptimized();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Xml;
using NMock2;
using NUnit.Framework;
using SIL.IO;
using SIL.Lift.Merging;
using SIL.Lift.Parsing;
using SIL.Lift.Tests.Properties;
using SIL.Lift.Validation;
using SIL.TestUtilities;
using Has = NMock2.Has;
using Is = NMock2.Is;
namespace SIL.Lift.Tests.Parsing
{
[TestFixture]
public class ParserTests
{
private ILexiconMerger<DummyBase, Dummy, Dummy, Dummy> _merger;
private LiftParser<DummyBase, Dummy, Dummy, Dummy> _parser;
private XmlDocument _doc;
public StringBuilder _results;
private Mockery _mocks;
private List<LiftParser<DummyBase, Dummy, Dummy, Dummy>.ErrorArgs> _parsingWarnings;
[SetUp]
public void Setup()
{
//_parsingErrors = new List<Exception>();
_doc = new XmlDocument();
//_doc.DocumentElement.SetAttribute("xmlns:flex", "http://fieldworks.sil.org");
_mocks = new Mockery();
_merger = _mocks.NewMock<ILexiconMerger<DummyBase, Dummy, Dummy, Dummy>>();
_parser = new LiftParser<DummyBase, Dummy, Dummy, Dummy>(_merger);
_parsingWarnings = new List<LiftParser<DummyBase, Dummy, Dummy, Dummy>.ErrorArgs>();
_parser.ParsingWarning += OnParsingWarning;
}
void OnParsingWarning(object sender, LiftParser<DummyBase, Dummy, Dummy, Dummy>.ErrorArgs e)
{
_parsingWarnings.Add(e);
}
[TearDown]
public void TearDown()
{
}
[Test]
public void ReadLiftFile_SuppliedChangeDetector_SkipsUnchangedEntries()
{
_parser.ChangeReport = new DummyChangeReport();
_doc.LoadXml("<entry id='old'/>");
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
ExpectGetOrMakeEntry(new ExtensibleMatcher("changed"));
ExpectFinishEntry();
_doc.LoadXml("<entry id='changed'/>");
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
[Test]
public void ReadLiftFile_OldVersion_ThrowsLiftFormatException()
{
using (TempFile f = new TempFile(string.Format("<lift version='{0}'></lift>", /*Validator.LiftVersion*/ "0.0")))
{
Assert.Throws<LiftFormatException>(() =>_parser.ReadLiftFile(f.Path));
}
}
[Test]
public void ReadLiftFile_CurrentVersion_Happy()
{
using (TempFile f = new TempFile(string.Format("<lift version='{0}'></lift>", Validator.LiftVersion)))
{
_parser.ReadLiftFile(f.Path);
}
}
[Test]
public void MultipleFormsInOneLangAreCombined()
{
_doc.LoadXml("<foobar><form lang='x'><text>one</text></form><form lang='z'><text>zzzz</text></form><form lang='x'><text>two</text></form></foobar>");
LiftMultiText t = _parser.ReadMultiText(_doc.FirstChild);
Assert.AreEqual("one; two", t["x"].Text);
Assert.AreEqual("zzzz", t["z"].Text);
}
[Test]
public void SpanContentsIncludedInForm()
{
_doc.LoadXml("<foobar><form lang='x'><text>one <span class='emphasis'>inner text</span> node</text></form></foobar>");
LiftMultiText t = _parser.ReadMultiText(_doc.FirstChild);
Assert.AreEqual("one inner text node", t["x"].Text);
Assert.AreEqual(1, t["x"].Spans.Count);
Assert.AreEqual("emphasis", t["x"].Spans[0].Class);
Assert.AreEqual(null, t["x"].Spans[0].Lang);
Assert.AreEqual(null, t["x"].Spans[0].LinkURL);
Assert.AreEqual(4, t["x"].Spans[0].Index);
Assert.AreEqual(10, t["x"].Spans[0].Length);
}
[Test]
public void MultiTextWithTwoInternalSpans()
{
// Note that isolated whitespace tends to be swallowed up and ignored when reading XML files.
// Thus, a single space between two span elements must be represented by a character entity.
_doc.LoadXml("<foobar><form lang='x'><text>one <span class='emphasis'>inner text</span> <span class='vernacular' lang='y' href='this is a test'>node</span></text></form></foobar>");
LiftMultiText t = _parser.ReadMultiText(_doc.FirstChild);
Assert.AreEqual("one inner text node", t["x"].Text);
Assert.AreEqual(2, t["x"].Spans.Count);
Assert.AreEqual("emphasis", t["x"].Spans[0].Class);
Assert.AreEqual(null, t["x"].Spans[0].Lang);
Assert.AreEqual(null, t["x"].Spans[0].LinkURL);
Assert.AreEqual(4, t["x"].Spans[0].Index);
Assert.AreEqual(10, t["x"].Spans[0].Length);
Assert.AreEqual("vernacular", t["x"].Spans[1].Class);
Assert.AreEqual("y", t["x"].Spans[1].Lang);
Assert.AreEqual("this is a test", t["x"].Spans[1].LinkURL);
Assert.AreEqual(15, t["x"].Spans[1].Index);
Assert.AreEqual(4, t["x"].Spans[1].Length);
}
[Test]
public void MultiTextWithNestedSpan()
{
_doc.LoadXml("<foobar><form lang='x'><text>one <span class='emphasis'>inner <span class='vernacular' lang='y'>text</span></span> node</text></form></foobar>");
var t = _parser.ReadMultiText(_doc.FirstChild);
var tx = t["x"];
Assert.IsNotNull(tx);
Assert.AreEqual("one inner text node", tx.Text);
Assert.AreEqual(1, tx.Spans.Count);
var span = tx.Spans[0];
Assert.IsNotNull(span);
Assert.AreEqual("emphasis", span.Class);
Assert.AreEqual(null, span.Lang);
Assert.AreEqual(null, span.LinkURL);
Assert.AreEqual(4, span.Index);
Assert.AreEqual(10, span.Length);
Assert.AreEqual(1, span.Spans.Count);
var subspan = span.Spans[0];
Assert.IsNotNull(subspan);
Assert.AreEqual("vernacular", subspan.Class);
Assert.AreEqual("y", subspan.Lang);
Assert.AreEqual(null, subspan.LinkURL);
Assert.AreEqual(10, subspan.Index);
Assert.AreEqual(4, subspan.Length);
Assert.AreEqual(0, subspan.Spans.Count);
}
[Test]
public void FirstValueOfSimpleMultiText()
{
LiftMultiText t = new LiftMultiText();
LiftString s1 = new LiftString();
s1.Text = "1";
t.Add("x", s1);
LiftString s2 = new LiftString();
s2.Text = "2";
t.Add("y", s2);
Assert.AreEqual("x", t.FirstValue.Key);
Assert.AreEqual("1", t.FirstValue.Value.Text);
}
[Test]
public void EmptyLiftOk()
{
SimpleCheckGetOrMakeEntry_InsertVersion("<lift V />", 0);
}
[Test]
public void EntryMissingIdNotFatal()
{
SimpleCheckGetOrMakeEntry_InsertVersion("<lift V><entry/></lift>", 1);
}
[Test]
public void EmptyEntriesOk()
{
SimpleCheckGetOrMakeEntry_InsertVersion("<lift V><entry/><entry/></lift>", 2);
}
[Test]
public void NotifyOfDeletedEntry()
{
DateTime now = DateTime.UtcNow;
string when = now.ToString(Extensible.LiftTimeFormatWithUTC);
ExpectEntryWasDeleted(); //todo expect more!
_doc.LoadXml(String.Format("<entry dateDeleted='{0}'/>", when));
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
private void SimpleCheckGetOrMakeEntry_InsertVersion(string content, int times)
{
content = InsertVersion(content);
_doc.LoadXml(content);
using (_mocks.Ordered)
{
Expect.Exactly(times).On(_merger)
.Method("GetOrMakeEntry")
.WithAnyArguments()
.Will(Return.Value(null));
}
using (TempFile f = new TempFile(string.Format(content)))
{
_parser.ReadLiftFile(f.Path);
}
_mocks.VerifyAllExpectationsHaveBeenMet();
}
private static string InsertVersion(string content) {
content = content.Replace("<lift V", string.Format("<lift version='{0}' ", Validator.LiftVersion));
return content;
}
[Test]
public void EntryWithGuid()
{
Guid g = Guid.NewGuid();
// ExpectMergeInLexemeForm(Is.Anything);
ParseEntryAndCheck(string.Format("<entry guid=\"{0}\" />", g),
new ExtensibleMatcher(g));
}
[Test]
public void EntryWithId()
{
ParseEntryAndCheck("<entry id='-foo-' />", new ExtensibleMatcher("-foo-"));
}
private void ParseEntryAndCheck(string content, Matcher extensibleMatcher)
{
ExpectGetOrMakeEntry(extensibleMatcher);
ExpectFinishEntry();
_doc.LoadXml(content);
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
private void ParseEntryAndCheck(string content)
{
ExpectFinishEntry();
_doc.LoadXml(content);
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
private void ExpectGetOrMakeEntry(Matcher extensibleMatcher)
{
Expect.Exactly(1).On(_merger)
.Method("GetOrMakeEntry")
//.With(Is.Anything)
.With(extensibleMatcher, Is.EqualTo(0))
.Will(Return.Value(new Dummy()));
}
private void ExpectEmptyEntry()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
}
private void ExpectGetOrMakeEntry()
{
Expect.Exactly(1).On(_merger)
.Method("GetOrMakeEntry")
.Will(Return.Value(new Dummy()));
}
private void ExpectGetOrMakeSense()
{
Expect.Exactly(1).On(_merger)
.Method("GetOrMakeSense")
.Will(Return.Value(new Dummy()));
}
private void ExpectMergeInGrammi(string value, Matcher traitListMatcher)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInGrammaticalInfo")
.With(Is.Anything, Is.EqualTo(value), traitListMatcher);
}
private void ExpectGetOrMakeExample()
{
Expect.Exactly(1).On(_merger)
.Method("GetOrMakeExample")
.Will(Return.Value(new Dummy()));
}
private void ExpectMergeInLexemeForm(Matcher matcher)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInLexemeForm")
.With(Is.Anything,matcher);
}
//private void ExpectMergeInCitationForm(Matcher matcher)
//{
// Expect.Exactly(1).On(_merger)
// .Method("MergeInCitationForm")
// .With(Is.Anything, matcher);
//}
private void ExpectFinishEntry()
{
Expect.Exactly(1).On(_merger)
.Method("FinishEntry");
}
//private void ExpectMergeGloss()
//{
// Expect.Exactly(1).On(_merger)
// .Method("MergeInGloss");
//}
//private void ExpectMergeDefinition()
//{
// Expect.Exactly(1).On(_merger)
// .Method("MergeInDefinition");
//}
private void ExpectMergeInField(Matcher tagMatcher, Matcher dateCreatedMatcher, Matcher dateModifiedMatcher, Matcher multiTextMatcher, Matcher traitsMatcher)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInField").With(Is.Anything, tagMatcher,
dateCreatedMatcher, dateModifiedMatcher, multiTextMatcher, traitsMatcher);
// .Method("MergeInField").With(matchers);
}
// private void ExpectMergeInField(params object[] matchers)
// {
// Expect.Exactly(1).On(_merger)
// .Method("MergeInField").With(Is.Anything, Is.Anything, Is.Anything, Is.Anything, Is.Anything);
// // .Method("MergeInField").With(matchers);
// }
private void ExpectMergeInTrait(Matcher traitMatcher)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInTrait")
.With(Is.Anything, traitMatcher);
}
private void ExpectMergeInRelation(string relationType, string targetId)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInRelation")
.With(Is.Anything, Is.EqualTo(relationType), Is.EqualTo(targetId), Is.Anything);
}
private void ExpectMergeInPicture(string href)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInPicture")
.With(Is.Anything, Is.EqualTo(href), Is.Null);
}
private void ExpectMergeInPictureWithCaption(string href)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInPicture")
.With(Is.Anything, Is.EqualTo(href), Is.NotNull);
}
private void ExpectMergeInMediaWithCaption(string href, string caption)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInMedia")
.With(Is.Anything, Is.EqualTo(href), Has.ToString(Is.EqualTo(caption)));
}
private void ExpectEntryWasDeleted()
{
Expect.Exactly(1).On(_merger)
.Method("EntryWasDeleted");
//todo expect more!
}
private void ExpectMergeInNote(string value)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInNote")
.With(Is.Anything, Is.Anything/*todo type*/, Has.ToString(Is.EqualTo(value)), Is.Anything);
}
private void ExpectTypedMergeInNote(string type)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInNote")
.With(Is.Anything, Is.EqualTo(type), Is.Anything, Is.Anything);
}
[Test]
public void EntryWithoutId()
{
// ExpectMergeInLexemeForm(Is.Anything);
ParseEntryAndCheck("<entry/>", new ExtensibleMatcher());
}
[Test]
public void EntryWithReadableIdPlusGuid()
{
// ExpectMergeInLexemeForm(Is.Anything);
Guid g = Guid.NewGuid();
// string s = String.Format("<lift xmlns:flex='http://fieldworks.sil.org'><entry id='-foo' flex:guid='{0}'/></lift>", g);
//
// _doc.LoadXml(s);
// _parser.ReadFile(_doc);
//
// string s = String.Format("<entry xmlns:flex='http://fieldworks.sil.org' id='-foo' flex:guid='{0}'/>", g);
string s = String.Format("<entry id='-foo' guid='{0}'/>", g);
ParseEntryAndCheck(s, new ExtensibleMatcher("-foo", g));
}
[Test]
public void FormMissingLangGeneratesNonFatalError()
{
ExpectGetOrMakeEntry();
// ExpectMergeInLexemeForm(Is.Anything);
ParseEntryAndCheck("<entry><lexical-unit><form/></lexical-unit></entry>");
Assert.AreEqual(1, _parsingWarnings.Count);
}
[Test]
public void EmptyFormOk()
{
using (_mocks.Ordered)
{
ExpectGetOrMakeEntry(/*";;;"*/);
ExpectMergeInLexemeForm(Is.Anything);
}
ParseEntryAndCheck("<entry><lexical-unit><form lang='x'/></lexical-unit></entry>");
}
// [Test]
// public void SpacesTrimmedFromLexicalUnit()
// {
// ExpectGetOrMakeEntry();
// ExpectMultiTextMergeIn("LexemeForm", Has.Property("Count", Is.EqualTo(2)));
// // ExpectMergeInCitationForm(Is.Anything);
// string content ="<entry><lexical-unit><form lang='x'><text> hello </text></form></lexical-unit></entry>";
// ExpectFinishEntry();
// _doc.LoadXml(content);
// Dummy d = _parser.ReadEntry(_doc.FirstChild);
// d
// }
[Test]
public void EntryWithLexicalUnit()
{
ExpectGetOrMakeEntry();
ExpectMultiTextMergeIn("LexemeForm", Has.Property("Count", Is.EqualTo(2)));
// ExpectMergeInCitationForm(Is.Anything);
ParseEntryAndCheck("<entry><lexical-unit><form lang='x'><text>hello</text></form><form lang='y'><text>bye</text></form></lexical-unit></entry>");
// ParseEntryAndCheck("<entry><lexical-unit><form lang='x'><text>hello</text></form><form lang='y'>bye</form></lexical-unit></entry>", "GetOrMakeEntry(;;;)MergeInLexemeForm(m,x=hello|y=bye|)");
}
[Test]
public void EntryWithCitationForm()
{
ExpectGetOrMakeEntry();
// ExpectMergeInLexemeForm(Is.Anything);
ExpectMultiTextMergeIn("CitationForm", Has.Property("Count", Is.EqualTo(2)));
ParseEntryAndCheck("<entry><citation><form lang='x'><text>hello</text></form><form lang='y'><text>bye</text></form></citation></entry>");
}
[Test]
public void EntryWithPronunciation()
{
ExpectGetOrMakeEntry();
ExpectMergeInPronunciation("en__IPA=ai|");
ParseEntryAndCheck("<entry><pronunciation><form lang='en__IPA'><text>ai</text></form></pronunciation></entry>");
}
[Test]
public void EntryWithPronunciationWithFields()
{
ExpectGetOrMakeEntry();
ExpectMergeInPronunciation("");
ExpectMergeInField(
Is.EqualTo("cvPattern"),
Is.EqualTo(default(DateTime)),
Is.EqualTo(default(DateTime)),
Has.Property("Count", Is.EqualTo(1)),//multitext
Has.Property("Count", Is.EqualTo(0))//traits
);
ParseEntryAndCheck(@"<entry><pronunciation>
<field type='cvPattern'>
<form lang='en'>
<text>acvpattern</text>
</form>
</field>
</pronunciation></entry>");
}
[Test]
public void EntryWithPronunciationWithMedia()
{
ExpectGetOrMakeEntry();
ExpectMergeInPronunciation("en__IPA=ai|");
ExpectMergeInMediaWithCaption("blah.mp3", "en=This is a test|");
ParseEntryAndCheck("<entry><pronunciation><form lang='en__IPA'><text>ai</text></form><media href='blah.mp3'><label><form lang='en'><text>This is a test</text></form></label></media></pronunciation></entry>");
}
private void ExpectMergeInPronunciation(string value)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInPronunciation")
.With(Is.Anything, Has.ToString(Is.EqualTo(value)), Is.Anything)
.Will(Return.Value(new Dummy()));
}
[Test]
public void EntryWithVariant()
{
ExpectGetOrMakeEntry();
ExpectMergeInVariant("en=-d|");
ParseEntryAndCheck("<entry><variant><form lang='en'><text>-d</text></form></variant></entry>");
}
private void ExpectMergeInVariant(string value)
{
Expect.Exactly(1).On(_merger)
.Method("MergeInVariant")
.With(Is.Anything, Has.ToString(Is.EqualTo(value)), Is.Anything)
.Will(Return.Value(new Dummy()));
}
// private void ExpectEmptyMultiTextMergeIn(string MultiTextPropertyName)
//{
// Expect.Exactly(1).On(_merger)
// .Method("MergeIn" + MultiTextPropertyName)
// .With(Is.Anything, Has.Property("Count",Is.EqualTo(0)));
//}
private void ExpectValueOfMergeIn(string MultiTextPropertyName, string value)
{
Expect.Exactly(1).On(_merger)
.Method("MergeIn" + MultiTextPropertyName)
.With(Is.Anything, Has.ToString(Is.EqualTo(value)));
}
private void ExpectValueOfMergeInTranslationForm(string type, string value)
{
if (type == null)
Expect.Exactly(1).On(_merger)
.Method("MergeInTranslationForm")
.With(Is.Anything, Is.Null, Has.ToString(Is.EqualTo(value)), Is.Anything);
else
Expect.Exactly(1).On(_merger)
.Method("MergeInTranslationForm")
.With(Is.Anything, Has.ToString(Is.EqualTo(type)), Has.ToString(Is.EqualTo(value)), Is.Anything);
}
// private void ExpectMultiTextMergeIn(string MultiTextPropertyName, Matcher matcher)
// {
// Expect.Exactly(1).On(_merger)
// .Method("MergeIn" + MultiTextPropertyName)
// .With(Is.Anything, Has.Property("Traits", matcher));
// }
private void ExpectMultiTextMergeIn(string MultiTextPropertyName, Matcher multiTextMatcher)
{
Expect.Exactly(1).On(_merger)
.Method("MergeIn" + MultiTextPropertyName)
.With(Is.Anything, multiTextMatcher);
}
[Test]
public void NonLiftDateError()
{
TryDateFormat("last tuesday");
TryDateFormat("2005-01-01T01:11:11");
TryDateFormat("1/2/2003");
Assert.AreEqual(3, _parsingWarnings.Count);
}
private void TryDateFormat(string created)
{
ExpectGetOrMakeEntry();
// ExpectMergeInLexemeForm(Is.Anything);
ParseEntryAndCheck(
string.Format("<entry id='foo' dateCreated='{0}'></entry>", created));
}
[Test]
public void DateWithoutTimeOk()
{
ExpectGetOrMakeEntry();
// ExpectMergeInLexemeForm(Is.Anything);
ParseEntryAndCheck("<entry id='foo' dateCreated='2005-01-01'></entry>");
Assert.AreEqual(0, _parsingWarnings.Count);
}
[Test]
public void EntryWithDates()
{
string createdIn = "2003-08-07T08:42:42+07:00";
string modIn = "2005-01-01T01:11:11+07:00";
DateTime createdOut = new DateTime(2003, 08, 07, 01, 42, 42, DateTimeKind.Utc);//"2003-08-07T01:42:42Z" has to be UTC (in - 7 hours)
DateTime modOut = new DateTime(2004, 12, 31, 18, 11, 11, DateTimeKind.Utc); //"2004-12-31T18:11:11Z" has to be UTC (in - 7 hours)
ExpectGetOrMakeEntry(new ExtensibleMatcher("foo", createdOut, modOut));
// ExpectEmptyMultiTextMergeIn("LexemeForm");
ParseEntryAndCheck(
string.Format("<entry id='foo' dateCreated='{0}' dateModified='{1}'></entry>", createdIn, modIn));
}
[Test]
public void EntryWithNote()
{
ExpectGetOrMakeEntry();
// ExpectMergeInLexemeForm(Is.Anything);
ExpectMergeInNote("x=hello|");
ParseEntryAndCheck(string.Format("<entry><note><form lang='x'><text>hello</text></form></note></entry>"));
}
[Test]
public void EntryWithTwoNotes()
{
ExpectGetOrMakeEntry();
ExpectTypedMergeInNote("typeone");
ExpectTypedMergeInNote("typetwo");
ParseEntryAndCheck(string.Format("<entry><note type='typeone'><form lang='x'><text>one</text></form></note><note type='typetwo'><form lang='x'><text>two</text></form></note></entry>"));
}
[Test]
public void EntryWithSense()
{
ExpectGetOrMakeEntry();
// ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
// ExpectMergeGloss();
// ExpectMergeDefinition();
ParseEntryAndCheck(string.Format("<entry><sense></sense></entry>"));
}
[Test]
public void SenseWithGloss()
{
ExpectGetOrMakeEntry();
// ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
ExpectValueOfMergeIn("Gloss","x=hello|");
// ExpectMergeDefinition();
ParseEntryAndCheck(string.Format("<entry><sense><gloss lang='x'><text>hello</text></gloss></sense></entry>"));
}
[Test]
public void LexicalUnitWithAnnotation()
{
ExpectGetOrMakeEntry();
ExpectMergeInLexemeForm(new LiftMultiTextAnnotationMatcher(1, "x", "flag", "1", null, default(DateTime)));
ParseEntryAndCheck(string.Format("<entry><lexical-unit><form lang='x'><text>blah blah</text><annotation name='flag' value='1'/></form></lexical-unit></entry>"));
}
[Test]
public void DefinitionWithAnnotation()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
string when= new DateTime(2000,1,1).ToUniversalTime().ToString(Extensible.LiftTimeFormatWithUTC);
ExpectMultiTextMergeIn("Definition", new LiftMultiTextAnnotationMatcher(1, "x", "flag", "1", "john", DateTime.Parse(when).ToUniversalTime()));
ParseEntryAndCheck(string.Format(@"
<entry>
<sense>
<definition>
<form lang='z'>
<text>hello</text>
<annotation name='flag' value='1' who='john' when='{0}'>
<form lang='x'>
<text>blah blah</text>
</form>
</annotation>
</form>
</definition></sense></entry>", when));
}
[Test]
public void SenseWithTraitWithAnnotations()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
string when= new DateTime(2000,1,1).ToUniversalTime().ToString(Extensible.LiftTimeFormatWithUTC);
ExpectMergeInTrait(new TraitMatcher("dummy", "blah", 2));
//ExpectMergeDefinition();
ParseEntryAndCheck(string.Format(@"
<entry>
<sense>
<trait name='dummy' value ='blah'>
<annotation name='first'/>
<annotation name='second'/>
</trait></sense></entry>", when));
}
[Test]
public void GrammiWithTwoTraits()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
//ExpectMultiTextMergeIn("Gloss", Is.Anything);
//ExpectMergeDefinition();
ExpectMergeInGrammi("x", Has.Property("Count", Is.EqualTo(2)));
ParseEntryAndCheck(string.Format("<entry><sense><grammatical-info value='x'><trait name='one' value='1'/><trait name='two' value='2'/></grammatical-info></sense></entry>"));
}
[Test]
public void GlossWithTwoLanguages()
{
ExpectGetOrMakeEntry();
ExpectGetOrMakeSense();
ExpectValueOfMergeIn("Gloss", "x=hello|y=bye|");
ParseEntryAndCheck(string.Format("<entry><sense><gloss lang='x'><text>hello</text></gloss><gloss lang='y'><text>bye</text></gloss></sense></entry>"));
}
[Test]
public void GlossWithTwoFormsInSameLanguageAreCombined()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
ExpectValueOfMergeIn("Gloss", "x=hello; bye|");
//ExpectMergeDefinition();
ParseEntryAndCheck(string.Format("<entry><sense><gloss lang='x'><text>hello</text></gloss><gloss lang='x'><text>bye</text></gloss></sense></entry>"));
}
[Test]
public void SenseWithDefintition()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
//ExpectMergeGloss();
ExpectValueOfMergeIn("Definition", "x=hello|");
ParseEntryAndCheck(string.Format("<entry><sense><definition><form lang='x'><text>hello</text></form></definition></sense></entry>"));
}
[Test]
public void SenseWithNote()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
//ExpectMergeGloss();
//ExpectMergeDefinition();
ExpectMergeInNote("x=hello|");
ParseEntryAndCheck(string.Format("<entry><sense><note><form lang='x'><text>hello</text></form></note></sense></entry>"));
}
[Test]
public void FieldOnEntries()
{
ExpectEmptyEntry();
ExpectMergeInField(
Is.EqualTo("color"),
Is.EqualTo(default(DateTime)),
Is.EqualTo(default(DateTime)),
Has.Property("Count", Is.EqualTo(2)),
Has.Property("Count", Is.EqualTo(0))
);
ParseEntryAndCheck(
"<entry><field type='color'><form lang='en'><text>red</text></form><form lang='es'><text>roco</text></form></field></entry>");
}
[Test]
public void FieldOnSenses()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
ExpectMergeInField(
Is.EqualTo("color"),
Is.EqualTo(default(DateTime)),
Is.EqualTo(default(DateTime)),
Has.Property("Count", Is.EqualTo(2)),
Has.Property("Count", Is.EqualTo(0))
);
ParseEntryAndCheck(
"<entry><sense><field type='color'><form lang='en'><text>red</text></form><form lang='es'><text>roco</text></form></field></sense></entry>");
}
[Test]
public void FieldOnExamples()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
ExpectGetOrMakeExample();
ExpectMergeInField(
Is.EqualTo("color"),
Is.EqualTo(default(DateTime)),
Is.EqualTo(default(DateTime)),
Has.Property("Count", Is.EqualTo(2)),
Has.Property("Count", Is.EqualTo(0))
);
ParseEntryAndCheck(
"<entry><sense><example><field type='color'><form lang='en'><text>red</text></form><form lang='es'><text>roco</text></form></field></example></sense></entry>");
}
[Test]
public void MultipleFieldsOnEntries()
{
ExpectEmptyEntry();
ExpectMergeInField(
Is.EqualTo("color"),
Is.EqualTo(default(DateTime)),
Is.EqualTo(default(DateTime)),
Has.Property("Count", Is.EqualTo(2)),
Has.Property("Count", Is.EqualTo(0))
);
ExpectMergeInField(
Is.EqualTo("special"),
Is.EqualTo(default(DateTime)),
Is.EqualTo(default(DateTime)),
Has.Property("Count", Is.EqualTo(1)),
Has.Property("Count", Is.EqualTo(0))
);
ParseEntryAndCheck(
"<entry><field type='color'><form lang='en'><text>red</text></form><form lang='es'><text>roco</text></form></field><field type='special'><form lang='en'><text>free</text></form></field></entry>");
}
[Test]
public void DatesOnFields()
{
ExpectEmptyEntry();
DateTime creat = new DateTime(2000,1,1).ToUniversalTime();
string createdTime = creat.ToString(Extensible.LiftTimeFormatWithUTC);
DateTime mod = new DateTime(2000, 1, 2).ToUniversalTime();
string modifiedTime = mod.ToString(Extensible.LiftTimeFormatWithUTC);
ExpectMergeInField(
Is.EqualTo("color"),
Is.EqualTo(creat),
Is.EqualTo(mod),
Is.Anything,
Has.Property("Count", Is.EqualTo(0))
);
ParseEntryAndCheck(String.Format("<entry><field type='color' dateCreated='{0}' dateModified='{1}' ></field></entry>",
createdTime,
modifiedTime));
}
[Test]
public void TraitsOnEntries()
{
ExpectEmptyEntry();
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("color")), Has.Property("Value", Is.EqualTo("red"))));
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("shape")), Has.Property("Value", Is.EqualTo("square"))));
ParseEntryAndCheck(string.Format("<entry><trait name='color' value='red'/><trait name='shape' value='square'/></entry>"));
}
[Test]
public void TraitsOnEntries_MultipleOfSameType_Okay()
{
ExpectEmptyEntry();
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("color")), Has.Property("Value", Is.EqualTo("red"))));
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("color")), Has.Property("Value", Is.EqualTo("blue"))));
ParseEntryAndCheck(string.Format("<entry><trait name='color' value='red'/><trait name='color' value='blue'/></entry>"));
}
[Test]
public void TraitsOnSenses()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("color")), Has.Property("Value", Is.EqualTo("red"))));
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("shape")), Has.Property("Value", Is.EqualTo("square"))));
ParseEntryAndCheck(string.Format("<entry><sense><trait name='color' value='red'/><trait name='shape' value='square'/></sense></entry>"));
}
[Test]
public void TraitsOnExamples()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
ExpectGetOrMakeExample();
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("color")), Has.Property("Value", Is.EqualTo("red"))));
ExpectMergeInTrait(new NMock2.Matchers.AndMatcher(
Has.Property("Name", Is.EqualTo("shape")), Has.Property("Value", Is.EqualTo("square"))));
ParseEntryAndCheck(string.Format("<entry><sense><example><trait name='color' value='red'/><trait name='shape' value='square'/></example></sense></entry>"));
}
[Test]
public void SenseWithGrammi()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
//ExpectMergeGloss();
//ExpectMergeDefinition();
ExpectMergeInGrammi("blue", Is.Anything);
ParseEntryAndCheck("<entry><sense><grammatical-info value='blue'/></sense></entry>");
}
[Test]
public void SenseWithExample()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
//ExpectMergeGloss();
//ExpectMergeDefinition();
ExpectGetOrMakeExample();
ExpectValueOfMergeIn("ExampleForm", "x=hello|");
// ExpectValueOfMergeIn("TranslationForm", "");
ParseEntryAndCheck(
string.Format("<entry><sense><example><form lang='x'><text>hello</text></form></example></sense></entry>"));
}
[Test]
public void SenseWithRelation()
{
ExpectGetOrMakeEntry();
ExpectGetOrMakeSense();
ExpectMergeInRelation("synonym", "one");
ParseEntryAndCheck(
string.Format("<entry><sense><relation type=\"synonym\" ref=\"one\" /></sense></entry>"));
}
[Test]
public void SenseWithPicture()
{
ExpectGetOrMakeEntry();
ExpectGetOrMakeSense();
ExpectMergeInPicture("bird.jpg");
ParseEntryAndCheck(
string.Format("<entry><sense><illustration href=\"bird.jpg\" /></sense></entry>"));
}
[Test]
public void SenseWithPictureAndCaption()
{
ExpectGetOrMakeEntry();
ExpectGetOrMakeSense();
ExpectMergeInPictureWithCaption("bird.jpg");
ParseEntryAndCheck(
string.Format("<entry><sense><illustration href=\"bird.jpg\" ><label><form lang='en'><text>bird</text></form></label></illustration></sense></entry>"));
}
[Test]
public void ExampleWithTranslation()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
//ExpectMergeGloss();
//ExpectMergeDefinition();
ExpectGetOrMakeExample();
// ExpectValueOfMergeIn("ExampleForm", "");
ExpectValueOfMergeInTranslationForm("Free Translation", "x=hello|");
ParseEntryAndCheck("<entry><sense><example><translation type='Free Translation'><form lang='x'><text>hello</text></form></translation></example></sense></entry>");
// "GetOrMakeEntry(;;;)GetOrMakeSense(m,)GetOrMakeExample(m,)MergeInTranslationForm(m,x=hello|)");
}
[Test]
public void ExampleWithSource()
{
ExpectGetOrMakeEntry();
//ExpectMergeInLexemeForm(Is.Anything);
ExpectGetOrMakeSense();
//ExpectMergeGloss();
//ExpectMergeDefinition();
ExpectGetOrMakeExample();
// ExpectValueOfMergeIn("ExampleForm", "");
ExpectValueOfMergeInTranslationForm(null, "x=hello|");
ExpectValueOfMergeIn("Source", "test");
ParseEntryAndCheck("<entry><sense><example source='test'><translation><form lang='x'><text>hello</text></form></translation></example></sense></entry>");
// "GetOrMakeEntry(;;;)GetOrMakeSense(m,)GetOrMakeExample(m,)MergeInTranslationForm(m,x=hello|)");
}
[Test]
public void ExampleWithNote()
{
ExpectEmptyEntry();
ExpectGetOrMakeSense();
//ExpectMergeGloss();
//ExpectMergeDefinition();
ExpectGetOrMakeExample();
ExpectMergeInNote("x=hello|");
ParseEntryAndCheck(string.Format("<entry><sense><example><note><form lang='x'><text>hello</text></form></note></example></sense></entry>"));
}
[Test]
public void EmptyHeaderOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header/><entry/></lift>"), 0, 0, 1);
}
[Test]
public void EmptyHeaderNoEntriesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header/></lift>"), 0,0,0);
}
[Test]
public void EmptyFieldsOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><fields/></header><entry/></lift>"), 0,0,1);
}
[Test]
public void EmptyFieldsNoEntriesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><fields/></header></lift>"), 0, 0, 0);
}
[Test]
public void EmptyFieldOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><fields><field tag='custom'/></fields></header><entry/></lift>"), 0,1,1);
}
[Test]
public void TwoFields()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><fields><field tag='special'/><field tag='custom'></field></fields></header><entry/></lift>"), 0, 2, 1);
}
[Test]
public void EmptyFieldNoEntriesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><fields><field tag='custom'/></fields></header></lift>"), 0, 1, 0);
}
[Test]
public void EmptyRangesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><ranges/></header><entry/></lift>"), 0,0,1);
}
[Test]
public void EmptyRangesNoEntriesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><ranges/></header></lift>"), 0, 0, 0);
}
[Test]
public void EmptyRangeOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><ranges><range/></ranges></header><entry/></lift>"), 0,0,1);
}
[Test]
public void EmptyRangeNoEntriesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><ranges><range/></ranges></header></lift>"), 0, 0, 0);
}
[Test]
public void EmptyLiftHeaderSectionsFieldsBeforeRangesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><fields/><ranges/></header><entry/></lift>"), 0, 0, 1);
}
[Test]
public void EmptyLiftHeaderSectionsOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><ranges/><fields/></header><entry/></lift>"), 0, 0, 1);
}
[Test]
public void EmptyLiftHeaderSectionsNoEntriesOk()
{
SimpleCheckWithHeader(InsertVersion("<lift V><header><ranges/><fields/></header></lift>"), 0, 0, 0);
}
[Test]
public void SimpleRangeElement()
{
string content = InsertVersion("<lift V><header><ranges><range id='dialect'><range-element id='en'><label><form lang='en'><text>English</text></form></label><abbrev><form lang='en'><text>Eng</text></form></abbrev><description><form lang='en'><text>Standard English</text></form></description></range-element></range></ranges></header><entry/></lift>");
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("dialect"), Is.EqualTo("en"), Is.Null, Is.Null,
Is.EqualTo(new LiftMultiText("en", "Standard English")),
Is.EqualTo(new LiftMultiText("en", "English")),
Is.EqualTo(new LiftMultiText("en", "Eng")),
Is.EqualTo("<range-element id=\"en\"><label><form lang=\"en\"><text>English</text></form></label><abbrev><form lang=\"en\"><text>Eng</text></form></abbrev><description><form lang=\"en\"><text>Standard English</text></form></description></range-element>"));
ExpectGetOrMakeEntry();
ExpectFinishEntry();
using (TempFile f = new TempFile(string.Format(content)))
{
_parser.ReadLiftFile(f.Path);
}
_mocks.VerifyAllExpectationsHaveBeenMet();
}
private void SimpleCheckWithHeader(string content, int rangeElementCount, int fieldCount, int entryCount)
{
using (_mocks.Unordered)
{
Expect.Exactly(rangeElementCount).On(_merger).Method("ProcessRangeElement")
.WithAnyArguments();
Expect.Exactly(fieldCount).On(_merger).Method("ProcessFieldDefinition")
.WithAnyArguments();
Expect.Exactly(entryCount).On(_merger).Method("GetOrMakeEntry").WithAnyArguments().Will(Return.Value(null));
}
using (TempFile f = new TempFile(string.Format(content)))
{
_parser.ReadLiftFile(f.Path);
}
_mocks.VerifyAllExpectationsHaveBeenMet();
}
[Test]
public void GetNumberOfEntriesInFile_0Entries_Returns0()
{
using(TempFile f = new TempFile( "<lift></lift>"))
{
int count = LiftParser<DummyBase, Dummy, Dummy, Dummy>.GetEstimatedNumberOfEntriesInFile(f.Path);
Assert.AreEqual(0, count);
}
}
[Test]
public void GetNumberOfEntriesInFile_3Entries_Returns3()
{
string path = Path.GetTempFileName();
try
{
File.WriteAllText(path, @"<lift><entry></entry>
<entry id='foo'/><entry/></lift>");
int count = LiftParser<DummyBase, Dummy, Dummy, Dummy>.GetEstimatedNumberOfEntriesInFile(path);
Assert.AreEqual(3, count);
}
finally
{
File.Delete(path);
}
}
[Test]
public void SimpleFieldDefinition()
{
string content = "<field tag='tone'><form lang='en'><text>the tone information for a pronunciation</text></form></field>";
Expect.Exactly(1).On(_merger).Method("ProcessFieldDefinition")
.With(Is.EqualTo("tone"), Is.EqualTo(new LiftMultiText("en", "the tone information for a pronunciation")));
_doc.LoadXml(content);
_parser.ReadFieldDefinition(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
[Test]
public void SimpleEtymology()
{
string content = "<entry><etymology source='Greek' type='borrowed'><form lang='bam'><text>alphabeta</text></form><gloss lang='en'><text>letters</text></gloss><field type='comment'><form lang='en'><text>this etymology is nonsense</text></form></field></etymology></entry>";
_doc.LoadXml(content);
using (_mocks.Ordered)
{
ExpectGetOrMakeEntry();
Expect.Exactly(1).On(_merger).Method("MergeInEtymology")
.With(Is.Anything, Is.EqualTo("Greek"), Is.EqualTo("borrowed"),
Is.EqualTo(new LiftMultiText("bam", "alphabeta")),
Is.EqualTo(new LiftMultiText("en", "letters")), Is.Anything)
.Will(Return.Value(new Dummy()));
Expect.Exactly(1).On(_merger).Method("MergeInField")
.With(Is.Anything, Is.EqualTo("comment"), Is.EqualTo(DateTime.MinValue), Is.EqualTo(DateTime.MinValue),
Is.EqualTo(new LiftMultiText("en", "this etymology is nonsense")), Is.Anything);
ExpectFinishEntry();
}
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
[Test]
public void SimpleReversal()
{
string content = "<entry><sense><reversal><form lang='en'><text>sorghum</text></form></reversal></sense></entry>";
_doc.LoadXml(content);
using (_mocks.Ordered)
{
ExpectGetOrMakeEntry();
ExpectGetOrMakeSense();
Expect.Exactly(1).On(_merger).Method("MergeInReversal")
.With(Is.Anything, Is.Null, Is.EqualTo(new LiftMultiText("en", "sorghum")), Is.Null, Is.Anything);
ExpectFinishEntry();
}
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
[Test]
public void NestedReversal()
{
string content = "<entry><sense><reversal type='test'><form lang='en'><text>apple</text></form><main><form lang='en'><text>fruit</text></form></main></reversal></sense></entry>";
_doc.LoadXml(content);
using (_mocks.Ordered)
{
ExpectGetOrMakeEntry();
ExpectGetOrMakeSense();
Expect.Exactly(1).On(_merger).Method("GetOrMakeParentReversal")
.With(Is.Null, Is.EqualTo(new LiftMultiText("en", "fruit")), Is.EqualTo("test"));
Expect.Exactly(1).On(_merger).Method("MergeInReversal")
.With(Is.Anything, Is.Null, Is.EqualTo(new LiftMultiText("en", "apple")), Is.EqualTo("test"), Is.Anything);
ExpectFinishEntry();
}
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
[Test]
public void ReadSubSense()
{
string content = "<entry><sense><gloss lang='en'><text>destroy</text></gloss><subsense><gloss lang='en'><text>unmake</text></gloss></subsense></sense></entry>";
_doc.LoadXml(content);
using (_mocks.Ordered)
{
ExpectGetOrMakeEntry();
ExpectGetOrMakeSense();
Expect.Exactly(1).On(_merger).Method("MergeInGloss")
.With(Is.NotNull, Is.EqualTo(new LiftMultiText("en", "destroy")));
Expect.Exactly(1).On(_merger).Method("GetOrMakeSubsense")
.Will(Return.Value(new Dummy()));
Expect.Exactly(1).On(_merger).Method("MergeInGloss")
.With(Is.Anything, Is.EqualTo(new LiftMultiText("en", "unmake")));
ExpectFinishEntry();
}
_parser.ReadEntry(_doc.FirstChild);
_mocks.VerifyAllExpectationsHaveBeenMet();
}
private class NewLineAgnosticEqualMatcher: NMock2.Matcher
{
public NewLineAgnosticEqualMatcher(string expected)
{
Expected = expected.Replace("\r\n", "\n");
}
private string Expected { get; set; }
public override void DescribeTo(TextWriter writer)
{
writer.Write("equal to ");
writer.Write(Expected);
}
public override bool Matches(object o)
{
var str = o as string;
if (str == null)
return false;
return Expected.Equals(str.Replace("\r\n", "\n"));
}
}
public static Matcher IsEqualToIgnoreNl(string expected)
{
return new NewLineAgnosticEqualMatcher(expected);
}
[Test]
public void ReadExternalLiftFile()
{
const string NewLine = "\n";
using (_mocks.Ordered) // Ordered may be too strong if parse details change.
{
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("etymology"), Is.EqualTo("borrowed"), Is.Null, Is.Null,
Is.EqualTo(new LiftMultiText("en", "The word is borrowed from another language")),
Is.EqualTo(new LiftMultiText("en", "borrowed")),
Is.EqualTo(new LiftMultiText()),
IsEqualToIgnoreNl("<range-element id=\"borrowed\">" + NewLine +
" <label>" + NewLine +
" <form lang=\"en\"><text>borrowed</text></form>" + NewLine +
" </label>" + NewLine +
" <description>" + NewLine +
" <form lang=\"en\"><text>The word is borrowed from another language</text></form>" + NewLine +
" </description>" + NewLine +
" </range-element>"));
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("etymology"), Is.EqualTo("proto"), Is.Null, Is.Null,
Is.EqualTo(new LiftMultiText("en", "The proto form of the word in another language")),
Is.EqualTo(new LiftMultiText("en", "proto")),
Is.EqualTo(new LiftMultiText()),
IsEqualToIgnoreNl("<range-element id=\"proto\">" + NewLine +
" <label>" + NewLine +
" <form lang=\"en\"><text>proto</text></form>" + NewLine +
" </label>" + NewLine +
" <description>" + NewLine +
" <form lang=\"en\"><text>The proto form of the word in another language</text></form>" + NewLine +
" </description>" + NewLine +
" </range-element>"));
// The following range elements are from an external range file.
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("grammatical-info"), Is.EqualTo("Adverb"), Is.NotNull, Is.Null,
Is.EqualTo(new LiftMultiText("en", "modify verbs")),
Is.EqualTo(new LiftMultiText("en", "Adverb")),
Is.EqualTo(new LiftMultiText("en", "adv")),
IsEqualToIgnoreNl("<range-element guid=\"c528ee72-31a5-423d-833d-0c8454f345d3\" id=\"Adverb\">" + NewLine +
" <label><form lang=\"en\"><text>Adverb</text></form></label>" + NewLine +
" <abbrev><form lang=\"en\"><text>adv</text></form></abbrev>" + NewLine +
" <description><form lang=\"en\"><text>modify verbs</text></form></description>" + NewLine +
" </range-element>"));
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("grammatical-info"), Is.EqualTo("Noun"), Is.NotNull, Is.Null,
Is.EqualTo(new LiftMultiText("en", "substantives and nominals")),
Is.EqualTo(new LiftMultiText("en", "Noun")),
Is.EqualTo(new LiftMultiText("en", "n")),
IsEqualToIgnoreNl("<range-element guid=\"0fae9a91-36c0-429f-9a31-fbef1292da6a\" id=\"Noun\">" + NewLine +
" <label><form lang=\"en\"><text>Noun</text></form></label>" + NewLine +
" <abbrev><form lang=\"en\"><text>n</text></form></abbrev>" + NewLine +
" <description><form lang=\"en\"><text>substantives and nominals</text></form></description>" + NewLine +
" </range-element>"));
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("grammatical-info"), Is.EqualTo("Verb"), Is.NotNull, Is.Null,
Is.EqualTo(new LiftMultiText("en", "signal events and actions")),
Is.EqualTo(new LiftMultiText("en", "Verb")),
Is.EqualTo(new LiftMultiText("en", "v")),
IsEqualToIgnoreNl("<range-element guid=\"4812abf3-31e5-450c-a15f-a830dfc7f223\" id=\"Verb\">" + NewLine +
" <label><form lang=\"en\"><text>Verb</text></form></label>" + NewLine +
" <abbrev><form lang=\"en\"><text>v</text></form></abbrev>" + NewLine +
" <description><form lang=\"en\"><text>signal events and actions</text></form></description>" + NewLine +
" </range-element>"));
Expect.Exactly(1).On(_merger).Method("ProcessFieldDefinition")
.With(Is.EqualTo("cv-pattern"),
Is.EqualTo(new LiftMultiText("en", "the syllable pattern for a pronunciation")));
Expect.Exactly(1).On(_merger).Method("ProcessFieldDefinition")
.With(Is.EqualTo("tone"),
Is.EqualTo(new LiftMultiText("en", "the tone information for a pronunciation")));
Expect.Exactly(1).On(_merger).Method("ProcessFieldDefinition")
.With(Is.EqualTo("import-residue"),
Is.EqualTo(new LiftMultiText("en", "residue left over from importing")));
Expect.Exactly(1).On(_merger).Method("ProcessFieldDefinition")
.With(Is.EqualTo("literal-meaning"),
Is.EqualTo(new LiftMultiText("en", "literal meaning of an entry")));
ExpectGetOrMakeEntry(new ExtensibleMatcher("bird_6db30a98-530e-4614-86d4-237f6984db71",
new Guid("6db30a98-530e-4614-86d4-237f6984db71"),
new DateTime(2008, 3, 31, 8, 4, 9, DateTimeKind.Utc),
new DateTime(2008, 3, 31, 8, 4, 9, DateTimeKind.Utc)));
Expect.Exactly(1).On(_merger).Method("MergeInLexemeForm")
.With(Is.Anything, Is.EqualTo(new LiftMultiText("x-rtl", "bird")));
ExpectGetOrMakeSense();
Expect.Exactly(1).On(_merger).Method("MergeInGrammaticalInfo")
.With(Is.Anything, Is.EqualTo("Noun"), Is.NotNull);
Expect.Exactly(1).On(_merger).Method("MergeInGloss")
.With(Is.Anything, Is.EqualTo(new LiftMultiText("en", "bird")));
Expect.Exactly(1).On(_merger).Method("MergeInTrait")
.With(Is.Anything, Is.EqualTo(new Trait("morph-type", "stem")));
Expect.Exactly(1).On(_merger).Method("MergeInTrait")
.With(Is.Anything, Is.EqualTo(new Trait("entry-type", "Main Entry")));
ExpectFinishEntry();
}
var cwd = Environment.CurrentDirectory;
var liftFilePath = Path.Combine(cwd, "test20080407.lift");
var liftRangesFilePath = Path.Combine(cwd, "test20080407.lift-ranges");
try
{
File.WriteAllBytes(liftFilePath, Resources.test20080407_lift);
File.WriteAllBytes(liftRangesFilePath, Resources.test20080407_lift_ranges);
_parser.ReadLiftFile("test20080407.lift");
_mocks.VerifyAllExpectationsHaveBeenMet();
}
finally
{
RobustFile.Delete(liftFilePath);
RobustFile.Delete(liftRangesFilePath);
}
}
[Test]
public void ReadLiftRangesFileWithEmptyRange()
{
using (_mocks.Ordered)
{
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("list"), Is.EqualTo("1"), Is.Null, Is.Null,
Is.EqualTo(new LiftMultiText()),
Is.EqualTo(new LiftMultiText("en", "first")),
Is.EqualTo(new LiftMultiText("en", "1st")),
IsEqualToIgnoreNl("<range-element id=\"1\"><label><form lang=\"en\"><text>first</text></form></label><abbrev><form lang=\"en\"><text>1st</text></form></abbrev></range-element>"));
Expect.Exactly(1).On(_merger).Method("ProcessRangeElement")
.With(Is.EqualTo("list"), Is.EqualTo("2"), Is.Null, Is.Null,
Is.EqualTo(new LiftMultiText()),
Is.EqualTo(new LiftMultiText("en", "second")),
Is.EqualTo(new LiftMultiText("en", "2nd")),
IsEqualToIgnoreNl("<range-element id=\"2\"><label><form lang=\"en\"><text>second</text></form></label><abbrev><form lang=\"en\"><text>2nd</text></form></abbrev></range-element>"));
}
using (TempFile r = new TempFile("<lift-ranges>" +
"<range id=\"empty\"/>" +
"<range id=\"list\">" +
"<range-element id=\"1\"><label><form lang=\"en\"><text>first</text></form></label><abbrev><form lang=\"en\"><text>1st</text></form></abbrev></range-element>" +
"<range-element id=\"2\"><label><form lang=\"en\"><text>second</text></form></label><abbrev><form lang=\"en\"><text>2nd</text></form></abbrev></range-element>" +
"</range>" +
"</lift-ranges>"))
using (TempFile f = new TempFile(String.Format("<lift version='{0}'><header><ranges><range href=\"file://{1}\" id=\"empty\"/><range href=\"file://{1}\" id=\"list\"/></ranges></header></lift>",
Validator.LiftVersion, r.Path)))
{
_parser.ReadLiftFile(f.Path);
}
_mocks.VerifyAllExpectationsHaveBeenMet();
}
/*
*
/// <summary>
/// when I wrote the flex exporter, lift did not yet implement semantic domain
/// </summary>
[Test, Ignore("Not yet implemented in WeSay")]
public void SemanticDomainTraitIsBroughtInCorrectly()
{
_doc.LoadXml("<trait range=\"semantic-domain\" value=\"6.5.1.1\"/>");
//TODO _importer.ReadTrait(_doc.SelectSingleNode("wrap"));
}
/// <summary>
/// when I wrote the flex exporter, lift did not yet implement part of speech
/// </summary>
[Test, Ignore("Not yet implemented in WeSay")]
public void GrammiWithTextLabel()
{
_doc.LoadXml("<sense><grammi type=\"conc\"/></sense>");
//TODO _importer.ReadSense(_doc.SelectSingleNode("sense"));
}
/// <summary>
/// when I wrote the flex exporter, lift did not yet implement part of speech
/// </summary>
[Test, Ignore("Not yet implemented in WeSay")]
public void GrammiWithEmptyLabel()
{
_doc.LoadXml("<sense><grammi type=\"\"/></sense>");
//TODO _importer.ReadSense(_doc.SelectSingleNode("sense"));
}
* */
//private void ParseAndCheck(string content, string expectedResults)
//{
// _doc.LoadXml(content);
// _parser.ReadFile(_doc);
// Assert.AreEqual(expectedResults, _results.ToString());
//}
// private void ParseEntryAndCheck(string content, string expectedResults)
// {
// _doc.LoadXml(content);
// _parser.ReadEntry(_doc.FirstChild);
// Assert.AreEqual(expectedResults, _results.ToString());
// }
}
public class DummyBase
{
}
public class Dummy : DummyBase
{
public override string ToString()
{
return "m";
}
}
public class DummyLiftChangeDetector : ILiftChangeDetector
{
private bool _haveCache = false;
public DummyLiftChangeDetector()
{
Reset();
}
public void Reset()
{
_haveCache = true;
}
public void ClearCache()
{
_haveCache = false;
}
public bool CanProvideChangeRecord
{
get { return _haveCache; }
}
public ILiftChangeReport GetChangeReport(IProgress progress)
{
return new DummyChangeReport();
}
}
class DummyChangeReport : ILiftChangeReport
{
public LiftChangeReport.ChangeType GetChangeType(string entryId)
{
switch (entryId)
{
case "new":
return LiftChangeReport.ChangeType.New;
case "old":
return LiftChangeReport.ChangeType.None;
case "changed":
return LiftChangeReport.ChangeType.Editted;
default:
return LiftChangeReport.ChangeType.None;
}
}
public IList<string> IdsOfDeletedEntries
{
get { throw new System.NotImplementedException(); }
}
}
}
| |
/*
*
* Created by Dimitris Tavlikos
* Website: http://software.tavlikos.com
* Contact: dimitris@tavlikos.com
*
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Serialization;
namespace DTavlikos
{
/// <summary>
/// FlexibleDictionary is a generic Dictionary but you can add items to it during a foreach iteration
/// </summary>
/// <typeparam name="TKey">The type of the keys</typeparam>
/// <typeparam name="TValue">The type of the values</typeparam>
[Serializable()]
public class FlexibleDictionary<TKey, TValue> : Dictionary<TKey, TValue>
{
#region Constructors
/// <summary>
/// Creates a new instance of the FlexibleDictionary class
/// </summary>
public FlexibleDictionary()
: base(0, null)
{
}//end ctor
/// <summary>
/// Constructor
/// </summary>
/// <param name="dictionary"></param>
public FlexibleDictionary(IDictionary<TKey, TValue> dictionary)
: base(dictionary, null)
{
}//end ctor
/// <summary>
/// Constructor
/// </summary>
/// <param name="dictionary"></param>
/// <param name="comparer"></param>
public FlexibleDictionary(IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer)
: base((dictionary != null) ? dictionary.Count : 0, comparer)
{
if (dictionary == null)
{
throw new ArgumentNullException("dictionary", "The IDictionary parameter must not be null!");
}//end if
foreach (KeyValuePair<TKey, TValue> eachItem in dictionary)
{
this.Add(eachItem.Key, eachItem.Value);
}//end foreach
}//end ctor
/// <summary>
/// Constructor
/// </summary>
/// <param name="comparer"></param>
public FlexibleDictionary(IEqualityComparer<TKey> comparer)
: base(0, comparer)
{
}//end ctor
/// <summary>
/// Constructor
/// </summary>
/// <param name="capacity"></param>
public FlexibleDictionary(int capacity)
: base(capacity, null)
{
}//end ctor
/// <summary>
/// Constructor
/// </summary>
/// <param name="capacity"></param>
/// <param name="comparer"></param>
public FlexibleDictionary(int capacity, IEqualityComparer<TKey> comparer)
: base(capacity, comparer)
{
}//end ctor
/// <summary>
/// Deserialization constructor
/// </summary>
/// <param name="info"></param>
/// <param name="context"></param>
protected FlexibleDictionary(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}//end ctor
#endregion
/// <summary>
/// Returns a FlexibleDictionaryEnumerator
/// </summary>
/// <returns></returns>
public new IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator()
{
this.pEnumerationRunning = true;
return new FlexibleDictionaryEnumerator<TKey, TValue>(this, this.ToList<KeyValuePair<TKey, TValue>>());
}//end public IEnumerator<KeyValuePair<Tkey, TValue>> GetEnumerator
private int pNewCount = -1;
/// <summary>
/// Returns the items that have been added during a foreach iteration
/// Returns -1 if it is invoked outside a foreach iteration
/// </summary>
public int NewCount
{
get
{
return this.pNewCount;
}
protected set
{
this.pNewCount = value;
}//end get set
}//end public int AddedCount
[NonSerialized()]
private bool pEnumerationRunning;
/// <summary>
/// Determines if the dictionary is being enumerated or not
/// </summary>
public bool EnumerationRunning
{
get
{
return this.pEnumerationRunning;
}//end get set
}//end public bool EnumerationStarted
[NonSerialized()]
private List<TKey> itemsToRemove = new List<TKey>();
/// <summary>
/// Removes an item from the FlexibleDictionary
/// Can be used inside foreach enumeration also with no risk at all
/// </summary>
/// <param name="key">The key of the item to remove</param>
/// <returns>True if the item has been successfully removed, otherwise false</returns>
public new bool Remove(TKey key)
{
if (this.pEnumerationRunning)
{
if (this.itemsToRemove.Count == this.Count)
{
return false;
}//end if
if (this.ContainsKey(key))
{
this.itemsToRemove.Add(key);
return true;
}
else
{
return false;
}//end if else
}
else
{
return base.Remove(key);
}//end if else
}//end void Remove
/// <summary>
/// The specific enumerator for the FlexibleDictionary
/// </summary>
/// <typeparam name="T1">The type of the keys</typeparam>
/// <typeparam name="T2">The type of the values</typeparam>
private class FlexibleDictionaryEnumerator<T1, T2> : IEnumerator<KeyValuePair<T1, T2>>
{
private int index;
private int itemCount;
private FlexibleDictionary<T1, T2> dictionary;
private List<KeyValuePair<T1, T2>> pairListCache;
private KeyValuePair<T1, T2> pCurrent;
/// <summary>
/// Creates a new instance of the FlexibleDictionaryEnumerator
/// </summary>
/// <param name="dictionary">The dictionary which will be enumerated</param>
/// <param name="pairListCache">The list of KeyValuePairs for indexing purposes</param>
public FlexibleDictionaryEnumerator(FlexibleDictionary<T1, T2> dictionary, List<KeyValuePair<T1, T2>> pairListCache)
{
this.dictionary = dictionary;
this.itemCount = this.dictionary.Count;
this.pCurrent = new KeyValuePair<T1, T2>();
this.index = 0;
this.pairListCache = pairListCache;
}//end ctor
#region IEnumerator<KeyValuePair<T1,T2>> Members
/// <summary>
/// Returns the current item in the list
/// </summary>
public KeyValuePair<T1, T2> Current
{
get
{
return this.pCurrent;
}//end get
}//end KeyValuePair<TKey, Tvalue> Current
#endregion
#region IEnumerator Members
object IEnumerator.Current
{
get
{
return null;
}//end get
}
/// <summary>
/// Moves the enumerator to the next item in the list
/// </summary>
/// <returns>False if the last item has been enumerated</returns>
public bool MoveNext()
{
if (this.index < this.itemCount)
{
this.dictionary.NewCount = this.dictionary.Count - this.itemCount;
this.pCurrent = this.pairListCache[this.index++];
return true;
}//end if
this.index = this.dictionary.Count + 1;
this.pCurrent = new KeyValuePair<T1, T2>();
this.dictionary.pEnumerationRunning = false;
if (this.dictionary.itemsToRemove.Count > 0)
{
foreach (T1 eachItem in this.dictionary.itemsToRemove)
{
this.dictionary.Remove(eachItem);
}//end foreach
this.dictionary.itemsToRemove.Clear();
}//end if
return false;
}//end bool MoveNext
/// <summary>
/// Resets all indexes
/// </summary>
public void Reset()
{
this.index = 0;
this.itemCount = 0;
this.pCurrent = new KeyValuePair<T1, T2>();
}//end void Reset
#endregion
#region IDisposable Members
public void Dispose()
{
}
#endregion
}//end class FlexibleDictionaryEnumerator
}//end class FlexibleDictionary<TKey, TValue>
/// <summary>
/// Class containing method extensions
/// </summary>
public static class FlexibleDictionaryExtensions
{
/// <summary>
/// Converts an IEnumerable collection to Dictionary
/// </summary>
/// <typeparam name="TSource">The type of the collection</typeparam>
/// <typeparam name="TKey">The type of the key of the FlexibleDictionary</typeparam>
/// <typeparam name="TValue">The type of the value of the FlexibleDictionary</typeparam>
/// <param name="source">The type of the IEnumerable collection</param>
/// <param name="keySelector">Function for determining the key of the FlexibleDictionary</param>
/// <param name="valueSelector">Function for determining the value of the collection</param>
/// <returns></returns>
public static FlexibleDictionary<TKey, TValue> ToFlexibleDictionary<TSource, TKey, TValue>(this IEnumerable<TSource> source,
Func<TSource, TKey> keySelector,
Func<TSource, TValue> valueSelector)
{
FlexibleDictionary<TKey, TValue> toReturn = new FlexibleDictionary<TKey, TValue>();
foreach (TSource eachItem in source)
{
toReturn.Add(keySelector(eachItem), valueSelector(eachItem));
}//end foreach
return toReturn;
}//end static FlexibleDictionary<TKey, TValue> ToFlexibleDictionary
}//end public static class FlexibledictionaryExtensions
}
| |
using System;
using System.Linq;
using System.Collections.ObjectModel;
using System.Reflection;
using System.Collections.Generic;
namespace System.Management.Automation
{
public class PSMethod : PSMethodInfo
{
private Type _classType;
private object _instance;
private MethodInfo[] _overloads;
private MethodInfo[] Overloads
{
get
{
if (_overloads == null)
{
var flags = BindingFlags.Public;
flags |= IsInstance ? BindingFlags.Instance : BindingFlags.Static;
_overloads = (from method in _classType.GetMethods(flags)
where method.Name.Equals(Name)
select method).ToArray();
}
return _overloads;
}
}
internal PSMethod(string methodName, Type classType, object owner, bool isInstance)
: base()
{
Name = methodName;
_classType = classType;
_instance = owner;
IsInstance = isInstance;
}
public override Collection<string> OverloadDefinitions
{
get
{
throw new NotImplementedException();
}
}
public override object Invoke(params object[] arguments)
{
object[] newArgs;
var methodInfo = FindBestMethod(arguments, out newArgs);
try
{
return methodInfo.Invoke(_instance, newArgs);
}
catch (TargetInvocationException e)
{
var msg = e.InnerException == null ? "Error invoking method '" + methodInfo.ToString() + "'"
: e.InnerException.Message;
throw new MethodInvocationException(msg, e.InnerException);
}
}
public override PSMemberInfo Copy()
{
return new PSMethod(Name, _classType, _instance, IsInstance);
}
private bool MethodFitsArgs(MethodInfo method, object[] arguments, out object[] newArguments)
{
var numArgs = arguments.Length;
var paras = method.GetParameters();
var numParams = paras.Length;
newArguments = new object[numParams];
var minCommon = numArgs;
if (numArgs < numParams)
{
for (int i = numArgs; i < numParams; i++)
{
var curParam = paras[i];
if (curParam.IsOptional)
{
newArguments[i - numArgs] = curParam.DefaultValue;
}
else if (IsParamsParameter(curParam))
{
newArguments[i - numArgs] = Array.CreateInstance(curParam.ParameterType.GetElementType(), 0);
}
else
{
return false;
}
}
minCommon = numArgs;
}
else if (numArgs > numParams)
{
if (numParams == 0)
{
return false;
}
var lastParam = paras[numParams - 1];
if (!IsParamsParameter(lastParam))
{
return false;
}
var paramsType = lastParam.ParameterType.GetElementType();
var paramsArray = Array.CreateInstance(paramsType, numArgs - numParams + 1);
for (int i = numParams - 1; i < numArgs; i++)
{
object converted;
if (!LanguagePrimitives.TryConvertTo(arguments[i], paramsType, out converted))
{
return false;
}
paramsArray.SetValue(converted, i - numParams + 1);
}
newArguments[numParams - 1] = paramsArray;
minCommon = numParams - 1;
}
for (int i = 0; i < minCommon; i++)
{
object converted;
if (!LanguagePrimitives.TryConvertTo(arguments[i], paras[i].ParameterType, out converted))
{
return false;
}
newArguments[i] = converted;
}
return true;
}
private bool IsParamsParameter(ParameterInfo info)
{
return info.GetCustomAttributes(typeof(ParamArrayAttribute), true).Any();
}
private MethodInfo FindBestMethod(object[] arguments, out object[] newArguments)
{
var candidates = new List<Tuple<MethodInfo, object[]>>();
var numArgs = arguments.Length;
// try direct match first
if (!arguments.Contains(null))
{
var argTypes = (from arg in arguments
select arg.GetType()).ToArray();
var methodInfo = _classType.GetMethod(Name, argTypes);
if (methodInfo != null && methodInfo.IsPublic && methodInfo.IsStatic != IsInstance)
{
// this doesn't mean that we got all arguments for methodInfo, e.g. we'd have a
// methodInfo for a method that takes an optional argument although argTypes doesn't
// include it. As old mono versions don't support invoking with optional arguments
// we need to take care of it
object[] fittingArgs;
if (MethodFitsArgs(methodInfo, arguments, out fittingArgs))
{
newArguments = fittingArgs;
return methodInfo;
}
// if we weren't successful (shouldn't be the case, but who knows), we try all overloads
}
}
// then check methods with conersion, optionall parameters or "params" parameter
foreach (var method in Overloads)
{
object[] fittingArgs;
if (MethodFitsArgs(method, arguments, out fittingArgs))
{
candidates.Add(new Tuple<MethodInfo, object[]>(method, fittingArgs));
}
}
if (candidates.Count < 1)
{
throw new PSArgumentException("The method (or none of its overloads) takes the given arguments!");
}
else if (candidates.Count > 1)
{
var candidateStrings = String.Join(Environment.NewLine, from cand in candidates
select MethodWithParametersToString(cand.Item1));
throw new PSArgumentException("Multiple overloaded functions match the given parameters: "
+ Environment.NewLine + candidateStrings);
}
else
{
var tuple = candidates[0];
newArguments = tuple.Item2;
return tuple.Item1;
}
}
private string MethodWithParametersToString(MethodInfo method)
{
string paras = String.Join(", ", from param in method.GetParameters()
select param.ParameterType.Name);
return String.Format("{0}({1})", method.Name, paras);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Security.Principal;
using Xunit;
namespace System.Security.AccessControl.Tests
{
public partial class SystemAcl_Purge
{
[Fact]
public static void BasicValidationTestCases()
{
bool isContainer = false;
bool isDS = false;
RawAcl rawAcl = null;
SystemAcl systemAcl = null;
int aceCount = 0;
SecurityIdentifier sid = null;
GenericAce gAce = null;
byte revision = 0;
int capacity = 0;
//CustomAce constructor parameters
AceType aceType = AceType.AccessAllowed;
AceFlags aceFlag = AceFlags.None;
byte[] opaque = null;
//CompoundAce constructor additional parameters
int accessMask = 0;
CompoundAceType compoundAceType = CompoundAceType.Impersonation;
string sidStr = "BG";
//CommonAce constructor additional parameters
AceQualifier aceQualifier = 0;
//ObjectAce constructor additional parameters
ObjectAceFlags objectAceFlag = 0;
Guid objectAceType;
Guid inheritedObjectAceType;
//case 1, no Ace
revision = 127;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
isContainer = true;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 0;
sidStr = "BG";
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid(sidStr));
Assert.True(TestPurge(systemAcl, sid, aceCount));
//case 2, only have 1 explicit Ace of the sid
revision = 0;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
sidStr = "BG";
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid(sidStr));
//199 has all aceflags but inheritedonly and inherited
gAce = new CommonAce((AceFlags)199, AceQualifier.SystemAudit, 1, sid, false, null);
rawAcl.InsertAce(0, gAce);
isContainer = false;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 0;
Assert.True(TestPurge(systemAcl, sid, aceCount));
//case 3, only have 1 explicit Ace of different sid
revision = 0;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
//199 has all aceflags but inheritedonly and inherited
sidStr = "BG";
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid(sidStr));
gAce = new CommonAce((AceFlags)199, AceQualifier.SystemAudit, 1, sid, false, null);
rawAcl.InsertAce(0, gAce);
isContainer = false;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 1;
sidStr = "BA";
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid(sidStr));
Assert.True(TestPurge(systemAcl, sid, aceCount));
//case 4, only have 1 inherited Ace of the sid
revision = 0;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
sidStr = "BG";
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid(sidStr));
//215 has all aceflags but inheritedonly
gAce = new CommonAce((AceFlags)215, AceQualifier.SystemAudit, 1, sid, false, null);
rawAcl.InsertAce(0, gAce);
isContainer = false;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 1;
Assert.True(TestPurge(systemAcl, sid, aceCount));
//case 5, have one explicit Ace and one inherited Ace of the sid
revision = 255;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
sidStr = "BG";
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid(sidStr));
//199 has all aceflags but inheritedonly and inherited
gAce = new CommonAce((AceFlags)199, AceQualifier.SystemAudit, 1, sid, false, null);
rawAcl.InsertAce(0, gAce);
//215 has all aceflags but inheritedonly
gAce = new CommonAce((AceFlags)215, AceQualifier.SystemAudit, 2, sid, false, null);
rawAcl.InsertAce(1, gAce);
isContainer = true;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 1;
Assert.True(TestPurge(systemAcl, sid, aceCount));
//case 6, have two explicit Aces of the sid
revision = 255;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
sidStr = "BG";
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid(sidStr));
gAce = new CommonAce(AceFlags.FailedAccess, AceQualifier.SystemAudit, 1, sid, false, null);
rawAcl.InsertAce(0, gAce);
gAce = new CommonAce(AceFlags.SuccessfulAccess, AceQualifier.SystemAudit, 2, sid, false, null);
rawAcl.InsertAce(0, gAce);
isContainer = true;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 0;
Assert.True(TestPurge(systemAcl, sid, 0));
//case 7, 1 explicit CustomAce
Assert.Throws<InvalidOperationException>(() =>
{
revision = 127;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
aceType = AceType.MaxDefinedAceType + 1;
//199 has all aceflags but inheritedonly and inherited
aceFlag = (AceFlags)199;
opaque = null;
gAce = new CustomAce(aceType, aceFlag, opaque);
rawAcl.InsertAce(0, gAce);
isContainer = false;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid("BG"));
aceCount = 1;
//After Mark changes design to make ACL with any CustomAce, CompoundAce uncanonical and
//forbid the modification on uncanonical ACL, this case will throw InvalidOperationException
TestPurge(systemAcl, sid, aceCount);
});
//case 8, 1 explicit CompoundAce
Assert.Throws<InvalidOperationException>(() =>
{
revision = 127;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
//207 has all AceFlags but inherited
aceFlag = (AceFlags)207;
accessMask = 1;
compoundAceType = CompoundAceType.Impersonation;
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid("BG"));
gAce = new CompoundAce(aceFlag, accessMask, compoundAceType, sid);
rawAcl.InsertAce(0, gAce);
isContainer = true;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 0;
//After Mark changes design to make ACL with any CustomAce, CompoundAce uncanonical and
//forbid the modification on uncanonical ACL, this case will throw InvalidOperationException
TestPurge(systemAcl, sid, aceCount);
});
//case 9, 1 explicit ObjectAce
revision = 127;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
sid = new SecurityIdentifier(Utils.TranslateStringConstFormatSidToStandardFormatSid("BG"));
//207 has all AceFlags but inherited
aceFlag = (AceFlags)207;
aceQualifier = AceQualifier.SystemAudit;
accessMask = 1;
objectAceFlag = ObjectAceFlags.ObjectAceTypePresent | ObjectAceFlags.InheritedObjectAceTypePresent;
objectAceType = new Guid("11111111-1111-1111-1111-111111111111");
inheritedObjectAceType = new Guid("22222222-2222-2222-2222-222222222222");
gAce = new ObjectAce(aceFlag, aceQualifier, accessMask, sid, objectAceFlag, objectAceType, inheritedObjectAceType, false, null);
rawAcl.InsertAce(0, gAce);
isContainer = true;
isDS = true;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
aceCount = 0;
Assert.True(TestPurge(systemAcl, sid, aceCount));
}
[Fact]
public static void AdditionalTestCases()
{
bool isContainer = false;
bool isDS = false;
RawAcl rawAcl = null;
SystemAcl systemAcl = null;
byte revision = 0;
int capacity = 0;
//case 1, null Sid
Assert.Throws<ArgumentNullException>(() =>
{
revision = 127;
capacity = 1;
rawAcl = new RawAcl(revision, capacity);
isContainer = true;
isDS = false;
systemAcl = new SystemAcl(isContainer, isDS, rawAcl);
systemAcl.Purge(null);
});
}
private static bool TestPurge(SystemAcl systemAcl, SecurityIdentifier sid, int aceCount)
{
KnownAce ace = null;
systemAcl.Purge(sid);
if (aceCount != systemAcl.Count)
return false;
for (int i = 0; i < systemAcl.Count; i++)
{
ace = systemAcl[i] as KnownAce;
if (ace != null && ((ace.AceFlags & AceFlags.Inherited) == 0))
{
if (ace.SecurityIdentifier == sid)
return false;
}
}
return true;
}
}
}
| |
/*
THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
PARTICULAR PURPOSE.
This is sample code and is freely distributable.
*/
using System;
using System.Collections;
using System.Drawing;
using System.Drawing.Imaging;
namespace ImageManipulation
{
/// <summary>
/// Quantize using an Octree
/// </summary>
public class OctreeQuantizer : Quantizer
{
/// <summary>
/// Construct the octree quantizer
/// </summary>
/// <remarks>
/// The Octree quantizer is a two pass algorithm. The initial pass sets up the octree,
/// the second pass quantizes a color based on the nodes in the tree
/// </remarks>
/// <param name="maxColors">The maximum number of colors to return</param>
/// <param name="maxColorBits">The number of significant bits</param>
public OctreeQuantizer ( int maxColors , int maxColorBits ) : base ( false )
{
if ( maxColors > 255 )
throw new ArgumentOutOfRangeException ( "maxColors" , maxColors , "The number of colors should be less than 256" ) ;
if ( ( maxColorBits < 1 ) | ( maxColorBits > 8 ) )
throw new ArgumentOutOfRangeException ( "maxColorBits" , maxColorBits , "This should be between 1 and 8" ) ;
// Construct the octree
_octree = new Octree ( maxColorBits ) ;
_maxColors = maxColors ;
}
/// <summary>
/// Process the pixel in the first pass of the algorithm
/// </summary>
/// <param name="pixel">The pixel to quantize</param>
/// <remarks>
/// This function need only be overridden if your quantize algorithm needs two passes,
/// such as an Octree quantizer.
/// </remarks>
protected override void InitialQuantizePixel ( Color32 pixel )
{
// Add the color to the octree
_octree.AddColor ( pixel ) ;
}
/// <summary>
/// Override this to process the pixel in the second pass of the algorithm
/// </summary>
/// <param name="pixel">The pixel to quantize</param>
/// <returns>The quantized value</returns>
protected override byte QuantizePixel ( Color32 pixel )
{
byte paletteIndex = (byte)_maxColors ; // The color at [_maxColors] is set to transparent
// Get the palette index if this non-transparent
if ( pixel.Alpha > 0 )
paletteIndex = (byte)_octree.GetPaletteIndex ( pixel ) ;
return paletteIndex ;
}
/// <summary>
/// Retrieve the palette for the quantized image
/// </summary>
/// <param name="original">Any old palette, this is overrwritten</param>
/// <returns>The new color palette</returns>
protected override ColorPalette GetPalette ( ColorPalette original )
{
// First off convert the octree to _maxColors colors
ArrayList palette = _octree.Palletize ( _maxColors - 1 ) ;
// Then convert the palette based on those colors
for ( int index = 0 ; index < palette.Count ; index++ )
original.Entries[index] = (Color)palette[index] ;
// Add the transparent color
original.Entries[_maxColors] = Color.FromArgb ( 0 , 0 , 0 , 0 ) ;
return original ;
}
/// <summary>
/// Stores the tree
/// </summary>
private Octree _octree ;
/// <summary>
/// Maximum allowed color depth
/// </summary>
private int _maxColors ;
/// <summary>
/// Class which does the actual quantization
/// </summary>
private class Octree
{
/// <summary>
/// Construct the octree
/// </summary>
/// <param name="maxColorBits">The maximum number of significant bits in the image</param>
public Octree ( int maxColorBits )
{
_maxColorBits = maxColorBits ;
_leafCount = 0 ;
_reducibleNodes = new OctreeNode[9] ;
_root = new OctreeNode ( 0 , _maxColorBits , this ) ;
_previousColor = 0 ;
_previousNode = null ;
}
/// <summary>
/// Add a given color value to the octree
/// </summary>
/// <param name="pixel"></param>
public void AddColor ( Color32 pixel )
{
// Check if this request is for the same color as the last
if ( _previousColor == pixel.ARGB )
{
// If so, check if I have a previous node setup. This will only ocurr if the first color in the image
// happens to be black, with an alpha component of zero.
if ( null == _previousNode )
{
_previousColor = pixel.ARGB ;
_root.AddColor ( pixel , _maxColorBits , 0 , this ) ;
}
else
// Just update the previous node
_previousNode.Increment ( pixel ) ;
}
else
{
_previousColor = pixel.ARGB ;
_root.AddColor ( pixel , _maxColorBits , 0 , this ) ;
}
}
/// <summary>
/// Reduce the depth of the tree
/// </summary>
public void Reduce ( )
{
int index ;
// Find the deepest level containing at least one reducible node
for ( index = _maxColorBits - 1 ; ( index > 0 ) && ( null == _reducibleNodes[index] ) ; index-- ) ;
// Reduce the node most recently added to the list at level 'index'
OctreeNode node = _reducibleNodes[index] ;
_reducibleNodes[index] = node.NextReducible ;
// Decrement the leaf count after reducing the node
_leafCount -= node.Reduce ( ) ;
// And just in case I've reduced the last color to be added, and the next color to
// be added is the same, invalidate the previousNode...
_previousNode = null ;
}
/// <summary>
/// Get/Set the number of leaves in the tree
/// </summary>
public int Leaves
{
get { return _leafCount ; }
set { _leafCount = value ; }
}
/// <summary>
/// Return the array of reducible nodes
/// </summary>
protected OctreeNode[] ReducibleNodes
{
get { return _reducibleNodes ; }
}
/// <summary>
/// Keep track of the previous node that was quantized
/// </summary>
/// <param name="node">The node last quantized</param>
protected void TrackPrevious ( OctreeNode node )
{
_previousNode = node ;
}
/// <summary>
/// Convert the nodes in the octree to a palette with a maximum of colorCount colors
/// </summary>
/// <param name="colorCount">The maximum number of colors</param>
/// <returns>An arraylist with the palettized colors</returns>
public ArrayList Palletize ( int colorCount )
{
while ( Leaves > colorCount )
Reduce ( ) ;
// Now palettize the nodes
ArrayList palette = new ArrayList ( Leaves ) ;
int paletteIndex = 0 ;
_root.ConstructPalette ( palette , ref paletteIndex ) ;
// And return the palette
return palette ;
}
/// <summary>
/// Get the palette index for the passed color
/// </summary>
/// <param name="pixel"></param>
/// <returns></returns>
public int GetPaletteIndex ( Color32 pixel )
{
return _root.GetPaletteIndex ( pixel , 0 ) ;
}
/// <summary>
/// Mask used when getting the appropriate pixels for a given node
/// </summary>
private static int[] mask = new int[8] { 0x80 , 0x40 , 0x20 , 0x10 , 0x08 , 0x04 , 0x02 , 0x01 } ;
/// <summary>
/// The root of the octree
/// </summary>
private OctreeNode _root ;
/// <summary>
/// Number of leaves in the tree
/// </summary>
private int _leafCount ;
/// <summary>
/// Array of reducible nodes
/// </summary>
private OctreeNode[] _reducibleNodes ;
/// <summary>
/// Maximum number of significant bits in the image
/// </summary>
private int _maxColorBits ;
/// <summary>
/// Store the last node quantized
/// </summary>
private OctreeNode _previousNode ;
/// <summary>
/// Cache the previous color quantized
/// </summary>
private int _previousColor ;
/// <summary>
/// Class which encapsulates each node in the tree
/// </summary>
protected class OctreeNode
{
/// <summary>
/// Construct the node
/// </summary>
/// <param name="level">The level in the tree = 0 - 7</param>
/// <param name="colorBits">The number of significant color bits in the image</param>
/// <param name="octree">The tree to which this node belongs</param>
public OctreeNode ( int level , int colorBits , Octree octree )
{
// Construct the new node
_leaf = ( level == colorBits ) ;
_red = _green = _blue = 0 ;
_pixelCount = 0 ;
// If a leaf, increment the leaf count
if ( _leaf )
{
octree.Leaves++ ;
_nextReducible = null ;
_children = null ;
}
else
{
// Otherwise add this to the reducible nodes
_nextReducible = octree.ReducibleNodes[level] ;
octree.ReducibleNodes[level] = this ;
_children = new OctreeNode[8] ;
}
}
/// <summary>
/// Add a color into the tree
/// </summary>
/// <param name="pixel">The color</param>
/// <param name="colorBits">The number of significant color bits</param>
/// <param name="level">The level in the tree</param>
/// <param name="octree">The tree to which this node belongs</param>
public void AddColor ( Color32 pixel , int colorBits , int level , Octree octree )
{
// Update the color information if this is a leaf
if ( _leaf )
{
Increment ( pixel ) ;
// Setup the previous node
octree.TrackPrevious ( this ) ;
}
else
{
// Go to the next level down in the tree
int shift = 7 - level ;
int index = ( ( pixel.Red & mask[level] ) >> ( shift - 2 ) ) |
( ( pixel.Green & mask[level] ) >> ( shift - 1 ) ) |
( ( pixel.Blue & mask[level] ) >> ( shift ) ) ;
OctreeNode child = _children[index] ;
if ( null == child )
{
// Create a new child node & store in the array
child = new OctreeNode ( level + 1 , colorBits , octree ) ;
_children[index] = child ;
}
// Add the color to the child node
child.AddColor ( pixel , colorBits , level + 1 , octree ) ;
}
}
/// <summary>
/// Get/Set the next reducible node
/// </summary>
public OctreeNode NextReducible
{
get { return _nextReducible ; }
set { _nextReducible = value ; }
}
/// <summary>
/// Return the child nodes
/// </summary>
public OctreeNode[] Children
{
get { return _children ; }
}
/// <summary>
/// Reduce this node by removing all of its children
/// </summary>
/// <returns>The number of leaves removed</returns>
public int Reduce ( )
{
_red = _green = _blue = 0 ;
int children = 0 ;
// Loop through all children and add their information to this node
for ( int index = 0 ; index < 8 ; index++ )
{
if ( null != _children[index] )
{
_red += _children[index]._red ;
_green += _children[index]._green ;
_blue += _children[index]._blue ;
_pixelCount += _children[index]._pixelCount ;
++children ;
_children[index] = null ;
}
}
// Now change this to a leaf node
_leaf = true ;
// Return the number of nodes to decrement the leaf count by
return ( children - 1 ) ;
}
/// <summary>
/// Traverse the tree, building up the color palette
/// </summary>
/// <param name="palette">The palette</param>
/// <param name="paletteIndex">The current palette index</param>
public void ConstructPalette ( ArrayList palette , ref int paletteIndex )
{
if ( _leaf )
{
// Consume the next palette index
_paletteIndex = paletteIndex++ ;
// And set the color of the palette entry
palette.Add ( Color.FromArgb ( _red / _pixelCount , _green / _pixelCount , _blue / _pixelCount ) ) ;
}
else
{
// Loop through children looking for leaves
for ( int index = 0 ; index < 8 ; index++ )
{
if ( null != _children[index] )
_children[index].ConstructPalette ( palette , ref paletteIndex ) ;
}
}
}
/// <summary>
/// Return the palette index for the passed color
/// </summary>
public int GetPaletteIndex ( Color32 pixel , int level )
{
int paletteIndex = _paletteIndex ;
if ( !_leaf )
{
int shift = 7 - level ;
int index = ( ( pixel.Red & mask[level] ) >> ( shift - 2 ) ) |
( ( pixel.Green & mask[level] ) >> ( shift - 1 ) ) |
( ( pixel.Blue & mask[level] ) >> ( shift ) ) ;
if ( null != _children[index] )
paletteIndex = _children[index].GetPaletteIndex ( pixel , level + 1 ) ;
else
throw new Exception ( "Didn't expect this!" ) ;
}
return paletteIndex ;
}
/// <summary>
/// Increment the pixel count and add to the color information
/// </summary>
public void Increment ( Color32 pixel )
{
_pixelCount++ ;
_red += pixel.Red ;
_green += pixel.Green ;
_blue += pixel.Blue ;
}
/// <summary>
/// Flag indicating that this is a leaf node
/// </summary>
private bool _leaf ;
/// <summary>
/// Number of pixels in this node
/// </summary>
private int _pixelCount ;
/// <summary>
/// Red component
/// </summary>
private int _red ;
/// <summary>
/// Green Component
/// </summary>
private int _green ;
/// <summary>
/// Blue component
/// </summary>
private int _blue ;
/// <summary>
/// Pointers to any child nodes
/// </summary>
private OctreeNode[] _children ;
/// <summary>
/// Pointer to next reducible node
/// </summary>
private OctreeNode _nextReducible ;
/// <summary>
/// The index of this node in the palette
/// </summary>
private int _paletteIndex ;
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Editor;
using Microsoft.CodeAnalysis.Editor.Host;
using Microsoft.CodeAnalysis.Editor.Implementation.GoToDefinition;
using Microsoft.CodeAnalysis.Editor.Undo;
using Microsoft.CodeAnalysis.FindSymbols;
using Microsoft.CodeAnalysis.GeneratedCodeRecognition;
using Microsoft.VisualStudio.LanguageServices.Implementation;
using Microsoft.VisualStudio.LanguageServices.Implementation.CodeModel;
using Microsoft.VisualStudio.LanguageServices.Implementation.Interop;
using Microsoft.VisualStudio.LanguageServices.Implementation.Library.ObjectBrowser.Lists;
using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem;
using Microsoft.VisualStudio.Shell;
using Roslyn.Utilities;
namespace Microsoft.VisualStudio.LanguageServices
{
[Export(typeof(VisualStudioWorkspace))]
[Export(typeof(VisualStudioWorkspaceImpl))]
internal class RoslynVisualStudioWorkspace : VisualStudioWorkspaceImpl
{
private readonly IEnumerable<Lazy<INavigableItemsPresenter>> _navigableItemsPresenters;
private readonly IEnumerable<Lazy<IReferencedSymbolsPresenter>> _referencedSymbolsPresenters;
private readonly IEnumerable<Lazy<INavigableDefinitionProvider>> _externalDefinitionProviders;
[ImportingConstructor]
private RoslynVisualStudioWorkspace(
SVsServiceProvider serviceProvider,
SaveEventsService saveEventsService,
[ImportMany] IEnumerable<Lazy<INavigableItemsPresenter>> navigableItemsPresenters,
[ImportMany] IEnumerable<Lazy<IReferencedSymbolsPresenter>> referencedSymbolsPresenters,
[ImportMany] IEnumerable<Lazy<INavigableDefinitionProvider>> externalDefinitionProviders)
: base(
serviceProvider,
backgroundWork: WorkspaceBackgroundWork.ParseAndCompile)
{
PrimaryWorkspace.Register(this);
InitializeStandardVisualStudioWorkspace(serviceProvider, saveEventsService);
_navigableItemsPresenters = navigableItemsPresenters;
_referencedSymbolsPresenters = referencedSymbolsPresenters;
_externalDefinitionProviders = externalDefinitionProviders;
}
public override EnvDTE.FileCodeModel GetFileCodeModel(DocumentId documentId)
{
if (documentId == null)
{
throw new ArgumentNullException(nameof(documentId));
}
var project = ProjectTracker.GetProject(documentId.ProjectId);
if (project == null)
{
throw new ArgumentException(ServicesVSResources.DocumentIdNotFromWorkspace, nameof(documentId));
}
var document = project.GetDocumentOrAdditionalDocument(documentId);
if (document == null)
{
throw new ArgumentException(ServicesVSResources.DocumentIdNotFromWorkspace, nameof(documentId));
}
var provider = project as IProjectCodeModelProvider;
if (provider != null)
{
var projectCodeModel = provider.ProjectCodeModel;
if (projectCodeModel.CanCreateFileCodeModelThroughProject(document.FilePath))
{
return (EnvDTE.FileCodeModel)projectCodeModel.CreateFileCodeModelThroughProject(document.FilePath);
}
}
return null;
}
internal override bool RenameFileCodeModelInstance(DocumentId documentId, string newFilePath)
{
if (documentId == null)
{
return false;
}
var project = ProjectTracker.GetProject(documentId.ProjectId);
if (project == null)
{
return false;
}
var document = project.GetDocumentOrAdditionalDocument(documentId);
if (document == null)
{
return false;
}
var codeModelProvider = project as IProjectCodeModelProvider;
if (codeModelProvider == null)
{
return false;
}
var codeModelCache = codeModelProvider.ProjectCodeModel.GetCodeModelCache();
if (codeModelCache == null)
{
return false;
}
codeModelCache.OnSourceFileRenaming(document.FilePath, newFilePath);
return true;
}
internal override IInvisibleEditor OpenInvisibleEditor(DocumentId documentId)
{
var hostDocument = GetHostDocument(documentId);
return OpenInvisibleEditor(hostDocument);
}
internal override IInvisibleEditor OpenInvisibleEditor(IVisualStudioHostDocument hostDocument)
{
// We need to ensure the file is saved, only if a global undo transaction is open
var globalUndoService = this.Services.GetService<IGlobalUndoService>();
var needsSave = globalUndoService.IsGlobalTransactionOpen(this);
var needsUndoDisabled = false;
if (needsSave)
{
if (this.CurrentSolution.ContainsDocument(hostDocument.Id))
{
// Disable undo on generated documents
needsUndoDisabled = this.Services.GetService<IGeneratedCodeRecognitionService>().IsGeneratedCode(this.CurrentSolution.GetDocument(hostDocument.Id));
}
else
{
// Enable undo on "additional documents" or if no document can be found.
needsUndoDisabled = false;
}
}
return new InvisibleEditor(ServiceProvider, hostDocument.FilePath, needsSave, needsUndoDisabled);
}
private static bool TryResolveSymbol(ISymbol symbol, Project project, CancellationToken cancellationToken, out ISymbol resolvedSymbol, out Project resolvedProject)
{
resolvedSymbol = null;
resolvedProject = null;
var currentProject = project.Solution.Workspace.CurrentSolution.GetProject(project.Id);
if (currentProject == null)
{
return false;
}
var originalCompilation = project.GetCompilationAsync(cancellationToken).WaitAndGetResult(cancellationToken);
var symbolId = SymbolKey.Create(symbol, cancellationToken);
var currentCompilation = currentProject.GetCompilationAsync(cancellationToken).WaitAndGetResult(cancellationToken);
var symbolInfo = symbolId.Resolve(currentCompilation, cancellationToken: cancellationToken);
if (symbolInfo.Symbol == null)
{
return false;
}
resolvedSymbol = symbolInfo.Symbol;
resolvedProject = currentProject;
return true;
}
public override bool TryGoToDefinition(ISymbol symbol, Project project, CancellationToken cancellationToken)
{
if (!_navigableItemsPresenters.Any())
{
return false;
}
ISymbol searchSymbol;
Project searchProject;
if (!TryResolveSymbol(symbol, project, cancellationToken, out searchSymbol, out searchProject))
{
return false;
}
return GoToDefinitionHelpers.TryGoToDefinition(
searchSymbol, searchProject, _externalDefinitionProviders, _navigableItemsPresenters, cancellationToken: cancellationToken);
}
public override bool TryFindAllReferences(ISymbol symbol, Project project, CancellationToken cancellationToken)
{
if (!_referencedSymbolsPresenters.Any())
{
return false;
}
ISymbol searchSymbol;
Project searchProject;
if (!TryResolveSymbol(symbol, project, cancellationToken, out searchSymbol, out searchProject))
{
return false;
}
var searchSolution = searchProject.Solution;
var result = SymbolFinder
.FindReferencesAsync(searchSymbol, searchSolution, cancellationToken)
.WaitAndGetResult(cancellationToken).ToList();
if (result != null)
{
DisplayReferencedSymbols(searchSolution, result);
return true;
}
return false;
}
public override void DisplayReferencedSymbols(Solution solution, IEnumerable<ReferencedSymbol> referencedSymbols)
{
foreach (var presenter in _referencedSymbolsPresenters)
{
presenter.Value.DisplayResult(solution, referencedSymbols);
}
}
internal override object GetBrowseObject(SymbolListItem symbolListItem)
{
var compilation = symbolListItem.GetCompilation(this);
if (compilation == null)
{
return null;
}
var symbol = symbolListItem.ResolveSymbol(compilation);
var sourceLocation = symbol.Locations.Where(l => l.IsInSource).FirstOrDefault();
if (sourceLocation == null)
{
return null;
}
var projectId = symbolListItem.ProjectId;
if (projectId == null)
{
return null;
}
var project = this.CurrentSolution.GetProject(projectId);
if (project == null)
{
return null;
}
var codeModelService = project.LanguageServices.GetService<ICodeModelService>();
if (codeModelService == null)
{
return null;
}
var tree = sourceLocation.SourceTree;
var document = project.GetDocument(tree);
var vsFileCodeModel = this.GetFileCodeModel(document.Id);
var fileCodeModel = ComAggregate.GetManagedObject<FileCodeModel>(vsFileCodeModel);
if (fileCodeModel != null)
{
var syntaxNode = tree.GetRoot().FindNode(sourceLocation.SourceSpan);
while (syntaxNode != null)
{
if (!codeModelService.TryGetNodeKey(syntaxNode).IsEmpty)
{
break;
}
syntaxNode = syntaxNode.Parent;
}
if (syntaxNode != null)
{
var codeElement = fileCodeModel.GetOrCreateCodeElement<EnvDTE.CodeElement>(syntaxNode);
if (codeElement != null)
{
return codeElement;
}
}
}
return null;
}
}
}
| |
/********************************************************************
The Multiverse Platform is made available under the MIT License.
Copyright (c) 2012 The Multiverse Foundation
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
*********************************************************************/
using System;
using System.Collections;
using System.Collections.Specialized;
using System.Collections.Generic;
using System.Xml;
using System.IO;
using System.Text;
using Vector2 = Axiom.MathLib.Vector2;
using Axiom.Core;
namespace Axiom.SceneManagers.Multiverse
{
public struct ImageRect
{
public string name;
public int left;
public int right;
public int top;
public int bottom;
public ImageRect(string name, int left, int right, int top, int bottom)
{
this.name = name;
this.left = left;
this.right = right;
this.top = top;
this.bottom = bottom;
}
public int Width
{
get {
return right - left;
}
}
public int Height
{
get {
return bottom - top;
}
}
// Returns coordinates of the start of the image in the
// floating point range 0..1, and the size as a fraction of
// the width and height.
public void UnitCoordStartAndSize(int totalWidth, int totalHeight,
out Vector2 start, out Vector2 size)
{
float onePixelWide = 1.0f / (float)totalWidth;
float onePixelHigh = 1.0f / (float)totalHeight;
start = new Vector2(((float)left + 0.5f) * onePixelWide,
((float)top + 0.5f) * onePixelHigh);
size = new Vector2(((float)(Width - 1) + 0.5f) * onePixelWide,
((float)(Height - 1) + 0.5f) * onePixelHigh);
}
}
public class MVImageSet
{
// Width and Height in pixels
public int width;
public int height;
// A list of regions of the image
public List<ImageRect> imageRects;
public MVImageSet()
{
width = 0;
height = 0;
imageRects = new List<ImageRect>();
}
// Returns coordinates of the start of the image in the
// floating point range 0..1, and the size as a fraction of
// the width and height. The return value is true if the
// named image was found; false otherwise.
public bool FindImageStartAndSize (string imageName,
out Vector2 start, out Vector2 size)
{
foreach(ImageRect rect in imageRects)
{
if (rect.name == imageName)
{
rect.UnitCoordStartAndSize(width, height, out start, out size);
return true;
}
}
start = new Vector2(0f, 0f);
size = new Vector2(0f, 0f);
return false;
}
public void AddImageRect(ImageRect rect)
{
imageRects.Add(rect);
}
public static MVImageSet FindMVImageSet(string fileName)
{
Stream stream = null;
try {
stream = ResourceManager.FindCommonResourceData(fileName);
} catch (AxiomException) {
return null;
}
XmlReaderSettings settings = new XmlReaderSettings();
settings.IgnoreWhitespace = true;
XmlReader r = XmlReader.Create(stream, settings);
r.Read();
if (r.Name == "MVImageSet")
{
MVImageSet imageSet = new MVImageSet();
imageSet.FromXML(r);
return imageSet;
}
return null;
}
public void ToXML(XmlTextWriter w)
{
w.WriteStartElement("MVImageSet");
w.WriteElementString("width", width.ToString());
w.WriteElementString("height", height.ToString());
foreach (ImageRect r in imageRects)
{
w.WriteStartElement("ImageRect");
w.WriteAttributeString("name", r.name);
w.WriteAttributeString("left", r.left.ToString());
w.WriteAttributeString("right", r.right.ToString());
w.WriteAttributeString("top", r.top.ToString());
w.WriteAttributeString("bottom", r.bottom.ToString());
w.WriteEndElement();
}
w.WriteEndElement();
}
private void FromXML(XmlReader r)
{
while (r.Read())
{
// look for the start of an element
if (r.NodeType == XmlNodeType.Element)
{
// parse that element
ParseElement(r);
}
else if (r.NodeType == XmlNodeType.EndElement)
{
// if we found an end element, it means we are at the end of the terrain description
return;
}
}
}
protected void ParseElement(XmlReader r)
{
bool readEnd = true;
// set the field in this object based on the element we just read
switch (r.Name)
{
case "width":
// read the value
r.Read();
if (r.NodeType != XmlNodeType.Text)
return;
width = int.Parse(r.Value);
break;
case "height":
// read the value
r.Read();
if (r.NodeType != XmlNodeType.Text)
return;
height = int.Parse(r.Value);
break;
case "ImageRect":
string name = "";
int left = 0;
int right = 0;
int top = 0;
int bottom = height;
for (int i = 0; i < r.AttributeCount; i++)
{
r.MoveToAttribute(i);
// set the field in this object based on the element we just read
switch (r.Name)
{
case "name":
name = r.Value;
break;
case "left":
left = int.Parse(r.Value);
break;
case "right":
right = int.Parse(r.Value);
break;
case "top":
top = int.Parse(r.Value);
break;
case "bottom":
bottom = int.Parse(r.Value);
break;
}
}
r.MoveToElement(); //Moves the reader back to the element node.
AddImageRect(new ImageRect(name, left, right, top, bottom));
readEnd = false;
break;
}
if (readEnd)
{
// error out if we dont see an end element here
r.Read();
if (r.NodeType != XmlNodeType.EndElement)
return;
}
}
}
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
*
* ***************************************************************************/
#if FEATURE_CORE_DLR
using System.Linq.Expressions;
#else
using Microsoft.Scripting.Ast;
#endif
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Dynamic;
using Microsoft.Scripting.Runtime;
using Microsoft.Scripting.Utils;
using System.Text;
namespace Microsoft.Scripting {
[DebuggerDisplay("{_path ?? \"<anonymous>\"}")]
public sealed class SourceUnit {
private readonly SourceCodeKind _kind;
private readonly string _path;
private readonly LanguageContext _language;
private readonly TextContentProvider _contentProvider;
// SourceUnit is serializable => updated parse result is transmitted
// back to the host unless the unit is passed by-ref
private ScriptCodeParseResult? _parseResult;
private KeyValuePair<int, int>[] _lineMap;
/// <summary>
/// Identification of the source unit. Assigned by the host.
/// The format and semantics is host dependent (could be a path on file system or URL).
/// Empty string for anonymous source units.
/// </summary>
public string Path {
get { return _path; }
}
public bool HasPath {
get { return _path != null; }
}
public SourceCodeKind Kind {
get { return _kind; }
}
public SymbolDocumentInfo Document {
get {
// _path is valid to be null. In that case we cannot create a valid SymbolDocumentInfo.
return _path == null ? null : Expression.SymbolDocument(_path, _language.LanguageGuid, _language.VendorGuid);
}
}
/// <summary>
/// LanguageContext of the language of the unit.
/// </summary>
public LanguageContext LanguageContext {
get { return _language; }
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
public ScriptCodeParseResult GetCodeProperties() {
return GetCodeProperties(_language.GetCompilerOptions());
}
public ScriptCodeParseResult GetCodeProperties(CompilerOptions options) {
ContractUtils.RequiresNotNull(options, "options");
_language.CompileSourceCode(this, options, ErrorSink.Null);
return _parseResult ?? ScriptCodeParseResult.Complete;
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1721:PropertyNamesShouldNotMatchGetMethods")] // TODO: fix
public ScriptCodeParseResult? CodeProperties {
get { return _parseResult; }
set { _parseResult = value; }
}
public SourceUnit(LanguageContext context, TextContentProvider contentProvider, string path, SourceCodeKind kind) {
Assert.NotNull(context, contentProvider);
Debug.Assert(context.CanCreateSourceCode);
_language = context;
_contentProvider = contentProvider;
_kind = kind;
_path = path;
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
public SourceCodeReader GetReader() {
return _contentProvider.GetReader();
}
/// <summary>
/// Reads specified range of lines (or less) from the source unit.
/// Line numbers starts with 1.
/// </summary>
public string[] GetCodeLines(int start, int count) {
ContractUtils.Requires(start > 0, "start");
ContractUtils.Requires(count > 0, "count");
List<string> result = new List<string>(count);
using (SourceCodeReader reader = GetReader()) {
reader.SeekLine(start);
while (count > 0) {
string line = reader.ReadLine();
if (line == null) break;
result.Add(line);
count--;
}
}
return result.ToArray();
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
public string GetCodeLine(int line) {
string[] lines = GetCodeLines(line, 1);
return (lines.Length > 0) ? lines[0] : null;
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")]
public string GetCode() {
using (SourceCodeReader reader = GetReader()) {
return reader.ReadToEnd();
}
}
#region Line/File mapping
public SourceLocation MakeLocation(int index, int line, int column) {
return new SourceLocation(index, MapLine(line), column);
}
public SourceLocation MakeLocation(SourceLocation loc) {
return new SourceLocation(loc.Index, MapLine(loc.Line), loc.Column);
}
public int MapLine(int line) {
if (_lineMap != null) {
int match = BinarySearch(_lineMap, line);
int delta = line - _lineMap[match].Key;
line = _lineMap[match].Value + delta;
if (line < 1) {
line = 1; // this is the minimum value
}
}
return line;
}
public bool HasLineMapping {
get {
return _lineMap != null;
}
}
private static int BinarySearch<T>(KeyValuePair<int, T>[] array, int line) {
int match = Array.BinarySearch(array, new KeyValuePair<int, T>(line, default(T)), new KeyComparer<T>());
if (match < 0) {
// If we couldn't find an exact match for this line number, get the nearest
// matching line number less than this one
match = ~match - 1;
// If our index = -1, it means that this line is before any line numbers that
// we know about. If that's the case, use the first entry in the list
if (match == -1) {
match = 0;
}
}
return match;
}
private class KeyComparer<T1> : IComparer<KeyValuePair<int, T1>> {
public int Compare(KeyValuePair<int, T1> x, KeyValuePair<int, T1> y) {
return x.Key - y.Key;
}
}
#endregion
#region Parsing, Compilation, Execution
public bool EmitDebugSymbols {
get {
return HasPath && LanguageContext.DomainManager.Configuration.DebugMode;
}
}
public ScriptCode Compile() {
return Compile(ErrorSink.Default);
}
public ScriptCode Compile(ErrorSink errorSink) {
return Compile(_language.GetCompilerOptions(), errorSink);
}
/// <summary>
/// Errors are reported to the specified sink.
/// Returns <c>null</c> if the parser cannot compile the code due to error(s).
/// </summary>
public ScriptCode Compile(CompilerOptions options, ErrorSink errorSink) {
ContractUtils.RequiresNotNull(errorSink, "errorSink");
ContractUtils.RequiresNotNull(options, "options");
return _language.CompileSourceCode(this, options, errorSink);
}
/// <summary>
/// Executes against a specified scope.
/// </summary>
public object Execute(Scope scope) {
return Execute(scope, ErrorSink.Default);
}
/// <summary>
/// Executes against a specified scope and reports errors to the given error sink.
/// </summary>
public object Execute(Scope scope, ErrorSink errorSink) {
ContractUtils.RequiresNotNull(scope, "scope");
ScriptCode compiledCode = Compile(_language.GetCompilerOptions(scope), errorSink);
if (compiledCode == null) {
throw new SyntaxErrorException();
}
return compiledCode.Run(scope);
}
/// <summary>
/// Executes in a new scope created by the language.
/// </summary>
public object Execute() {
return Compile().Run();
}
/// <summary>
/// Executes in a new scope created by the language.
/// </summary>
public object Execute(ErrorSink errorSink) {
return Compile(errorSink).Run();
}
/// <summary>
/// Executes in a new scope created by the language.
/// </summary>
public object Execute(CompilerOptions options, ErrorSink errorSink) {
return Compile(options, errorSink).Run();
}
public int ExecuteProgram() {
return _language.ExecuteProgram(this);
}
#endregion
public void SetLineMapping(KeyValuePair<int, int>[] lineMap) {
_lineMap = (lineMap == null || lineMap.Length == 0) ? null : lineMap;
}
}
}
| |
/********************************************************************************************
Copyright (c) Microsoft Corporation
All rights reserved.
Microsoft Public License:
This license governs use of the accompanying software. If you use the software, you
accept this license. If you do not accept the license, do not use the software.
1. Definitions
The terms "reproduce," "reproduction," "derivative works," and "distribution" have the
same meaning here as under U.S. copyright law.
A "contribution" is the original software, or any additions or changes to the software.
A "contributor" is any person that distributes its contribution under this license.
"Licensed patents" are a contributor's patent claims that read directly on its contribution.
2. Grant of Rights
(A) Copyright Grant- Subject to the terms of this license, including the license conditions
and limitations in section 3, each contributor grants you a non-exclusive, worldwide,
royalty-free copyright license to reproduce its contribution, prepare derivative works of
its contribution, and distribute its contribution or any derivative works that you create.
(B) Patent Grant- Subject to the terms of this license, including the license conditions
and limitations in section 3, each contributor grants you a non-exclusive, worldwide,
royalty-free license under its licensed patents to make, have made, use, sell, offer for
sale, import, and/or otherwise dispose of its contribution in the software or derivative
works of the contribution in the software.
3. Conditions and Limitations
(A) No Trademark License- This license does not grant you rights to use any contributors'
name, logo, or trademarks.
(B) If you bring a patent claim against any contributor over patents that you claim are
infringed by the software, your patent license from such contributor to the software ends
automatically.
(C) If you distribute any portion of the software, you must retain all copyright, patent,
trademark, and attribution notices that are present in the software.
(D) If you distribute any portion of the software in source code form, you may do so only
under this license by including a complete copy of this license with your distribution.
If you distribute any portion of the software in compiled or object code form, you may only
do so under a license that complies with this license.
(E) The software is licensed "as-is." You bear the risk of using it. The contributors give
no express warranties, guarantees or conditions. You may have additional consumer rights
under your local laws which this license cannot change. To the extent permitted under your
local laws, the contributors exclude the implied warranties of merchantability, fitness for
a particular purpose and non-infringement.
********************************************************************************************/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell.Interop;
using MSBuild = Microsoft.Build.Evaluation;
using MSBuildExecution = Microsoft.Build.Execution;
namespace Microsoft.VisualStudio.Project
{
/// <summary>
/// Allows projects to group outputs according to usage.
/// </summary>
[CLSCompliant(false), ComVisible(true)]
public class OutputGroup : IVsOutputGroup2
{
#region fields
private ProjectConfig projectCfg;
private ProjectNode project;
private List<Output> outputs = new List<Output>();
private Output keyOutput;
private string name;
private string targetName;
#endregion
#region properties
/// <summary>
/// Get the project configuration object associated with this output group
/// </summary>
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Cfg")]
protected ProjectConfig ProjectCfg
{
get { return projectCfg; }
}
/// <summary>
/// Get the project object that produces this output group.
/// </summary>
protected ProjectNode Project
{
get { return project; }
}
/// <summary>
/// Gets the msbuild target name which is assciated to the outputgroup.
/// ProjectNode defines a static collection of output group names and their associated MsBuild target
/// </summary>
protected string TargetName
{
get { return targetName; }
}
#endregion
#region ctors
/// <summary>
/// Constructor for IVSOutputGroup2 implementation
/// </summary>
/// <param name="outputName">Name of the output group. See VS_OUTPUTGROUP_CNAME_Build in vsshell.idl for the list of standard values</param>
/// <param name="msBuildTargetName">MSBuild target name</param>
/// <param name="projectManager">Project that produce this output</param>
/// <param name="configuration">Configuration that produce this output</param>
public OutputGroup(string outputName, string msBuildTargetName, ProjectNode projectManager, ProjectConfig configuration)
{
if(outputName == null)
throw new ArgumentNullException("outputName");
if(msBuildTargetName == null)
throw new ArgumentNullException("outputName");
if(projectManager == null)
throw new ArgumentNullException("projectManager");
if(configuration == null)
throw new ArgumentNullException("configuration");
name = outputName;
targetName = msBuildTargetName;
project = projectManager;
projectCfg = configuration;
}
#endregion
#region virtual methods
protected virtual void Refresh()
{
// Let MSBuild know which configuration we are working with
project.SetConfiguration(projectCfg.ConfigName);
// Generate dependencies if such a task exist
const string generateDependencyList = "AllProjectOutputGroups";
if(project.BuildProject.Targets.ContainsKey(generateDependencyList))
{
bool succeeded = false;
project.BuildTarget(generateDependencyList, out succeeded);
Debug.Assert(succeeded, "Failed to build target: " + generateDependencyList);
}
// Rebuild the content of our list of output
string outputType = this.targetName + "Output";
this.outputs.Clear();
foreach (MSBuildExecution.ProjectItemInstance assembly in project.CurrentConfig.GetItems(outputType))
{
Output output = new Output(project, assembly);
this.outputs.Add(output);
// See if it is our key output
if(String.Compare(assembly.GetMetadataValue("IsKeyOutput"), true.ToString(), StringComparison.OrdinalIgnoreCase) == 0)
keyOutput = output;
}
project.SetCurrentConfiguration();
// Now that the group is built we have to check if it is invalidated by a property
// change on the project.
project.OnProjectPropertyChanged += new EventHandler<ProjectPropertyChangedArgs>(OnProjectPropertyChanged);
}
public virtual void InvalidateGroup()
{
// Set keyOutput to null so that a refresh will be performed the next time
// a property getter is called.
if(null != keyOutput)
{
// Once the group is invalidated there is no more reason to listen for events.
project.OnProjectPropertyChanged -= new EventHandler<ProjectPropertyChangedArgs>(OnProjectPropertyChanged);
}
keyOutput = null;
}
#endregion
#region event handlers
private void OnProjectPropertyChanged(object sender, ProjectPropertyChangedArgs args)
{
// In theory here we should decide if we have to invalidate the group according with the kind of property
// that is changed.
InvalidateGroup();
}
#endregion
#region IVsOutputGroup2 Members
public virtual int get_CanonicalName(out string pbstrCanonicalName)
{
pbstrCanonicalName = this.name;
return VSConstants.S_OK;
}
public virtual int get_DeployDependencies(uint celt, IVsDeployDependency[] rgpdpd, uint[] pcActual)
{
return VSConstants.E_NOTIMPL;
}
public virtual int get_Description(out string pbstrDescription)
{
pbstrDescription = null;
string description;
int hr = this.get_CanonicalName(out description);
if(ErrorHandler.Succeeded(hr))
pbstrDescription = this.Project.GetOutputGroupDescription(description);
return hr;
}
public virtual int get_DisplayName(out string pbstrDisplayName)
{
pbstrDisplayName = null;
string displayName;
int hr = this.get_CanonicalName(out displayName);
if(ErrorHandler.Succeeded(hr))
pbstrDisplayName = this.Project.GetOutputGroupDisplayName(displayName);
return hr;
}
public virtual int get_KeyOutput(out string pbstrCanonicalName)
{
pbstrCanonicalName = null;
if(keyOutput == null)
Refresh();
if(keyOutput == null)
{
pbstrCanonicalName = String.Empty;
return VSConstants.S_FALSE;
}
return keyOutput.get_CanonicalName(out pbstrCanonicalName);
}
public virtual int get_KeyOutputObject(out IVsOutput2 ppKeyOutput)
{
if(keyOutput == null)
Refresh();
ppKeyOutput = keyOutput;
if(ppKeyOutput == null)
return VSConstants.S_FALSE;
return VSConstants.S_OK;
}
public virtual int get_Outputs(uint celt, IVsOutput2[] rgpcfg, uint[] pcActual)
{
// Ensure that we are refreshed. This is somewhat of a hack that enables project to
// project reference scenarios to work. Normally, output groups are populated as part
// of build. However, in the project to project reference case, what ends up happening
// is that the referencing projects requests the referenced project's output group
// before a build is done on the referenced project.
//
// Furthermore, the project auto toolbox manager requires output groups to be populated
// on project reopen as well...
//
// In the end, this is probably the right thing to do, though -- as it keeps the output
// groups always up to date.
Refresh();
// See if only the caller only wants to know the count
if(celt == 0 || rgpcfg == null)
{
if(pcActual != null && pcActual.Length > 0)
pcActual[0] = (uint)outputs.Count;
return VSConstants.S_OK;
}
// Fill the array with our outputs
uint count = 0;
foreach(Output output in outputs)
{
if(rgpcfg.Length > count && celt > count && output != null)
{
rgpcfg[count] = output;
++count;
}
}
if(pcActual != null && pcActual.Length > 0)
pcActual[0] = count;
// If the number asked for does not match the number returned, return S_FALSE
return (count == celt) ? VSConstants.S_OK : VSConstants.S_FALSE;
}
public virtual int get_ProjectCfg(out IVsProjectCfg2 ppIVsProjectCfg2)
{
ppIVsProjectCfg2 = (IVsProjectCfg2)this.projectCfg;
return VSConstants.S_OK;
}
public virtual int get_Property(string pszProperty, out object pvar)
{
pvar = project.GetProjectProperty(pszProperty);
return VSConstants.S_OK;
}
#endregion
}
}
| |
/*
* MindTouch Dream - a distributed REST framework
* Copyright (C) 2006-2011 MindTouch, Inc.
* www.mindtouch.com oss@mindtouch.com
*
* For community documentation and downloads visit wiki.developer.mindtouch.com;
* please review the licensing section.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Data;
using MindTouch.Collections;
using MindTouch.Dream;
using MindTouch.Xml;
namespace MindTouch.Data {
/// <summary>
/// Index information entity.
/// </summary>
public class IndexInfo {
/// <summary>
/// Index name.
/// </summary>
public string Name;
/// <summary>
/// Database table name.
/// </summary>
public string Table;
/// <summary>
/// Indexed XPath expression.
/// </summary>
public string XPath;
}
/// <summary>
/// Provides an indexing service for use by a <see cref="MysqlDocStore"/> instances.
/// </summary>
public interface IMysqlDocStoreIndexer {
//--- Properties ---
/// <summary>
/// Get the collection configuration.
/// </summary>
XDoc Config { get; }
/// <summary>
/// Get the collection name.
/// </summary>
string Name { get; }
//--- Methods ---
/// <summary>
/// Queue a document for indexing.
/// </summary>
/// <param name="id">Primary key of document.</param>
/// <param name="revision">Revision of document.</param>
/// <param name="doc">Document to be indexed.</param>
void QueueUpdate(int id, int revision, XDoc doc);
/// <summary>
/// Queue a document for removal from all indicies.
/// </summary>
/// <param name="id">Primary key of document.</param>
void QueueDelete(int id);
/// <summary>
/// Get the index information for a key.
/// </summary>
/// <param name="keyName">Name of index key.</param>
/// <returns></returns>
IndexInfo GetIndexInfo(string keyName);
}
/// <summary>
/// Provides an implemenation of <see cref="IMysqlDocStoreIndexer"/>
/// </summary>
public class MysqlDocStoreManager : IMysqlDocStoreIndexer {
//--- Types ---
private class WorkItem {
public readonly IndexInfo Index;
public readonly int Id;
public readonly int Revision;
public readonly XDoc Doc;
public WorkItem(IndexInfo index, int id, int revision, XDoc doc) {
Index = index;
Id = id;
Revision = revision;
Doc = doc;
}
}
//--- Class Fields ---
private readonly IDataCatalog _catalog;
private readonly XDoc _config;
private readonly string _name;
private readonly string _indexLookupTable;
private readonly IDictionary<string, string> _namespaceMap = new Dictionary<string, string>();
private readonly ProcessingQueue<WorkItem> _processingQueue;
private Dictionary<string, IndexInfo> _indicies = new Dictionary<string, IndexInfo>();
//--- Constructors ---
/// <summary>
/// Create a new storage manager.
/// </summary>
/// <param name="catalog">Database catalog to use.</param>
/// <param name="config">Collection and index configuration.</param>
public MysqlDocStoreManager(IDataCatalog catalog, XDoc config) {
_catalog = catalog;
_config = config;
_name = "docstore_" + _config["name"].AsText;
_indexLookupTable = _name + "_indicies";
if(string.IsNullOrEmpty(_name)) {
throw new ArgumentException("Missing name for store table");
}
if(_catalog == null) {
throw new ArgumentException("Missing DataCatalog");
}
_namespaceMap.Add(new KeyValuePair<string, string>("docstore", "mindtouch.dream.docstore"));
foreach(XDoc doc in _config["namespaces/namespace"]) {
_namespaceMap.Add(new KeyValuePair<string, string>(doc["@prefix"].AsText, doc["@urn"].AsText));
}
_processingQueue = new ProcessingQueue<WorkItem>(Update, 5);
// create storage & index lookup tables if required
_catalog.NewQuery(string.Format(@"
CREATE TABLE IF NOT EXISTS {0} (
id int primary key auto_increment not null,
revision int not null default 1,
doc_id varchar(255) unique not null,
doc text not null )", _name))
.Execute();
_catalog.NewQuery(string.Format(@"
CREATE TABLE IF NOT EXISTS {0} (
idx_name varchar(255) primary key not null,
idx_xpath text not null )", _indexLookupTable))
.Execute();
RefreshIndicies();
}
//--- Properties ---
/// <summary>
/// Get the collection configuration.
/// </summary>
public XDoc Config { get { return _config; } }
/// <summary>
/// Get the collection name.
/// </summary>
public string Name { get { return _name; } }
/// <summary>
/// Get all Indicies defined for this indexer.
/// </summary>
public IEnumerable<IndexInfo> Indicies {
get {
RefreshIndicies();
List<IndexInfo> indicies = new List<IndexInfo>();
foreach(IndexInfo info in _indicies.Values) {
indicies.Add(info);
}
return indicies;
}
}
//--- Methods ---
/// <summary>
/// Manually add an index.
/// </summary>
/// <param name="keyName">Name of the index.</param>
/// <param name="xpath">XPath expression to index.</param>
public void AddIndex(string keyName, string xpath) {
// make sure index doesn't already exist
if(_indicies.ContainsKey(keyName)) {
RefreshIndicies();
if(_indicies.ContainsKey(keyName)) {
return;
}
}
// TODO: need to sanity check keyName
IndexInfo info = new IndexInfo();
info.Name = keyName;
info.Table = _name + "_idx_" + keyName;
info.XPath = xpath;
try {
_catalog.NewQuery(string.Format("INSERT INTO {0} VALUES (?KEY, ?XPATH)", _indexLookupTable))
.With("KEY", keyName)
.With("XPATH", xpath)
.Execute();
_catalog.NewQuery(string.Format(@"
CREATE TABLE {0} (
ref_id int not null,
ref_revision int not null,
idx_value varchar(255),
key(ref_id),
key(idx_value(40)) );", info.Table))
.Execute();
} catch(Exception e) {
// Note: need to do this by reflection magic, because Dream doesn't take DB dependencies at the
// dll level
if(StringUtil.EqualsInvariant(e.GetType().ToString(), "MySql.Data.MySqlClient.MySqlException")) {
try {
int errorNumber = (int)e.GetType().GetProperty("Number").GetValue(e, null);
// trap for duplicate key or existing table collisions
if(errorNumber == 1062 || errorNumber == 1050) {
return;
}
} catch { }
}
throw;
}
_indicies[keyName] = info;
BuildIndex(info);
}
/// <summary>
/// Modify an existing index.
/// </summary>
/// <param name="keyName">Name of existing index.</param>
/// <param name="xpath">New XPath expression.</param>
public void ChangeIndex(string keyName, string xpath) {
// make sure index exists
IndexInfo info = GetIndexInfo(keyName);
if(info == null) {
AddIndex(keyName, xpath);
return;
}
_catalog.NewQuery(string.Format("UPDATE {0} SET idx_xpath = ?XPATH where idx_name = ?KEY", _indexLookupTable))
.With("KEY", keyName)
.With("XPATH", xpath)
.Execute();
RefreshIndicies();
RebuildIndex(keyName);
}
/// <summary>
/// Drop an an index.
/// </summary>
/// <param name="keyName">Name of the index.</param>
public void RemoveIndex(string keyName) {
// make sure index exists
IndexInfo info = GetIndexInfo(keyName);
if(info == null) {
return;
}
_catalog.NewQuery(string.Format("DELETE FROM {0} WHERE idx_name = ?KEY; DROP TABLE {1};", _indexLookupTable, info.Table))
.With("KEY", info.Name)
.Execute();
}
/// <summary>
/// Rebuild all values in an index.
/// </summary>
/// <param name="keyName">Name of the index.</param>
public void RebuildIndex(string keyName) {
// make sure index exists
IndexInfo info = GetIndexInfo(keyName);
if(info == null) {
throw new ArgumentException(string.Format("No index exists for key '{0}'", keyName));
}
_catalog.NewQuery(string.Format("TRUNCATE TABLE {0}", info.Table)).Execute();
BuildIndex(info);
}
/// <summary>
/// Queue a document for indexing.
/// </summary>
/// <param name="id">Primary key of document.</param>
/// <param name="revision">Revision of document.</param>
/// <param name="doc">Document to be indexed.</param>
public void QueueUpdate(int id, int revision, XDoc doc) {
Map(doc);
foreach(IndexInfo index in Indicies) {
// TODO (arnec): what to do when enqueue fails...
_processingQueue.TryEnqueue(new WorkItem(index, id, revision, doc));
}
}
/// <summary>
/// Queue a document for removal from all indicies.
/// </summary>
/// <param name="id">Primary key of document.</param>
public void QueueDelete(int id) {
foreach(IndexInfo index in Indicies) {
// TODO (arnec): what to do when enqueue fails...
_processingQueue.TryEnqueue(new WorkItem(index, id, 0, null));
}
}
/// <summary>
/// Get the index information for a key.
/// </summary>
/// <param name="keyName">Name of index key.</param>
/// <returns></returns>
public IndexInfo GetIndexInfo(string keyName) {
IndexInfo info;
if(!_indicies.TryGetValue(keyName, out info)) {
RefreshIndicies();
if(!_indicies.TryGetValue(keyName, out info)) {
return null;
}
}
return info;
}
/// <summary>
/// Drop the entire data store.
/// </summary>
/// <param name="catalog"></param>
/// <param name="name"></param>
public static void DropDataStore(IDataCatalog catalog, string name) {
string tables = string.Empty;
catalog.NewQuery("SHOW TABLES LIKE ?PREFIX")
.With("PREFIX", name + "_store%")
.Execute(delegate(IDataReader reader) {
while(reader.Read()) {
if(tables != string.Empty) {
tables += ", ";
}
tables += reader.GetString(0);
}
});
if(tables != string.Empty) {
catalog.NewQuery(string.Format("DROP TABLE IF EXISTS {0}", tables)).Execute();
}
}
private void RefreshIndicies() {
lock(_indicies) {
Dictionary<string, IndexInfo> indicies = new Dictionary<string, IndexInfo>();
_catalog.NewQuery(string.Format(@"SELECT idx_name, idx_xpath FROM {0}", _indexLookupTable))
.Execute(delegate(IDataReader reader) {
while(reader.Read()) {
IndexInfo index = new IndexInfo();
index.Name = reader.GetString(0);
index.Table = _name + "_idx_" + index.Name;
index.XPath = reader.GetString(1);
indicies.Add(index.Name, index);
}
});
_indicies = indicies;
}
}
private void BuildIndex(IndexInfo info) {
_catalog.NewQuery(string.Format("SELECT id, revision, doc FROM {0}", _name)).Execute(delegate(IDataReader reader) {
while(reader.Read()) {
int id = reader.GetInt32(0);
int revision = reader.GetInt32(1);
XDoc doc = XDocFactory.From(reader.GetString(2), MimeType.TEXT_XML);
QueueSingleIndexUpdate(info, id, revision, doc);
}
});
}
private void QueueSingleIndexUpdate(IndexInfo index, int id, int revision, XDoc doc) {
Map(doc);
// TODO (arnec): what to do when enqueue fails...
_processingQueue.TryEnqueue(new WorkItem(index, id, revision, doc));
}
private void Update(WorkItem workItem) {
if(workItem.Doc == null) {
// delete
_catalog.NewQuery(string.Format("DELETE FROM {0} WHERE ref_id = ?REFID", workItem.Index.Table))
.With("REFID", workItem.Id)
.Execute();
} else {
// index update
foreach(XDoc x in workItem.Doc[workItem.Index.XPath]) {
string value = x.AsText;
if(string.IsNullOrEmpty(value)) {
continue;
}
_catalog.NewQuery(string.Format("INSERT INTO {0} VALUES (?REFID,?REVISION, ?VALUE)", workItem.Index.Table))
.With("REFID", workItem.Id)
.With("REVISION", workItem.Revision)
.With("VALUE", value)
.Execute();
}
// remove old entries
_catalog.NewQuery(string.Format("DELETE FROM {0} WHERE ref_id = ?REFID AND ref_revision < ?REVISION", workItem.Index.Table))
.With("REFID", workItem.Id)
.With("REVISION", workItem.Revision)
.Execute();
}
}
private void Map(XDoc doc) {
if(_namespaceMap == null) {
return;
}
foreach(KeyValuePair<string, string> kvp in _namespaceMap) {
doc.UsePrefix(kvp.Key, kvp.Value);
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.1.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.RecoveryServices.Backup
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Azure.Management.RecoveryServices;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// BackupResourceVaultConfigsOperations operations.
/// </summary>
internal partial class BackupResourceVaultConfigsOperations : IServiceOperations<RecoveryServicesBackupClient>, IBackupResourceVaultConfigsOperations
{
/// <summary>
/// Initializes a new instance of the BackupResourceVaultConfigsOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal BackupResourceVaultConfigsOperations(RecoveryServicesBackupClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the RecoveryServicesBackupClient
/// </summary>
public RecoveryServicesBackupClient Client { get; private set; }
/// <summary>
/// Fetches resource vault config.
/// </summary>
/// <param name='vaultName'>
/// The name of the recovery services vault.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group where the recovery services vault is
/// present.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<BackupResourceVaultConfigResource>> GetWithHttpMessagesAsync(string vaultName, string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (vaultName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "vaultName");
}
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2016-12-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("vaultName", vaultName);
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupconfig/vaultconfig").ToString();
_url = _url.Replace("{vaultName}", System.Uri.EscapeDataString(vaultName));
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<BackupResourceVaultConfigResource>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<BackupResourceVaultConfigResource>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Updates vault security config.
/// </summary>
/// <param name='vaultName'>
/// The name of the recovery services vault.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group where the recovery services vault is
/// present.
/// </param>
/// <param name='parameters'>
/// resource config request
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<BackupResourceVaultConfigResource>> UpdateWithHttpMessagesAsync(string vaultName, string resourceGroupName, BackupResourceVaultConfigResource parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (vaultName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "vaultName");
}
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
string apiVersion = "2016-12-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("vaultName", vaultName);
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Update", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupconfig/vaultconfig").ToString();
_url = _url.Replace("{vaultName}", System.Uri.EscapeDataString(vaultName));
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PATCH");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 204)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<BackupResourceVaultConfigResource>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<BackupResourceVaultConfigResource>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2008-2010 Ricardo Quesada
Copyright (c) 2011 Zynga Inc.
Copyright (c) 2011-2012 openxlive.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
using System;
using Microsoft.Xna.Framework;
namespace CocosSharp
{
/** CCLayerGradient is a subclass of CCLayerColor that draws gradients across
the background.
All features from CCLayerColor are valid, plus the following new features:
- direction
- final color
- interpolation mode
Color is interpolated between the startColor and endColor along the given
vector (starting at the origin, ending at the terminus). If no vector is
supplied, it defaults to (0, -1) -- a fade from top to bottom.
If 'compressedInterpolation' is disabled, you will not see either the start or end color for
non-cardinal vectors; a smooth gradient implying both end points will be still
be drawn, however.
If ' compressedInterpolation' is enabled (default mode) you will see both the start and end colors of the gradient.
@since v0.99.5
*/
public class CCLayerGradient : CCLayerColor
{
// Whether or not the interpolation will be compressed in order to display all the colors of the gradient both in canonical and non canonical vectors
bool compressedInterpolation;
byte endOpacity;
byte startOpacity;
CCPoint alongVector;
CCColor3B endColor;
#region Properties
public CCColor3B StartColor
{
get { return RealColor; }
set
{
base.Color = value;
UpdateColor();
}
}
public CCColor3B EndColor
{
get { return endColor; }
set
{
endColor = value;
UpdateColor();
}
}
public byte StartOpacity
{
get { return startOpacity; }
set
{
startOpacity = value;
UpdateColor();
}
}
public byte EndOpacity
{
get { return endOpacity; }
set
{
endOpacity = value;
UpdateColor();
}
}
public CCPoint Vector
{
get { return alongVector; }
set
{
alongVector = value;
UpdateColor();
}
}
public bool IsCompressedInterpolation
{
get { return compressedInterpolation; }
set
{
compressedInterpolation = value;
UpdateColor();
}
}
#endregion Properties
#region Constructors
/// <summary>
/// Creates a full-screen CCLayer with a gradient between start and end.
/// </summary>
public CCLayerGradient (CCColor4B start, CCColor4B end) : this(start, end, new CCPoint(0, -1))
{
}
public CCLayerGradient() : this(new CCColor4B(0, 0, 0, 255), new CCColor4B(0, 0, 0, 255))
{
}
public CCLayerGradient(byte startOpacity, byte endOpacity) : base()
{
StartOpacity = startOpacity;
EndOpacity = endOpacity;
}
/// <summary>
/// Creates a full-screen CCLayer with a gradient between start and end in the direction of v.
/// </summary>
public CCLayerGradient (CCColor4B start, CCColor4B end, CCPoint gradientDirection)
: base(new CCColor4B(start.R, start.G, start.B, 255))
{
EndColor = new CCColor3B(end.R, end.G, end.B);
StartOpacity = start.A;
EndOpacity = end.A;
IsCompressedInterpolation = true;
alongVector = gradientDirection;
UpdateColor();
}
#endregion Constructors
public override void UpdateColor()
{
base.UpdateColor();
float h = alongVector.Length;
if (h == 0)
return;
double c = Math.Sqrt(2.0);
var u = new CCPoint(alongVector.X / h, alongVector.Y / h);
// Compressed Interpolation mode
if (IsCompressedInterpolation)
{
float h2 = 1 / (Math.Abs(u.X) + Math.Abs(u.Y));
u = u * (h2 * (float) c);
}
float opacityf = DisplayedOpacity / 255.0f;
var S = new CCColor4B
{
R = DisplayedColor.R,
G = DisplayedColor.G,
B = DisplayedColor.B,
A = (byte) (StartOpacity * opacityf)
};
var E = new CCColor4B
{
R = EndColor.R,
G = EndColor.G,
B = EndColor.B,
A = (byte) (EndOpacity * opacityf)
};
// (-1, -1)
SquareVertices[0].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c + u.X + u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c + u.X + u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c + u.X + u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c + u.X + u.Y) / (2.0f * c)))
);
// (1, -1)
SquareVertices[1].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c - u.X + u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c - u.X + u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c - u.X + u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c - u.X + u.Y) / (2.0f * c)))
);
// (-1, 1)
SquareVertices[2].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c + u.X - u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c + u.X - u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c + u.X - u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c + u.X - u.Y) / (2.0f * c)))
);
// (1, 1)
SquareVertices[3].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c - u.X - u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c - u.X - u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c - u.X - u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c - u.X - u.Y) / (2.0f * c)))
);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Reflection;
using System.Runtime.ExceptionServices;
namespace System.Linq.Expressions.Interpreter
{
#if FEATURE_MAKE_RUN_METHODS
internal static partial class DelegateHelpers
{
private const int MaximumArity = 17;
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1506:AvoidExcessiveClassCoupling")]
internal static Type MakeDelegate(Type[] types)
{
Debug.Assert(types != null && types.Length > 0);
// Can only used predefined delegates if we have no byref types and
// the arity is small enough to fit in Func<...> or Action<...>
if (types.Length > MaximumArity || types.Any(t => t.IsByRef))
{
throw ContractUtils.Unreachable;
}
Type returnType = types[types.Length - 1];
if (returnType == typeof(void))
{
Array.Resize(ref types, types.Length - 1);
switch (types.Length)
{
case 0: return typeof(Action);
case 1: return typeof(Action<>).MakeGenericType(types);
case 2: return typeof(Action<,>).MakeGenericType(types);
case 3: return typeof(Action<,,>).MakeGenericType(types);
case 4: return typeof(Action<,,,>).MakeGenericType(types);
case 5: return typeof(Action<,,,,>).MakeGenericType(types);
case 6: return typeof(Action<,,,,,>).MakeGenericType(types);
case 7: return typeof(Action<,,,,,,>).MakeGenericType(types);
case 8: return typeof(Action<,,,,,,,>).MakeGenericType(types);
case 9: return typeof(Action<,,,,,,,,>).MakeGenericType(types);
case 10: return typeof(Action<,,,,,,,,,>).MakeGenericType(types);
case 11: return typeof(Action<,,,,,,,,,,>).MakeGenericType(types);
case 12: return typeof(Action<,,,,,,,,,,,>).MakeGenericType(types);
case 13: return typeof(Action<,,,,,,,,,,,,>).MakeGenericType(types);
case 14: return typeof(Action<,,,,,,,,,,,,,>).MakeGenericType(types);
case 15: return typeof(Action<,,,,,,,,,,,,,,>).MakeGenericType(types);
case 16: return typeof(Action<,,,,,,,,,,,,,,,>).MakeGenericType(types);
}
}
else
{
switch (types.Length)
{
case 1: return typeof(Func<>).MakeGenericType(types);
case 2: return typeof(Func<,>).MakeGenericType(types);
case 3: return typeof(Func<,,>).MakeGenericType(types);
case 4: return typeof(Func<,,,>).MakeGenericType(types);
case 5: return typeof(Func<,,,,>).MakeGenericType(types);
case 6: return typeof(Func<,,,,,>).MakeGenericType(types);
case 7: return typeof(Func<,,,,,,>).MakeGenericType(types);
case 8: return typeof(Func<,,,,,,,>).MakeGenericType(types);
case 9: return typeof(Func<,,,,,,,,>).MakeGenericType(types);
case 10: return typeof(Func<,,,,,,,,,>).MakeGenericType(types);
case 11: return typeof(Func<,,,,,,,,,,>).MakeGenericType(types);
case 12: return typeof(Func<,,,,,,,,,,,>).MakeGenericType(types);
case 13: return typeof(Func<,,,,,,,,,,,,>).MakeGenericType(types);
case 14: return typeof(Func<,,,,,,,,,,,,,>).MakeGenericType(types);
case 15: return typeof(Func<,,,,,,,,,,,,,,>).MakeGenericType(types);
case 16: return typeof(Func<,,,,,,,,,,,,,,,>).MakeGenericType(types);
case 17: return typeof(Func<,,,,,,,,,,,,,,,,>).MakeGenericType(types);
}
}
throw ContractUtils.Unreachable;
}
}
#endif
internal static class ScriptingRuntimeHelpers
{
public static object Int32ToObject(int i)
{
switch (i)
{
case -1:
return Utils.BoxedIntM1;
case 0:
return Utils.BoxedInt0;
case 1:
return Utils.BoxedInt1;
case 2:
return Utils.BoxedInt2;
case 3:
return Utils.BoxedInt3;
}
return i;
}
internal static object GetPrimitiveDefaultValue(Type type)
{
object result;
switch (type.GetTypeCode())
{
case TypeCode.Boolean:
result = Utils.BoxedFalse;
break;
case TypeCode.SByte:
result = Utils.BoxedDefaultSByte;
break;
case TypeCode.Byte:
result = Utils.BoxedDefaultByte;
break;
case TypeCode.Char:
result = Utils.BoxedDefaultChar;
break;
case TypeCode.Int16:
result = Utils.BoxedDefaultInt16;
break;
case TypeCode.Int32:
result = Utils.BoxedInt0;
break;
case TypeCode.Int64:
result = Utils.BoxedDefaultInt64;
break;
case TypeCode.UInt16:
result = Utils.BoxedDefaultUInt16;
break;
case TypeCode.UInt32:
result = Utils.BoxedDefaultUInt32;
break;
case TypeCode.UInt64:
result = Utils.BoxedDefaultUInt64;
break;
case TypeCode.Single:
return Utils.BoxedDefaultSingle;
case TypeCode.Double:
return Utils.BoxedDefaultDouble;
case TypeCode.DateTime:
return Utils.BoxedDefaultDateTime;
case TypeCode.Decimal:
return Utils.BoxedDefaultDecimal;
default:
// Also covers DBNull which is a class.
return null;
}
if (type.IsEnum)
{
result = Enum.ToObject(type, result);
}
return result;
}
}
internal static class ExceptionHelpers
{
/// <summary>
/// Updates an exception before it's getting re-thrown so
/// we can present a reasonable stack trace to the user.
/// </summary>
public static void UnwrapAndRethrow(TargetInvocationException exception)
{
ExceptionDispatchInfo.Throw(exception.InnerException);
}
}
/// <summary>
/// A hybrid dictionary which compares based upon object identity.
/// </summary>
internal class HybridReferenceDictionary<TKey, TValue> where TKey : class
{
private KeyValuePair<TKey, TValue>[] _keysAndValues;
private Dictionary<TKey, TValue> _dict;
private const int ArraySize = 10;
public bool TryGetValue(TKey key, out TValue value)
{
Debug.Assert(key != null);
if (_dict != null)
{
return _dict.TryGetValue(key, out value);
}
else if (_keysAndValues != null)
{
for (int i = 0; i < _keysAndValues.Length; i++)
{
if (_keysAndValues[i].Key == key)
{
value = _keysAndValues[i].Value;
return true;
}
}
}
value = default(TValue);
return false;
}
public void Remove(TKey key)
{
Debug.Assert(key != null);
if (_dict != null)
{
_dict.Remove(key);
}
else if (_keysAndValues != null)
{
for (int i = 0; i < _keysAndValues.Length; i++)
{
if (_keysAndValues[i].Key == key)
{
_keysAndValues[i] = new KeyValuePair<TKey, TValue>();
return;
}
}
}
}
public bool ContainsKey(TKey key)
{
Debug.Assert(key != null);
if (_dict != null)
{
return _dict.ContainsKey(key);
}
KeyValuePair<TKey, TValue>[] keysAndValues = _keysAndValues;
if (keysAndValues != null)
{
for (int i = 0; i < keysAndValues.Length; i++)
{
if (keysAndValues[i].Key == key)
{
return true;
}
}
}
return false;
}
public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator()
{
if (_dict != null)
{
return _dict.GetEnumerator();
}
return GetEnumeratorWorker();
}
private IEnumerator<KeyValuePair<TKey, TValue>> GetEnumeratorWorker()
{
if (_keysAndValues != null)
{
for (int i = 0; i < _keysAndValues.Length; i++)
{
if (_keysAndValues[i].Key != null)
{
yield return _keysAndValues[i];
}
}
}
}
public TValue this[TKey key]
{
get
{
Debug.Assert(key != null);
TValue res;
if (TryGetValue(key, out res))
{
return res;
}
throw new KeyNotFoundException();
}
set
{
Debug.Assert(key != null);
if (_dict != null)
{
_dict[key] = value;
}
else
{
int index;
if (_keysAndValues != null)
{
index = -1;
for (int i = 0; i < _keysAndValues.Length; i++)
{
if (_keysAndValues[i].Key == key)
{
_keysAndValues[i] = new KeyValuePair<TKey, TValue>(key, value);
return;
}
else if (_keysAndValues[i].Key == null)
{
index = i;
}
}
}
else
{
_keysAndValues = new KeyValuePair<TKey, TValue>[ArraySize];
index = 0;
}
if (index != -1)
{
_keysAndValues[index] = new KeyValuePair<TKey, TValue>(key, value);
}
else
{
_dict = new Dictionary<TKey, TValue>();
for (int i = 0; i < _keysAndValues.Length; i++)
{
_dict[_keysAndValues[i].Key] = _keysAndValues[i].Value;
}
_keysAndValues = null;
_dict[key] = value;
}
}
}
}
}
internal static class Assert
{
[Conditional("DEBUG")]
public static void NotNull(object var)
{
Debug.Assert(var != null);
}
}
}
| |
// Copyright (c) DotSpatial Team. All rights reserved.
// Licensed under the MIT license. See License.txt file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
// The Original Code is from a code project example:
// http://www.codeproject.com/KB/recipes/fortunevoronoi.aspx
// which is protected under the Code Project Open License
// http://www.codeproject.com/info/cpol10.aspx
//
namespace DotSpatial.NTSExtension.Voronoi
{
/// <summary>
/// Contains static methods and parameters that organize the major elements of applying the Fortune linesweep methods.
/// </summary>
public abstract class Fortune
{
#region Fields
/// <summary>
/// Represents an infinite vector location.
/// </summary>
// ReSharper disable once InconsistentNaming
public static readonly Vector2 VVInfinite = new Vector2(double.PositiveInfinity, double.PositiveInfinity);
/// <summary>
/// The default definition of a coordinate that uses double.NaN to clarify that no value has yet been assigned to this vector.
/// </summary>
// ReSharper disable once InconsistentNaming
public static readonly Vector2 VVUnkown = new Vector2(double.NaN, double.NaN);
/// <summary>
/// Gets or sets a value indicating whether the cleanup method should be called. This is unnecessary, for
/// the mapwindow implementation and will in fact cause the implementation to break
/// because infinities and other bad values start showing up.
/// </summary>
public static bool DoCleanup { get; set; }
#endregion
#region Methods
/// <summary>
/// Calculates the voronoi graph, but specifies a tolerance below which values should be considered equal.
/// </summary>
/// <param name="vertices">The original points to use during the calculation.</param>
/// <param name="tolerance">A double value that controls the test for equality.</param>
/// <param name="cleanup">This for Ben's code and should be passed as true if cleanup should be done.</param>
/// <returns>A VoronoiGraph structure containing the output geometries.</returns>
public static VoronoiGraph ComputeVoronoiGraph(double[] vertices, double tolerance, bool cleanup)
{
Vector2.Tolerance = tolerance;
DoCleanup = cleanup;
return ComputeVoronoiGraph(vertices);
}
/// <summary>
/// Calculates a list of edges and junction vertices by using the specified points.
/// This defaults to not using any tolerance for determining if points are equal,
/// and will not use the cleanup algorithm, which breaks the HandleBoundaries
/// method in the Voronoi class.
/// </summary>
/// <param name="vertices">The original points to use during the calculation.</param>
/// <returns>A VoronoiGraph structure containing the output geometries.</returns>
public static VoronoiGraph ComputeVoronoiGraph(double[] vertices)
{
SortedDictionary<VEvent, VEvent> pq = new SortedDictionary<VEvent, VEvent>();
Dictionary<VDataNode, VCircleEvent> currentCircles = new Dictionary<VDataNode, VCircleEvent>();
VoronoiGraph vg = new VoronoiGraph();
VNode rootNode = null;
for (int i = 0; i < vertices.Length / 2; i++)
{
VDataEvent e = new VDataEvent(new Vector2(vertices, i * 2));
if (pq.ContainsKey(e)) continue;
pq.Add(e, e);
}
while (pq.Count > 0)
{
VEvent ve = pq.First().Key;
pq.Remove(ve);
VDataNode[] circleCheckList = new VDataNode[] { };
if (ve is VDataEvent)
{
rootNode = VNode.ProcessDataEvent(ve as VDataEvent, rootNode, vg, ve.Y, out circleCheckList);
}
else if (ve is VCircleEvent)
{
currentCircles.Remove(((VCircleEvent)ve).NodeN);
if (!((VCircleEvent)ve).Valid) continue;
rootNode = VNode.ProcessCircleEvent(ve as VCircleEvent, rootNode, vg, out circleCheckList);
}
else if (ve != null)
{
throw new Exception("Got event of type " + ve.GetType() + "!");
}
foreach (VDataNode vd in circleCheckList)
{
if (currentCircles.ContainsKey(vd))
{
currentCircles[vd].Valid = false;
currentCircles.Remove(vd);
}
if (ve == null) continue;
VCircleEvent vce = VNode.CircleCheckDataNode(vd, ve.Y);
if (vce == null) continue;
pq.Add(vce, vce);
currentCircles[vd] = vce;
}
if (!(ve is VDataEvent)) continue;
Vector2 dp = ((VDataEvent)ve).DataPoint;
foreach (VCircleEvent vce in currentCircles.Values)
{
if (MathTools.Dist(dp.X, dp.Y, vce.Center.X, vce.Center.Y) < vce.Y - vce.Center.Y && Math.Abs(MathTools.Dist(dp.X, dp.Y, vce.Center.X, vce.Center.Y) - (vce.Y - vce.Center.Y)) > 1e-10) vce.Valid = false;
}
}
// This is where the MapWindow version should exit since it uses the HandleBoundaries
// function instead. The following code is needed for Benjamin Ditter's original process to work.
if (!DoCleanup) return vg;
VNode.CleanUpTree(rootNode);
foreach (VoronoiEdge ve in vg.Edges)
{
if (ve.Done) continue;
if (ve.VVertexB != VVUnkown) continue;
ve.AddVertex(VVInfinite);
if (Math.Abs(ve.LeftData.Y - ve.RightData.Y) < 1e-10 && ve.LeftData.X < ve.RightData.X)
{
Vector2 t = ve.LeftData;
ve.LeftData = ve.RightData;
ve.RightData = t;
}
}
ArrayList minuteEdges = new ArrayList();
foreach (VoronoiEdge ve in vg.Edges)
{
if (ve.IsPartlyInfinite || !ve.VVertexA.Equals(ve.VVertexB)) continue;
minuteEdges.Add(ve);
// prevent rounding errors from expanding to holes
foreach (VoronoiEdge ve2 in vg.Edges)
{
if (ve2.VVertexA.Equals(ve.VVertexA)) ve2.VVertexA = ve.VVertexA;
if (ve2.VVertexB.Equals(ve.VVertexA)) ve2.VVertexB = ve.VVertexA;
}
}
foreach (VoronoiEdge ve in minuteEdges)
{
vg.Edges.Remove(ve);
}
return vg;
}
/// <summary>
/// Applies an optional cleanup method needed by Benjamine Ditter for laser data calculations.
/// This is not used by the MapWindow calculations.
/// </summary>
/// <param name="vg">The output voronoi graph created in the Compute Voronoi Graph section.</param>
/// <param name="minLeftRightDist">A minimum left to right distance.</param>
/// <returns>The Voronoi Graph after it has been filtered.</returns>
public static VoronoiGraph FilterVg(VoronoiGraph vg, double minLeftRightDist)
{
VoronoiGraph vgErg = new VoronoiGraph();
foreach (VoronoiEdge ve in vg.Edges)
{
if (ve.LeftData.Distance(ve.RightData) >= minLeftRightDist) vgErg.Edges.Add(ve);
}
foreach (VoronoiEdge ve in vgErg.Edges)
{
vgErg.Vertices.Add(ve.VVertexA);
vgErg.Vertices.Add(ve.VVertexB);
}
return vgErg;
}
/// <summary>
/// Gets the center of the circle.
/// </summary>
/// <param name="a">First circle point.</param>
/// <param name="b">Second circle point.</param>
/// <param name="c">Third circle point.</param>
/// <returns>The center vector.</returns>
/// <exception cref="ArgumentException">Thrown if at least 2 of the points are the same.</exception>
internal static Vector2 CircumCircleCenter(Vector2 a, Vector2 b, Vector2 c)
{
if (a == b || b == c || a == c) throw new ArgumentException("Need three different points!");
double tx = (a.X + c.X) / 2;
double ty = (a.Y + c.Y) / 2;
double vx = (b.X + c.X) / 2;
double vy = (b.Y + c.Y) / 2;
double ux, uy, wx, wy;
if (a.X == c.X)
{
ux = 1;
uy = 0;
}
else
{
ux = (c.Y - a.Y) / (a.X - c.X);
uy = 1;
}
if (b.X == c.X)
{
wx = -1;
wy = 0;
}
else
{
wx = (b.Y - c.Y) / (b.X - c.X);
wy = -1;
}
double alpha = ((wy * (vx - tx)) - (wx * (vy - ty))) / ((ux * wy) - (wx * uy));
return new Vector2(tx + (alpha * ux), ty + (alpha * uy));
}
/// <summary>
/// Performs a parabolic cut with the given values.
/// </summary>
/// <param name="x1">First x value.</param>
/// <param name="y1">First y value.</param>
/// <param name="x2">Second x value.</param>
/// <param name="y2">Second y value.</param>
/// <param name="ys">The ys.</param>
/// <returns>The resulting x value.</returns>
/// <exception cref="ArgumentException">Thrown if the two points are the same.</exception>
internal static double ParabolicCut(double x1, double y1, double x2, double y2, double ys)
{
if (x1 == x2 && y1 == y2)
{
throw new ArgumentException("Identical datapoints are not allowed!");
}
if (y1 == ys && y2 == ys) return (x1 + x2) / 2;
if (y1 == ys) return x1;
if (y2 == ys) return x2;
double a1 = 1 / (2 * (y1 - ys));
double a2 = 1 / (2 * (y2 - ys));
if (a1 == a2) return (x1 + x2) / 2;
double root = Math.Sqrt((-8 * a1 * x1 * a2 * x2) - (2 * a1 * y1) + (2 * a1 * y2) + (4 * a1 * a2 * x2 * x2) + (2 * a2 * y1) + (4 * a2 * a1 * x1 * x1) - (2 * a2 * y2));
double xs1 = 0.5 / ((2 * a1) - (2 * a2)) * ((4 * a1 * x1) - (4 * a2 * x2) + (2 * root));
double xs2 = 0.5 / ((2 * a1) - (2 * a2)) * ((4 * a1 * x1) - (4 * a2 * x2) - (2 * root));
if (xs1 > xs2)
{
double h = xs1;
xs1 = xs2;
xs2 = h;
}
return y1 >= y2 ? xs2 : xs1;
}
#endregion
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
#if UNITY
using UnityEngine;
#endif
#if WINDOWS_WSA || WINDOWS_UWP
using Windows.System.Threading;
#else
using System.Threading;
#endif
namespace Foundation.Tasks
{
/// <summary>
/// Describes the Tasks State
/// </summary>
public enum TaskStatus
{
/// <summary>
/// Working
/// </summary>
Pending,
/// <summary>
/// Exception as thrown or otherwise stopped early
/// </summary>
Faulted,
/// <summary>
/// Complete without error
/// </summary>
Success,
}
/// <summary>
/// Execution strategy for the Task
/// </summary>
public enum TaskStrategy
{
/// <summary>
/// Dispatches the task to a background thread
/// </summary>
BackgroundThread,
/// <summary>
/// Dispatches the task to the main thread
/// </summary>
MainThread,
/// <summary>
/// Dispatches the task to the current thread
/// </summary>
CurrentThread,
/// <summary>
/// Runs the task as a coroutine
/// </summary>
Coroutine,
/// <summary>
/// Does nothing. For custom tasks.
/// </summary>
Custom,
}
/// <summary>
/// A task encapsulates future work that may be waited on.
/// - Support running actions in background threads
/// - Supports running coroutines with return results
/// - Use the WaitForRoutine method to wait for the task in a coroutine
/// </summary>
/// <example>
/// <code>
/// var task = Task.Run(() =>
/// {
/// //Debug.Log does not work in
/// Debug.Log("Sleeping...");
/// Task.Delay(2000);
/// Debug.Log("Slept");
/// });
/// // wait for it
/// yield return task;
///
/// // check exceptions
/// if(task.IsFaulted)
/// Debug.LogException(task.Exception)
///</code>
///</example>
public partial class AsyncTask :
#if UNITY_5
CustomYieldInstruction,
#endif
IDisposable
{
#region options
/// <summary>
/// Forces use of a single thread for debugging
/// </summary>
public static bool DisableMultiThread = false;
/// <summary>
/// Logs Exceptions
/// </summary>
public static bool LogErrors = false;
#endregion
#region properties
/// <summary>
/// Run execution path
/// </summary>
public TaskStrategy Strategy;
/// <summary>
/// Error
/// </summary>
public Exception Exception { get; set; }
/// <summary>
/// Run State
/// </summary>
public TaskStatus Status { get; set; }
#if UNITY_5
/// <summary>
/// Custom Yield
/// </summary>
public override bool keepWaiting
{
get { return !IsCompleted; }
}
#endif
public bool IsRunning
{
get { return Status == TaskStatus.Pending; }
}
public bool IsCompleted
{
get { return (Status == TaskStatus.Success || Status == TaskStatus.Faulted) && !HasContinuations; }
}
public bool IsFaulted
{
get { return Status == TaskStatus.Faulted; }
}
public bool IsSuccess
{
get { return Status == TaskStatus.Success; }
}
public bool HasContinuations { get; protected set; }
#endregion
#region private
protected TaskStatus _status;
protected Action _action;
protected IEnumerator _routine;
List<Delegate> _completeList;
#endregion
#region constructor
static AsyncTask()
{
#if UNITY
TaskManager.ConfirmInit();
#endif
}
/// <summary>
/// Creates a new task
/// </summary>
public AsyncTask()
{
}
/// <summary>
/// Creates a new task
/// </summary>
public AsyncTask(TaskStrategy mode)
{
Strategy = mode;
}
/// <summary>
/// Creates a new Task in a Faulted state
/// </summary>
/// <param name="ex"></param>
public AsyncTask(Exception ex)
{
Exception = ex;
Strategy = TaskStrategy.Custom;
Status = TaskStatus.Faulted;
}
/// <summary>
/// Creates a new background task
/// </summary>
/// <param name="action"></param>
public AsyncTask(Action action)
{
_action = action;
Strategy = TaskStrategy.BackgroundThread;
}
/// <summary>
/// Creates a new Task
/// </summary>
/// <param name="action"></param>
/// <param name="mode"></param>
public AsyncTask(Action action, TaskStrategy mode)
: this()
{
if (mode == TaskStrategy.Coroutine)
throw new ArgumentException("Action tasks may not be coroutines");
_action = action;
Strategy = mode;
}
/// <summary>
/// Creates a new Coroutine Task
/// </summary>
/// <param name="action"></param>
public AsyncTask(IEnumerator action)
: this()
{
if (action == null)
throw new ArgumentNullException("action");
_routine = action;
Strategy = TaskStrategy.Coroutine;
}
#endregion
#region Private
protected virtual void Execute()
{
try
{
if (_action != null)
{
_action();
}
Status = TaskStatus.Success;
OnTaskComplete();
}
catch (Exception ex)
{
Exception = ex;
Status = TaskStatus.Faulted;
#if UNITY
if (LogErrors)
Debug.LogException(ex);
#endif
}
}
#if WINDOWS_WSA || WINDOWS_UWP
protected async void RunOnBackgroundThread()
{
Status = TaskStatus.Pending;
await ThreadPool.RunAsync(o => Execute());
#else
protected void RunOnBackgroundThread()
{
Status = TaskStatus.Pending;
ThreadPool.QueueUserWorkItem(state => Execute());
#endif
}
protected void RunOnCurrentThread()
{
Status = TaskStatus.Pending;
Execute();
}
#if UNITY
protected void RunOnMainThread()
{
Status = TaskStatus.Pending;
TaskManager.RunOnMainThread(Execute);
}
protected void RunAsCoroutine()
{
Status = TaskStatus.Pending;
TaskManager.StartRoutine(new TaskManager.CoroutineCommand
{
Coroutine = _routine,
OnComplete = OnRoutineComplete
});
}
#endif
protected virtual void OnTaskComplete()
{
if (_completeList != null)
{
foreach (var d in _completeList)
{
if (d != null)
d.DynamicInvoke(this);
}
_completeList = null;
}
HasContinuations = false;
}
protected void OnRoutineComplete()
{
if (Status == TaskStatus.Pending)
{
Status = TaskStatus.Success;
OnTaskComplete();
}
}
#endregion
#region public methods
/// <summary>
/// Runs complete logic, for custom tasks
/// </summary>
public virtual void Complete(Exception ex = null)
{
if (ex == null)
{
Exception = null;
Status = TaskStatus.Success;
OnTaskComplete();
}
else
{
Exception = ex;
Status = TaskStatus.Faulted;
OnTaskComplete();
}
}
/// <summary>
/// Executes the task
/// </summary>
public virtual void Start()
{
Status = TaskStatus.Pending;
switch (Strategy)
{
case TaskStrategy.Custom:
break;
#if UNITY
case TaskStrategy.Coroutine:
RunAsCoroutine();
break;
#endif
case TaskStrategy.BackgroundThread:
if (DisableMultiThread)
RunOnCurrentThread();
else
RunOnBackgroundThread();
break;
case TaskStrategy.CurrentThread:
RunOnCurrentThread();
break;
#if UNITY
case TaskStrategy.MainThread:
RunOnMainThread();
break;
#endif
}
}
public virtual void Dispose()
{
Status = TaskStatus.Pending;
Exception = null;
_action = null;
_routine = null;
_completeList = null;
HasContinuations = false;
}
public void AddContinue(Delegate action)
{
HasContinuations = true;
if (_completeList == null)
{
_completeList = new List<Delegate>();
}
_completeList.Add(action);
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using InfluxDB.Net.Models;
using InfluxDB.Net.Contracts;
using InfluxDB.Net.Enums;
using InfluxDB.Net.Infrastructure.Influx;
using InfluxDB.Net.Infrastructure.Configuration;
namespace InfluxDB.Net.Client
{
internal class InfluxDbClientAutoVersion : IInfluxDbClient
{
private readonly IInfluxDbClient _influxDbClient;
public InfluxDbClientAutoVersion(InfluxDbClientConfiguration influxDbClientConfiguration)
{
_influxDbClient = new InfluxDbClientBase(influxDbClientConfiguration);
var errorHandlers = new List<ApiResponseErrorHandlingDelegate>();
//NOTE: Only performs ping when the client is connected. (Do not use multiple connections with the "Client Auto Version" setting.)
var result = _influxDbClient.Ping(errorHandlers).Result;
var databaseVersion = result.Body;
if (databaseVersion.StartsWith("1.1."))
{
_influxDbClient = new InfluxDbClientV013x(influxDbClientConfiguration);
}
else if (databaseVersion.StartsWith("0.13."))
{
_influxDbClient = new InfluxDbClientV013x(influxDbClientConfiguration);
}
else if (databaseVersion.StartsWith("0.12."))
{
_influxDbClient = new InfluxDbClientV012x(influxDbClientConfiguration);
}
else if (databaseVersion.StartsWith("0.11."))
{
_influxDbClient = new InfluxDbClientV011x(influxDbClientConfiguration);
}
else if (databaseVersion.StartsWith("0.10."))
{
_influxDbClient = new InfluxDbClientV010x(influxDbClientConfiguration);
}
else if (databaseVersion.StartsWith("0.9."))
{
switch (databaseVersion)
{
case "0.9.2":
_influxDbClient = new InfluxDbClientV092(influxDbClientConfiguration);
break;
case "0.9.5":
_influxDbClient = new InfluxDbClientV092(influxDbClientConfiguration);
break;
case "0.9.6":
_influxDbClient = new InfluxDbClientV092(influxDbClientConfiguration);
break;
}
}
else
{
_influxDbClient = new InfluxDbClientV0x(influxDbClientConfiguration);
}
}
#region Database
public async Task<InfluxDbApiResponse> CreateDatabase(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, Database database)
{
return await _influxDbClient.CreateDatabase(errorHandlers, database);
}
public async Task<InfluxDbApiResponse> DropDatabase(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string name)
{
return await _influxDbClient.DropDatabase(errorHandlers, name);
}
public async Task<InfluxDbApiResponse> ShowDatabases(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.ShowDatabases(errorHandlers);
}
#endregion Database
#region Basic Querying
public async Task<InfluxDbApiWriteResponse> Write(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, WriteRequest request, string timePrecision)
{
return await _influxDbClient.Write(errorHandlers, request, timePrecision);
}
public async Task<InfluxDbApiResponse> Query(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string name, string query)
{
return await _influxDbClient.Query(errorHandlers, name, query);
}
public async Task<InfluxDbApiResponse> Query(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string name, List<string> queries)
{
return await _influxDbClient.Query(errorHandlers, name, queries);
}
#endregion Basic Querying
#region Continuous Queries
public async Task<InfluxDbApiResponse> GetContinuousQueries(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database)
{
return await _influxDbClient.GetContinuousQueries(errorHandlers, database);
}
public async Task<InfluxDbApiResponse> DeleteContinuousQuery(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, int id)
{
return await _influxDbClient.DeleteContinuousQuery(errorHandlers, database, id);
}
#endregion Continuous Queries
#region Series
public async Task<InfluxDbApiResponse> DropSeries(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, string name)
{
return await _influxDbClient.DropSeries(errorHandlers, database, name);
}
#endregion Series
#region Clustering
public async Task<InfluxDbApiResponse> CreateClusterAdmin(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, User user)
{
return await _influxDbClient.CreateClusterAdmin(errorHandlers, user);
}
public async Task<InfluxDbApiResponse> DeleteClusterAdmin(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string name)
{
return await _influxDbClient.DeleteClusterAdmin(errorHandlers, name);
}
public async Task<InfluxDbApiResponse> DescribeClusterAdmins(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.DescribeClusterAdmins(errorHandlers);
}
public async Task<InfluxDbApiResponse> UpdateClusterAdmin(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, User user, string name)
{
return await _influxDbClient.UpdateClusterAdmin(errorHandlers, user, name);
}
#endregion Clustering
#region Sharding
public async Task<InfluxDbApiResponse> GetShardSpaces(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.GetShardSpaces(errorHandlers);
}
public async Task<InfluxDbApiResponse> DropShardSpace(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, string name)
{
return await _influxDbClient.DropShardSpace(errorHandlers, database, name);
}
public async Task<InfluxDbApiResponse> CreateShardSpace(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, ShardSpace shardSpace)
{
return await _influxDbClient.CreateShardSpace(errorHandlers, database, shardSpace);
}
#endregion Sharding
#region Users
public async Task<InfluxDbApiResponse> CreateDatabaseUser(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, User user)
{
return await _influxDbClient.CreateDatabaseUser(errorHandlers, database, user);
}
public async Task<InfluxDbApiResponse> DeleteDatabaseUser(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, string name)
{
return await _influxDbClient.DeleteDatabaseUser(errorHandlers, database, name);
}
public async Task<InfluxDbApiResponse> DescribeDatabaseUsers(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database)
{
return await _influxDbClient.DescribeDatabaseUsers(errorHandlers, database);
}
public async Task<InfluxDbApiResponse> UpdateDatabaseUser(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, User user, string name)
{
return await _influxDbClient.UpdateDatabaseUser(errorHandlers, database, user, name);
}
public async Task<InfluxDbApiResponse> AuthenticateDatabaseUser(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string database, string user, string password)
{
return await _influxDbClient.AuthenticateDatabaseUser(errorHandlers, database, user, password);
}
#endregion Users
#region Other
public async Task<InfluxDbApiResponse> Ping(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.Ping(errorHandlers);
}
public async Task<InfluxDbApiResponse> ForceRaftCompaction(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.ForceRaftCompaction(errorHandlers);
}
public async Task<InfluxDbApiResponse> Interfaces(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.Interfaces(errorHandlers);
}
public async Task<InfluxDbApiResponse> Sync(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.Sync(errorHandlers);
}
public async Task<InfluxDbApiResponse> ListServers(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers)
{
return await _influxDbClient.ListServers(errorHandlers);
}
public async Task<InfluxDbApiResponse> RemoveServers(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, int id)
{
return await _influxDbClient.RemoveServers(errorHandlers, id);
}
public async Task<InfluxDbApiResponse> AlterRetentionPolicy(IEnumerable<ApiResponseErrorHandlingDelegate> errorHandlers, string policyName, string dbName, string duration, int replication)
{
return await _influxDbClient.AlterRetentionPolicy(errorHandlers, policyName, dbName, duration, replication);
}
public IFormatter GetFormatter()
{
return _influxDbClient.GetFormatter();
}
public InfluxVersion GetVersion()
{
return _influxDbClient.GetVersion();
}
#endregion Other
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Runtime.Serialization;
using GlmSharp.Swizzle;
// ReSharper disable InconsistentNaming
namespace GlmSharp
{
/// <summary>
/// A matrix of type Half with 2 columns and 3 rows.
/// </summary>
[Serializable]
[StructLayout(LayoutKind.Sequential)]
public struct hmat2x3 : IEnumerable<Half>, IEquatable<hmat2x3>
{
#region Fields
/// <summary>
/// Column 0, Rows 0
/// </summary>
public Half m00;
/// <summary>
/// Column 0, Rows 1
/// </summary>
public Half m01;
/// <summary>
/// Column 0, Rows 2
/// </summary>
public Half m02;
/// <summary>
/// Column 1, Rows 0
/// </summary>
public Half m10;
/// <summary>
/// Column 1, Rows 1
/// </summary>
public Half m11;
/// <summary>
/// Column 1, Rows 2
/// </summary>
public Half m12;
#endregion
#region Constructors
/// <summary>
/// Component-wise constructor
/// </summary>
public hmat2x3(Half m00, Half m01, Half m02, Half m10, Half m11, Half m12)
{
this.m00 = m00;
this.m01 = m01;
this.m02 = m02;
this.m10 = m10;
this.m11 = m11;
this.m12 = m12;
}
/// <summary>
/// Constructs this matrix from a hmat2. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat2 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = Half.Zero;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat3x2. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat3x2 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = Half.Zero;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat4x2. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat4x2 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = Half.Zero;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a hmat2x3. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat2x3 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = m.m02;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = m.m12;
}
/// <summary>
/// Constructs this matrix from a hmat3. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat3 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = m.m02;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = m.m12;
}
/// <summary>
/// Constructs this matrix from a hmat4x3. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat4x3 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = m.m02;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = m.m12;
}
/// <summary>
/// Constructs this matrix from a hmat2x4. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat2x4 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = m.m02;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = m.m12;
}
/// <summary>
/// Constructs this matrix from a hmat3x4. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat3x4 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = m.m02;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = m.m12;
}
/// <summary>
/// Constructs this matrix from a hmat4. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hmat4 m)
{
this.m00 = m.m00;
this.m01 = m.m01;
this.m02 = m.m02;
this.m10 = m.m10;
this.m11 = m.m11;
this.m12 = m.m12;
}
/// <summary>
/// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hvec2 c0, hvec2 c1)
{
this.m00 = c0.x;
this.m01 = c0.y;
this.m02 = Half.Zero;
this.m10 = c1.x;
this.m11 = c1.y;
this.m12 = Half.Zero;
}
/// <summary>
/// Constructs this matrix from a series of column vectors. Non-overwritten fields are from an Identity matrix.
/// </summary>
public hmat2x3(hvec3 c0, hvec3 c1)
{
this.m00 = c0.x;
this.m01 = c0.y;
this.m02 = c0.z;
this.m10 = c1.x;
this.m11 = c1.y;
this.m12 = c1.z;
}
#endregion
#region Properties
/// <summary>
/// Creates a 2D array with all values (address: Values[x, y])
/// </summary>
public Half[,] Values => new[,] { { m00, m01, m02 }, { m10, m11, m12 } };
/// <summary>
/// Creates a 1D array with all values (internal order)
/// </summary>
public Half[] Values1D => new[] { m00, m01, m02, m10, m11, m12 };
/// <summary>
/// Gets or sets the column nr 0
/// </summary>
public hvec3 Column0
{
get
{
return new hvec3(m00, m01, m02);
}
set
{
m00 = value.x;
m01 = value.y;
m02 = value.z;
}
}
/// <summary>
/// Gets or sets the column nr 1
/// </summary>
public hvec3 Column1
{
get
{
return new hvec3(m10, m11, m12);
}
set
{
m10 = value.x;
m11 = value.y;
m12 = value.z;
}
}
/// <summary>
/// Gets or sets the row nr 0
/// </summary>
public hvec2 Row0
{
get
{
return new hvec2(m00, m10);
}
set
{
m00 = value.x;
m10 = value.y;
}
}
/// <summary>
/// Gets or sets the row nr 1
/// </summary>
public hvec2 Row1
{
get
{
return new hvec2(m01, m11);
}
set
{
m01 = value.x;
m11 = value.y;
}
}
/// <summary>
/// Gets or sets the row nr 2
/// </summary>
public hvec2 Row2
{
get
{
return new hvec2(m02, m12);
}
set
{
m02 = value.x;
m12 = value.y;
}
}
#endregion
#region Static Properties
/// <summary>
/// Predefined all-zero matrix
/// </summary>
public static hmat2x3 Zero { get; } = new hmat2x3(Half.Zero, Half.Zero, Half.Zero, Half.Zero, Half.Zero, Half.Zero);
/// <summary>
/// Predefined all-ones matrix
/// </summary>
public static hmat2x3 Ones { get; } = new hmat2x3(Half.One, Half.One, Half.One, Half.One, Half.One, Half.One);
/// <summary>
/// Predefined identity matrix
/// </summary>
public static hmat2x3 Identity { get; } = new hmat2x3(Half.One, Half.Zero, Half.Zero, Half.Zero, Half.One, Half.Zero);
/// <summary>
/// Predefined all-MaxValue matrix
/// </summary>
public static hmat2x3 AllMaxValue { get; } = new hmat2x3(Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue, Half.MaxValue);
/// <summary>
/// Predefined diagonal-MaxValue matrix
/// </summary>
public static hmat2x3 DiagonalMaxValue { get; } = new hmat2x3(Half.MaxValue, Half.Zero, Half.Zero, Half.Zero, Half.MaxValue, Half.Zero);
/// <summary>
/// Predefined all-MinValue matrix
/// </summary>
public static hmat2x3 AllMinValue { get; } = new hmat2x3(Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue, Half.MinValue);
/// <summary>
/// Predefined diagonal-MinValue matrix
/// </summary>
public static hmat2x3 DiagonalMinValue { get; } = new hmat2x3(Half.MinValue, Half.Zero, Half.Zero, Half.Zero, Half.MinValue, Half.Zero);
/// <summary>
/// Predefined all-Epsilon matrix
/// </summary>
public static hmat2x3 AllEpsilon { get; } = new hmat2x3(Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon, Half.Epsilon);
/// <summary>
/// Predefined diagonal-Epsilon matrix
/// </summary>
public static hmat2x3 DiagonalEpsilon { get; } = new hmat2x3(Half.Epsilon, Half.Zero, Half.Zero, Half.Zero, Half.Epsilon, Half.Zero);
/// <summary>
/// Predefined all-NaN matrix
/// </summary>
public static hmat2x3 AllNaN { get; } = new hmat2x3(Half.NaN, Half.NaN, Half.NaN, Half.NaN, Half.NaN, Half.NaN);
/// <summary>
/// Predefined diagonal-NaN matrix
/// </summary>
public static hmat2x3 DiagonalNaN { get; } = new hmat2x3(Half.NaN, Half.Zero, Half.Zero, Half.Zero, Half.NaN, Half.Zero);
/// <summary>
/// Predefined all-NegativeInfinity matrix
/// </summary>
public static hmat2x3 AllNegativeInfinity { get; } = new hmat2x3(Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity, Half.NegativeInfinity);
/// <summary>
/// Predefined diagonal-NegativeInfinity matrix
/// </summary>
public static hmat2x3 DiagonalNegativeInfinity { get; } = new hmat2x3(Half.NegativeInfinity, Half.Zero, Half.Zero, Half.Zero, Half.NegativeInfinity, Half.Zero);
/// <summary>
/// Predefined all-PositiveInfinity matrix
/// </summary>
public static hmat2x3 AllPositiveInfinity { get; } = new hmat2x3(Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity, Half.PositiveInfinity);
/// <summary>
/// Predefined diagonal-PositiveInfinity matrix
/// </summary>
public static hmat2x3 DiagonalPositiveInfinity { get; } = new hmat2x3(Half.PositiveInfinity, Half.Zero, Half.Zero, Half.Zero, Half.PositiveInfinity, Half.Zero);
#endregion
#region Functions
/// <summary>
/// Returns an enumerator that iterates through all fields.
/// </summary>
public IEnumerator<Half> GetEnumerator()
{
yield return m00;
yield return m01;
yield return m02;
yield return m10;
yield return m11;
yield return m12;
}
/// <summary>
/// Returns an enumerator that iterates through all fields.
/// </summary>
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
#endregion
/// <summary>
/// Returns the number of Fields (2 x 3 = 6).
/// </summary>
public int Count => 6;
/// <summary>
/// Gets/Sets a specific indexed component (a bit slower than direct access).
/// </summary>
public Half this[int fieldIndex]
{
get
{
switch (fieldIndex)
{
case 0: return m00;
case 1: return m01;
case 2: return m02;
case 3: return m10;
case 4: return m11;
case 5: return m12;
default: throw new ArgumentOutOfRangeException("fieldIndex");
}
}
set
{
switch (fieldIndex)
{
case 0: this.m00 = value; break;
case 1: this.m01 = value; break;
case 2: this.m02 = value; break;
case 3: this.m10 = value; break;
case 4: this.m11 = value; break;
case 5: this.m12 = value; break;
default: throw new ArgumentOutOfRangeException("fieldIndex");
}
}
}
/// <summary>
/// Gets/Sets a specific 2D-indexed component (a bit slower than direct access).
/// </summary>
public Half this[int col, int row]
{
get
{
return this[col * 3 + row];
}
set
{
this[col * 3 + row] = value;
}
}
/// <summary>
/// Returns true iff this equals rhs component-wise.
/// </summary>
public bool Equals(hmat2x3 rhs) => (((m00.Equals(rhs.m00) && m01.Equals(rhs.m01)) && m02.Equals(rhs.m02)) && ((m10.Equals(rhs.m10) && m11.Equals(rhs.m11)) && m12.Equals(rhs.m12)));
/// <summary>
/// Returns true iff this equals rhs type- and component-wise.
/// </summary>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
return obj is hmat2x3 && Equals((hmat2x3) obj);
}
/// <summary>
/// Returns true iff this equals rhs component-wise.
/// </summary>
public static bool operator ==(hmat2x3 lhs, hmat2x3 rhs) => lhs.Equals(rhs);
/// <summary>
/// Returns true iff this does not equal rhs (component-wise).
/// </summary>
public static bool operator !=(hmat2x3 lhs, hmat2x3 rhs) => !lhs.Equals(rhs);
/// <summary>
/// Returns a hash code for this instance.
/// </summary>
public override int GetHashCode()
{
unchecked
{
return ((((((((((m00.GetHashCode()) * 397) ^ m01.GetHashCode()) * 397) ^ m02.GetHashCode()) * 397) ^ m10.GetHashCode()) * 397) ^ m11.GetHashCode()) * 397) ^ m12.GetHashCode();
}
}
/// <summary>
/// Returns a transposed version of this matrix.
/// </summary>
public hmat3x2 Transposed => new hmat3x2(m00, m10, m01, m11, m02, m12);
/// <summary>
/// Returns the minimal component of this matrix.
/// </summary>
public Half MinElement => Half.Min(Half.Min(Half.Min(Half.Min(Half.Min(m00, m01), m02), m10), m11), m12);
/// <summary>
/// Returns the maximal component of this matrix.
/// </summary>
public Half MaxElement => Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(m00, m01), m02), m10), m11), m12);
/// <summary>
/// Returns the euclidean length of this matrix.
/// </summary>
public float Length => (float)Math.Sqrt((((m00*m00 + m01*m01) + m02*m02) + ((m10*m10 + m11*m11) + m12*m12)));
/// <summary>
/// Returns the squared euclidean length of this matrix.
/// </summary>
public float LengthSqr => (((m00*m00 + m01*m01) + m02*m02) + ((m10*m10 + m11*m11) + m12*m12));
/// <summary>
/// Returns the sum of all fields.
/// </summary>
public Half Sum => (((m00 + m01) + m02) + ((m10 + m11) + m12));
/// <summary>
/// Returns the euclidean norm of this matrix.
/// </summary>
public float Norm => (float)Math.Sqrt((((m00*m00 + m01*m01) + m02*m02) + ((m10*m10 + m11*m11) + m12*m12)));
/// <summary>
/// Returns the one-norm of this matrix.
/// </summary>
public float Norm1 => (((Half.Abs(m00) + Half.Abs(m01)) + Half.Abs(m02)) + ((Half.Abs(m10) + Half.Abs(m11)) + Half.Abs(m12)));
/// <summary>
/// Returns the two-norm of this matrix.
/// </summary>
public float Norm2 => (float)Math.Sqrt((((m00*m00 + m01*m01) + m02*m02) + ((m10*m10 + m11*m11) + m12*m12)));
/// <summary>
/// Returns the max-norm of this matrix.
/// </summary>
public Half NormMax => Half.Max(Half.Max(Half.Max(Half.Max(Half.Max(Half.Abs(m00), Half.Abs(m01)), Half.Abs(m02)), Half.Abs(m10)), Half.Abs(m11)), Half.Abs(m12));
/// <summary>
/// Returns the p-norm of this matrix.
/// </summary>
public double NormP(double p) => Math.Pow((((Math.Pow((double)Half.Abs(m00), p) + Math.Pow((double)Half.Abs(m01), p)) + Math.Pow((double)Half.Abs(m02), p)) + ((Math.Pow((double)Half.Abs(m10), p) + Math.Pow((double)Half.Abs(m11), p)) + Math.Pow((double)Half.Abs(m12), p))), 1 / p);
/// <summary>
/// Executes a matrix-matrix-multiplication hmat2x3 * hmat2 -> hmat2x3.
/// </summary>
public static hmat2x3 operator*(hmat2x3 lhs, hmat2 rhs) => new hmat2x3((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01), (lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01), (lhs.m02 * rhs.m00 + lhs.m12 * rhs.m01), (lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11), (lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11), (lhs.m02 * rhs.m10 + lhs.m12 * rhs.m11));
/// <summary>
/// Executes a matrix-matrix-multiplication hmat2x3 * hmat3x2 -> hmat3.
/// </summary>
public static hmat3 operator*(hmat2x3 lhs, hmat3x2 rhs) => new hmat3((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01), (lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01), (lhs.m02 * rhs.m00 + lhs.m12 * rhs.m01), (lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11), (lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11), (lhs.m02 * rhs.m10 + lhs.m12 * rhs.m11), (lhs.m00 * rhs.m20 + lhs.m10 * rhs.m21), (lhs.m01 * rhs.m20 + lhs.m11 * rhs.m21), (lhs.m02 * rhs.m20 + lhs.m12 * rhs.m21));
/// <summary>
/// Executes a matrix-matrix-multiplication hmat2x3 * hmat4x2 -> hmat4x3.
/// </summary>
public static hmat4x3 operator*(hmat2x3 lhs, hmat4x2 rhs) => new hmat4x3((lhs.m00 * rhs.m00 + lhs.m10 * rhs.m01), (lhs.m01 * rhs.m00 + lhs.m11 * rhs.m01), (lhs.m02 * rhs.m00 + lhs.m12 * rhs.m01), (lhs.m00 * rhs.m10 + lhs.m10 * rhs.m11), (lhs.m01 * rhs.m10 + lhs.m11 * rhs.m11), (lhs.m02 * rhs.m10 + lhs.m12 * rhs.m11), (lhs.m00 * rhs.m20 + lhs.m10 * rhs.m21), (lhs.m01 * rhs.m20 + lhs.m11 * rhs.m21), (lhs.m02 * rhs.m20 + lhs.m12 * rhs.m21), (lhs.m00 * rhs.m30 + lhs.m10 * rhs.m31), (lhs.m01 * rhs.m30 + lhs.m11 * rhs.m31), (lhs.m02 * rhs.m30 + lhs.m12 * rhs.m31));
/// <summary>
/// Executes a matrix-vector-multiplication.
/// </summary>
public static hvec3 operator*(hmat2x3 m, hvec2 v) => new hvec3((m.m00 * v.x + m.m10 * v.y), (m.m01 * v.x + m.m11 * v.y), (m.m02 * v.x + m.m12 * v.y));
/// <summary>
/// Executes a component-wise * (multiply).
/// </summary>
public static hmat2x3 CompMul(hmat2x3 A, hmat2x3 B) => new hmat2x3(A.m00 * B.m00, A.m01 * B.m01, A.m02 * B.m02, A.m10 * B.m10, A.m11 * B.m11, A.m12 * B.m12);
/// <summary>
/// Executes a component-wise / (divide).
/// </summary>
public static hmat2x3 CompDiv(hmat2x3 A, hmat2x3 B) => new hmat2x3(A.m00 / B.m00, A.m01 / B.m01, A.m02 / B.m02, A.m10 / B.m10, A.m11 / B.m11, A.m12 / B.m12);
/// <summary>
/// Executes a component-wise + (add).
/// </summary>
public static hmat2x3 CompAdd(hmat2x3 A, hmat2x3 B) => new hmat2x3(A.m00 + B.m00, A.m01 + B.m01, A.m02 + B.m02, A.m10 + B.m10, A.m11 + B.m11, A.m12 + B.m12);
/// <summary>
/// Executes a component-wise - (subtract).
/// </summary>
public static hmat2x3 CompSub(hmat2x3 A, hmat2x3 B) => new hmat2x3(A.m00 - B.m00, A.m01 - B.m01, A.m02 - B.m02, A.m10 - B.m10, A.m11 - B.m11, A.m12 - B.m12);
/// <summary>
/// Executes a component-wise + (add).
/// </summary>
public static hmat2x3 operator+(hmat2x3 lhs, hmat2x3 rhs) => new hmat2x3(lhs.m00 + rhs.m00, lhs.m01 + rhs.m01, lhs.m02 + rhs.m02, lhs.m10 + rhs.m10, lhs.m11 + rhs.m11, lhs.m12 + rhs.m12);
/// <summary>
/// Executes a component-wise + (add) with a scalar.
/// </summary>
public static hmat2x3 operator+(hmat2x3 lhs, Half rhs) => new hmat2x3(lhs.m00 + rhs, lhs.m01 + rhs, lhs.m02 + rhs, lhs.m10 + rhs, lhs.m11 + rhs, lhs.m12 + rhs);
/// <summary>
/// Executes a component-wise + (add) with a scalar.
/// </summary>
public static hmat2x3 operator+(Half lhs, hmat2x3 rhs) => new hmat2x3(lhs + rhs.m00, lhs + rhs.m01, lhs + rhs.m02, lhs + rhs.m10, lhs + rhs.m11, lhs + rhs.m12);
/// <summary>
/// Executes a component-wise - (subtract).
/// </summary>
public static hmat2x3 operator-(hmat2x3 lhs, hmat2x3 rhs) => new hmat2x3(lhs.m00 - rhs.m00, lhs.m01 - rhs.m01, lhs.m02 - rhs.m02, lhs.m10 - rhs.m10, lhs.m11 - rhs.m11, lhs.m12 - rhs.m12);
/// <summary>
/// Executes a component-wise - (subtract) with a scalar.
/// </summary>
public static hmat2x3 operator-(hmat2x3 lhs, Half rhs) => new hmat2x3(lhs.m00 - rhs, lhs.m01 - rhs, lhs.m02 - rhs, lhs.m10 - rhs, lhs.m11 - rhs, lhs.m12 - rhs);
/// <summary>
/// Executes a component-wise - (subtract) with a scalar.
/// </summary>
public static hmat2x3 operator-(Half lhs, hmat2x3 rhs) => new hmat2x3(lhs - rhs.m00, lhs - rhs.m01, lhs - rhs.m02, lhs - rhs.m10, lhs - rhs.m11, lhs - rhs.m12);
/// <summary>
/// Executes a component-wise / (divide) with a scalar.
/// </summary>
public static hmat2x3 operator/(hmat2x3 lhs, Half rhs) => new hmat2x3(lhs.m00 / rhs, lhs.m01 / rhs, lhs.m02 / rhs, lhs.m10 / rhs, lhs.m11 / rhs, lhs.m12 / rhs);
/// <summary>
/// Executes a component-wise / (divide) with a scalar.
/// </summary>
public static hmat2x3 operator/(Half lhs, hmat2x3 rhs) => new hmat2x3(lhs / rhs.m00, lhs / rhs.m01, lhs / rhs.m02, lhs / rhs.m10, lhs / rhs.m11, lhs / rhs.m12);
/// <summary>
/// Executes a component-wise * (multiply) with a scalar.
/// </summary>
public static hmat2x3 operator*(hmat2x3 lhs, Half rhs) => new hmat2x3(lhs.m00 * rhs, lhs.m01 * rhs, lhs.m02 * rhs, lhs.m10 * rhs, lhs.m11 * rhs, lhs.m12 * rhs);
/// <summary>
/// Executes a component-wise * (multiply) with a scalar.
/// </summary>
public static hmat2x3 operator*(Half lhs, hmat2x3 rhs) => new hmat2x3(lhs * rhs.m00, lhs * rhs.m01, lhs * rhs.m02, lhs * rhs.m10, lhs * rhs.m11, lhs * rhs.m12);
/// <summary>
/// Executes a component-wise lesser-than comparison.
/// </summary>
public static bmat2x3 operator<(hmat2x3 lhs, hmat2x3 rhs) => new bmat2x3(lhs.m00 < rhs.m00, lhs.m01 < rhs.m01, lhs.m02 < rhs.m02, lhs.m10 < rhs.m10, lhs.m11 < rhs.m11, lhs.m12 < rhs.m12);
/// <summary>
/// Executes a component-wise lesser-than comparison with a scalar.
/// </summary>
public static bmat2x3 operator<(hmat2x3 lhs, Half rhs) => new bmat2x3(lhs.m00 < rhs, lhs.m01 < rhs, lhs.m02 < rhs, lhs.m10 < rhs, lhs.m11 < rhs, lhs.m12 < rhs);
/// <summary>
/// Executes a component-wise lesser-than comparison with a scalar.
/// </summary>
public static bmat2x3 operator<(Half lhs, hmat2x3 rhs) => new bmat2x3(lhs < rhs.m00, lhs < rhs.m01, lhs < rhs.m02, lhs < rhs.m10, lhs < rhs.m11, lhs < rhs.m12);
/// <summary>
/// Executes a component-wise lesser-or-equal comparison.
/// </summary>
public static bmat2x3 operator<=(hmat2x3 lhs, hmat2x3 rhs) => new bmat2x3(lhs.m00 <= rhs.m00, lhs.m01 <= rhs.m01, lhs.m02 <= rhs.m02, lhs.m10 <= rhs.m10, lhs.m11 <= rhs.m11, lhs.m12 <= rhs.m12);
/// <summary>
/// Executes a component-wise lesser-or-equal comparison with a scalar.
/// </summary>
public static bmat2x3 operator<=(hmat2x3 lhs, Half rhs) => new bmat2x3(lhs.m00 <= rhs, lhs.m01 <= rhs, lhs.m02 <= rhs, lhs.m10 <= rhs, lhs.m11 <= rhs, lhs.m12 <= rhs);
/// <summary>
/// Executes a component-wise lesser-or-equal comparison with a scalar.
/// </summary>
public static bmat2x3 operator<=(Half lhs, hmat2x3 rhs) => new bmat2x3(lhs <= rhs.m00, lhs <= rhs.m01, lhs <= rhs.m02, lhs <= rhs.m10, lhs <= rhs.m11, lhs <= rhs.m12);
/// <summary>
/// Executes a component-wise greater-than comparison.
/// </summary>
public static bmat2x3 operator>(hmat2x3 lhs, hmat2x3 rhs) => new bmat2x3(lhs.m00 > rhs.m00, lhs.m01 > rhs.m01, lhs.m02 > rhs.m02, lhs.m10 > rhs.m10, lhs.m11 > rhs.m11, lhs.m12 > rhs.m12);
/// <summary>
/// Executes a component-wise greater-than comparison with a scalar.
/// </summary>
public static bmat2x3 operator>(hmat2x3 lhs, Half rhs) => new bmat2x3(lhs.m00 > rhs, lhs.m01 > rhs, lhs.m02 > rhs, lhs.m10 > rhs, lhs.m11 > rhs, lhs.m12 > rhs);
/// <summary>
/// Executes a component-wise greater-than comparison with a scalar.
/// </summary>
public static bmat2x3 operator>(Half lhs, hmat2x3 rhs) => new bmat2x3(lhs > rhs.m00, lhs > rhs.m01, lhs > rhs.m02, lhs > rhs.m10, lhs > rhs.m11, lhs > rhs.m12);
/// <summary>
/// Executes a component-wise greater-or-equal comparison.
/// </summary>
public static bmat2x3 operator>=(hmat2x3 lhs, hmat2x3 rhs) => new bmat2x3(lhs.m00 >= rhs.m00, lhs.m01 >= rhs.m01, lhs.m02 >= rhs.m02, lhs.m10 >= rhs.m10, lhs.m11 >= rhs.m11, lhs.m12 >= rhs.m12);
/// <summary>
/// Executes a component-wise greater-or-equal comparison with a scalar.
/// </summary>
public static bmat2x3 operator>=(hmat2x3 lhs, Half rhs) => new bmat2x3(lhs.m00 >= rhs, lhs.m01 >= rhs, lhs.m02 >= rhs, lhs.m10 >= rhs, lhs.m11 >= rhs, lhs.m12 >= rhs);
/// <summary>
/// Executes a component-wise greater-or-equal comparison with a scalar.
/// </summary>
public static bmat2x3 operator>=(Half lhs, hmat2x3 rhs) => new bmat2x3(lhs >= rhs.m00, lhs >= rhs.m01, lhs >= rhs.m02, lhs >= rhs.m10, lhs >= rhs.m11, lhs >= rhs.m12);
}
}
| |
// ****************************************************************
// This is free software licensed under the NUnit license. You
// may obtain a copy of the license as well as information regarding
// copyright ownership at http://nunit.org/?p=license&r=2.4.
// ****************************************************************
using System;
using System.IO;
using System.Collections;
using System.Reflection;
using NUnit.Core.Extensibility;
namespace NUnit.Core.Builders
{
/// <summary>
/// Class that builds a TestSuite from an assembly
/// </summary>
public class TestAssemblyBuilder
{
#region Instance Fields
/// <summary>
/// The loaded assembly
/// </summary>
Assembly assembly;
/// <summary>
/// Our LegacySuite builder, which is only used when a
/// fixture has been passed by name on the command line.
/// </summary>
ISuiteBuilder legacySuiteBuilder;
private TestAssemblyInfo assemblyInfo = null;
#endregion
#region Properties
public Assembly Assembly
{
get { return assembly; }
}
public TestAssemblyInfo AssemblyInfo
{
get
{
if ( assemblyInfo == null && assembly != null )
{
string path = new Uri( assembly.GetName().CodeBase ).LocalPath;
AssemblyReader rdr = new AssemblyReader( path );
Version runtimeVersion = new Version( rdr.ImageRuntimeVersion.Substring( 1 ) );
IList frameworks = CoreExtensions.Host.TestFrameworks.GetReferencedFrameworks( assembly );
assemblyInfo = new TestAssemblyInfo( path, runtimeVersion, frameworks );
}
return assemblyInfo;
}
}
#endregion
#region Constructor
public TestAssemblyBuilder()
{
// TODO: Keeping this separate till we can make
//it work in all situations.
legacySuiteBuilder = new NUnit.Core.Builders.LegacySuiteBuilder();
}
#endregion
#region Build Methods
public Test Build( string assemblyName, string testName, bool autoSuites )
{
if ( testName == null || testName == string.Empty )
return Build( assemblyName, autoSuites );
this.assembly = Load( assemblyName );
if ( assembly == null ) return null;
// If provided test name is actually a fixture,
// just build and return that!
Type testType = assembly.GetType(testName);
if( testType != null )
return BuildSingleFixture( testType );
// Assume that testName is a namespace and get all fixtures in it
IList fixtures = GetFixtures( assembly, testName );
if ( fixtures.Count > 0 )
return BuildTestAssembly( assemblyName, fixtures, autoSuites );
return null;
}
public TestSuite Build( string assemblyName, bool autoSuites )
{
this.assembly = Load( assemblyName );
if ( this.assembly == null ) return null;
IList fixtures = GetFixtures( assembly, null );
return BuildTestAssembly( assemblyName, fixtures, autoSuites );
}
private TestSuite BuildTestAssembly( string assemblyName, IList fixtures, bool autoSuites )
{
TestSuite testAssembly = new TestSuite( assemblyName );
if ( autoSuites )
{
NamespaceTreeBuilder treeBuilder =
new NamespaceTreeBuilder( testAssembly );
treeBuilder.Add( fixtures );
testAssembly = treeBuilder.RootSuite;
}
else
foreach( TestSuite fixture in fixtures )
{
if ( fixture is SetUpFixture )
{
fixture.RunState = RunState.NotRunnable;
fixture.IgnoreReason = "SetUpFixture cannot be used when loading tests as a flat list of fixtures";
}
testAssembly.Add( fixture );
}
if ( fixtures.Count == 0 )
{
testAssembly.RunState = RunState.NotRunnable;
testAssembly.IgnoreReason = "Has no TestFixtures";
}
NUnitFramework.ApplyCommonAttributes( assembly, testAssembly );
// TODO: Make this an option? Add Option to sort assemblies as well?
testAssembly.Sort();
return testAssembly;
}
#endregion
#region Helper Methods
private Assembly Load(string path)
{
// Change currentDirectory in case assembly references unmanaged dlls
using( new DirectorySwapper( Path.GetDirectoryName( path ) ) )
{
// Throws if this isn't a managed assembly or if it was built
// with a later version of the same assembly.
AssemblyName.GetAssemblyName( Path.GetFileName( path ) );
// TODO: Figure out why we can't load using the assembly name
// in all cases. Might be a problem with the tests themselves.
Assembly assembly = Assembly.Load(Path.GetFileNameWithoutExtension(path));
if ( assembly != null )
CoreExtensions.Host.InstallAdhocExtensions( assembly );
return assembly;
}
}
private IList GetFixtures( Assembly assembly, string ns )
{
ArrayList fixtures = new ArrayList();
IList testTypes = GetCandidateFixtureTypes( assembly, ns );
foreach(Type testType in testTypes)
{
if( TestFixtureBuilder.CanBuildFrom( testType ) )
fixtures.Add( TestFixtureBuilder.BuildFrom( testType ) );
}
return fixtures;
}
private Test BuildSingleFixture( Type testType )
{
// The only place we currently allow legacy suites
if ( legacySuiteBuilder.CanBuildFrom( testType ) )
return legacySuiteBuilder.BuildFrom( testType );
return TestFixtureBuilder.BuildFrom( testType );
}
private IList GetCandidateFixtureTypes( Assembly assembly, string ns )
{
IList types = assembly.GetTypes();
if ( ns == null || ns == string.Empty || types.Count == 0 )
return types;
string prefix = ns + "." ;
ArrayList result = new ArrayList();
foreach( Type type in types )
if ( type.FullName.StartsWith( prefix ) )
result.Add( type );
return result;
}
#endregion
}
}
| |
//
// ServiceStack.OrmLite: Light-weight POCO ORM for .NET and Mono
//
// Authors:
// Demis Bellot (demis.bellot@gmail.com)
//
// Copyright 2013 Service Stack LLC. All Rights Reserved.
//
// Licensed under the same terms of ServiceStack.
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using ServiceStack.Data;
using ServiceStack.Logging;
namespace ServiceStack.OrmLite
{
public static class OrmLiteWriteCommandExtensions
{
private static readonly ILog Log = LogManager.GetLogger(typeof(OrmLiteWriteCommandExtensions));
private static void LogDebug(string fmt)
{
Log.Debug(fmt);
}
internal static void CreateTables(this IDbCommand dbCmd, bool overwrite, params Type[] tableTypes)
{
foreach (var tableType in tableTypes)
{
CreateTable(dbCmd, overwrite, tableType);
}
}
internal static void CreateTable<T>(this IDbCommand dbCmd, bool overwrite = false)
{
var tableType = typeof(T);
CreateTable(dbCmd, overwrite, tableType);
}
internal static void CreateTable(this IDbCommand dbCmd, bool overwrite, Type modelType)
{
var modelDef = modelType.GetModelDefinition();
var dialectProvider = dbCmd.GetDialectProvider();
var tableName = dialectProvider.NamingStrategy.GetTableName(modelDef);
var tableExists = dialectProvider.DoesTableExist(dbCmd, tableName);
if (overwrite && tableExists)
{
if (modelDef.PreDropTableSql != null)
{
ExecuteSql(dbCmd, modelDef.PreDropTableSql);
}
DropTable(dbCmd, modelDef);
var postDropTableSql = dialectProvider.ToPostDropTableStatement(modelDef);
if (postDropTableSql != null)
{
ExecuteSql(dbCmd, postDropTableSql);
}
if (modelDef.PostDropTableSql != null)
{
ExecuteSql(dbCmd, modelDef.PostDropTableSql);
}
tableExists = false;
}
try
{
if (!tableExists)
{
if (modelDef.PreCreateTableSql != null)
{
ExecuteSql(dbCmd, modelDef.PreCreateTableSql);
}
ExecuteSql(dbCmd, dialectProvider.ToCreateTableStatement(modelType));
var postCreateTableSql = dialectProvider.ToPostCreateTableStatement(modelDef);
if (postCreateTableSql != null)
{
ExecuteSql(dbCmd, postCreateTableSql);
}
if (modelDef.PostCreateTableSql != null)
{
ExecuteSql(dbCmd, modelDef.PostCreateTableSql);
}
var sqlIndexes = dialectProvider.ToCreateIndexStatements(modelType);
foreach (var sqlIndex in sqlIndexes)
{
try
{
dbCmd.ExecuteSql(sqlIndex);
}
catch (Exception exIndex)
{
if (IgnoreAlreadyExistsError(exIndex))
{
Log.DebugFormat("Ignoring existing index '{0}': {1}", sqlIndex, exIndex.Message);
continue;
}
throw;
}
}
var sequenceList = dialectProvider.SequenceList(modelType);
if (sequenceList.Count > 0)
{
foreach (var seq in sequenceList)
{
if (dialectProvider.DoesSequenceExist(dbCmd, seq) == false)
{
var seqSql = dialectProvider.ToCreateSequenceStatement(modelType, seq);
dbCmd.ExecuteSql(seqSql);
}
}
}
else
{
var sequences = dialectProvider.ToCreateSequenceStatements(modelType);
foreach (var seq in sequences)
{
try
{
dbCmd.ExecuteSql(seq);
}
catch (Exception ex)
{
if (IgnoreAlreadyExistsGeneratorError(ex))
{
Log.DebugFormat("Ignoring existing generator '{0}': {1}", seq, ex.Message);
continue;
}
throw;
}
}
}
}
}
catch (Exception ex)
{
if (IgnoreAlreadyExistsError(ex))
{
Log.DebugFormat("Ignoring existing table '{0}': {1}", modelDef.ModelName, ex.Message);
return;
}
throw;
}
}
internal static void DropTable<T>(this IDbCommand dbCmd)
{
DropTable(dbCmd, ModelDefinition<T>.Definition);
}
internal static void DropTable(this IDbCommand dbCmd, Type modelType)
{
DropTable(dbCmd, modelType.GetModelDefinition());
}
internal static void DropTables(this IDbCommand dbCmd, params Type[] tableTypes)
{
foreach (var modelDef in tableTypes.Select(type => type.GetModelDefinition()))
{
DropTable(dbCmd, modelDef);
}
}
private static void DropTable(IDbCommand dbCmd, ModelDefinition modelDef)
{
try
{
var dialectProvider = dbCmd.GetDialectProvider();
var tableName = dialectProvider.NamingStrategy.GetTableName(modelDef);
if (dialectProvider.DoesTableExist(dbCmd, tableName))
{
if (modelDef.PreDropTableSql != null)
{
ExecuteSql(dbCmd, modelDef.PreDropTableSql);
}
var dropTableFks = dialectProvider.GetDropForeignKeyConstraints(modelDef);
if (!string.IsNullOrEmpty(dropTableFks))
{
dbCmd.ExecuteSql(dropTableFks);
}
dbCmd.ExecuteSql("DROP TABLE " + dialectProvider.GetQuotedTableName(modelDef));
if (modelDef.PostDropTableSql != null)
{
ExecuteSql(dbCmd, modelDef.PostDropTableSql);
}
}
}
catch (Exception ex)
{
Log.DebugFormat("Could not drop table '{0}': {1}", modelDef.ModelName, ex.Message);
throw;
}
}
internal static string LastSql(this IDbCommand dbCmd)
{
return dbCmd.CommandText;
}
internal static int ExecuteSql(this IDbCommand dbCmd, string sql)
{
if (Log.IsDebugEnabled)
LogDebug(sql);
dbCmd.CommandText = sql;
if (OrmLiteConfig.ResultsFilter != null)
{
return OrmLiteConfig.ResultsFilter.ExecuteSql(dbCmd);
}
return dbCmd.ExecuteNonQuery();
}
private static bool IgnoreAlreadyExistsError(Exception ex)
{
//ignore Sqlite table already exists error
const string sqliteAlreadyExistsError = "already exists";
const string sqlServerAlreadyExistsError = "There is already an object named";
return ex.Message.Contains(sqliteAlreadyExistsError)
|| ex.Message.Contains(sqlServerAlreadyExistsError);
}
private static bool IgnoreAlreadyExistsGeneratorError(Exception ex)
{
const string fbError = "attempt to store duplicate value";
return ex.Message.Contains(fbError);
}
public static T PopulateWithSqlReader<T>(this T objWithProperties, IOrmLiteDialectProvider dialectProvider, IDataReader dataReader)
{
var fieldDefs = ModelDefinition<T>.Definition.AllFieldDefinitionsArray;
return PopulateWithSqlReader(objWithProperties, dialectProvider, dataReader, fieldDefs, null);
}
public static int GetColumnIndex(this IDataReader dataReader, IOrmLiteDialectProvider dialectProvider, string fieldName)
{
try
{
return dataReader.GetOrdinal(dialectProvider.NamingStrategy.GetColumnName(fieldName));
}
catch (IndexOutOfRangeException ignoreNotFoundExInSomeProviders)
{
return NotFound;
}
}
internal static int FindColumnIndex(this IDataReader dataReader, IOrmLiteDialectProvider dialectProvider, FieldDefinition fieldDef)
{
var index = NotFound;
index = dataReader.GetColumnIndex(dialectProvider, fieldDef.FieldName);
if (index == NotFound)
{
index = TryGuessColumnIndex(fieldDef.FieldName, dataReader);
}
// Try fallback to original field name when overriden by alias
if (index == NotFound && fieldDef.Alias != null && !OrmLiteConfig.DisableColumnGuessFallback)
{
index = dataReader.GetColumnIndex(dialectProvider, fieldDef.Name);
if (index == NotFound)
{
index = TryGuessColumnIndex(fieldDef.Name, dataReader);
}
}
return index;
}
private const int NotFound = -1;
public static T PopulateWithSqlReader<T>(this T objWithProperties, IOrmLiteDialectProvider dialectProvider, IDataReader dataReader, FieldDefinition[] fieldDefs, Dictionary<string, int> indexCache)
{
try
{
foreach (var fieldDef in fieldDefs)
{
int index;
if (indexCache != null)
{
if (!indexCache.TryGetValue(fieldDef.Name, out index))
{
index = FindColumnIndex(dataReader, dialectProvider, fieldDef);
indexCache.Add(fieldDef.Name, index);
}
}
else
{
index = FindColumnIndex(dataReader, dialectProvider, fieldDef);
}
dialectProvider.SetDbValue(fieldDef, dataReader, index, objWithProperties);
}
}
catch (Exception ex)
{
Log.Error(ex);
}
return objWithProperties;
}
private static readonly Regex AllowedPropertyCharsRegex = new Regex(@"[^0-9a-zA-Z_]",
RegexOptions.Compiled | RegexOptions.CultureInvariant);
private static int TryGuessColumnIndex(string fieldName, IDataReader dataReader)
{
if (OrmLiteConfig.DisableColumnGuessFallback)
return NotFound;
var fieldCount = dataReader.FieldCount;
for (var i = 0; i < fieldCount; i++)
{
var dbFieldName = dataReader.GetName(i);
// First guess: Maybe the DB field has underscores? (most common)
// e.g. CustomerId (C#) vs customer_id (DB)
var dbFieldNameWithNoUnderscores = dbFieldName.Replace("_", "");
if (string.Compare(fieldName, dbFieldNameWithNoUnderscores, StringComparison.OrdinalIgnoreCase) == 0)
{
return i;
}
// Next guess: Maybe the DB field has special characters?
// e.g. Quantity (C#) vs quantity% (DB)
var dbFieldNameSanitized = AllowedPropertyCharsRegex.Replace(dbFieldName, string.Empty);
if (string.Compare(fieldName, dbFieldNameSanitized, StringComparison.OrdinalIgnoreCase) == 0)
{
return i;
}
// Next guess: Maybe the DB field has special characters *and* has underscores?
// e.g. Quantity (C#) vs quantity_% (DB)
if (string.Compare(fieldName, dbFieldNameSanitized.Replace("_", string.Empty), StringComparison.OrdinalIgnoreCase) == 0)
{
return i;
}
// Next guess: Maybe the DB field has some prefix that we don't have in our C# field?
// e.g. CustomerId (C#) vs t130CustomerId (DB)
if (dbFieldName.EndsWith(fieldName, StringComparison.OrdinalIgnoreCase))
{
return i;
}
// Next guess: Maybe the DB field has some prefix that we don't have in our C# field *and* has underscores?
// e.g. CustomerId (C#) vs t130_CustomerId (DB)
if (dbFieldNameWithNoUnderscores.EndsWith(fieldName, StringComparison.OrdinalIgnoreCase))
{
return i;
}
// Next guess: Maybe the DB field has some prefix that we don't have in our C# field *and* has special characters?
// e.g. CustomerId (C#) vs t130#CustomerId (DB)
if (dbFieldNameSanitized.EndsWith(fieldName, StringComparison.OrdinalIgnoreCase))
{
return i;
}
// Next guess: Maybe the DB field has some prefix that we don't have in our C# field *and* has underscores *and* has special characters?
// e.g. CustomerId (C#) vs t130#Customer_I#d (DB)
if (dbFieldNameSanitized.Replace("_", "").EndsWith(fieldName, StringComparison.OrdinalIgnoreCase))
{
return i;
}
// Cater for Naming Strategies like PostgreSQL that has lower_underscore names
if (dbFieldNameSanitized.Replace("_", "").EndsWith(fieldName.Replace("_", ""), StringComparison.OrdinalIgnoreCase))
{
return i;
}
}
return NotFound;
}
internal static int Update<T>(this IDbCommand dbCmd, T obj)
{
if (OrmLiteConfig.UpdateFilter != null)
OrmLiteConfig.UpdateFilter(dbCmd, obj);
var dialectProvider = dbCmd.GetDialectProvider();
var hadRowVersion = dialectProvider.PrepareParameterizedUpdateStatement<T>(dbCmd);
if (string.IsNullOrEmpty(dbCmd.CommandText))
return 0;
dialectProvider.SetParameterValues<T>(dbCmd, obj);
var rowsUpdated = dbCmd.ExecNonQuery();
if (hadRowVersion && rowsUpdated == 0)
throw new OptimisticConcurrencyException();
return rowsUpdated;
}
internal static int Update<T>(this IDbCommand dbCmd, params T[] objs)
{
return dbCmd.UpdateAll(objs);
}
internal static int UpdateAll<T>(this IDbCommand dbCmd, IEnumerable<T> objs)
{
IDbTransaction dbTrans = null;
int count = 0;
try
{
if (dbCmd.Transaction == null)
dbCmd.Transaction = dbTrans = dbCmd.Connection.BeginTransaction();
var dialectProvider = dbCmd.GetDialectProvider();
var hadRowVersion = dialectProvider.PrepareParameterizedUpdateStatement<T>(dbCmd);
if (string.IsNullOrEmpty(dbCmd.CommandText))
return 0;
foreach (var obj in objs)
{
if (OrmLiteConfig.UpdateFilter != null)
OrmLiteConfig.UpdateFilter(dbCmd, obj);
dialectProvider.SetParameterValues<T>(dbCmd, obj);
var rowsUpdated = dbCmd.ExecNonQuery();
if (hadRowVersion && rowsUpdated == 0)
throw new OptimisticConcurrencyException();
count += rowsUpdated;
}
if (dbTrans != null)
dbTrans.Commit();
}
finally
{
if (dbTrans != null)
dbTrans.Dispose();
}
return count;
}
private static int AssertRowsUpdated(IDbCommand dbCmd, bool hadRowVersion)
{
var rowsUpdated = dbCmd.ExecNonQuery();
if (hadRowVersion && rowsUpdated == 0)
throw new OptimisticConcurrencyException();
return rowsUpdated;
}
internal static int Delete<T>(this IDbCommand dbCmd, object anonType)
{
var dialectProvider = dbCmd.GetDialectProvider();
var hadRowVersion = dialectProvider.PrepareParameterizedDeleteStatement<T>(dbCmd, anonType.AllFields<T>());
dialectProvider.SetParameterValues<T>(dbCmd, anonType);
return AssertRowsUpdated(dbCmd, hadRowVersion);
}
internal static int DeleteNonDefaults<T>(this IDbCommand dbCmd, T filter)
{
var dialectProvider = dbCmd.GetDialectProvider();
var hadRowVersion = dialectProvider.PrepareParameterizedDeleteStatement<T>(dbCmd, filter.NonDefaultFields<T>());
dialectProvider.SetParameterValues<T>(dbCmd, filter);
return AssertRowsUpdated(dbCmd, hadRowVersion);
}
internal static int Delete<T>(this IDbCommand dbCmd, params object[] objs)
{
if (objs.Length == 0) return 0;
return DeleteAll<T>(dbCmd, objs[0].AllFields<T>(), objs);
}
internal static int DeleteNonDefaults<T>(this IDbCommand dbCmd, params T[] filters)
{
if (filters.Length == 0) return 0;
return DeleteAll<T>(dbCmd, filters[0].NonDefaultFields<T>(), filters.Map(x => (object)x));
}
private static int DeleteAll<T>(IDbCommand dbCmd, ICollection<string> deleteFields, IEnumerable<object> objs)
{
IDbTransaction dbTrans = null;
int count = 0;
try
{
if (dbCmd.Transaction == null)
dbCmd.Transaction = dbTrans = dbCmd.Connection.BeginTransaction();
var dialectProvider = dbCmd.GetDialectProvider();
dialectProvider.PrepareParameterizedDeleteStatement<T>(dbCmd, deleteFields);
foreach (var obj in objs)
{
dialectProvider.SetParameterValues<T>(dbCmd, obj);
count += dbCmd.ExecNonQuery();
}
if (dbTrans != null)
dbTrans.Commit();
}
finally
{
if (dbTrans != null)
dbTrans.Dispose();
}
return count;
}
internal static int DeleteById<T>(this IDbCommand dbCmd, object id)
{
var sql = DeleteByIdSql<T>(dbCmd, id);
return dbCmd.ExecuteSql(sql);
}
internal static string DeleteByIdSql<T>(this IDbCommand dbCmd, object id)
{
var modelDef = ModelDefinition<T>.Definition;
var dialectProvider = dbCmd.GetDialectProvider();
var idParamString = dialectProvider.GetParam();
var sql = string.Format("DELETE FROM {0} WHERE {1} = {2}",
dialectProvider.GetQuotedTableName(modelDef),
dialectProvider.GetQuotedColumnName(modelDef.PrimaryKey.FieldName),
idParamString);
var idParam = dbCmd.CreateParameter();
idParam.ParameterName = idParamString;
idParam.Value = id;
dbCmd.Parameters.Add(idParam);
return sql;
}
internal static void DeleteById<T>(this IDbCommand dbCmd, object id, ulong rowVersion)
{
var sql = DeleteByIdSql<T>(dbCmd, id, rowVersion);
var rowsAffected = dbCmd.ExecuteSql(sql);
if (rowsAffected == 0)
throw new OptimisticConcurrencyException("The row was modified or deleted since the last read");
}
internal static string DeleteByIdSql<T>(this IDbCommand dbCmd, object id, ulong rowVersion)
{
var modelDef = ModelDefinition<T>.Definition;
var dialectProvider = dbCmd.GetDialectProvider();
dbCmd.Parameters.Clear();
var idParam = dbCmd.CreateParameter();
idParam.ParameterName = dialectProvider.GetParam();
idParam.Value = id;
dbCmd.Parameters.Add(idParam);
var rowVersionField = modelDef.RowVersion;
if (rowVersionField == null)
throw new InvalidOperationException(
"Cannot use DeleteById with rowVersion for model type without a row version column");
var rowVersionParam = dbCmd.CreateParameter();
rowVersionParam.ParameterName = dialectProvider.GetParam("rowVersion");
rowVersionParam.Value = rowVersion;
dbCmd.Parameters.Add(rowVersionParam);
var sql = string.Format("DELETE FROM {0} WHERE {1} = {2} AND {3} = {4}",
dialectProvider.GetQuotedTableName(modelDef),
dialectProvider.GetQuotedColumnName(modelDef.PrimaryKey.FieldName),
idParam.ParameterName,
dialectProvider.GetQuotedColumnName(rowVersionField.FieldName),
rowVersionParam.ParameterName);
return sql;
}
internal static int DeleteByIds<T>(this IDbCommand dbCmd, IEnumerable idValues)
{
var sqlIn = idValues.GetIdsInSql();
if (sqlIn == null) return 0;
var sql = GetDeleteByIdsSql<T>(sqlIn, dbCmd.GetDialectProvider());
return dbCmd.ExecuteSql(sql);
}
internal static string GetDeleteByIdsSql<T>(string sqlIn, IOrmLiteDialectProvider dialectProvider)
{
var modelDef = ModelDefinition<T>.Definition;
var sql = string.Format("DELETE FROM {0} WHERE {1} IN ({2})",
dialectProvider.GetQuotedTableName(modelDef),
dialectProvider.GetQuotedColumnName(modelDef.PrimaryKey.FieldName),
sqlIn);
return sql;
}
internal static int DeleteAll<T>(this IDbCommand dbCmd)
{
return DeleteAll(dbCmd, typeof(T));
}
internal static int DeleteAll(this IDbCommand dbCmd, Type tableType)
{
return dbCmd.ExecuteSql(dbCmd.GetDialectProvider().ToDeleteStatement(tableType, null));
}
internal static int DeleteFmt<T>(this IDbCommand dbCmd, string sqlFilter, params object[] filterParams)
{
return DeleteFmt(dbCmd, typeof(T), sqlFilter, filterParams);
}
internal static int DeleteFmt(this IDbCommand dbCmd, Type tableType, string sqlFilter, params object[] filterParams)
{
return dbCmd.ExecuteSql(dbCmd.GetDialectProvider().ToDeleteStatement(tableType, sqlFilter, filterParams));
}
internal static long Insert<T>(this IDbCommand dbCmd, T obj, bool selectIdentity = false)
{
if (OrmLiteConfig.InsertFilter != null)
OrmLiteConfig.InsertFilter(dbCmd, obj);
var dialectProvider = dbCmd.GetDialectProvider();
dialectProvider.PrepareParameterizedInsertStatement<T>(dbCmd);
dialectProvider.SetParameterValues<T>(dbCmd, obj);
if (selectIdentity)
return dialectProvider.InsertAndGetLastInsertId<T>(dbCmd);
return dbCmd.ExecNonQuery();
}
internal static void Insert<T>(this IDbCommand dbCmd, params T[] objs)
{
InsertAll(dbCmd, objs);
}
internal static void InsertAll<T>(this IDbCommand dbCmd, IEnumerable<T> objs)
{
IDbTransaction dbTrans = null;
try
{
if (dbCmd.Transaction == null)
dbCmd.Transaction = dbTrans = dbCmd.Connection.BeginTransaction();
var dialectProvider = dbCmd.GetDialectProvider();
dialectProvider.PrepareParameterizedInsertStatement<T>(dbCmd);
foreach (var obj in objs)
{
if (OrmLiteConfig.InsertFilter != null)
OrmLiteConfig.InsertFilter(dbCmd, obj);
dialectProvider.SetParameterValues<T>(dbCmd, obj);
dbCmd.ExecNonQuery();
}
if (dbTrans != null)
dbTrans.Commit();
}
finally
{
if (dbTrans != null)
dbTrans.Dispose();
}
}
internal static int Save<T>(this IDbCommand dbCmd, params T[] objs)
{
return SaveAll(dbCmd, objs);
}
internal static bool Save<T>(this IDbCommand dbCmd, T obj)
{
var id = obj.GetId();
var existingRow = id != null ? dbCmd.SingleById<T>(id) : default(T);
var modelDef = typeof(T).GetModelDefinition();
if (Equals(existingRow, default(T)))
{
if (modelDef.HasAutoIncrementId)
{
var dialectProvider = dbCmd.GetDialectProvider();
var newId = dbCmd.Insert(obj, selectIdentity: true);
var safeId = dialectProvider.ConvertDbValue(newId, modelDef.PrimaryKey.FieldType);
modelDef.PrimaryKey.SetValueFn(obj, safeId);
id = newId;
}
else
{
dbCmd.Insert(obj);
}
if (modelDef.RowVersion != null)
modelDef.RowVersion.SetValueFn(obj, dbCmd.GetRowVersion(modelDef, id));
return true;
}
dbCmd.Update(obj);
if (modelDef.RowVersion != null)
modelDef.RowVersion.SetValueFn(obj, dbCmd.GetRowVersion(modelDef, id));
return false;
}
internal static int SaveAll<T>(this IDbCommand dbCmd, IEnumerable<T> objs)
{
var saveRows = objs.ToList();
var firstRow = saveRows.FirstOrDefault();
if (Equals(firstRow, default(T))) return 0;
var firstRowId = firstRow.GetId();
var defaultIdValue = firstRowId != null ? firstRowId.GetType().GetDefaultValue() : null;
var idMap = defaultIdValue != null
? saveRows.Where(x => !defaultIdValue.Equals(x.GetId())).ToSafeDictionary(x => x.GetId())
: saveRows.Where(x => x.GetId() != null).ToSafeDictionary(x => x.GetId());
var existingRowsMap = dbCmd.SelectByIds<T>(idMap.Keys).ToDictionary(x => x.GetId());
var modelDef = typeof(T).GetModelDefinition();
var rowsAdded = 0;
IDbTransaction dbTrans = null;
if (dbCmd.Transaction == null)
dbCmd.Transaction = dbTrans = dbCmd.Connection.BeginTransaction();
try
{
foreach (var row in saveRows)
{
var id = row.GetId();
if (id != defaultIdValue && existingRowsMap.ContainsKey(id))
{
if (OrmLiteConfig.UpdateFilter != null)
OrmLiteConfig.UpdateFilter(dbCmd, row);
dbCmd.Update(row);
}
else
{
if (modelDef.HasAutoIncrementId)
{
var dialectProvider = dbCmd.GetDialectProvider();
var newId = dbCmd.Insert(row, selectIdentity: true);
var safeId = dialectProvider.ConvertDbValue(newId, modelDef.PrimaryKey.FieldType);
modelDef.PrimaryKey.SetValueFn(row, safeId);
id = newId;
}
else
{
if (OrmLiteConfig.InsertFilter != null)
OrmLiteConfig.InsertFilter(dbCmd, row);
dbCmd.Insert(row);
}
rowsAdded++;
}
if (modelDef.RowVersion != null)
modelDef.RowVersion.SetValueFn(row, dbCmd.GetRowVersion(modelDef, id));
}
if (dbTrans != null)
dbTrans.Commit();
}
finally
{
if (dbTrans != null)
dbTrans.Dispose();
}
return rowsAdded;
}
internal static void SaveAllReferences<T>(this IDbCommand dbCmd, T instance)
{
var modelDef = ModelDefinition<T>.Definition;
var pkValue = modelDef.PrimaryKey.GetValue(instance);
var fieldDefs = modelDef.AllFieldDefinitionsArray.Where(x => x.IsReference);
foreach (var fieldDef in fieldDefs)
{
var listInterface = fieldDef.FieldType.GetTypeWithGenericInterfaceOf(typeof(IList<>));
if (listInterface != null)
{
var refType = listInterface.GenericTypeArguments()[0];
var refModelDef = refType.GetModelDefinition();
var refField = modelDef.GetRefFieldDef(refModelDef, refType);
var results = (IEnumerable)fieldDef.GetValue(instance);
if (results != null)
{
foreach (var oRef in results)
{
refField.SetValueFn(oRef, pkValue);
}
dbCmd.CreateTypedApi(refType).SaveAll(results);
}
}
else
{
var refType = fieldDef.FieldType;
var refModelDef = refType.GetModelDefinition();
var refSelf = modelDef.GetSelfRefFieldDefIfExists(refModelDef, fieldDef);
var result = fieldDef.GetValue(instance);
var refField = refSelf == null
? modelDef.GetRefFieldDef(refModelDef, refType)
: modelDef.GetRefFieldDefIfExists(refModelDef);
if (result != null)
{
if (refField != null)
refField.SetValueFn(result, pkValue);
dbCmd.CreateTypedApi(refType).Save(result);
//Save Self Table.RefTableId PK
if (refSelf != null)
{
var refPkValue = refModelDef.PrimaryKey.GetValue(result);
refSelf.SetValueFn(instance, refPkValue);
dbCmd.Update(instance);
}
}
}
}
}
internal static void SaveReferences<T, TRef>(this IDbCommand dbCmd, T instance, params TRef[] refs)
{
var modelDef = ModelDefinition<T>.Definition;
var pkValue = modelDef.PrimaryKey.GetValue(instance);
var refType = typeof(TRef);
var refModelDef = ModelDefinition<TRef>.Definition;
var refSelf = modelDef.GetSelfRefFieldDefIfExists(refModelDef, null);
foreach (var oRef in refs)
{
var refField = refSelf == null
? modelDef.GetRefFieldDef(refModelDef, refType)
: modelDef.GetRefFieldDefIfExists(refModelDef);
if (refField != null)
refField.SetValueFn(oRef, pkValue);
}
dbCmd.SaveAll(refs);
foreach (var oRef in refs)
{
//Save Self Table.RefTableId PK
if (refSelf != null)
{
var refPkValue = refModelDef.PrimaryKey.GetValue(oRef);
refSelf.SetValueFn(instance, refPkValue);
dbCmd.Update(instance);
}
}
}
// Procedures
internal static void ExecuteProcedure<T>(this IDbCommand dbCmd, T obj)
{
var dialectProvider = dbCmd.GetDialectProvider();
dialectProvider.PrepareStoredProcedureStatement(dbCmd, obj);
dbCmd.ExecuteNonQuery();
}
internal static ulong GetRowVersion(this IDbCommand dbCmd, ModelDefinition modelDef, object id)
{
var sql = RowVersionSql(dbCmd, modelDef, id);
return dbCmd.Scalar<ulong>(sql);
}
internal static string RowVersionSql(this IDbCommand dbCmd, ModelDefinition modelDef, object id)
{
var dialectProvider = dbCmd.GetDialectProvider();
var idParamString = dialectProvider.GetParam();
var sql = string.Format("SELECT {0} FROM {1} WHERE {2} = {3}",
dialectProvider.GetRowVersionColumnName(modelDef.RowVersion),
dialectProvider.GetQuotedTableName(modelDef),
dialectProvider.GetQuotedColumnName(modelDef.PrimaryKey.FieldName),
idParamString);
dbCmd.Parameters.Clear();
var idParam = dbCmd.CreateParameter();
idParam.ParameterName = idParamString;
idParam.Value = id;
dbCmd.Parameters.Add(idParam);
return sql;
}
}
}
| |
/********************************************************************************************
Copyright (c) Microsoft Corporation
All rights reserved.
Microsoft Public License:
This license governs use of the accompanying software. If you use the software, you
accept this license. If you do not accept the license, do not use the software.
1. Definitions
The terms "reproduce," "reproduction," "derivative works," and "distribution" have the
same meaning here as under U.S. copyright law.
A "contribution" is the original software, or any additions or changes to the software.
A "contributor" is any person that distributes its contribution under this license.
"Licensed patents" are a contributor's patent claims that read directly on its contribution.
2. Grant of Rights
(A) Copyright Grant- Subject to the terms of this license, including the license conditions
and limitations in section 3, each contributor grants you a non-exclusive, worldwide,
royalty-free copyright license to reproduce its contribution, prepare derivative works of
its contribution, and distribute its contribution or any derivative works that you create.
(B) Patent Grant- Subject to the terms of this license, including the license conditions
and limitations in section 3, each contributor grants you a non-exclusive, worldwide,
royalty-free license under its licensed patents to make, have made, use, sell, offer for
sale, import, and/or otherwise dispose of its contribution in the software or derivative
works of the contribution in the software.
3. Conditions and Limitations
(A) No Trademark License- This license does not grant you rights to use any contributors'
name, logo, or trademarks.
(B) If you bring a patent claim against any contributor over patents that you claim are
infringed by the software, your patent license from such contributor to the software ends
automatically.
(C) If you distribute any portion of the software, you must retain all copyright, patent,
trademark, and attribution notices that are present in the software.
(D) If you distribute any portion of the software in source code form, you may do so only
under this license by including a complete copy of this license with your distribution.
If you distribute any portion of the software in compiled or object code form, you may only
do so under a license that complies with this license.
(E) The software is licensed "as-is." You bear the risk of using it. The contributors give
no express warranties, guarantees or conditions. You may have additional consumer rights
under your local laws which this license cannot change. To the extent permitted under your
local laws, the contributors exclude the implied warranties of merchantability, fitness for
a particular purpose and non-infringement.
********************************************************************************************/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Text;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell.Interop;
using MSBuild = Microsoft.Build.Evaluation;
using OleConstants = Microsoft.VisualStudio.OLE.Interop.Constants;
using VsCommands = Microsoft.VisualStudio.VSConstants.VSStd97CmdID;
using VsCommands2K = Microsoft.VisualStudio.VSConstants.VSStd2KCmdID;
namespace Microsoft.VisualStudio.Project
{
[CLSCompliant(false), ComVisible(true)]
public class ReferenceContainerNode : HierarchyNode, IReferenceContainer
{
#region fields
internal const string ReferencesNodeVirtualName = "References";
#endregion
#region ctor
public ReferenceContainerNode(ProjectNode root)
: base(root)
{
this.VirtualNodeName = ReferencesNodeVirtualName;
this.ExcludeNodeFromScc = true;
}
#endregion
#region Properties
private static string[] supportedReferenceTypes = new string[] {
ProjectFileConstants.ProjectReference,
ProjectFileConstants.Reference,
ProjectFileConstants.COMReference
};
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1819:PropertiesShouldNotReturnArrays")]
protected virtual string[] SupportedReferenceTypes
{
get { return supportedReferenceTypes; }
}
#endregion
#region overridden properties
public override int SortPriority
{
get
{
return DefaultSortOrderNode.ReferenceContainerNode;
}
}
public override int MenuCommandId
{
get { return VsMenus.IDM_VS_CTXT_REFERENCEROOT; }
}
public override Guid ItemTypeGuid
{
get { return VSConstants.GUID_ItemType_VirtualFolder; }
}
public override string Url
{
get { return this.VirtualNodeName; }
}
public override string Caption
{
get
{
return SR.GetString(SR.ReferencesNodeName, CultureInfo.CurrentUICulture);
}
}
private Automation.OAReferences references;
internal override object Object
{
get
{
if(null == references)
{
references = new Automation.OAReferences(this);
}
return references;
}
}
#endregion
#region overridden methods
/// <summary>
/// Returns an instance of the automation object for ReferenceContainerNode
/// </summary>
/// <returns>An intance of the Automation.OAReferenceFolderItem type if succeeeded</returns>
public override object GetAutomationObject()
{
if(this.ProjectMgr == null || this.ProjectMgr.IsClosed)
{
return null;
}
return new Automation.OAReferenceFolderItem(this.ProjectMgr.GetAutomationObject() as Automation.OAProject, this);
}
/// <summary>
/// Disable inline editing of Caption of a ReferendeContainerNode
/// </summary>
/// <returns>null</returns>
public override string GetEditLabel()
{
return null;
}
public override object GetIconHandle(bool open)
{
return this.ProjectMgr.ImageHandler.GetIconHandle(open ? (int)ProjectNode.ImageName.OpenReferenceFolder : (int)ProjectNode.ImageName.ReferenceFolder);
}
/// <summary>
/// References node cannot be dragged.
/// </summary>
/// <returns>A stringbuilder.</returns>
protected internal override StringBuilder PrepareSelectedNodesForClipBoard()
{
return null;
}
/// <summary>
/// Not supported.
/// </summary>
protected override int ExcludeFromProject()
{
return (int)OleConstants.OLECMDERR_E_NOTSUPPORTED;
}
protected override int QueryStatusOnNode(Guid cmdGroup, uint cmd, IntPtr pCmdText, ref QueryStatusResult result)
{
if(cmdGroup == VsMenus.guidStandardCommandSet97)
{
switch((VsCommands)cmd)
{
case VsCommands.AddNewItem:
case VsCommands.AddExistingItem:
result |= QueryStatusResult.SUPPORTED | QueryStatusResult.ENABLED;
return VSConstants.S_OK;
}
}
else if(cmdGroup == VsMenus.guidStandardCommandSet2K)
{
if((VsCommands2K)cmd == VsCommands2K.ADDREFERENCE)
{
result |= QueryStatusResult.SUPPORTED | QueryStatusResult.ENABLED;
return VSConstants.S_OK;
}
}
else
{
return (int)OleConstants.OLECMDERR_E_UNKNOWNGROUP;
}
return base.QueryStatusOnNode(cmdGroup, cmd, pCmdText, ref result);
}
protected override int ExecCommandOnNode(Guid cmdGroup, uint cmd, uint nCmdexecopt, IntPtr pvaIn, IntPtr pvaOut)
{
if(cmdGroup == VsMenus.guidStandardCommandSet2K)
{
switch((VsCommands2K)cmd)
{
case VsCommands2K.ADDREFERENCE:
return this.ProjectMgr.AddProjectReference();
case VsCommands2K.ADDWEBREFERENCE:
return this.ProjectMgr.AddWebReference();
}
}
return base.ExecCommandOnNode(cmdGroup, cmd, nCmdexecopt, pvaIn, pvaOut);
}
protected override bool CanDeleteItem(__VSDELETEITEMOPERATION deleteOperation)
{
return false;
}
/// <summary>
/// Defines whether this node is valid node for painting the refererences icon.
/// </summary>
/// <returns></returns>
protected override bool CanShowDefaultIcon()
{
if(!String.IsNullOrEmpty(this.VirtualNodeName))
{
return true;
}
return false;
}
#endregion
#region IReferenceContainer
public IList<ReferenceNode> EnumReferences()
{
List<ReferenceNode> refs = new List<ReferenceNode>();
for(HierarchyNode node = this.FirstChild; node != null; node = node.NextSibling)
{
ReferenceNode refNode = node as ReferenceNode;
if(refNode != null)
{
refs.Add(refNode);
}
}
return refs;
}
/// <summary>
/// Adds references to this container from a MSBuild project.
/// </summary>
public void LoadReferencesFromBuildProject(MSBuild.Project buildProject)
{
foreach(string referenceType in SupportedReferenceTypes)
{
IEnumerable<MSBuild.ProjectItem> refererncesGroup = this.ProjectMgr.BuildProject.GetItems(referenceType);
bool isAssemblyReference = referenceType == ProjectFileConstants.Reference;
// If the project was loaded for browsing we should still create the nodes but as not resolved.
if(isAssemblyReference && this.ProjectMgr.Build(MsBuildTarget.ResolveAssemblyReferences) != MSBuildResult.Successful)
{
continue;
}
foreach (MSBuild.ProjectItem item in refererncesGroup)
{
ProjectElement element = new ProjectElement(this.ProjectMgr, item, false);
ReferenceNode node = CreateReferenceNode(referenceType, element);
if(node != null)
{
// Make sure that we do not want to add the item twice to the ui hierarchy
// We are using here the UI representation of the Node namely the Caption to find that out, in order to
// avoid different representation problems.
// Example :<Reference Include="EnvDTE80, Version=8.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a" />
// <Reference Include="EnvDTE80" />
bool found = false;
for(HierarchyNode n = this.FirstChild; n != null && !found; n = n.NextSibling)
{
if(String.Compare(n.Caption, node.Caption, StringComparison.OrdinalIgnoreCase) == 0)
{
found = true;
}
}
if(!found)
{
this.AddChild(node);
}
}
}
}
}
/// <summary>
/// Adds a reference to this container using the selector data structure to identify it.
/// </summary>
/// <param name="selectorData">data describing selected component</param>
/// <returns>Reference in case of a valid reference node has been created. Otherwise null</returns>
public ReferenceNode AddReferenceFromSelectorData(VSCOMPONENTSELECTORDATA selectorData, string wrapperTool = null)
{
//Make sure we can edit the project file
if(!this.ProjectMgr.QueryEditProjectFile(false))
{
throw Marshal.GetExceptionForHR(VSConstants.OLE_E_PROMPTSAVECANCELLED);
}
//Create the reference node
ReferenceNode node = null;
try
{
node = CreateReferenceNode(selectorData, wrapperTool);
}
catch(ArgumentException)
{
// Some selector data was not valid.
}
// Does such a reference already exist in the project?
ReferenceNode existingNode;
if (node.IsAlreadyAdded(out existingNode))
{
return existingNode;
}
//Add the reference node to the project if we have a valid reference node
if(node != null)
{
// This call will find if the reference is in the project and, in this case
// will not add it again, so the parent node will not be set.
node.AddReference();
if(null == node.Parent)
{
// The reference was not added, so we can not return this item because it
// is not inside the project.
return null;
}
}
return node;
}
#endregion
#region virtual methods
protected virtual ReferenceNode CreateReferenceNode(string referenceType, ProjectElement element)
{
ReferenceNode node = null;
if(referenceType == ProjectFileConstants.COMReference)
{
node = this.CreateComReferenceNode(element);
}
else if(referenceType == ProjectFileConstants.Reference)
{
node = this.CreateAssemblyReferenceNode(element);
}
else if(referenceType == ProjectFileConstants.ProjectReference)
{
node = this.CreateProjectReferenceNode(element);
}
return node;
}
protected virtual ReferenceNode CreateReferenceNode(VSCOMPONENTSELECTORDATA selectorData, string wrapperTool = null)
{
ReferenceNode node = null;
switch(selectorData.type)
{
case VSCOMPONENTTYPE.VSCOMPONENTTYPE_Project:
node = this.CreateProjectReferenceNode(selectorData);
break;
case VSCOMPONENTTYPE.VSCOMPONENTTYPE_File:
// This is the case for managed assembly
case VSCOMPONENTTYPE.VSCOMPONENTTYPE_ComPlus:
node = this.CreateFileComponent(selectorData, wrapperTool);
break;
case VSCOMPONENTTYPE.VSCOMPONENTTYPE_Com2:
node = this.CreateComReferenceNode(selectorData, wrapperTool);
break;
}
return node;
}
#endregion
#region Helper functions to add references
/// <summary>
/// Creates a project reference node given an existing project element.
/// </summary>
protected virtual ProjectReferenceNode CreateProjectReferenceNode(ProjectElement element)
{
return new ProjectReferenceNode(this.ProjectMgr, element);
}
/// <summary>
/// Create a Project to Project reference given a VSCOMPONENTSELECTORDATA structure
/// </summary>
protected virtual ProjectReferenceNode CreateProjectReferenceNode(VSCOMPONENTSELECTORDATA selectorData)
{
return new ProjectReferenceNode(this.ProjectMgr, selectorData.bstrTitle, selectorData.bstrFile, selectorData.bstrProjRef);
}
/// <summary>
/// Creates an assemby or com reference node given a selector data.
/// </summary>
protected virtual ReferenceNode CreateFileComponent(VSCOMPONENTSELECTORDATA selectorData, string wrapperTool = null)
{
if(null == selectorData.bstrFile)
{
throw new ArgumentNullException("selectorData");
}
// We have a path to a file, it could be anything
// First see if it is a managed assembly
bool tryToCreateAnAssemblyReference = true;
if(File.Exists(selectorData.bstrFile))
{
try
{
// We should not load the assembly in the current appdomain.
// If we do not do it like that and we load the assembly in the current appdomain then the assembly cannot be unloaded again.
// The following problems might arose in that case.
// 1. Assume that a user is extending the MPF and his project is creating a managed assembly dll.
// 2. The user opens VS and creates a project and builds it.
// 3. Then the user opens VS creates another project and adds a reference to the previously built assembly. This will load the assembly in the appdomain had we been using Assembly.ReflectionOnlyLoadFrom.
// 4. Then he goes back to the first project modifies it an builds it. A build error is issued that the assembly is used.
// GetAssemblyName is assured not to load the assembly.
tryToCreateAnAssemblyReference = (AssemblyName.GetAssemblyName(selectorData.bstrFile) != null);
}
catch(BadImageFormatException)
{
// We have found the file and it is not a .NET assembly; no need to try to
// load it again.
tryToCreateAnAssemblyReference = false;
}
catch(FileLoadException)
{
// We must still try to load from here because this exception is thrown if we want
// to add the same assembly refererence from different locations.
tryToCreateAnAssemblyReference = true;
}
}
ReferenceNode node = null;
if(tryToCreateAnAssemblyReference)
{
// This might be a candidate for an assembly reference node. Try to load it.
// CreateAssemblyReferenceNode will suppress BadImageFormatException if the node cannot be created.
node = this.CreateAssemblyReferenceNode(selectorData.bstrFile);
}
// If no node has been created try to create a com reference node.
if(node == null)
{
if(!File.Exists(selectorData.bstrFile))
{
return null;
}
node = this.CreateComReferenceNode(selectorData, wrapperTool);
}
return node;
}
/// <summary>
/// Creates an assembly refernce node from a project element.
/// </summary>
protected virtual AssemblyReferenceNode CreateAssemblyReferenceNode(ProjectElement element)
{
AssemblyReferenceNode node = null;
try
{
node = new AssemblyReferenceNode(this.ProjectMgr, element);
}
catch(ArgumentNullException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(FileNotFoundException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(BadImageFormatException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(FileLoadException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(System.Security.SecurityException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
return node;
}
/// <summary>
/// Creates an assembly reference node from a file path.
/// </summary>
protected virtual AssemblyReferenceNode CreateAssemblyReferenceNode(string fileName)
{
AssemblyReferenceNode node = null;
try
{
node = new AssemblyReferenceNode(this.ProjectMgr, fileName);
}
catch(ArgumentNullException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(FileNotFoundException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(BadImageFormatException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(FileLoadException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
catch(System.Security.SecurityException e)
{
Trace.WriteLine("Exception : " + e.Message);
}
return node;
}
/// <summary>
/// Creates a com reference node from the project element.
/// </summary>
protected virtual ComReferenceNode CreateComReferenceNode(ProjectElement reference)
{
return new ComReferenceNode(this.ProjectMgr, reference);
}
/// <summary>
/// Creates a com reference node from a selector data.
/// </summary>
protected virtual ComReferenceNode CreateComReferenceNode(Microsoft.VisualStudio.Shell.Interop.VSCOMPONENTSELECTORDATA selectorData, string wrapperTool = null)
{
ComReferenceNode node = new ComReferenceNode(this.ProjectMgr, selectorData);
return node;
}
#endregion
}
}
| |
namespace Azure.Quantum
{
public partial class QuantumJobClientOptions : Azure.Core.ClientOptions
{
public const Azure.Quantum.QuantumJobClientOptions.ServiceVersion LatestVersion = Azure.Quantum.QuantumJobClientOptions.ServiceVersion.V1Preview;
public QuantumJobClientOptions(Azure.Quantum.QuantumJobClientOptions.ServiceVersion version = Azure.Quantum.QuantumJobClientOptions.ServiceVersion.V1Preview) { }
public enum ServiceVersion
{
V1Preview = 1,
}
}
}
namespace Azure.Quantum.Jobs
{
public partial class QuantumJobClient
{
protected QuantumJobClient() { }
public QuantumJobClient(string subscriptionId, string resourceGroupName, string workspaceName, string location, Azure.Core.TokenCredential credential = null, Azure.Quantum.QuantumJobClientOptions options = null) { }
public virtual Azure.Response CancelJob(string jobId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CancelJobAsync(string jobId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Quantum.Jobs.Models.JobDetails> CreateJob(string jobId, Azure.Quantum.Jobs.Models.JobDetails job, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Quantum.Jobs.Models.JobDetails>> CreateJobAsync(string jobId, Azure.Quantum.Jobs.Models.JobDetails job, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Quantum.Jobs.Models.JobDetails> GetJob(string jobId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Quantum.Jobs.Models.JobDetails>> GetJobAsync(string jobId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Quantum.Jobs.Models.JobDetails> GetJobs(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Quantum.Jobs.Models.JobDetails> GetJobsAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Quantum.Jobs.Models.ProviderStatus> GetProviderStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Quantum.Jobs.Models.ProviderStatus> GetProviderStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Quantum.Jobs.Models.QuantumJobQuota> GetQuotas(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Quantum.Jobs.Models.QuantumJobQuota> GetQuotasAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Quantum.Jobs.Models.SasUriResponse> GetStorageSasUri(Azure.Quantum.Jobs.Models.BlobDetails blobDetails, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Quantum.Jobs.Models.SasUriResponse>> GetStorageSasUriAsync(Azure.Quantum.Jobs.Models.BlobDetails blobDetails, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
}
namespace Azure.Quantum.Jobs.Models
{
public partial class BlobDetails
{
public BlobDetails(string containerName) { }
public string BlobName { get { throw null; } set { } }
public string ContainerName { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct DimensionScope : System.IEquatable<Azure.Quantum.Jobs.Models.DimensionScope>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public DimensionScope(string value) { throw null; }
public static Azure.Quantum.Jobs.Models.DimensionScope Subscription { get { throw null; } }
public static Azure.Quantum.Jobs.Models.DimensionScope Workspace { get { throw null; } }
public bool Equals(Azure.Quantum.Jobs.Models.DimensionScope other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Quantum.Jobs.Models.DimensionScope left, Azure.Quantum.Jobs.Models.DimensionScope right) { throw null; }
public static implicit operator Azure.Quantum.Jobs.Models.DimensionScope (string value) { throw null; }
public static bool operator !=(Azure.Quantum.Jobs.Models.DimensionScope left, Azure.Quantum.Jobs.Models.DimensionScope right) { throw null; }
public override string ToString() { throw null; }
}
public partial class ErrorData
{
internal ErrorData() { }
public string Code { get { throw null; } }
public string Message { get { throw null; } }
}
public partial class JobDetails
{
public JobDetails(string containerUri, string inputDataFormat, string providerId, string target) { }
public System.DateTimeOffset? BeginExecutionTime { get { throw null; } }
public System.DateTimeOffset? CancellationTime { get { throw null; } }
public string ContainerUri { get { throw null; } set { } }
public System.DateTimeOffset? CreationTime { get { throw null; } }
public System.DateTimeOffset? EndExecutionTime { get { throw null; } }
public Azure.Quantum.Jobs.Models.ErrorData ErrorData { get { throw null; } }
public string Id { get { throw null; } set { } }
public string InputDataFormat { get { throw null; } set { } }
public string InputDataUri { get { throw null; } set { } }
public object InputParams { get { throw null; } set { } }
public System.Collections.Generic.IDictionary<string, string> Metadata { get { throw null; } set { } }
public string Name { get { throw null; } set { } }
public string OutputDataFormat { get { throw null; } set { } }
public string OutputDataUri { get { throw null; } set { } }
public string ProviderId { get { throw null; } set { } }
public Azure.Quantum.Jobs.Models.JobStatus? Status { get { throw null; } }
public string Target { get { throw null; } set { } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct JobStatus : System.IEquatable<Azure.Quantum.Jobs.Models.JobStatus>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public JobStatus(string value) { throw null; }
public static Azure.Quantum.Jobs.Models.JobStatus Cancelled { get { throw null; } }
public static Azure.Quantum.Jobs.Models.JobStatus Executing { get { throw null; } }
public static Azure.Quantum.Jobs.Models.JobStatus Failed { get { throw null; } }
public static Azure.Quantum.Jobs.Models.JobStatus Succeeded { get { throw null; } }
public static Azure.Quantum.Jobs.Models.JobStatus Waiting { get { throw null; } }
public bool Equals(Azure.Quantum.Jobs.Models.JobStatus other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Quantum.Jobs.Models.JobStatus left, Azure.Quantum.Jobs.Models.JobStatus right) { throw null; }
public static implicit operator Azure.Quantum.Jobs.Models.JobStatus (string value) { throw null; }
public static bool operator !=(Azure.Quantum.Jobs.Models.JobStatus left, Azure.Quantum.Jobs.Models.JobStatus right) { throw null; }
public override string ToString() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct MeterPeriod : System.IEquatable<Azure.Quantum.Jobs.Models.MeterPeriod>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public MeterPeriod(string value) { throw null; }
public static Azure.Quantum.Jobs.Models.MeterPeriod Monthly { get { throw null; } }
public static Azure.Quantum.Jobs.Models.MeterPeriod None { get { throw null; } }
public bool Equals(Azure.Quantum.Jobs.Models.MeterPeriod other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Quantum.Jobs.Models.MeterPeriod left, Azure.Quantum.Jobs.Models.MeterPeriod right) { throw null; }
public static implicit operator Azure.Quantum.Jobs.Models.MeterPeriod (string value) { throw null; }
public static bool operator !=(Azure.Quantum.Jobs.Models.MeterPeriod left, Azure.Quantum.Jobs.Models.MeterPeriod right) { throw null; }
public override string ToString() { throw null; }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct ProviderAvailability : System.IEquatable<Azure.Quantum.Jobs.Models.ProviderAvailability>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public ProviderAvailability(string value) { throw null; }
public static Azure.Quantum.Jobs.Models.ProviderAvailability Available { get { throw null; } }
public static Azure.Quantum.Jobs.Models.ProviderAvailability Degraded { get { throw null; } }
public static Azure.Quantum.Jobs.Models.ProviderAvailability Unavailable { get { throw null; } }
public bool Equals(Azure.Quantum.Jobs.Models.ProviderAvailability other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Quantum.Jobs.Models.ProviderAvailability left, Azure.Quantum.Jobs.Models.ProviderAvailability right) { throw null; }
public static implicit operator Azure.Quantum.Jobs.Models.ProviderAvailability (string value) { throw null; }
public static bool operator !=(Azure.Quantum.Jobs.Models.ProviderAvailability left, Azure.Quantum.Jobs.Models.ProviderAvailability right) { throw null; }
public override string ToString() { throw null; }
}
public partial class ProviderStatus
{
internal ProviderStatus() { }
public Azure.Quantum.Jobs.Models.ProviderAvailability? CurrentAvailability { get { throw null; } }
public string Id { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Quantum.Jobs.Models.TargetStatus> Targets { get { throw null; } }
}
public partial class QuantumJobQuota
{
internal QuantumJobQuota() { }
public string Dimension { get { throw null; } }
public float? Holds { get { throw null; } }
public float? Limit { get { throw null; } }
public Azure.Quantum.Jobs.Models.MeterPeriod? Period { get { throw null; } }
public string ProviderId { get { throw null; } }
public Azure.Quantum.Jobs.Models.DimensionScope? Scope { get { throw null; } }
public float? Utilization { get { throw null; } }
}
public static partial class QuantumModelFactory
{
public static Azure.Quantum.Jobs.Models.ErrorData ErrorData(string code = null, string message = null) { throw null; }
public static Azure.Quantum.Jobs.Models.JobDetails JobDetails(string id = null, string name = null, string containerUri = null, string inputDataUri = null, string inputDataFormat = null, object inputParams = null, string providerId = null, string target = null, System.Collections.Generic.IDictionary<string, string> metadata = null, string outputDataUri = null, string outputDataFormat = null, Azure.Quantum.Jobs.Models.JobStatus? status = default(Azure.Quantum.Jobs.Models.JobStatus?), System.DateTimeOffset? creationTime = default(System.DateTimeOffset?), System.DateTimeOffset? beginExecutionTime = default(System.DateTimeOffset?), System.DateTimeOffset? endExecutionTime = default(System.DateTimeOffset?), System.DateTimeOffset? cancellationTime = default(System.DateTimeOffset?), Azure.Quantum.Jobs.Models.ErrorData errorData = null) { throw null; }
public static Azure.Quantum.Jobs.Models.ProviderStatus ProviderStatus(string id = null, Azure.Quantum.Jobs.Models.ProviderAvailability? currentAvailability = default(Azure.Quantum.Jobs.Models.ProviderAvailability?), System.Collections.Generic.IEnumerable<Azure.Quantum.Jobs.Models.TargetStatus> targets = null) { throw null; }
public static Azure.Quantum.Jobs.Models.QuantumJobQuota QuantumJobQuota(string dimension = null, Azure.Quantum.Jobs.Models.DimensionScope? scope = default(Azure.Quantum.Jobs.Models.DimensionScope?), string providerId = null, float? utilization = default(float?), float? holds = default(float?), float? limit = default(float?), Azure.Quantum.Jobs.Models.MeterPeriod? period = default(Azure.Quantum.Jobs.Models.MeterPeriod?)) { throw null; }
public static Azure.Quantum.Jobs.Models.SasUriResponse SasUriResponse(string sasUri = null) { throw null; }
public static Azure.Quantum.Jobs.Models.TargetStatus TargetStatus(string id = null, Azure.Quantum.Jobs.Models.TargetAvailability? currentAvailability = default(Azure.Quantum.Jobs.Models.TargetAvailability?), long? averageQueueTime = default(long?), string statusPage = null) { throw null; }
}
public partial class SasUriResponse
{
internal SasUriResponse() { }
public string SasUri { get { throw null; } }
}
[System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)]
public readonly partial struct TargetAvailability : System.IEquatable<Azure.Quantum.Jobs.Models.TargetAvailability>
{
private readonly object _dummy;
private readonly int _dummyPrimitive;
public TargetAvailability(string value) { throw null; }
public static Azure.Quantum.Jobs.Models.TargetAvailability Available { get { throw null; } }
public static Azure.Quantum.Jobs.Models.TargetAvailability Degraded { get { throw null; } }
public static Azure.Quantum.Jobs.Models.TargetAvailability Unavailable { get { throw null; } }
public bool Equals(Azure.Quantum.Jobs.Models.TargetAvailability other) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override bool Equals(object obj) { throw null; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public override int GetHashCode() { throw null; }
public static bool operator ==(Azure.Quantum.Jobs.Models.TargetAvailability left, Azure.Quantum.Jobs.Models.TargetAvailability right) { throw null; }
public static implicit operator Azure.Quantum.Jobs.Models.TargetAvailability (string value) { throw null; }
public static bool operator !=(Azure.Quantum.Jobs.Models.TargetAvailability left, Azure.Quantum.Jobs.Models.TargetAvailability right) { throw null; }
public override string ToString() { throw null; }
}
public partial class TargetStatus
{
internal TargetStatus() { }
public long? AverageQueueTime { get { throw null; } }
public Azure.Quantum.Jobs.Models.TargetAvailability? CurrentAvailability { get { throw null; } }
public string Id { get { throw null; } }
public string StatusPage { get { throw null; } }
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Net;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using Microsoft.Protocols.TestTools.StackSdk.FileAccessService.Cifs;
namespace Microsoft.Protocols.TestTools.StackSdk.FileAccessService.Smb
{
/// <summary>
/// the context of smb runtime.
/// </summary>
public class SmbClientContext : CifsClientContext
{
#region Properties for sdk
/// <summary>
/// the smb client
/// </summary>
private SmbClient smbClient;
/// <summary>
/// the package name of security from config
/// </summary>
private SmbSecurityPackage securityPackage;
/// <summary>
/// a Dictionary for IPEndPoint and connectionID
/// </summary>
private Dictionary<IPEndPoint, int> endpointToConnectionIDMap = new Dictionary<IPEndPoint, int>();
/// <summary>
/// the package name of security from config
/// </summary>
public SmbSecurityPackage SecurityPackage
{
get
{
return this.securityPackage;
}
}
/// <summary>
/// set the security package
/// </summary>
/// <param name="smbSecurityPackage">the new value of security package</param>
internal void SetSecurityPackage(SmbSecurityPackage smbSecurityPackage)
{
this.securityPackage = smbSecurityPackage;
}
#endregion
#region Constructor
/// <summary>
/// default constructor.
/// </summary>
internal SmbClientContext(SmbClient smbClient)
: base()
{
this.smbClient = smbClient;
}
#endregion
#region Generate The Map Between IpEndPoint with ConnectionId
/// <summary>
/// Get the conntionID from the dictionary
/// </summary>
/// <param name = "endpoint">the endpoint for connectionID </param>
internal int GetConnectionID(IPEndPoint endpoint)
{
lock (endpointToConnectionIDMap)
{
if (endpoint == null)
{
return -1;
}
if (!endpointToConnectionIDMap.ContainsKey(endpoint))
{
// marke the endpoint with its index
endpointToConnectionIDMap[endpoint] = endpointToConnectionIDMap.Count;
}
return endpointToConnectionIDMap[endpoint];
}
}
#endregion
#region Overrided Behavior
/// <summary>
/// this function will be invoked every time a packet is sent or received. all logics about
/// Client' states will be implemented here.
/// </summary>
/// <param name="connection">the connection object.</param>
/// <param name="packet">the sent or received packet in stack transport.</param>
protected override void UpdateRoleContext(CifsClientPerConnection connection, SmbPacket packet)
{
// Do nothing if no connection is found or the packet is not SmbPacket:
if (connection == null || packet == null)
{
return;
}
// request packet:
if (packet.PacketType == SmbPacketType.BatchedRequest
|| packet.PacketType == SmbPacketType.SingleRequest)
{
RequestPacketUpdateRoleContext(connection, packet, false);
}
// response packet:
else if (packet.PacketType == SmbPacketType.BatchedResponse
|| packet.PacketType == SmbPacketType.SingleResponse)
{
if (!this.SmbUpdateContextWithResponsePacket(connection as SmbClientConnection, packet))
{
SmbPacket request = this.GetOutstandingRequest(connection.ConnectionId, (ulong)packet.SmbHeader.Mid);
ResponsePacketUpdateRoleContext(connection, request, packet);
}
ResponsePacketUpdateRoleContextRegular(connection, packet);
}
else
{
// Do nothing if neither request nor response.
// No exception is thrown here because UpdateRoleContext is not responsible for checking the
// invalidation of the packet.
}
}
/// <summary>
/// update the context, using the single response packet.
/// if this method failed to update, return false, the base method must invoked.
/// </summary>
/// <param name = "connection">the connection of client. </param>
/// <param name = "response">the response packet. </param>
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
private bool SmbUpdateContextWithResponsePacket(SmbClientConnection connection, SmbPacket response)
{
if (response == null)
{
return false;
}
int connectionId = connection.ConnectionId;
SmbHeader smbHeader = response.SmbHeader;
// only process the response packet.
if (response.PacketType != SmbPacketType.BatchedResponse
&& response.PacketType != SmbPacketType.SingleResponse)
{
return false;
}
// packet status
SmbStatus packetStatus = (SmbStatus)smbHeader.Status;
// filter error packet
if (packetStatus != SmbStatus.STATUS_SUCCESS &&
packetStatus != SmbStatus.STATUS_MORE_PROCESSING_REQUIRED &&
packetStatus != SmbStatus.STATUS_BUFFER_OVERFLOW)
{
return false;
}
// process each special command
switch (smbHeader.Command)
{
#region Negotiate Response
case SmbCommand.SMB_COM_NEGOTIATE:
// implicit ntlm, decode using cifs sdk.
if (!smbClient.Capability.IsSupportsExtendedSecurity)
{
return false;
}
// down cast to negotiate response packet.
SmbNegotiateResponsePacket negotiate = response as SmbNegotiateResponsePacket;
// set negotiate flag
connection.NegotiateSent = true;
#region update security mode
SecurityModes securityModes = negotiate.SmbParameters.SecurityMode;
if (SecurityModes.NEGOTIATE_SECURITY_SIGNATURES_ENABLED
== (securityModes & SecurityModes.NEGOTIATE_SECURITY_SIGNATURES_ENABLED))
{
connection.ServerSigningState = SignState.ENABLED;
}
else if (SecurityModes.NEGOTIATE_SECURITY_SIGNATURES_REQUIRED
== (securityModes & SecurityModes.NEGOTIATE_SECURITY_SIGNATURES_REQUIRED))
{
connection.ServerSigningState = SignState.REQUIRED;
}
else
{
connection.ServerSigningState = SignState.DISABLED;
}
if (SecurityModes.NEGOTIATE_USER_SECURITY
== (securityModes & SecurityModes.NEGOTIATE_USER_SECURITY))
{
connection.UsesSharePasswords = false;
}
else
{
connection.UsesSharePasswords = true;
}
if (SecurityModes.NEGOTIATE_ENCRYPT_PASSWORDS
== (securityModes & SecurityModes.NEGOTIATE_ENCRYPT_PASSWORDS))
{
connection.IsClientEncryptPasswords = true;
}
else
{
connection.IsClientEncryptPasswords = false;
}
// update IsSignActive using the combination of the client's
// MessageSigningPolicy and the connection's ServerSigningState
smbClient.Context.UpdateSigningActive(connection);
#endregion
#region update server capabilities
connection.ServerCapabilities = (Capabilities)negotiate.SmbParameters.Capabilities;
if (Capabilities.CAP_INFOLEVEL_PASSTHRU
== (connection.ServerCapabilities & Capabilities.CAP_INFOLEVEL_PASSTHRU))
{
smbClient.Capability.IsUsePathThrough = true;
}
#endregion
#region update maxbuffersize
connection.MaxBufferSize = negotiate.SmbParameters.MaxBufferSize;
#endregion
this.AddOrUpdateConnection(connection);
break;
#endregion
#region Session Setup Response
case SmbCommand.SMB_COM_SESSION_SETUP_ANDX:
// implicit ntlm, decode using cifs sdk.
if (!smbClient.Capability.IsSupportsExtendedSecurity)
{
return false;
}
// the session to operation on.
SmbClientSession session = null;
// down-case the packet
SmbSessionSetupAndxResponsePacket packet = response as SmbSessionSetupAndxResponsePacket;
// if session exists, use it.
if (this.GetSession(connectionId, smbHeader.Uid) != null)
{
session = new SmbClientSession(this.GetSession(connectionId, smbHeader.Uid));
}
else
{
session = new SmbClientSession();
}
// if success, update context and session key.
if (packetStatus == SmbStatus.STATUS_SUCCESS)
{
// if spng, the SessionKey is null and the SecurityBlob from server contains data
// in this situation, need to initialize the SecurityBlob of server to generate the SessionKey
if (connection.GssApi.SessionKey == null
&& packet.SecurityBlob != null && packet.SecurityBlob.Length > 0)
{
connection.GssApi.Initialize(packet.SecurityBlob);
}
// get session key and store in the context
session.SessionKey = connection.GssApi.SessionKey;
// reset the gss api of connection
connection.GssApi = null;
// reset the securityblob when success
packet.SecurityBlob = null;
}
// update the security blob from server
connection.SecurityBlob = packet.SecurityBlob;
this.AddOrUpdateConnection(connection);
// update session
session.SessionUid = smbHeader.Uid;
session.ConnectionId = connectionId;
this.AddOrUpdateSession(session);
break;
#endregion
default:
return false;
}
return true;
}
#endregion
#region Common Methods
/// <summary>
/// get the current connection
/// </summary>
public SmbClientConnection Connection
{
get
{
// get all connections
ReadOnlyCollection<CifsClientPerConnection> connections = GetConnections();
// if connection
if (connections == null || connections.Count != 1)
{
return null;
}
return connections[0] as SmbClientConnection;
}
}
/// <summary>
/// get the session in current connection
/// </summary>
/// <param name="sessionUid">the id of session</param>
/// <returns>the session object</returns>
public SmbClientSession GetSession(ushort sessionUid)
{
SmbClientConnection connection = Connection;
if (connection == null)
{
return null;
}
return new SmbClientSession(GetSession(connection.ConnectionId, sessionUid));
}
/// <summary>
/// get the treeconnect in current connection
/// </summary>
/// <param name="tid">the id of treeconnect</param>
/// <returns>the treeconnect object</returns>
public SmbClientTreeConnect GetTreeConnect(ushort tid)
{
SmbClientConnection connection = Connection;
if (connection == null)
{
return null;
}
ReadOnlyCollection<CifsClientPerTreeConnect> treeconnects = GetTreeConnects(connection.ConnectionId);
foreach (CifsClientPerTreeConnect treeconnect in treeconnects)
{
if (treeconnect.TreeId == tid)
{
return new SmbClientTreeConnect(GetSession((ushort)treeconnect.SessionId), treeconnect);
}
}
return null;
}
/// <summary>
/// get the file in current connection
/// </summary>
/// <param name="fid">the id of file</param>
/// <returns>the file object</returns>
public SmbClientOpen GetOpenFile(ushort fid)
{
SmbClientConnection connection = Connection;
if (connection == null)
{
return null;
}
ReadOnlyCollection<CifsClientPerOpenFile> files = GetOpenFiles(connection.ConnectionId);
foreach (CifsClientPerOpenFile file in files)
{
if (file.FileHandle == fid)
{
return new SmbClientOpen(file);
}
}
return null;
}
#endregion
#region Update the Signing State of context
/// <summary>
/// update the connection isSigningActive, combination of the client's MessageSigningPolicy and the
/// connection's ServerSigningState.
/// </summary>
/// <param name = "connection">the target connection to update </param>
/// <returns>the combination result </returns>
internal void UpdateSigningActive(SmbClientConnection connection)
{
if (connection == null)
{
return;
}
switch (smbClient.Capability.ClientSignState)
{
case SignState.REQUIRED:
MergeClientRequiredState(connection);
break;
case SignState.ENABLED:
MergeClientEnableState(connection);
break;
case SignState.DISABLED_UNLESS_REQUIRED:
MergeClientDisalbedUnlessRequiredState(connection);
break;
case SignState.DISABLED:
MergeClientDisabledState(connection);
break;
default:
break;
}
}
#endregion
#region Private Properties & Methods
/// <summary>
/// merge the server sign state when client is disabled.
/// </summary>
/// <param name = "connection">the target connection to update </param>
/// <exception cref="InvalidOperationException"></exception>
private void MergeClientDisabledState(SmbClientConnection connection)
{
switch (connection.ServerSigningState)
{
case SignState.REQUIRED:
break;
case SignState.ENABLED:
case SignState.DISABLED_UNLESS_REQUIRED:
case SignState.DISABLED:
connection.IsSigningActive = false;
break;
default:
break;
}
}
/// <summary>
/// merge the server sign state when client is disabled-unless-required.
/// </summary>
/// <param name = "connection">the target connection to update </param>
private void MergeClientDisalbedUnlessRequiredState(SmbClientConnection connection)
{
switch (connection.ServerSigningState)
{
case SignState.REQUIRED:
connection.IsSigningActive = true;
break;
case SignState.ENABLED:
case SignState.DISABLED_UNLESS_REQUIRED:
case SignState.DISABLED:
connection.IsSigningActive = false;
break;
}
}
/// <summary>
/// merge the server sign state when client is enable.
/// </summary>
/// <param name = "connection">the target connection to update </param>
private void MergeClientEnableState(SmbClientConnection connection)
{
switch (connection.ServerSigningState)
{
case SignState.REQUIRED:
case SignState.ENABLED:
connection.IsSigningActive = true;
break;
case SignState.DISABLED_UNLESS_REQUIRED:
case SignState.DISABLED:
connection.IsSigningActive = false;
break;
default:
break;
}
}
/// <summary>
/// merge the server sign state when client is required.
/// </summary>
/// <param name = "connection">the target connection to update </param>
private void MergeClientRequiredState(SmbClientConnection connection)
{
switch (connection.ServerSigningState)
{
case SignState.REQUIRED:
case SignState.ENABLED:
case SignState.DISABLED_UNLESS_REQUIRED:
connection.IsSigningActive = true;
break;
case SignState.DISABLED:
break;
default:
break;
}
}
#endregion
}
}
| |
#region Copyright notice and license
// Copyright 2015, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
using System;
using System.Runtime.InteropServices;
using System.Threading;
using Grpc.Core.Utils;
namespace Grpc.Core.Internal
{
/// <summary>
/// gpr_timespec from grpc/support/time.h
/// </summary>
[StructLayout(LayoutKind.Sequential)]
internal struct Timespec
{
const long NanosPerSecond = 1000 * 1000 * 1000;
const long NanosPerTick = 100;
const long TicksPerSecond = NanosPerSecond / NanosPerTick;
static readonly NativeMethods Native = NativeMethods.Get();
static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
public Timespec(long tv_sec, int tv_nsec) : this(tv_sec, tv_nsec, GPRClockType.Realtime)
{
}
public Timespec(long tv_sec, int tv_nsec, GPRClockType clock_type)
{
this.tv_sec = tv_sec;
this.tv_nsec = tv_nsec;
this.clock_type = clock_type;
}
private long tv_sec;
private int tv_nsec;
private GPRClockType clock_type;
/// <summary>
/// Timespec a long time in the future.
/// </summary>
public static Timespec InfFuture
{
get
{
return Native.gprsharp_inf_future(GPRClockType.Realtime);
}
}
/// <summary>
/// Timespec a long time in the past.
/// </summary>
public static Timespec InfPast
{
get
{
return Native.gprsharp_inf_past(GPRClockType.Realtime);
}
}
/// <summary>
/// Return Timespec representing the current time.
/// </summary>
public static Timespec Now
{
get
{
return Native.gprsharp_now(GPRClockType.Realtime);
}
}
/// <summary>
/// Seconds since unix epoch.
/// </summary>
public long TimevalSeconds
{
get
{
return tv_sec;
}
}
/// <summary>
/// The nanoseconds part of timeval.
/// </summary>
public int TimevalNanos
{
get
{
return tv_nsec;
}
}
/// <summary>
/// Converts the timespec to desired clock type.
/// </summary>
public Timespec ToClockType(GPRClockType targetClock)
{
return Native.gprsharp_convert_clock_type(this, targetClock);
}
/// <summary>
/// Converts Timespec to DateTime.
/// Timespec needs to be of type GPRClockType.Realtime and needs to represent a legal value.
/// DateTime has lower resolution (100ns), so rounding can occurs.
/// Value are always rounded up to the nearest DateTime value in the future.
///
/// For Timespec.InfFuture or if timespec is after the largest representable DateTime, DateTime.MaxValue is returned.
/// For Timespec.InfPast or if timespec is before the lowest representable DateTime, DateTime.MinValue is returned.
///
/// Unless DateTime.MaxValue or DateTime.MinValue is returned, the resulting DateTime is always in UTC
/// (DateTimeKind.Utc)
/// </summary>
public DateTime ToDateTime()
{
GrpcPreconditions.CheckState(tv_nsec >= 0 && tv_nsec < NanosPerSecond);
GrpcPreconditions.CheckState(clock_type == GPRClockType.Realtime);
// fast path for InfFuture
if (this.Equals(InfFuture))
{
return DateTime.MaxValue;
}
// fast path for InfPast
if (this.Equals(InfPast))
{
return DateTime.MinValue;
}
try
{
// convert nanos to ticks, round up to the nearest tick
long ticksFromNanos = tv_nsec / NanosPerTick + ((tv_nsec % NanosPerTick != 0) ? 1 : 0);
long ticksTotal = checked(tv_sec * TicksPerSecond + ticksFromNanos);
return UnixEpoch.AddTicks(ticksTotal);
}
catch (OverflowException)
{
// ticks out of long range
return tv_sec > 0 ? DateTime.MaxValue : DateTime.MinValue;
}
catch (ArgumentOutOfRangeException)
{
// resulting date time would be larger than MaxValue
return tv_sec > 0 ? DateTime.MaxValue : DateTime.MinValue;
}
}
/// <summary>
/// Creates DateTime to Timespec.
/// DateTime has to be in UTC (DateTimeKind.Utc) unless it's DateTime.MaxValue or DateTime.MinValue.
/// For DateTime.MaxValue of date time after the largest representable Timespec, Timespec.InfFuture is returned.
/// For DateTime.MinValue of date time before the lowest representable Timespec, Timespec.InfPast is returned.
/// </summary>
/// <returns>The date time.</returns>
/// <param name="dateTime">Date time.</param>
public static Timespec FromDateTime(DateTime dateTime)
{
if (dateTime == DateTime.MaxValue)
{
return Timespec.InfFuture;
}
if (dateTime == DateTime.MinValue)
{
return Timespec.InfPast;
}
GrpcPreconditions.CheckArgument(dateTime.Kind == DateTimeKind.Utc, "dateTime needs of kind DateTimeKind.Utc or be equal to DateTime.MaxValue or DateTime.MinValue.");
try
{
TimeSpan timeSpan = dateTime - UnixEpoch;
long ticks = timeSpan.Ticks;
long seconds = ticks / TicksPerSecond;
int nanos = (int)((ticks % TicksPerSecond) * NanosPerTick);
if (nanos < 0)
{
// correct the result based on C# modulo semantics for negative dividend
seconds--;
nanos += (int)NanosPerSecond;
}
return new Timespec(seconds, nanos);
}
catch (ArgumentOutOfRangeException)
{
return dateTime > UnixEpoch ? Timespec.InfFuture : Timespec.InfPast;
}
}
/// <summary>
/// Gets current timestamp using <c>GPRClockType.Precise</c>.
/// Only available internally because core needs to be compiled with
/// GRPC_TIMERS_RDTSC support for this to use RDTSC.
/// </summary>
internal static Timespec PreciseNow
{
get
{
return Native.gprsharp_now(GPRClockType.Precise);
}
}
internal static int NativeSize
{
get
{
return Native.gprsharp_sizeof_timespec();
}
}
}
}
| |
////////////////////////////////////////////////////////////////////////////////
// StickyWindows
//
// Copyright (c) 2004 Corneliu I. Tusnea
//
// This software is provided 'as-is', without any express or implied warranty.
// In no event will the author be held liable for any damages arising from
// the use of this software.
// Permission to use, copy, modify, distribute and sell this software for any
// purpose is hereby granted without fee, provided that the above copyright
// notice appear in all copies and that both that copyright notice and this
// permission notice appear in supporting documentation.
//
// Notice: Check CodeProject for details about using this class
//
//////////////////////////////////////////////////////////////////////////////////
using System;
using System.Collections;
using System.Drawing;
using System.Runtime.InteropServices;
using System.Security.Permissions;
using System.Windows.Forms;
using Blue.Private.Win32Imports;
#region Blue.Win32Imports
namespace Blue.Private.Win32Imports
{
/// <summary>
/// Win32 is just a placeholder for some Win32 imported definitions
/// </summary>
internal static class UnsafeNativeMethods
{
[DllImport("user32.dll", CharSet = CharSet.Ansi, CallingConvention = CallingConvention.Cdecl)]
private static extern short GetAsyncKeyState(int vKey);
[DllImport("user32.dll", CharSet = CharSet.Ansi, CallingConvention = CallingConvention.Cdecl)]
internal static extern IntPtr GetDesktopWindow();
/// <summary>
/// VK is just a placeholder for VK (VirtualKey) general definitions
/// </summary>
public class VK
{
public const int VK_SHIFT = 0x10;
public const int VK_CONTROL = 0x11;
public const int VK_MENU = 0x12;
public const int VK_ESCAPE = 0x1B;
public static bool IsKeyPressed(int KeyCode)
{
return (GetAsyncKeyState(KeyCode) & 0x0800) == 0;
}
}
/// <summary>
/// WM is just a placeholder class for WM (WindowMessage) definitions
/// </summary>
public class WM
{
public const int WM_MOUSEMOVE = 0x0200;
public const int WM_NCMOUSEMOVE = 0x00A0;
public const int WM_NCLBUTTONDOWN = 0x00A1;
public const int WM_NCLBUTTONUP = 0x00A2;
public const int WM_NCLBUTTONDBLCLK = 0x00A3;
public const int WM_LBUTTONDOWN = 0x0201;
public const int WM_LBUTTONUP = 0x0202;
public const int WM_KEYDOWN = 0x0100;
}
/// <summary>
/// HT is just a placeholder for HT (HitTest) definitions
/// </summary>
public class HT
{
public const int HTERROR = (-2);
public const int HTTRANSPARENT = (-1);
public const int HTNOWHERE = 0;
public const int HTCLIENT = 1;
public const int HTCAPTION = 2;
public const int HTSYSMENU = 3;
public const int HTGROWBOX = 4;
public const int HTSIZE = HTGROWBOX;
public const int HTMENU = 5;
public const int HTHSCROLL = 6;
public const int HTVSCROLL = 7;
public const int HTMINBUTTON = 8;
public const int HTMAXBUTTON = 9;
public const int HTLEFT = 10;
public const int HTRIGHT = 11;
public const int HTTOP = 12;
public const int HTTOPLEFT = 13;
public const int HTTOPRIGHT = 14;
public const int HTBOTTOM = 15;
public const int HTBOTTOMLEFT = 16;
public const int HTBOTTOMRIGHT = 17;
public const int HTBORDER = 18;
public const int HTREDUCE = HTMINBUTTON;
public const int HTZOOM = HTMAXBUTTON;
public const int HTSIZEFIRST = HTLEFT;
public const int HTSIZELAST = HTBOTTOMRIGHT;
public const int HTOBJECT = 19;
public const int HTCLOSE = 20;
public const int HTHELP = 21;
}
public class Bit
{
public static int HiWord(int iValue)
{
return ((iValue >> 16) & 0xFFFF);
}
public static int LoWord(int iValue)
{
return (iValue & 0xFFFF);
}
}
}
}
#endregion
namespace Blue.Windows
{
/// <summary>
/// A windows that Sticks to other windows of the same type when moved or resized.
/// You get a nice way of organizing multiple top-level windows.
/// Quite similar with WinAmp 2.x style of sticking the windows
/// </summary>
public class StickyWindow : NativeWindow
{
/// <summary>
/// Global List of registered StickyWindows
/// </summary>
private static readonly ArrayList GlobalStickyWindows = new ArrayList();
// public properties
private static int _stickGap = 10; // distance to stick
public event EventHandler ResizeEnded;
public event EventHandler MoveEnded;
#region StickyWindow Constructor
/// <summary>
/// Make the form Sticky
/// </summary>
/// <param name="form">Form to be made sticky</param>
public StickyWindow(Form form)
{
_resizingForm = false;
_movingForm = false;
_originalForm = form;
_formRect = Rectangle.Empty;
_formOffsetRect = Rectangle.Empty;
_formOffsetPoint = Point.Empty;
_offsetPoint = Point.Empty;
_mousePoint = Point.Empty;
StickOnMove = true;
StickOnResize = true;
StickToScreen = true;
StickToOther = true;
_defaultMessageProcessor = DefaultMsgProcessor;
_moveMessageProcessor = MoveMsgProcessor;
_resizeMessageProcessor = ResizeMsgProcessor;
_messageProcessor = _defaultMessageProcessor;
AssignHandle(_originalForm.Handle);
}
#endregion
#region OnHandleChange
[PermissionSet(SecurityAction.Demand, Name = "FullTrust")]
protected override void OnHandleChange()
{
if ((int)Handle != 0)
{
GlobalStickyWindows.Add(_originalForm);
}
else
{
GlobalStickyWindows.Remove(_originalForm);
}
}
#endregion
#region WndProc
[PermissionSet(SecurityAction.Demand, Name = "FullTrust")]
protected override void WndProc(ref Message m)
{
if (!_messageProcessor(ref m))
base.WndProc(ref m);
}
#endregion
#region DefaultMsgProcessor
/// <summary>
/// Processes messages during normal operations (while the form is not resized or moved)
/// </summary>
/// <param name="m"></param>
/// <returns></returns>
private bool DefaultMsgProcessor(ref Message m)
{
switch (m.Msg)
{
case UnsafeNativeMethods.WM.WM_NCLBUTTONDOWN:
{
_originalForm.Activate();
_mousePoint.X = (short)UnsafeNativeMethods.Bit.LoWord((int)m.LParam);
_mousePoint.Y = (short)UnsafeNativeMethods.Bit.HiWord((int)m.LParam);
if (OnNCLButtonDown((int)m.WParam, _mousePoint))
{
//m.Result = new IntPtr ( (resizingForm || movingForm) ? 1 : 0 );
m.Result = (IntPtr)((_resizingForm || _movingForm) ? 1 : 0);
return true;
}
break;
}
}
return false;
}
#endregion
#region OnNCLButtonDown
/// <summary>
/// Checks where the click was in the NC area and starts move or resize operation
/// </summary>
/// <param name="iHitTest"></param>
/// <param name="point"></param>
/// <returns></returns>
private bool OnNCLButtonDown(int iHitTest, Point point)
{
_offsetPoint = point;
switch (iHitTest)
{
case UnsafeNativeMethods.HT.HTCAPTION:
{
// request for move
if (StickOnMove)
{
var pointInApp = _originalForm.PointToClient(Cursor.Position);
_offsetPoint.Offset(pointInApp.X, pointInApp.Y);
StartMove();
return true;
}
return false; // leave default processing
}
// requests for resize
case UnsafeNativeMethods.HT.HTTOPLEFT:
return StartResize(ResizeDir.Top | ResizeDir.Left);
case UnsafeNativeMethods.HT.HTTOP:
return StartResize(ResizeDir.Top);
case UnsafeNativeMethods.HT.HTTOPRIGHT:
return StartResize(ResizeDir.Top | ResizeDir.Right);
case UnsafeNativeMethods.HT.HTRIGHT:
return StartResize(ResizeDir.Right);
case UnsafeNativeMethods.HT.HTBOTTOMRIGHT:
return StartResize(ResizeDir.Bottom | ResizeDir.Right);
case UnsafeNativeMethods.HT.HTBOTTOM:
return StartResize(ResizeDir.Bottom);
case UnsafeNativeMethods.HT.HTBOTTOMLEFT:
return StartResize(ResizeDir.Bottom | ResizeDir.Left);
case UnsafeNativeMethods.HT.HTLEFT:
return StartResize(ResizeDir.Left);
}
return false;
}
#endregion
#region Utilities
private int NormalizeInside(int iP1, int iM1, int iM2)
{
if (iP1 <= iM1)
return iM1;
if (iP1 >= iM2)
return iM2;
return iP1;
}
#endregion
#region Cancel
private void Cancel()
{
_originalForm.Capture = false;
_movingForm = false;
_resizingForm = false;
_messageProcessor = _defaultMessageProcessor;
}
#endregion
#region ResizeDir
[Flags]
private enum ResizeDir
{
Top = 2,
Bottom = 4,
Left = 8,
Right = 16
};
#endregion
#region Message Processor
// Internal Message Processor
private delegate bool ProcessMessage(ref Message m);
private ProcessMessage _messageProcessor;
// Messages processors based on type
private readonly ProcessMessage _defaultMessageProcessor;
private readonly ProcessMessage _moveMessageProcessor;
private readonly ProcessMessage _resizeMessageProcessor;
#endregion
#region Internal properties
// Move stuff
private bool _movingForm;
private Point _formOffsetPoint; // calculated offset rect to be added !! (min distances in all directions!!)
private Point _offsetPoint; // primary offset
// Resize stuff
private bool _resizingForm;
private ResizeDir _resizeDirection;
private Rectangle _formOffsetRect; // calculated rect to fix the size
private Point _mousePoint; // mouse position
// General Stuff
private readonly Form _originalForm; // the form
private Rectangle _formRect; // form bounds
private Rectangle _formOriginalRect; // bounds before last operation started
#endregion
#region Public operations and properties
/// <summary>
/// Distance in pixels betwen two forms or a form and the screen where the sticking should start
/// Default value = 20
/// </summary>
public int StickGap
{
get { return _stickGap; }
set { _stickGap = value; }
}
/// <summary>
/// Allow the form to stick while resizing
/// Default value = true
/// </summary>
public bool StickOnResize { get; set; }
/// <summary>
/// Allow the form to stick while moving
/// Default value = true
/// </summary>
public bool StickOnMove { get; set; }
/// <summary>
/// Allow sticking to Screen Margins
/// Default value = true
/// </summary>
public bool StickToScreen { get; set; }
/// <summary>
/// Allow sticking to other StickWindows
/// Default value = true
/// </summary>
public bool StickToOther { get; set; }
/// <summary>
/// Register a new form as an external reference form.
/// All Sticky windows will try to stick to the external references
/// Use this to register your MainFrame so the child windows try to stick to it, when your MainFrame is NOT a sticky
/// window
/// </summary>
/// <param name="frmExternal">External window that will be used as reference</param>
public static void RegisterExternalReferenceForm(Form frmExternal)
{
GlobalStickyWindows.Add(frmExternal);
}
/// <summary>
/// Unregister a form from the external references.
/// <see cref="RegisterExternalReferenceForm" />
/// </summary>
/// <param name="frmExternal">External window that will was used as reference</param>
public static void UnregisterExternalReferenceForm(Form frmExternal)
{
GlobalStickyWindows.Remove(frmExternal);
}
#endregion
#region ResizeOperations
private bool StartResize(ResizeDir resDir)
{
if (StickOnResize)
{
_resizeDirection = resDir;
_formRect = _originalForm.Bounds;
_formOriginalRect = _originalForm.Bounds; // save the old bounds
if (!_originalForm.Capture) // start capturing messages
_originalForm.Capture = true;
_messageProcessor = _resizeMessageProcessor;
return true; // catch the message
}
return false; // leave default processing !
}
private bool ResizeMsgProcessor(ref Message m)
{
if (!_originalForm.Capture)
{
Cancel();
return false;
}
switch (m.Msg)
{
case UnsafeNativeMethods.WM.WM_LBUTTONUP:
{
// ok, resize finished !!!
EndResize();
break;
}
case UnsafeNativeMethods.WM.WM_MOUSEMOVE:
{
_mousePoint.X = (short)UnsafeNativeMethods.Bit.LoWord((int)m.LParam);
_mousePoint.Y = (short)UnsafeNativeMethods.Bit.HiWord((int)m.LParam);
Resize(_mousePoint);
break;
}
case UnsafeNativeMethods.WM.WM_KEYDOWN:
{
if ((int)m.WParam == UnsafeNativeMethods.VK.VK_ESCAPE)
{
_originalForm.Bounds = _formOriginalRect; // set back old size
Cancel();
}
break;
}
}
return false;
}
private void EndResize()
{
Cancel();
OnResizeEnded();
}
#endregion
#region Resize Computing
private void Resize(Point p)
{
p = _originalForm.PointToScreen(p);
var activeScr = Screen.FromPoint(p);
_formRect = _originalForm.Bounds;
var iRight = _formRect.Right;
var iBottom = _formRect.Bottom;
// no normalize required
// first strech the window to the new position
if ((_resizeDirection & ResizeDir.Left) == ResizeDir.Left)
{
_formRect.Width = _formRect.X - p.X + _formRect.Width;
_formRect.X = iRight - _formRect.Width;
}
if ((_resizeDirection & ResizeDir.Right) == ResizeDir.Right)
_formRect.Width = p.X - _formRect.Left;
if ((_resizeDirection & ResizeDir.Top) == ResizeDir.Top)
{
_formRect.Height = _formRect.Height - p.Y + _formRect.Top;
_formRect.Y = iBottom - _formRect.Height;
}
if ((_resizeDirection & ResizeDir.Bottom) == ResizeDir.Bottom)
_formRect.Height = p.Y - _formRect.Top;
// this is the real new position
// now, try to snap it to different objects (first to screen)
// CARE !!! We use "Width" and "Height" as Bottom & Right!! (C++ style)
//formOffsetRect = new Rectangle ( stickGap + 1, stickGap + 1, 0, 0 );
_formOffsetRect.X = _stickGap + 1;
_formOffsetRect.Y = _stickGap + 1;
_formOffsetRect.Height = 0;
_formOffsetRect.Width = 0;
if (StickToScreen)
Resize_Stick(activeScr.WorkingArea, false);
if (StickToOther)
{
// now try to stick to other forms
foreach (Form sw in GlobalStickyWindows)
{
if (sw != _originalForm)
Resize_Stick(sw.Bounds, true);
}
}
// Fix (clear) the values that were not updated to stick
if (_formOffsetRect.X == _stickGap + 1)
_formOffsetRect.X = 0;
if (_formOffsetRect.Width == _stickGap + 1)
_formOffsetRect.Width = 0;
if (_formOffsetRect.Y == _stickGap + 1)
_formOffsetRect.Y = 0;
if (_formOffsetRect.Height == _stickGap + 1)
_formOffsetRect.Height = 0;
// compute the new form size
if ((_resizeDirection & ResizeDir.Left) == ResizeDir.Left)
{
// left resize requires special handling of X & Width acording to MinSize and MinWindowTrackSize
var iNewWidth = _formRect.Width + _formOffsetRect.Width + _formOffsetRect.X;
if (_originalForm.MaximumSize.Width != 0)
iNewWidth = Math.Min(iNewWidth, _originalForm.MaximumSize.Width);
iNewWidth = Math.Min(iNewWidth, SystemInformation.MaxWindowTrackSize.Width);
iNewWidth = Math.Max(iNewWidth, _originalForm.MinimumSize.Width);
iNewWidth = Math.Max(iNewWidth, SystemInformation.MinWindowTrackSize.Width);
_formRect.X = iRight - iNewWidth;
_formRect.Width = iNewWidth;
}
else
{
// other resizes
_formRect.Width += _formOffsetRect.Width + _formOffsetRect.X;
}
if ((_resizeDirection & ResizeDir.Top) == ResizeDir.Top)
{
var iNewHeight = _formRect.Height + _formOffsetRect.Height + _formOffsetRect.Y;
if (_originalForm.MaximumSize.Height != 0)
iNewHeight = Math.Min(iNewHeight, _originalForm.MaximumSize.Height);
iNewHeight = Math.Min(iNewHeight, SystemInformation.MaxWindowTrackSize.Height);
iNewHeight = Math.Max(iNewHeight, _originalForm.MinimumSize.Height);
iNewHeight = Math.Max(iNewHeight, SystemInformation.MinWindowTrackSize.Height);
_formRect.Y = iBottom - iNewHeight;
_formRect.Height = iNewHeight;
}
else
{
// all other resizing are fine
_formRect.Height += _formOffsetRect.Height + _formOffsetRect.Y;
}
// Done !!
_originalForm.Bounds = _formRect;
}
private void Resize_Stick(Rectangle toRect, bool bInsideStick)
{
if (_formRect.Right >= (toRect.Left - _stickGap) && _formRect.Left <= (toRect.Right + _stickGap))
{
if ((_resizeDirection & ResizeDir.Top) == ResizeDir.Top)
{
if (Math.Abs(_formRect.Top - toRect.Bottom) <= Math.Abs(_formOffsetRect.Top) && bInsideStick)
_formOffsetRect.Y = _formRect.Top - toRect.Bottom; // snap top to bottom
else if (Math.Abs(_formRect.Top - toRect.Top) <= Math.Abs(_formOffsetRect.Top))
_formOffsetRect.Y = _formRect.Top - toRect.Top; // snap top to top
}
if ((_resizeDirection & ResizeDir.Bottom) == ResizeDir.Bottom)
{
if (Math.Abs(_formRect.Bottom - toRect.Top) <= Math.Abs(_formOffsetRect.Bottom) && bInsideStick)
_formOffsetRect.Height = toRect.Top - _formRect.Bottom; // snap Bottom to top
else if (Math.Abs(_formRect.Bottom - toRect.Bottom) <= Math.Abs(_formOffsetRect.Bottom))
_formOffsetRect.Height = toRect.Bottom - _formRect.Bottom; // snap bottom to bottom
}
}
if (_formRect.Bottom >= (toRect.Top - _stickGap) && _formRect.Top <= (toRect.Bottom + _stickGap))
{
if ((_resizeDirection & ResizeDir.Right) == ResizeDir.Right)
{
if (Math.Abs(_formRect.Right - toRect.Left) <= Math.Abs(_formOffsetRect.Right) && bInsideStick)
_formOffsetRect.Width = toRect.Left - _formRect.Right; // Stick right to left
else if (Math.Abs(_formRect.Right - toRect.Right) <= Math.Abs(_formOffsetRect.Right))
_formOffsetRect.Width = toRect.Right - _formRect.Right; // Stick right to right
}
if ((_resizeDirection & ResizeDir.Left) == ResizeDir.Left)
{
if (Math.Abs(_formRect.Left - toRect.Right) <= Math.Abs(_formOffsetRect.Left) && bInsideStick)
_formOffsetRect.X = _formRect.Left - toRect.Right; // Stick left to right
else if (Math.Abs(_formRect.Left - toRect.Left) <= Math.Abs(_formOffsetRect.Left))
_formOffsetRect.X = _formRect.Left - toRect.Left; // Stick left to left
}
}
}
#endregion
#region Move Operations
private void StartMove()
{
_formRect = _originalForm.Bounds;
_formOriginalRect = _originalForm.Bounds; // save original position
if (!_originalForm.Capture) // start capturing messages
_originalForm.Capture = true;
_messageProcessor = _moveMessageProcessor;
}
private bool MoveMsgProcessor(ref Message m)
{
// internal message loop
if (!_originalForm.Capture)
{
Cancel();
return false;
}
switch (m.Msg)
{
case UnsafeNativeMethods.WM.WM_LBUTTONUP:
{
// ok, move finished !!!
EndMove();
break;
}
case UnsafeNativeMethods.WM.WM_MOUSEMOVE:
{
_mousePoint.X = (short)UnsafeNativeMethods.Bit.LoWord((int)m.LParam);
_mousePoint.Y = (short)UnsafeNativeMethods.Bit.HiWord((int)m.LParam);
Move(_mousePoint);
break;
}
case UnsafeNativeMethods.WM.WM_KEYDOWN:
{
if ((int)m.WParam == UnsafeNativeMethods.VK.VK_ESCAPE)
{
_originalForm.Bounds = _formOriginalRect; // set back old size
Cancel();
}
break;
}
}
return false;
}
private void EndMove()
{
Cancel();
OnMoveEnded();
}
#endregion
#region Move Computing
private void Move(Point p)
{
p = _originalForm.PointToScreen(p);
var activeScr = Screen.FromPoint(p); // get the screen from the point !!
if (!activeScr.WorkingArea.Contains(p))
{
p.X = NormalizeInside(p.X, activeScr.WorkingArea.Left, activeScr.WorkingArea.Right);
p.Y = NormalizeInside(p.Y, activeScr.WorkingArea.Top, activeScr.WorkingArea.Bottom);
}
p.Offset(-_offsetPoint.X, -_offsetPoint.Y);
// p is the exact location of the frame - so we can play with it
// to detect the new position acording to different bounds
_formRect.Location = p; // this is the new positon of the form
_formOffsetPoint.X = _stickGap + 1; // (more than) maximum gaps
_formOffsetPoint.Y = _stickGap + 1;
if (StickToScreen)
Move_Stick(activeScr.WorkingArea, false);
// Now try to snap to other windows
if (StickToOther)
{
foreach (Form sw in GlobalStickyWindows)
{
if (sw != _originalForm)
Move_Stick(sw.Bounds, true);
}
}
if (_formOffsetPoint.X == _stickGap + 1)
_formOffsetPoint.X = 0;
if (_formOffsetPoint.Y == _stickGap + 1)
_formOffsetPoint.Y = 0;
_formRect.Offset(_formOffsetPoint);
_originalForm.Bounds = _formRect;
}
/// <summary>
/// </summary>
/// <param name="toRect">Rect to try to snap to</param>
/// <param name="bInsideStick">Allow snapping on the inside (eg: window to screen)</param>
private void Move_Stick(Rectangle toRect, bool bInsideStick)
{
// compare distance from toRect to formRect
// and then with the found distances, compare the most closed position
if (_formRect.Bottom >= (toRect.Top - _stickGap) && _formRect.Top <= (toRect.Bottom + _stickGap))
{
if (bInsideStick)
{
if ((Math.Abs(_formRect.Left - toRect.Right) <= Math.Abs(_formOffsetPoint.X)))
{
// left 2 right
_formOffsetPoint.X = toRect.Right - _formRect.Left;
}
if ((Math.Abs(_formRect.Left + _formRect.Width - toRect.Left) <= Math.Abs(_formOffsetPoint.X)))
{
// right 2 left
_formOffsetPoint.X = toRect.Left - _formRect.Width - _formRect.Left;
}
}
if (Math.Abs(_formRect.Left - toRect.Left) <= Math.Abs(_formOffsetPoint.X))
{
// snap left 2 left
_formOffsetPoint.X = toRect.Left - _formRect.Left;
}
if (Math.Abs(_formRect.Left + _formRect.Width - toRect.Left - toRect.Width) <= Math.Abs(_formOffsetPoint.X))
{
// snap right 2 right
_formOffsetPoint.X = toRect.Left + toRect.Width - _formRect.Width - _formRect.Left;
}
}
if (_formRect.Right >= (toRect.Left - _stickGap) && _formRect.Left <= (toRect.Right + _stickGap))
{
if (bInsideStick)
{
if (Math.Abs(_formRect.Top - toRect.Bottom) <= Math.Abs(_formOffsetPoint.Y))
{
// Stick Top to Bottom
_formOffsetPoint.Y = toRect.Bottom - _formRect.Top;
}
if (Math.Abs(_formRect.Top + _formRect.Height - toRect.Top) <= Math.Abs(_formOffsetPoint.Y))
{
// snap Bottom to Top
_formOffsetPoint.Y = toRect.Top - _formRect.Height - _formRect.Top;
}
}
// try to snap top 2 top also
if (Math.Abs(_formRect.Top - toRect.Top) <= Math.Abs(_formOffsetPoint.Y))
{
// top 2 top
_formOffsetPoint.Y = toRect.Top - _formRect.Top;
}
if (Math.Abs(_formRect.Top + _formRect.Height - toRect.Top - toRect.Height) <= Math.Abs(_formOffsetPoint.Y))
{
// bottom 2 bottom
_formOffsetPoint.Y = toRect.Top + toRect.Height - _formRect.Height - _formRect.Top;
}
}
}
#endregion
protected virtual void OnResizeEnded()
{
ResizeEnded?.Invoke(this, EventArgs.Empty);
}
protected virtual void OnMoveEnded()
{
MoveEnded?.Invoke(this, EventArgs.Empty);
}
}
}
| |
using CSemVer;
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
namespace SimpleGitVersion
{
/// <summary>
/// Immutable object that exposes simplified information by wrapping a more complex <see cref="RepositoryInfo"/>.
/// The <see cref="LoadFromPath"/> also handles the read of the RepositoryInfo.xml that may exist
/// at the root of the solution directory (the Repository.xml file creates a <see cref="RepositoryInfoOptions"/> that
/// configures the analysis).
/// </summary>
public sealed partial class SimpleRepositoryInfo
{
/// <summary>
/// Gets the <see cref="RepositoryInfo"/> onto which this simplified representation is built.
/// Never null.
/// </summary>
public RepositoryInfo Info { get; private set; }
/// <summary>
/// Gets whether a release can be produced from the current commit point.
/// It is either a release or a CI build (see <see cref="IsValidRelease"/> and <see cref="IsValidCIBuild"/>).
/// </summary>
public bool IsValid => IsValidCIBuild || IsValidRelease;
/// <summary>
/// Gets whether this is a valid, normal, release (not a CI build).
/// </summary>
public bool IsValidRelease { get; private set; }
/// <summary>
/// Gets whether this is a valid CI build.
/// </summary>
public bool IsValidCIBuild { get; private set; }
/// <summary>
/// Gets the major version.
/// When <see cref="IsValid"/> is false, it is 0.
/// </summary>
public int Major { get; private set; }
/// <summary>
/// Gets the minor version.
/// When <see cref="IsValid"/> is false, it is 0.
/// </summary>
public int Minor { get; private set; }
/// <summary>
/// Gets the patch version.
/// When <see cref="IsValid"/> is false, it is 0.
/// </summary>
public int Patch { get; private set; }
/// <summary>
/// Gets the standard pre release name among <see cref="CSVersion.StandardPrereleaseNames"/>.
/// <see cref="string.Empty"/> when this is not a pre release version or <see cref="IsValid"/> is false.
/// </summary>
public string PreReleaseName { get; private set; }
/// <summary>
/// Gets the pre-release number (between 0 and 99).
/// Meaningful only if <see cref="PreReleaseName"/> is not empty. Always 0 otherwise.
/// </summary>
public int PreReleaseNumber { get; private set; }
/// <summary>
/// Gets the the pre-release fix number between 1 and 99.
/// When <see cref="IsValid"/> is false or if it is not a pre-release fix, it is 0.
/// </summary>
public int PreReleaseFix { get; private set; }
/// <summary>
/// Gets the "<see cref="Major"/>.<see cref="Minor"/>" as a string: this is the component version (the AssemblyVersion).
/// </summary>
public string MajorMinor { get; private set; }
/// <summary>
/// Gets the "<see cref="Major"/>.<see cref="Minor"/>.<see cref="Patch"/>" as a string.
/// </summary>
public string MajorMinorPatch { get; private set; }
/// <summary>
/// Gets the 'Major.Minor.Build.Revision' windows file version to use.
/// When <see cref="IsValid"/> is false, it is '0.0.0.0' (<see cref="InformationalVersion.ZeroFileVersion"/>).
/// When it is a release the last part (Revision) is even and it is odd for CI builds.
/// </summary>
public string FileVersion { get; private set; }
/// <summary>
/// Gets the ordered version.
/// When <see cref="IsValid"/> it is greater than 0.
/// </summary>
public long OrderedVersion { get; private set; }
/// <summary>
/// Gets the Sha of the current commit.
/// </summary>
public string CommitSha { get; private set; }
/// <summary>
/// Gets the UTC date and time of the current commit.
/// </summary>
public DateTime CommitDateUtc { get; private set; }
/// <summary>
/// Gets the version in <see cref="CSVersionFormat.Normalized"/> format.
/// When <see cref="IsValid"/> is false, it contains the error message (the first error line) so that
/// any attempt to use this to actually package something will fail.
/// </summary>
public string SafeSemVersion { get; private set; }
/// <summary>
/// Gets the NuGet version to use.
/// When <see cref="IsValid"/> is false, it contains the error message (the first error line) so that
/// any attempt to use this to actually package something will fail.
/// </summary>
public string SafeNuGetVersion { get; private set; }
/// <summary>
/// Gets the original tag on the current commit point.
/// When <see cref="IsValid"/> is false or if there is no tag (ie. we are on a CI build), it is null.
/// </summary>
public string OriginalTagText { get; private set; }
/// <summary>
/// Creates a new <see cref="SimpleRepositoryInfo"/> based on a path (that can be below the folder with the '.git' sub folder).
/// </summary>
/// <param name="path">The path to lookup.</param>
/// <param name="logger">Logger that will be used.</param>
/// <param name="optionsChecker">
/// Optional action that accepts the logger, a boolean that is true if a RepositoryInfo.xml has been
/// found, and the <see cref="RepositoryInfoOptions"/> that will be used.
/// </param>
/// <returns>An immutable SimpleRepositoryInfo instance.</returns>
static public SimpleRepositoryInfo LoadFromPath( ILogger logger, string path, Action<ILogger,bool,RepositoryInfoOptions> optionsChecker = null )
{
if( logger == null ) throw new ArgumentNullException( nameof( logger ) );
RepositoryInfo info = RepositoryInfo.LoadFromPath( path, gitPath =>
{
string optionFile = Path.Combine( gitPath, "RepositoryInfo.xml" );
bool fileExists = File.Exists( optionFile );
var options = fileExists ? RepositoryInfoOptions.Read( optionFile ) : new RepositoryInfoOptions();
optionsChecker?.Invoke( logger, fileExists, options );
return options;
} );
return new SimpleRepositoryInfo( logger, info );
}
/// <summary>
/// Initializes a new <see cref="SimpleRepositoryInfo"/> based on a (more complex) <see cref="Info"/>.
/// </summary>
/// <param name="logger">Logger that will be used.</param>
/// <param name="info">The simplified repository information.</param>
public SimpleRepositoryInfo( ILogger logger, RepositoryInfo info )
{
if( logger == null ) throw new ArgumentNullException( nameof( logger ) );
if( info == null ) throw new ArgumentNullException( nameof( info ) );
Info = info;
if( !HandleRepositoryInfoError( logger, info ) )
{
CommitSha = info.CommitSha;
CommitDateUtc = info.CommitDateUtc;
var t = info.ValidReleaseTag;
if( info.IsDirty && !info.Options.IgnoreDirtyWorkingFolder )
{
SetInvalidValuesAndLog( logger, "Working folder has non committed changes.", false );
logger.Info( info.IsDirtyExplanations );
}
else
{
Debug.Assert( info.PossibleVersions != null );
if( info.IsDirty )
{
logger.Warn( "Working folder is Dirty! Checking this has been disabled since RepositoryInfoOptions.IgnoreDirtyWorkingFolder is true." );
logger.Warn( info.IsDirtyExplanations );
}
var basic = info.CommitInfo.BasicInfo;
if( basic == null )
{
logger.Trace( $"No version information found." );
}
else
{
var tag = basic.UnfilteredThisCommit?.ThisTag;
logger.Trace( tag != null ? $"Tag: {tag}" : "No tag found on commit itself." );
var bestContent = info.BetterExistingVersion;
if( bestContent != null ) logger.Trace( $"Better version found: {bestContent.ThisTag}, sha: {bestContent.CommitSha}" );
var baseTag = basic.BestCommitBelow?.ThisTag;
logger.Trace( baseTag != null ? $"Base tag below: {baseTag}" : "No base tag found below." );
}
// Will be replaced by SetInvalidValuesAndLog if needed.
SafeNuGetVersion = info.FinalNuGetVersion.NormalizedTextWithBuildMetaData;
SafeSemVersion = info.FinalSemVersion.NormalizedTextWithBuildMetaData;
if( info.CIRelease != null )
{
IsValidCIBuild = true;
if( !info.CIRelease.IsZeroTimed )
{
SetNumericalVersionValues( info.CIRelease.BaseTag, true );
}
else
{
Major = info.CIRelease.BuildVersion.Major;
Minor = info.CIRelease.BuildVersion.Minor;
Patch = info.CIRelease.BuildVersion.Patch;
PreReleaseName = String.Empty;
PreReleaseNumber = 0;
PreReleaseFix = 0;
FileVersion = InformationalVersion.ZeroFileVersion;
OrderedVersion = 0;
}
logger.Info( $"CI release: '{SafeNuGetVersion}'." );
LogValidVersions( logger, info );
}
else
{
if( t == null )
{
SetInvalidValuesAndLog( logger, "No valid release tag found on the commit.", false );
LogValidVersions( logger, info );
}
else
{
IsValidRelease = true;
OriginalTagText = t.ParsedText;
SetNumericalVersionValues( t, false );
logger.Info( $"Release: '{SafeNuGetVersion}'." );
}
}
}
}
MajorMinor = $"{Major}.{Minor}";
MajorMinorPatch = $"{MajorMinor}.{Patch}";
}
void LogValidVersions( ILogger logger, RepositoryInfo info )
{
string opt = null;
if( info.Options.OnlyPatch ) opt += "OnlyPatch";
if( info.Options.SingleMajor.HasValue )
{
if( opt != null ) opt += ", ";
opt += "SingleMajor = " + info.Options.SingleMajor.ToString();
}
if( opt != null ) opt = " (" + opt + ")";
if( info.PossibleVersions.Count == 0 )
{
logger.Info( $"No possible versions {opt}." );
}
else
{
logger.Info( $"Possible version(s) {opt}: {string.Join( ", ", info.PossibleVersions )}" );
}
}
void SetNumericalVersionValues( CSVersion t, bool isCIBuild )
{
Major = t.Major;
Minor = t.Minor;
Patch = t.Patch;
PreReleaseName = t.PrereleaseName;
PreReleaseNumber = t.PrereleaseNumber;
PreReleaseFix = t.PrereleasePatch;
FileVersion = t.ToStringFileVersion( isCIBuild );
OrderedVersion = t.OrderedVersion;
}
bool HandleRepositoryInfoError( ILogger logger, RepositoryInfo info )
{
if( info.Error == null ) return false;
logger.Error( info.Error );
int index = info.Error.IndexOfAny( new char[] { '\r', '\n' } );
string firstline = index == -1 ? info.Error : info.Error.Substring( 0, index );
SetInvalidValues( firstline );
return true;
}
void SetInvalidValuesAndLog( ILogger logger, string reason, bool isWarning )
{
if( isWarning ) logger.Warn( reason );
else logger.Info( reason );
if( !InvalidValuesAlreadySet ) SetInvalidValues( reason );
}
bool InvalidValuesAlreadySet { get { return FileVersion != null; } }
void SetInvalidValues( string reason )
{
Major = 0;
Minor = 0;
Patch = 0;
PreReleaseName = string.Empty;
PreReleaseNumber = 0;
PreReleaseFix = 0;
FileVersion = InformationalVersion.ZeroFileVersion;
OrderedVersion = 0;
SafeNuGetVersion = reason;
SafeSemVersion = reason;
}
}
}
| |
using BulletSharp;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
namespace DemoFramework.FileLoaders
{
public struct BspBrush
{
public int FirstSide { get; set; }
public int NumSides { get; set; }
public int ShaderNum { get; set; }
}
public struct BspBrushSide
{
public int PlaneNum { get; set; }
public int ShaderNum { get; set; }
}
[DebuggerDisplay("ClassName: {ClassName}")]
public class BspEntity
{
public string ClassName { get; set; }
public Vector3 Origin { get; set; }
public Dictionary<string, string> KeyValues { get; set; }
public BspEntity()
{
KeyValues = new Dictionary<string, string>();
}
}
public struct BspLeaf
{
public int Cluster;
public int Area;
public Vector3 Min;
public Vector3 Max;
public int FirstLeafFace;
public int NumLeafFaces;
public int FirstLeafBrush;
public int NumLeafBrushes;
}
[DebuggerDisplay("Offset: {Offset}, Length: {Length}")]
public struct BspLump
{
public int Offset;
public int Length;
}
public struct BspPlane
{
public Vector3 Normal;
public float Distance;
}
[Flags]
public enum ContentFlags
{
Solid = 1,
AreaPortal = 0x8000,
MonsterClip = 0x20000,
Detail = 0x8000000
}
public class BspShader
{
public string Shader;
public int SurfaceFlags;
public ContentFlags ContentFlags;
}
public enum IBspLumpType
{
Entities = 0,
Shaders,
Planes,
Nodes,
Leaves,
LeafFaces,
LeafBrushes,
Models,
Brushes,
BrushSides,
Vertices,
MeshIndices,
Faces,
Lightmaps,
LightVols,
VisData
}
public enum VBspLumpType
{
Entities = 0,
Planes,
Texdata,
Vertexes,
Visibility,
Nodes,
Texinfo,
Faces,
Lighting,
Occlusion,
Leafs,
Unused1,
Edges,
Surfedges,
Models,
Worldlights,
LeafFaces,
LeafBrushes,
Brushes,
BrushSides,
Area,
AreaPortals,
Portals,
Clusters,
PortalVerts,
ClusterPortals,
Dispinfo,
OriginalFaces,
Unused2,
PhysCollide,
VertNormals,
VertNormalIndices,
DispLightmapAlphas,
DispVerts,
DispLightmapSamplePos,
GameLump,
LeafWaterData,
Primitives,
PrimVerts,
PrimIndices,
Pakfile,
ClipPortalVerts,
Cubemaps,
TexdataStringData,
TexdataStringTable,
Overlays,
LeafMinDistToWater,
FaceMacroTextureInfo,
DispTris,
PhysCollideSurface,
Unused3,
Unused4,
Unused5,
LightingHDR,
WorldlightsHDR,
LeaflightHDR1,
LeaflightHDR2
}
public sealed class BspLoader
{
public BspBrush[] Brushes { get; set; }
public BspBrushSide[] BrushSides { get; set; }
public Dictionary<string, BspEntity> Entities { get; set; }
public BspLeaf[] Leaves { get; set; }
public int[] LeafBrushes { get; set; }
public BspPlane[] Planes { get; set; }
public List<BspShader> Shaders { get; set; }
public bool IsVbsp { get; private set; }
public bool LoadBspFile(string filename)
{
return LoadBspFile(new FileStream(filename, FileMode.Open, FileAccess.Read));
}
public bool LoadBspFile(Stream buffer)
{
BinaryReader reader = new BinaryReader(buffer);
// read header
string id = Encoding.ASCII.GetString(reader.ReadBytes(4), 0, 4);
int version = reader.ReadInt32();
if (id != "IBSP" && id != "VBSP")
return false;
int nHeaderLumps;
if (id == "IBSP")
{
if (version != 0x2E)
{
return false;
}
nHeaderLumps = 17;
}
else// if (id == "VBSP")
{
if (version != 0x14)
{
return false;
}
nHeaderLumps = 64;
IsVbsp = true;
}
BspLump[] lumps = new BspLump[nHeaderLumps];
for (int i = 0; i < lumps.Length; i++)
{
lumps[i].Offset = reader.ReadInt32();
lumps[i].Length = reader.ReadInt32();
if (IsVbsp)
{
reader.ReadInt32(); // lump format version
reader.ReadInt32(); // lump ident code
}
}
// read brushes
int lumpHeaderOffset = IsVbsp ? (int)VBspLumpType.Brushes : (int)IBspLumpType.Brushes;
buffer.Position = lumps[lumpHeaderOffset].Offset;
int length = lumps[lumpHeaderOffset].Length / Marshal.SizeOf(typeof(BspBrush));
Brushes = new BspBrush[length];
for (int i = 0; i < length; i++)
{
Brushes[i].FirstSide = reader.ReadInt32();
Brushes[i].NumSides = reader.ReadInt32();
Brushes[i].ShaderNum = reader.ReadInt32();
}
// read brush sides
lumpHeaderOffset = IsVbsp ? (int)VBspLumpType.BrushSides : (int)IBspLumpType.BrushSides;
buffer.Position = lumps[lumpHeaderOffset].Offset;
length = lumps[lumpHeaderOffset].Length / Marshal.SizeOf(typeof(BspBrushSide));
BrushSides = new BspBrushSide[length];
for (int i = 0; i < length; i++)
{
if (IsVbsp)
{
BrushSides[i].PlaneNum = reader.ReadUInt16();
reader.ReadInt16(); // texinfo
BrushSides[i].ShaderNum = reader.ReadInt16();
reader.ReadInt16(); // bevel
}
else
{
BrushSides[i].PlaneNum = reader.ReadInt32();
BrushSides[i].ShaderNum = reader.ReadInt32();
}
}
// read entities
Entities = new Dictionary<string, BspEntity>();
buffer.Position = lumps[(int)IBspLumpType.Entities].Offset;
length = lumps[(int)IBspLumpType.Entities].Length;
byte[] entityBytes = new byte[length];
reader.Read(entityBytes, 0, length);
string entityString = Encoding.ASCII.GetString(entityBytes);
string[] entityStrings = entityString.Split('\n');
BspEntity bspEntity = null;
foreach (string entity in entityStrings)
{
switch (entity)
{
case "\0":
continue;
case "{":
bspEntity = new BspEntity();
break;
case "}":
Entities.Add(bspEntity.ClassName, bspEntity);
break;
default:
string[] keyValue = entity.Trim('\"').Split(new[] { "\" \"" }, 2, 0);
switch (keyValue[0])
{
case "classname":
bspEntity.ClassName = keyValue[1];
break;
case "origin":
string[] originStrings = keyValue[1].Split(' ');
bspEntity.Origin = new Vector3(
float.Parse(originStrings[0], CultureInfo.InvariantCulture),
float.Parse(originStrings[1], CultureInfo.InvariantCulture),
float.Parse(originStrings[2], CultureInfo.InvariantCulture));
break;
default:
if (!bspEntity.KeyValues.ContainsKey(keyValue[0]))
{
if (keyValue.Length == 1)
{
bspEntity.KeyValues.Add(keyValue[0], "");
}
else
{
bspEntity.KeyValues.Add(keyValue[0], keyValue[1]);
}
}
break;
}
break;
}
}
// read leaves
lumpHeaderOffset = IsVbsp ? (int)VBspLumpType.Leafs : (int)IBspLumpType.Leaves;
buffer.Position = lumps[lumpHeaderOffset].Offset;
length = lumps[lumpHeaderOffset].Length;
length /= IsVbsp ? 32 : Marshal.SizeOf(typeof(BspLeaf));
Leaves = new BspLeaf[length];
for (int i = 0; i < length; i++)
{
if (IsVbsp)
{
reader.ReadInt32(); // contents
Leaves[i].Cluster = reader.ReadInt16();
Leaves[i].Area = reader.ReadInt16();
//Swap Y and Z; invert Z
Leaves[i].Min.X = reader.ReadInt16();
Leaves[i].Min.Z = -reader.ReadInt16();
Leaves[i].Min.Y = reader.ReadInt16();
//Swap Y and Z; invert Z
Leaves[i].Max.X = reader.ReadInt16();
Leaves[i].Max.Z = -reader.ReadInt16();
Leaves[i].Max.Y = reader.ReadInt16();
Leaves[i].FirstLeafFace = reader.ReadUInt16();
Leaves[i].NumLeafFaces = reader.ReadUInt16();
Leaves[i].FirstLeafBrush = reader.ReadUInt16();
Leaves[i].NumLeafBrushes = reader.ReadUInt16();
reader.ReadInt16(); // leafWaterDataID
//reader.ReadInt16(); // ambientLighting
//reader.ReadSByte(); // ambientLighting
reader.ReadInt16(); // padding
}
else
{
Leaves[i].Cluster = reader.ReadInt32();
Leaves[i].Area = reader.ReadInt32();
//Swap Y and Z; invert Z
Leaves[i].Min.X = reader.ReadInt32();
Leaves[i].Min.Z = -reader.ReadInt32();
Leaves[i].Min.Y = reader.ReadInt32();
//Swap Y and Z; invert Z
Leaves[i].Max.X = reader.ReadInt32();
Leaves[i].Max.Z = -reader.ReadInt32();
Leaves[i].Max.Y = reader.ReadInt32();
Leaves[i].FirstLeafFace = reader.ReadInt32();
Leaves[i].NumLeafFaces = reader.ReadInt32();
Leaves[i].FirstLeafBrush = reader.ReadInt32();
Leaves[i].NumLeafBrushes = reader.ReadInt32();
}
}
// read leaf brushes
lumpHeaderOffset = IsVbsp ? (int)VBspLumpType.LeafBrushes : (int)IBspLumpType.LeafBrushes;
buffer.Position = lumps[lumpHeaderOffset].Offset;
length = lumps[lumpHeaderOffset].Length;
length /= IsVbsp ? sizeof(short) : sizeof(int);
LeafBrushes = new int[length];
for (int i = 0; i < length; i++)
{
if (IsVbsp)
{
LeafBrushes[i] = reader.ReadInt16();
}
else
{
LeafBrushes[i] = reader.ReadInt32();
}
}
// read planes
lumpHeaderOffset = IsVbsp ? (int)VBspLumpType.Planes : (int)IBspLumpType.Planes;
buffer.Position = lumps[lumpHeaderOffset].Offset;
length = lumps[lumpHeaderOffset].Length;
length /= IsVbsp ? (Marshal.SizeOf(typeof(BspPlane)) + sizeof(int)) : Marshal.SizeOf(typeof(BspPlane));
Planes = new BspPlane[length];
for (int i = 0; i < length; i++)
{
Planes[i].Normal.X = reader.ReadSingle();
Planes[i].Normal.Y = reader.ReadSingle();
Planes[i].Normal.Z = reader.ReadSingle();
Planes[i].Distance = reader.ReadSingle();
if (IsVbsp)
{
reader.ReadInt32(); // type
}
}
if (!IsVbsp)
{
// read shaders
Shaders = new List<BspShader>();
buffer.Position = lumps[(int)IBspLumpType.Shaders].Offset;
length = lumps[(int)IBspLumpType.Shaders].Length / (64 + 2 * sizeof(int));
byte[] shaderBytes = new byte[64];
for (int i = 0; i < length; i++)
{
BspShader shader = new BspShader();
reader.Read(shaderBytes, 0, 64);
shader.Shader = Encoding.ASCII.GetString(shaderBytes, 0, Array.IndexOf(shaderBytes, (byte)0));
shader.SurfaceFlags = reader.ReadInt32();
shader.ContentFlags = (ContentFlags)reader.ReadInt32();
Shaders.Add(shader);
}
}
return true;
}
}
}
| |
/*
* Copyright (C) 2009 JavaRosa ,Copyright (C) 2014 Simbacode
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
using org.javarosa.core.model.utils;
using org.javarosa.core.services.locale;
using org.javarosa.core.util.externalizable;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
namespace org.javarosa.core.model
{
/** The definition of a group in a form or questionaire.
*
* @author Acellam Guy , Daniel Kayiwa
*
*/
public class GroupDef : IFormElement, Localizable
{
private List<IFormElement> children; /** A list of questions on a group. */
private Boolean repeat; /** True if this is a "repeat", false if it is a "group" */
private int id; /** The group number. */
private IDataReference binding; /** reference to a location in the model to store data in */
private String labelInnerText;
private String appearanceAttr;
private String textID;
//custom phrasings for repeats
public String chooseCaption;
public String addCaption;
public String delCaption;
public String doneCaption;
public String addEmptyCaption;
public String doneEmptyCaption;
public String entryHeader;
public String delHeader;
public String mainHeader;
ArrayList observers;
public Boolean noAddRemove = false;
public IDataReference count = null;
public GroupDef()
: this(Constants.NULL_ID, null, false)
{
}
public GroupDef(int id, List<IFormElement> children, Boolean repeat)
{
ID = id;
Children = children;
Repeat = repeat;
observers = new ArrayList();
}
virtual public int ID
{
get
{
return id;
}
set
{
this.id = value;
}
}
virtual public IDataReference Bind
{
get
{
return binding;
}
set
{
this.binding = value;
}
}
virtual public List<IFormElement> Children
{
get
{
return children;
}
set
{
this.children = (value == null ? new List<IFormElement>() : value);
}
}
public void addChild(IFormElement fe)
{
children.Add(fe);
}
public IFormElement getChild(int i)
{
if (children == null || i >= children.Count)
{
return null;
}
else
{
return (IFormElement)children[i];
}
}
/**
* @return true if this represents a <repeat> element
*/
public Boolean Repeat
{
get { return repeat; }
set { this.repeat = value; }
}
public String LabelInnerText
{
get { return labelInnerText; }
set { labelInnerText = value; }
}
public String AppearanceAttr
{
get { return appearanceAttr; }
set{ this.appearanceAttr = value;}
}
public void setAppearanceAttr(String appearanceAttr)
{
this.appearanceAttr = appearanceAttr;
}
public void localeChanged(String locale, Localizer localizer)
{
for (IEnumerator e = children.GetEnumerator(); e.MoveNext(); )
{
((IFormElement)e.Current).localeChanged(locale, localizer);
}
}
public IDataReference CountReference
{
get{return count;}
}
public override String ToString()
{
return "<group>";
}
/*
* (non-Javadoc)
* @see org.javarosa.core.model.IFormElement#getDeepChildCount()
*/
public int DeepChildCount
{
get
{
int total = 0;
IEnumerator e = children.GetEnumerator();
while (e.MoveNext())
{
total += ((IFormElement)e.Current).DeepChildCount;
}
return total;
}
}
/** Reads a group definition object from the supplied stream. */
public void readExternal(BinaryReader dis, PrototypeFactory pf)
{
ID = ExtUtil.readInt(dis);
setAppearanceAttr((String)ExtUtil.read(dis, new ExtWrapNullable(typeof(String)), pf));
Bind = (IDataReference)ExtUtil.read(dis, new ExtWrapTagged(), pf);
TextID = (String)ExtUtil.read(dis, new ExtWrapNullable(typeof(String)), pf);
LabelInnerText = ((String)ExtUtil.read(dis, new ExtWrapNullable(typeof(String)), pf));
Repeat = (ExtUtil.readBool(dis));
Children = (List<IFormElement>)ExtUtil.read(dis, new ExtWrapListPoly(), pf);
noAddRemove = ExtUtil.readBool(dis);
count = (IDataReference)ExtUtil.read(dis, new ExtWrapNullable(new ExtWrapTagged()), pf);
chooseCaption = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
addCaption = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
delCaption = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
doneCaption = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
addEmptyCaption = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
doneEmptyCaption = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
entryHeader = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
delHeader = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
mainHeader = ExtUtil.nullIfEmpty(ExtUtil.readString(dis));
}
/** Write the group definition object to the supplied stream. */
public void writeExternal(BinaryWriter dos)
{
ExtUtil.writeNumeric(dos, ID);
ExtUtil.write(dos, new ExtWrapNullable(AppearanceAttr));
ExtUtil.write(dos, new ExtWrapTagged(Bind));
ExtUtil.write(dos, new ExtWrapNullable(TextID));
ExtUtil.write(dos, new ExtWrapNullable(LabelInnerText));
ExtUtil.writeBool(dos, Repeat);
ExtUtil.write(dos, new ExtWrapListPoly(Children));
ExtUtil.writeBool(dos, noAddRemove);
ExtUtil.write(dos, new ExtWrapNullable(count != null ? new ExtWrapTagged(count) : null));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(chooseCaption));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(addCaption));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(delCaption));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(doneCaption));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(addEmptyCaption));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(doneEmptyCaption));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(entryHeader));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(delHeader));
ExtUtil.writeString(dos, ExtUtil.emptyIfNull(mainHeader));
}
public void registerStateObserver(FormElementStateListener qsl)
{
if (!observers.Contains(qsl))
{
observers.Add(qsl);
}
}
public void unregisterStateObserver(FormElementStateListener qsl)
{
observers.Remove(qsl);
}
virtual public System.String TextID
{
get
{
return textID;
}
set
{
if (value == null)
{
this.textID = null;
return;
}
if (DateUtils.stringContains(value, ";"))
{
System.Console.Error.WriteLine("Warning: TextID contains ;form modifier:: \"" + value.Substring(value.IndexOf(";")) + "\"... will be stripped.");
value = value.Substring(0, (value.IndexOf(";")) - (0)); //trim away the form specifier
}
this.textID = value;
}
}
}
}
| |
// ReSharper disable once CheckNamespace
namespace Fluent
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows.Input;
using System.Windows.Media;
using Fluent.Internal;
/// <summary>
/// Represent panel with ribbon tab items.
/// It is automatically adjusting size of tabs
/// </summary>
public class RibbonTabsContainer : Panel, IScrollInfo
{
/// <summary>
/// Initializes a new instance of the <see cref="RibbonTabsContainer"/> class.
/// </summary>
public RibbonTabsContainer()
{
this.Focusable = false;
}
static RibbonTabsContainer()
{
KeyboardNavigation.TabNavigationProperty.OverrideMetadata(typeof(RibbonTabsContainer), new FrameworkPropertyMetadata(KeyboardNavigationMode.Once));
KeyboardNavigation.DirectionalNavigationProperty.OverrideMetadata(typeof(RibbonTabsContainer), new FrameworkPropertyMetadata(KeyboardNavigationMode.Cycle));
}
#region Layout Overridings
/// <inheritdoc />
protected override Size MeasureOverride(Size availableSize)
{
if (this.InternalChildren.Count == 0)
{
return base.MeasureOverride(availableSize);
}
var desiredSize = this.MeasureChildrenDesiredSize(availableSize);
// Step 1. If all tabs already fit, just return.
if (availableSize.Width >= desiredSize.Width
|| DoubleUtil.AreClose(availableSize.Width, desiredSize.Width))
{
// Hide separator lines between tabs
this.UpdateSeparators(false, false);
this.VerifyScrollData(availableSize.Width, desiredSize.Width);
return desiredSize;
}
// Size reduction:
// - calculate the overflow width
// - get all visible tabs ordered by:
// - non context tabs first
// - largest tabs first
// - then loop over all tabs and reduce their size in steps
// - during each tabs reduction check if it's still the largest tab
// - if it's still the largest tab reduce it's size further till there is no larger tab left
var overflowWidth = desiredSize.Width - availableSize.Width;
var visibleTabs = this.InternalChildren.Cast<RibbonTabItem>()
.Where(x => x.Visibility != Visibility.Collapsed)
.OrderBy(x => x.IsContextual)
.ToList();
// did we change the size of any contextual tabs?
var contextualTabsSizeChanged = false;
// step size for reducing the size of tabs
const int sizeChangeStepSize = 4;
// loop while we got overflow left (still need to reduce more) and all tabs are larger than the minimum size
while (overflowWidth > 0
&& AreAnyTabsAboveMinimumSize(visibleTabs))
{
var tabsChangedInSize = 0;
foreach (var tab in visibleTabs.OrderByDescending(x => x.DesiredSize.Width))
{
var widthBeforeMeasure = tab.DesiredSize.Width;
// ignore tabs that are smaller or equal to the minimum size
if (widthBeforeMeasure < MinimumRegularTabWidth
|| DoubleUtil.AreClose(widthBeforeMeasure, MinimumRegularTabWidth))
{
continue;
}
var wasLargestTab = IsLargestTab(visibleTabs, tab.DesiredSize.Width, tab.IsContextual);
// measure with reduced size, but at least the minimum size
tab.Measure(new Size(Math.Max(MinimumRegularTabWidth, tab.DesiredSize.Width - sizeChangeStepSize), tab.DesiredSize.Height));
// calculate diff of measure before and after possible reduction
var widthDifference = widthBeforeMeasure - tab.DesiredSize.Width;
var didWidthChange = widthDifference > 0;
// count as changed if diff is greater than zero
tabsChangedInSize += didWidthChange
? 1
: 0;
// was it a changed contextual tab?
if (tab.IsContextual
&& didWidthChange)
{
contextualTabsSizeChanged = true;
}
// reduce remaining overflow width
overflowWidth -= widthDifference;
// break if no overflow width is left
if (overflowWidth <= 0)
{
break;
}
// if the current tab was the largest tab break to reduce it's size further
if (wasLargestTab
&& didWidthChange)
{
break;
}
}
// break if no tabs changed their size
if (tabsChangedInSize == 0)
{
break;
}
}
desiredSize = this.GetChildrenDesiredSize();
// Add separator lines between
// tabs to assist readability
this.UpdateSeparators(true, contextualTabsSizeChanged || AreAnyTabsAboveMinimumSize(visibleTabs) == false);
this.VerifyScrollData(availableSize.Width, desiredSize.Width);
return desiredSize;
}
private static bool AreAnyTabsAboveMinimumSize(List<RibbonTabItem> tabs)
{
return tabs.Any(item => item.DesiredSize.Width > MinimumRegularTabWidth);
}
private static bool IsLargestTab(List<RibbonTabItem> tabs, double width, bool isContextual)
{
return tabs.Count > 1 && tabs.Any(x => x.IsContextual == isContextual && x.DesiredSize.Width > width) == false;
}
private Size MeasureChildrenDesiredSize(Size availableSize)
{
double width = 0;
double height = 0;
foreach (UIElement? child in this.InternalChildren)
{
if (child is null)
{
continue;
}
child.Measure(availableSize);
width += child.DesiredSize.Width;
height = Math.Max(height, child.DesiredSize.Height);
}
return new Size(width, height);
}
private Size GetChildrenDesiredSize()
{
double width = 0;
double height = 0;
foreach (UIElement? child in this.InternalChildren)
{
if (child is null)
{
continue;
}
width += child.DesiredSize.Width;
height = Math.Max(height, child.DesiredSize.Height);
}
return new Size(width, height);
}
/// <inheritdoc />
protected override Size ArrangeOverride(Size finalSize)
{
var finalRect = new Rect(finalSize)
{
X = -this.HorizontalOffset
};
var orderedChildren = this.InternalChildren.OfType<RibbonTabItem>()
.OrderBy(x => x.Group is not null);
foreach (var item in orderedChildren)
{
finalRect.Width = item.DesiredSize.Width;
finalRect.Height = Math.Max(finalSize.Height, item.DesiredSize.Height);
item.Arrange(finalRect);
finalRect.X += item.DesiredSize.Width;
}
return finalSize;
}
/// <summary>
/// Updates separator visibility
/// </summary>
/// <param name="regularTabs">If this parameter true, regular tabs will have separators</param>
/// <param name="contextualTabs">If this parameter true, contextual tabs will have separators</param>
private void UpdateSeparators(bool regularTabs, bool contextualTabs)
{
foreach (RibbonTabItem? tab in this.Children)
{
if (tab is null)
{
continue;
}
if (tab.IsContextual)
{
if (tab.IsSeparatorVisible != contextualTabs)
{
tab.IsSeparatorVisible = contextualTabs;
}
}
else if (tab.IsSeparatorVisible != regularTabs)
{
tab.IsSeparatorVisible = regularTabs;
}
}
}
#endregion
#region IScrollInfo Members
/// <inheritdoc />
public ScrollViewer? ScrollOwner
{
get { return this.ScrollData.ScrollOwner; }
set { this.ScrollData.ScrollOwner = value; }
}
/// <inheritdoc />
public void SetHorizontalOffset(double offset)
{
var newValue = CoerceOffset(ValidateInputOffset(offset, nameof(this.HorizontalOffset)), this.ScrollData.ExtentWidth, this.ScrollData.ViewportWidth);
if (DoubleUtil.AreClose(this.ScrollData.OffsetX, newValue) == false)
{
this.ScrollData.OffsetX = newValue;
this.InvalidateMeasure();
this.ScrollOwner?.InvalidateScrollInfo();
}
}
/// <inheritdoc />
public double ExtentWidth => this.ScrollData.ExtentWidth;
/// <inheritdoc />
public double HorizontalOffset => this.ScrollData.OffsetX;
/// <inheritdoc />
public double ViewportWidth => this.ScrollData.ViewportWidth;
/// <inheritdoc />
public void LineLeft()
{
this.SetHorizontalOffset(this.HorizontalOffset - 16.0);
}
/// <inheritdoc />
public void LineRight()
{
this.SetHorizontalOffset(this.HorizontalOffset + 16.0);
}
/// <inheritdoc />
public Rect MakeVisible(Visual visual, Rect rectangle)
{
// We can only work on visuals that are us or children.
// An empty rect has no size or position. We can't meaningfully use it.
if (rectangle.IsEmpty
|| visual is null
|| ReferenceEquals(visual, this)
|| this.IsAncestorOf(visual) == false)
{
return Rect.Empty;
}
// Compute the child's rect relative to (0,0) in our coordinate space.
var childTransform = visual.TransformToAncestor(this);
rectangle = childTransform.TransformBounds(rectangle);
// Initialize the viewport
var viewport = new Rect(this.HorizontalOffset, rectangle.Top, this.ViewportWidth, rectangle.Height);
rectangle.X += viewport.X;
// Compute the offsets required to minimally scroll the child maximally into view.
var minX = ComputeScrollOffsetWithMinimalScroll(viewport.Left, viewport.Right, rectangle.Left, rectangle.Right);
// We have computed the scrolling offsets; scroll to them.
this.SetHorizontalOffset(minX);
// Compute the visible rectangle of the child relative to the viewport.
viewport.X = minX;
rectangle.Intersect(viewport);
rectangle.X -= viewport.X;
// Return the rectangle
return rectangle;
}
private static double ComputeScrollOffsetWithMinimalScroll(
double topView,
double bottomView,
double topChild,
double bottomChild)
{
// # CHILD POSITION CHILD SIZE SCROLL REMEDY
// 1 Above viewport <= viewport Down Align top edge of child & viewport
// 2 Above viewport > viewport Down Align bottom edge of child & viewport
// 3 Below viewport <= viewport Up Align bottom edge of child & viewport
// 4 Below viewport > viewport Up Align top edge of child & viewport
// 5 Entirely within viewport NA No scroll.
// 6 Spanning viewport NA No scroll.
//
// Note: "Above viewport" = childTop above viewportTop, childBottom above viewportBottom
// "Below viewport" = childTop below viewportTop, childBottom below viewportBottom
// These child thus may overlap with the viewport, but will scroll the same direction
/*bool fAbove = DoubleUtil.LessThan(topChild, topView) && DoubleUtil.LessThan(bottomChild, bottomView);
bool fBelow = DoubleUtil.GreaterThan(bottomChild, bottomView) && DoubleUtil.GreaterThan(topChild, topView);*/
var fAbove = (topChild < topView) && (bottomChild < bottomView);
var fBelow = (bottomChild > bottomView) && (topChild > topView);
var fLarger = bottomChild - topChild > bottomView - topView;
// Handle Cases: 1 & 4 above
if ((fAbove && !fLarger)
|| (fBelow && fLarger))
{
return topChild;
}
// Handle Cases: 2 & 3 above
if (fAbove || fBelow)
{
return bottomChild - (bottomView - topView);
}
// Handle cases: 5 & 6 above.
return topView;
}
/// <summary>
/// Not implemented
/// </summary>
public void MouseWheelDown()
{
}
/// <inheritdoc />
public void MouseWheelLeft()
{
this.SetHorizontalOffset(this.HorizontalOffset - 16);
}
/// <inheritdoc />
public void MouseWheelRight()
{
this.SetHorizontalOffset(this.HorizontalOffset + 16);
}
/// <summary>
/// Not implemented
/// </summary>
public void MouseWheelUp()
{
}
/// <summary>
/// Not implemented
/// </summary>
public void LineDown()
{
}
/// <summary>
/// Not implemented
/// </summary>
public void LineUp()
{
}
/// <summary>
/// Not implemented
/// </summary>
public void PageDown()
{
}
/// <inheritdoc />
public void PageLeft()
{
this.SetHorizontalOffset(this.HorizontalOffset - this.ViewportWidth);
}
/// <inheritdoc />
public void PageRight()
{
this.SetHorizontalOffset(this.HorizontalOffset + this.ViewportWidth);
}
/// <summary>
/// Not implemented
/// </summary>
public void PageUp()
{
}
/// <summary>
/// Not implemented
/// </summary>
public void SetVerticalOffset(double offset)
{
}
/// <inheritdoc />
public bool CanVerticallyScroll
{
get => false;
set { }
}
/// <inheritdoc />
public bool CanHorizontallyScroll
{
get => true;
set { }
}
/// <summary>
/// Not implemented
/// </summary>
public double ExtentHeight => 0.0;
/// <summary>
/// Not implemented
/// </summary>
public double VerticalOffset => 0.0;
/// <summary>
/// Not implemented
/// </summary>
public double ViewportHeight => 0.0;
// Gets scroll data info
private ScrollData ScrollData => this.scrollData ?? (this.scrollData = new ScrollData());
// Scroll data info
private ScrollData? scrollData;
private const double MinimumRegularTabWidth = 30D;
// Validates input offset
private static double ValidateInputOffset(double offset, string parameterName)
{
if (double.IsNaN(offset))
{
throw new ArgumentOutOfRangeException(parameterName);
}
return Math.Max(0.0, offset);
}
// Verifies scrolling data using the passed viewport and extent as newly computed values.
// Checks the X/Y offset and coerces them into the range [0, Extent - ViewportSize]
// If extent, viewport, or the newly coerced offsets are different than the existing offset,
// caches are updated and InvalidateScrollInfo() is called.
private void VerifyScrollData(double viewportWidth, double extentWidth)
{
var isValid = true;
if (double.IsInfinity(viewportWidth))
{
viewportWidth = extentWidth;
}
var offsetX = CoerceOffset(this.ScrollData.OffsetX, extentWidth, viewportWidth);
isValid &= DoubleUtil.AreClose(viewportWidth, this.ScrollData.ViewportWidth);
isValid &= DoubleUtil.AreClose(extentWidth, this.ScrollData.ExtentWidth);
isValid &= DoubleUtil.AreClose(this.ScrollData.OffsetX, offsetX);
this.ScrollData.ViewportWidth = viewportWidth;
// Prevent flickering by only using extentWidth if it's at least 2 larger than viewportWidth
if (viewportWidth + 2 < extentWidth)
{
this.ScrollData.ExtentWidth = extentWidth;
}
else
{
// Or we show show the srollbar if all tabs are at their minimum width or smaller
// but do this early (if extent + 2 is equal or larger than the viewport, or they are equal)
if (extentWidth + 2 >= viewportWidth
|| DoubleUtil.AreClose(extentWidth, viewportWidth))
{
var visibleTabs = this.InternalChildren.Cast<RibbonTabItem>().Where(item => item.Visibility != Visibility.Collapsed).ToList();
var newExtentWidth = viewportWidth;
if (visibleTabs.Any()
&& visibleTabs.All(item => DoubleUtil.AreClose(item.DesiredSize.Width, MinimumRegularTabWidth) || item.DesiredSize.Width < MinimumRegularTabWidth))
{
if (DoubleUtil.AreClose(newExtentWidth, viewportWidth))
{
newExtentWidth += 1;
}
this.ScrollData.ExtentWidth = newExtentWidth;
}
else
{
this.ScrollData.ExtentWidth = viewportWidth;
}
}
else
{
this.ScrollData.ExtentWidth = viewportWidth;
}
}
this.ScrollData.OffsetX = offsetX;
if (isValid == false)
{
this.ScrollOwner?.InvalidateScrollInfo();
}
}
// Returns an offset coerced into the [0, Extent - Viewport] range.
private static double CoerceOffset(double offset, double extent, double viewport)
{
if (offset > extent - viewport)
{
offset = extent - viewport;
}
if (offset < 0)
{
offset = 0;
}
return offset;
}
#endregion
}
#region ScrollData
/// <summary>
/// Helper class to hold scrolling data.
/// This class exists to reduce working set when SCP is delegating to another implementation of ISI.
/// Standard "extra pointer always for less data sometimes" cache savings model:
/// </summary>
internal class ScrollData
{
/// <summary>
/// Scroll viewer
/// </summary>
internal ScrollViewer? ScrollOwner { get; set; }
/// <summary>
/// Scroll offset
/// </summary>
internal double OffsetX { get; set; }
/// <summary>
/// ViewportSize is computed from our FinalSize, but may be in different units.
/// </summary>
internal double ViewportWidth { get; set; }
/// <summary>
/// Extent is the total size of our content.
/// </summary>
internal double ExtentWidth { get; set; }
}
#endregion ScrollData
}
| |
using System;
using System.Threading;
using LiteNetLib;
using LiteNetLib.Utils;
namespace LibSample
{
class EchoMessagesTest
{
private static int _messagesReceivedCount = 0;
private class ClientListener : INetEventListener
{
public void OnPeerConnected(NetPeer peer)
{
Console.WriteLine("[Client] connected to: {0}:{1}", peer.EndPoint.Host, peer.EndPoint.Port);
NetDataWriter dataWriter = new NetDataWriter();
for (int i = 0; i < 5; i++)
{
dataWriter.Reset();
dataWriter.Put(0);
dataWriter.Put(i);
peer.Send(dataWriter, SendOptions.ReliableUnordered);
dataWriter.Reset();
dataWriter.Put(1);
dataWriter.Put(i);
peer.Send(dataWriter, SendOptions.ReliableOrdered);
dataWriter.Reset();
dataWriter.Put(2);
dataWriter.Put(i);
peer.Send(dataWriter, SendOptions.Sequenced);
dataWriter.Reset();
dataWriter.Put(3);
dataWriter.Put(i);
peer.Send(dataWriter, SendOptions.Unreliable);
}
//And test fragment
byte[] testData = new byte[13218];
testData[0] = 192;
testData[13217] = 31;
peer.Send(testData, SendOptions.ReliableOrdered);
}
public void OnPeerDisconnected(NetPeer peer, DisconnectInfo disconnectInfo)
{
Console.WriteLine("[Client] disconnected: " + disconnectInfo.Reason);
}
public void OnNetworkError(NetEndPoint endPoint, int socketErrorCode)
{
Console.WriteLine("[Client] error! " + socketErrorCode);
}
public void OnNetworkReceive(NetPeer peer, NetDataReader reader)
{
if (reader.AvailableBytes == 13218)
{
Console.WriteLine("[{0}] TestFrag: {1}, {2}", peer.NetManager.LocalPort, reader.Data[0], reader.Data[13217]);
}
else
{
int type = reader.GetInt();
int num = reader.GetInt();
_messagesReceivedCount++;
Console.WriteLine("[{0}] CNT: {1}, TYPE: {2}, NUM: {3}", peer.NetManager.LocalPort, _messagesReceivedCount, type, num);
}
}
public void OnNetworkReceiveUnconnected(NetEndPoint remoteEndPoint, NetDataReader reader, UnconnectedMessageType messageType)
{
}
public void OnNetworkLatencyUpdate(NetPeer peer, int latency)
{
}
}
private class ServerListener : INetEventListener
{
public NetManager Server;
public void OnPeerConnected(NetPeer peer)
{
Console.WriteLine("[Server] Peer connected: " + peer.EndPoint);
var peers = Server.GetPeers();
foreach (var netPeer in peers)
{
Console.WriteLine("ConnectedPeersList: id={0}, ep={1}", netPeer.ConnectId, netPeer.EndPoint);
}
}
public void OnPeerDisconnected(NetPeer peer, DisconnectInfo disconnectInfo)
{
Console.WriteLine("[Server] Peer disconnected: " + peer.EndPoint + ", reason: " + disconnectInfo.Reason);
}
public void OnNetworkError(NetEndPoint endPoint, int socketErrorCode)
{
Console.WriteLine("[Server] error: " + socketErrorCode);
}
public void OnNetworkReceive(NetPeer peer, NetDataReader reader)
{
//echo
peer.Send(reader.Data, SendOptions.ReliableUnordered);
//fragment log
if (reader.AvailableBytes == 13218)
{
Console.WriteLine("[Server] TestFrag: {0}, {1}", reader.Data[0], reader.Data[13217]);
}
}
public void OnNetworkReceiveUnconnected(NetEndPoint remoteEndPoint, NetDataReader reader, UnconnectedMessageType messageType)
{
Console.WriteLine("[Server] ReceiveUnconnected: {0}", reader.GetString(100));
}
public void OnNetworkLatencyUpdate(NetPeer peer, int latency)
{
}
}
private ClientListener _clientListener;
private ServerListener _serverListener;
public void Run()
{
//Server
_serverListener = new ServerListener();
NetManager server = new NetManager(_serverListener, 2, "myapp1");
//server.ReuseAddress = true;
if (!server.Start(9050))
{
Console.WriteLine("Server start failed");
Console.ReadKey();
return;
}
_serverListener.Server = server;
//Client
_clientListener = new ClientListener();
NetManager client1 = new NetManager(_clientListener, "myapp1");
//client1.SimulateLatency = true;
client1.SimulationMaxLatency = 1500;
client1.MergeEnabled = true;
if (!client1.Start())
{
Console.WriteLine("Client1 start failed");
return;
}
client1.Connect("127.0.0.1", 9050);
NetManager client2 = new NetManager(_clientListener, "myapp1");
//client2.SimulateLatency = true;
client2.SimulationMaxLatency = 1500;
client2.Start();
client2.Connect("::1", 9050);
while (!Console.KeyAvailable)
{
client1.PollEvents();
client2.PollEvents();
server.PollEvents();
Thread.Sleep(15);
}
client1.Stop();
client2.Stop();
server.Stop();
Console.ReadKey();
Console.WriteLine("ServStats:\n BytesReceived: {0}\n PacketsReceived: {1}\n BytesSent: {2}\n PacketsSent: {3}",
server.BytesReceived,
server.PacketsReceived,
server.BytesSent,
server.PacketsSent);
Console.WriteLine("Client1Stats:\n BytesReceived: {0}\n PacketsReceived: {1}\n BytesSent: {2}\n PacketsSent: {3}",
client1.BytesReceived,
client1.PacketsReceived,
client1.BytesSent,
client1.PacketsSent);
Console.WriteLine("Client2Stats:\n BytesReceived: {0}\n PacketsReceived: {1}\n BytesSent: {2}\n PacketsSent: {3}",
client2.BytesReceived,
client2.PacketsReceived,
client2.BytesSent,
client2.PacketsSent);
Console.WriteLine("Press any key to exit");
Console.ReadKey();
}
}
}
| |
// Copyright 2016 Mark Raasveldt
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.Drawing;
using System.Windows.Forms.DataVisualization;
using System.Windows.Forms.DataVisualization.Charting;
namespace Tibialyzer {
enum DamageChartType { DamageDealt, DamageTaken, HealingDone };
class DamageChart : NotificationForm {
private TransparentChart mChart;
public Dictionary<string, DamageResult> dps;
public bool graph = true;
private Label detailsButton;
private ComboBox targetBox;
public string filter = "";
private string target = null;
private Label chartTitle;
private DamageChartType chartType;
public DamageChart(DamageChartType chartType) {
InitializeComponent();
this.chartType = chartType;
switch (chartType) {
case DamageChartType.DamageDealt:
this.mChart.Palette = System.Windows.Forms.DataVisualization.Charting.ChartColorPalette.BrightPastel;
chartTitle.Text = "Damage Dealt"; break;
case DamageChartType.DamageTaken:
this.mChart.Palette = System.Windows.Forms.DataVisualization.Charting.ChartColorPalette.Chocolate;
chartTitle.Text = "Damage Taken"; break;
case DamageChartType.HealingDone:
this.mChart.Palette = System.Windows.Forms.DataVisualization.Charting.ChartColorPalette.EarthTones;
chartTitle.Text = "Healing Done"; break;
}
this.Name = "Tibialyzer (Damage Form)";
this.Text = "Tibialyzer (Damage Form)";
}
private void InitializeComponent() {
System.Windows.Forms.DataVisualization.Charting.ChartArea chartArea1 = new System.Windows.Forms.DataVisualization.Charting.ChartArea();
System.Windows.Forms.DataVisualization.Charting.Legend legend1 = new System.Windows.Forms.DataVisualization.Charting.Legend();
System.Windows.Forms.DataVisualization.Charting.Series series1 = new System.Windows.Forms.DataVisualization.Charting.Series();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(DamageChart));
this.targetBox = new System.Windows.Forms.ComboBox();
this.detailsButton = new System.Windows.Forms.Label();
this.mChart = new Tibialyzer.TransparentChart();
this.chartTitle = new System.Windows.Forms.Label();
((System.ComponentModel.ISupportInitialize)(this.mChart)).BeginInit();
this.SuspendLayout();
//
// targetBox
//
this.targetBox.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.targetBox.FormattingEnabled = true;
this.targetBox.Location = new System.Drawing.Point(75, 4);
this.targetBox.Name = "targetBox";
this.targetBox.Size = new System.Drawing.Size(96, 21);
this.targetBox.TabIndex = 4;
this.targetBox.SelectedIndexChanged += new System.EventHandler(this.targetBox_SelectedIndexChanged);
//
// detailsButton
//
this.detailsButton.BackColor = System.Drawing.Color.Transparent;
this.detailsButton.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.detailsButton.Font = new System.Drawing.Font("Microsoft Sans Serif", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.detailsButton.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(191)))), ((int)(((byte)(191)))), ((int)(((byte)(191)))));
this.detailsButton.Location = new System.Drawing.Point(12, 291);
this.detailsButton.Name = "detailsButton";
this.detailsButton.Padding = new System.Windows.Forms.Padding(2);
this.detailsButton.Size = new System.Drawing.Size(96, 21);
this.detailsButton.TabIndex = 3;
this.detailsButton.Text = "Details";
this.detailsButton.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
this.detailsButton.Click += new System.EventHandler(this.detailsButton_Click);
//
// mChart
//
this.mChart.BackColor = System.Drawing.Color.Transparent;
chartArea1.BackColor = System.Drawing.Color.Transparent;
chartArea1.Name = "ChartArea1";
this.mChart.ChartAreas.Add(chartArea1);
legend1.Alignment = System.Drawing.StringAlignment.Far;
legend1.BackColor = System.Drawing.Color.Transparent;
legend1.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(191)))), ((int)(((byte)(191)))), ((int)(((byte)(191)))));
legend1.Name = "Legend1";
this.mChart.Legends.Add(legend1);
this.mChart.Location = new System.Drawing.Point(0, -1);
this.mChart.Name = "mChart";
this.mChart.Palette = System.Windows.Forms.DataVisualization.Charting.ChartColorPalette.EarthTones;
series1.BorderColor = System.Drawing.Color.Transparent;
series1.ChartArea = "ChartArea1";
series1.ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.Pie;
series1.Font = new System.Drawing.Font("Microsoft Sans Serif", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
series1.LabelBackColor = System.Drawing.Color.Transparent;
series1.LabelBorderColor = System.Drawing.Color.FromArgb(((int)(((byte)(191)))), ((int)(((byte)(191)))), ((int)(((byte)(191)))));
series1.LabelBorderDashStyle = System.Windows.Forms.DataVisualization.Charting.ChartDashStyle.NotSet;
series1.LabelBorderWidth = 0;
series1.LabelForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(32)))), ((int)(((byte)(32)))), ((int)(((byte)(32)))));
series1.Legend = "Legend1";
series1.Name = "Series1";
this.mChart.Series.Add(series1);
this.mChart.Size = new System.Drawing.Size(448, 320);
this.mChart.TabIndex = 0;
this.mChart.Text = "Damage Chart";
//
// chartTitle
//
this.chartTitle.AutoSize = true;
this.chartTitle.BackColor = System.Drawing.Color.Transparent;
this.chartTitle.Font = new System.Drawing.Font("Microsoft Sans Serif", 9.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.chartTitle.ForeColor = System.Drawing.Color.FromArgb(((int)(((byte)(191)))), ((int)(((byte)(191)))), ((int)(((byte)(191)))));
this.chartTitle.Location = new System.Drawing.Point(8, 30);
this.chartTitle.Name = "chartTitle";
this.chartTitle.Size = new System.Drawing.Size(32, 16);
this.chartTitle.TabIndex = 15;
this.chartTitle.Text = "List";
//
// DamageChart
//
this.ClientSize = new System.Drawing.Size(450, 321);
this.Controls.Add(this.chartTitle);
this.Controls.Add(this.targetBox);
this.Controls.Add(this.detailsButton);
this.Controls.Add(this.mChart);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.None;
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Name = "DamageChart";
this.Text = "Damage Chart";
((System.ComponentModel.ISupportInitialize)(this.mChart)).EndInit();
this.ResumeLayout(false);
this.PerformLayout();
}
private List<Control> controlList = new List<Control>();
public void refreshDamageChart() {
foreach (Control c in controlList) {
c.Dispose();
Controls.Remove(c);
}
controlList.Clear();
if (graph) {
this.mChart.Visible = true;
this.mChart.Series[0].Points.Clear();
int max = SettingsManager.getSettingInt("MaxDamageChartPlayers");
for (int i = 0; i < damageDealt.Count; i++) {
if (max > 0 && i >= max) break;
DamageObject p = damageDealt[i];
double percentage = p.percentage;
DataPoint point = new DataPoint();
point.XValue = percentage;
point.YValues = new double[1];
point.YValues[0] = p.totalDamage;
point.AxisLabel = p.name;
point.LegendText = p.name;
point.Label = Math.Round(percentage, 1).ToString() + "%";
this.mChart.Series[0].Points.Add(point);
}
this.mChart.ApplyPaletteColors();
for (int i = 0; i < damageDealt.Count; i++) {
if (max > 0 && i >= max) break;
DamageObject p = damageDealt[i];
p.color = this.mChart.Series[0].Points[i].Color;
}
this.Size = new Size(GetWidth(), (int)(GetWidth() * 0.9));
} else {
this.mChart.Series[0].Points.Clear();
this.mChart.Visible = false;
int newWidth = 0;
int y = UIManager.DisplayCreatureAttributeList(Controls, damageDealt.ToList<TibiaObject>(), 5, 25, out newWidth, null, controlList, 0, 20, null, null, null, sortFunction, sortedHeader, desc, null, null, false, this.Size.Width - 20);
this.Size = new Size(GetWidth(), Math.Max(startY, 25 + y));
}
refreshTimer();
}
public void UpdateDamage() {
try {
List<Color> colorList = null;
if (chartType == DamageChartType.DamageDealt) {
this.dps = ScanningManager.lastResults.DamagePerSecond;
colorList = Constants.DamageChartColors;
} else if (chartType == DamageChartType.HealingDone){
this.dps = ScanningManager.lastResults.HealingPerSecond;
colorList = Constants.HealingChartColors;
} else if (chartType == DamageChartType.DamageTaken) {
this.dps = ScanningManager.lastResults.DamageTakenPerSecond;
colorList = Constants.DamageTakenChartColors;
}
var res = GenerateDamageInformation(dps, filter, target);
damageDealt = res.Item2;
for(int i = 0; i < damageDealt.Count; i++) {
damageDealt[i].color = colorList[i % colorList.Count];
}
this.Invoke((MethodInvoker)delegate {
try {
preventTargetUpdate = true;
string selected = targetBox.SelectedIndex < 0 ? "All" : targetBox.Items[targetBox.SelectedIndex].ToString();
targetBox.Items.Clear();
targetBox.Items.Add("All");
int index = 0;
foreach (var kvp in res.Item1.OrderByDescending(o => o.Value)) {
string target = kvp.Key;
if (target.Equals(selected, StringComparison.InvariantCultureIgnoreCase)) {
index = targetBox.Items.Count;
}
targetBox.Items.Add(target);
}
targetBox.SelectedIndex = index;
preventTargetUpdate = false;
refreshDamageChart();
RefreshForm();
} catch {
}
});
} catch {
}
}
private void refreshTargets() {
}
private string sortedHeader = "Total Damage";
private bool desc = true;
public void sortFunction(object sender, EventArgs e) {
if (sortedHeader == (sender as Control).Name) {
desc = !desc;
} else {
sortedHeader = (sender as Control).Name;
desc = false;
}
this.SuspendForm();
refreshDamageChart();
RefreshForm();
this.ResumeForm();
}
int startX, startY;
private List<DamageObject> damageDealt = new List<DamageObject>();
private void detailsButton_Click(object sender, EventArgs e) {
graph = !graph;
if (graph) {
detailsButton.Text = "Details";
} else {
detailsButton.Text = "Graph";
}
this.SuspendForm();
refreshDamageChart();
RefreshForm();
this.ResumeForm();
}
public static Tuple<Dictionary<string, int>, List<DamageObject>> GenerateDamageInformation(Dictionary<string, DamageResult> dps, string filter, string target = null) {
List<DamageObject> damageDealt = new List<DamageObject>();
Dictionary<string, int> targets = new Dictionary<string, int>();
foreach (KeyValuePair<string, DamageResult> kvp in dps) {
string name = kvp.Key.Replace(".", "");
if (name.Substring(0, 2).Equals("a ", StringComparison.InvariantCultureIgnoreCase)) name = name.Substring(2, name.Length - 2);
if (name.Substring(0, 3).Equals("an ", StringComparison.InvariantCultureIgnoreCase)) name = name.Substring(3, name.Length - 3);
Creature cr = StorageManager.getCreature(name);
if (filter != "all" && filter != "creature" && cr != null) continue;
if (filter == "creature" && cr == null) continue;
foreach (var creatureDamage in kvp.Value.damagePerCreature) {
if (!targets.ContainsKey(creatureDamage.Key)) {
targets.Add(creatureDamage.Key, creatureDamage.Value);
} else {
targets[creatureDamage.Key] += creatureDamage.Value;
}
}
int totalDamage = kvp.Value.totalDamage;
if (target != null && !target.Equals("all", StringComparison.InvariantCultureIgnoreCase)) {
totalDamage = 0;
foreach(var v in kvp.Value.damagePerCreature) {
if (v.Key.Equals(target, StringComparison.InvariantCultureIgnoreCase)) {
totalDamage = v.Value;
break;
}
}
}
if (totalDamage > 0) {
damageDealt.Add(new DamageObject() { name = name, totalDamage = totalDamage, dps = kvp.Value.damagePerSecond });
}
}
if (damageDealt.Count == 0) {
damageDealt.Add(new DamageObject() { name = "Mytherin", dps = 50, totalDamage = 501 });
damageDealt.Add(new DamageObject() { name = "Amel Cyrom", dps = 50, totalDamage = 250 });
damageDealt.Add(new DamageObject() { name = "Martincc", dps = 50, totalDamage = 499 });
}
double total_damage = 0;
foreach (DamageObject player in damageDealt) {
total_damage = total_damage + player.totalDamage;
}
foreach (DamageObject p in damageDealt) {
p.percentage = p.totalDamage / total_damage * 100;
}
damageDealt = damageDealt.OrderByDescending(o => o.totalDamage).ToList();
return new Tuple<Dictionary<string, int>, List<DamageObject>>(targets, damageDealt);
}
public override void LoadForm() {
this.SuspendForm();
NotificationInitialize();
UnregisterControl(detailsButton);
UnregisterControl(targetBox);
startX = this.Size.Width;
startY = this.Size.Height;
var res = GenerateDamageInformation(dps, filter, target);
damageDealt = res.Item2;
preventTargetUpdate = true;
targetBox.Items.Clear();
targetBox.Items.Add("All");
foreach(var kvp in res.Item1.OrderByDescending(o => o.Value)) {
string target = kvp.Key;
targetBox.Items.Add(target);
}
targetBox.SelectedIndex = 0;
preventTargetUpdate = false;
refreshDamageChart();
this.ResumeForm();
NotificationFinalize();
RefreshForm();
}
public override string FormName() {
switch (chartType) {
case DamageChartType.DamageDealt:
return "DamageChart";
case DamageChartType.DamageTaken:
return "DamageTakenChart";
case DamageChartType.HealingDone:
return "HealingChart";
}
return "UnknownChart";
}
public override int MinWidth() {
return 200;
}
public override int MaxWidth() {
return 800;
}
public override int WidthInterval() {
return 70;
}
private bool preventTargetUpdate = false;
private void targetBox_SelectedIndexChanged(object sender, EventArgs e) {
if (preventTargetUpdate) return;
string newTarget;
if (targetBox.SelectedIndex >= 0) {
newTarget = targetBox.Items[targetBox.SelectedIndex].ToString();
} else {
newTarget = null;
}
if (newTarget != target) {
target = newTarget;
preventTargetUpdate = true;
UpdateDamage();
preventTargetUpdate = false;
}
}
public override void RefreshForm() {
this.SuspendForm();
if (graph) {
this.Size = new Size(GetWidth(), (int)(GetWidth() * 0.9));
} else {
this.Size = new Size(GetWidth(), 321);
}
mChart.Size = new Size(this.Size.Width, this.Size.Height);
this.detailsButton.Location = new Point(3, this.Size.Height - detailsButton.Height - 5);
this.refreshDamageChart();
this.ResumeForm();
}
}
}
| |
namespace Farmhand.Installers.Patcher.Cecil
{
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.IO;
using System.Linq;
using System.Reflection;
using Farmhand.Installers.Patcher.Injection;
using Mono.Cecil;
using Mono.Cecil.Cil;
using Mono.Cecil.Mdb;
using Mono.Cecil.Pdb;
using Mono.Collections.Generic;
/// <summary>
/// A utility class which helps with injecting via Cecil.
/// </summary>
[Export(typeof(IInjectionContext))]
[PartCreationPolicy(CreationPolicy.Shared)]
public class CecilContext : IInjectionContext
{
internal AssemblyDefinition AssemblyDefinition { get; set; }
#region IInjectionContext Members
/// <summary>
/// Gets the loaded assemblies.
/// </summary>
public IEnumerable<Assembly> LoadedAssemblies { get; } = new List<Assembly>();
/// <summary>
/// Loads an assembly.
/// </summary>
/// <param name="file">
/// The path of the assembly to load.
/// </param>
public void LoadAssembly(string file)
{
((List<Assembly>)this.LoadedAssemblies).Add(Assembly.LoadFrom(file));
}
/// <summary>
/// Sets the primary assembly.
/// </summary>
/// <param name="file">
/// The path of the assembly.
/// </param>
/// <param name="loadDebugInformation">
/// Whether debug symbols should also be loaded.
/// </param>
public void SetPrimaryAssembly(string file, bool loadDebugInformation)
{
var mono = Type.GetType("Mono.Runtime") != null;
ISymbolReaderProvider readerProvider;
if (mono)
{
readerProvider = new MdbReaderProvider();
}
else
{
readerProvider = new PdbReaderProvider();
}
var pdbPath = Path.GetDirectoryName(file) + Path.GetFileNameWithoutExtension(file) + $".{(mono ? "m" : "p")}db";
if (loadDebugInformation && File.Exists(pdbPath))
{
var readerParameters = new ReaderParameters
{
SymbolReaderProvider = readerProvider,
ReadSymbols = true
};
this.AssemblyDefinition = AssemblyDefinition.ReadAssembly(file, readerParameters);
}
else
{
this.AssemblyDefinition = AssemblyDefinition.ReadAssembly(file);
}
}
/// <summary>
/// Writes the modified assembly to disk.
/// </summary>
/// <param name="file">
/// The output file.
/// </param>
/// <param name="writePdb">
/// Whether an updated PDB should also be written.
/// </param>
public void WriteAssembly(string file, bool writePdb = false)
{
var mono = Type.GetType("Mono.Runtime") != null;
ISymbolWriterProvider writerProvider;
if (mono)
{
writerProvider = new MdbWriterProvider();
}
else
{
writerProvider = new PdbWriterProvider();
}
if (writePdb)
{
var writerParameters = new WriterParameters
{
SymbolWriterProvider = writerProvider,
WriteSymbols = true
};
this.AssemblyDefinition.Write(file, writerParameters);
}
else
{
this.AssemblyDefinition.Write(file);
}
}
#endregion
/// <summary>
/// Gets the IL Processor for a specific method.
/// </summary>
/// <param name="type">
/// The type containing the method.
/// </param>
/// <param name="method">
/// The method to get the processor for.
/// </param>
/// <returns>
/// The <see cref="ILProcessor" /> for the specified method..
/// </returns>
/// <exception cref="Exception">
/// Throws an exception if AssemblyDefinition is null, or if either a type of method is not specified.
/// </exception>
public ILProcessor GetMethodIlProcessor(string type, string method)
{
if (this.AssemblyDefinition == null)
{
throw new Exception("ERROR Assembly not properly read. Cannot parse");
}
if (string.IsNullOrWhiteSpace(type) || string.IsNullOrWhiteSpace(method))
{
throw new Exception("Both type and method must be set");
}
ILProcessor processor = null;
var typeDef = this.GetTypeDefinition(type);
var methodDef = typeDef?.Methods.FirstOrDefault(m => m.Name == method)
?? typeDef?.Methods.FirstOrDefault(m => m.FullName == method);
if (methodDef != null && methodDef.HasBody)
{
processor = methodDef.Body.GetILProcessor();
}
return processor;
}
/// <summary>
/// Gets a Cecil type definition.
/// </summary>
/// <param name="type">
/// The type to get the definition for.
/// </param>
/// <param name="toCheck">
/// A collection of types to check for the provided them. (Defaults to null)
/// </param>
/// <returns>
/// The <see cref="TypeDefinition" /> for the provided type.
/// </returns>
/// <exception cref="Exception">
/// Throws an exception if AssemblyDefinition is null, or a type is not specified.
/// </exception>
public TypeDefinition GetTypeDefinition(string type, Collection<TypeDefinition> toCheck = null)
{
if (this.AssemblyDefinition == null)
{
throw new Exception("ERROR Assembly not properly read. Cannot parse");
}
if (string.IsNullOrWhiteSpace(type))
{
throw new Exception("Both type and method must be set");
}
this.AssemblyDefinition.MainModule.Import(typeof(void));
if (toCheck == null)
{
toCheck = this.AssemblyDefinition.MainModule.Types;
}
var typeDef = default(TypeDefinition);
foreach (var def in toCheck)
{
if (def.FullName == type)
{
typeDef = def;
break;
}
if (type.StartsWith(def.FullName, StringComparison.Ordinal) && def.HasNestedTypes)
{
typeDef = this.GetTypeDefinition(type, def.NestedTypes);
if (typeDef != null)
{
break;
}
}
}
return typeDef;
}
/// <summary>
/// Gets the types in the loaded AssemblyDefinition.
/// </summary>
/// <returns>
/// An <see cref="IEnumerable<TypeDefinition>" /> of type definitions.
/// </returns>
public IEnumerable<TypeDefinition> GetTypes()
{
return this.AssemblyDefinition.MainModule.Types;
}
/// <summary>
/// Gets all methods in the loaded AssemblyDefinition
/// </summary>
/// <returns>
/// An <see cref="IEnumerable<MethodDefinition>" /> of method definitions.
/// </returns>
public IEnumerable<MethodDefinition> GetMethods()
{
return this.GetTypes().SelectMany(n => n.Methods);
}
/// <summary>
/// Imports a <see cref="TypeReference" /> for the provided type from the AssemblyDefinition.
/// </summary>
/// <param name="type">
/// The type to get a reference to.
/// </param>
/// <returns>
/// The <see cref="TypeReference" /> of the provided type.
/// </returns>
/// <exception cref="Exception">
/// Throws an exception if
/// <param name="type" />
/// is null
/// </exception>
public TypeReference GetTypeReference(Type type)
{
if (type == null)
{
throw new Exception("Both type must be set");
}
return this.AssemblyDefinition.MainModule.Import(type);
}
/// <summary>
/// Gets a method definition.
/// </summary>
/// <param name="type">
/// The type containing the method.
/// </param>
/// <param name="method">
/// The method to get a definition for.
/// </param>
/// <param name="selector">
/// A selector to filter certain methods. (Defaults to null)
/// </param>
/// <returns>
/// A <see cref="MethodDefinition" /> for the specified type.
/// </returns>
public MethodDefinition GetMethodDefinition(
string type,
string method,
Func<MethodDefinition, bool> selector = null)
{
MethodDefinition methodDef = null;
var typeDef = this.GetTypeDefinition(type);
if (typeDef != null)
{
methodDef = selector == null
? (typeDef.Methods.FirstOrDefault(m => m.Name == method)
?? typeDef.Methods.FirstOrDefault(m => m.FullName == method))
: typeDef.Methods.Where(m => m.Name == method || m.Name == method)
.FirstOrDefault(selector);
}
return methodDef;
}
/// <summary>
/// Gets a method definition.
/// </summary>
/// <param name="type">
/// The type containing the method.
/// </param>
/// <param name="method">
/// The full name of the method to get a definition for.
/// </param>
/// <param name="selector">
/// A selector to filter certain methods. (Defaults to null)
/// </param>
/// <returns>
/// A <see cref="MethodDefinition" /> for the specified type.
/// </returns>
public MethodDefinition GetMethodDefinitionFullName(
string type,
string method,
Func<MethodDefinition, bool> selector = null)
{
MethodDefinition methodDef = null;
var typeDef = this.GetTypeDefinition(type);
if (typeDef != null)
{
methodDef = selector == null
? typeDef.Methods.FirstOrDefault(m => m.FullName == method)
: typeDef.Methods.Where(m => m.FullName == method).FirstOrDefault(selector);
}
return methodDef;
}
/// <summary>
/// Gets a reference to the first constructor of a type.
/// </summary>
/// <param name="typeDefinition">
/// The <see cref="TypeDefinition" /> of the type containing the constructor.
/// </param>
/// <param name="selector">
/// A selector to filter certain constructors. (Defaults to null)
/// </param>
/// <returns>
/// The <see cref="MethodReference" /> for the constructor.
/// </returns>
public MethodReference GetConstructorReference(
TypeDefinition typeDefinition,
Func<MethodDefinition, bool> selector = null)
{
return selector == null
? typeDefinition.Methods.FirstOrDefault(m => m.IsConstructor)
: typeDefinition.Methods.Where(m => m.IsConstructor).FirstOrDefault(selector);
}
/// <summary>
/// Gets a reference to the first constructor of a type.
/// </summary>
/// <param name="typeDefinition">
/// The <see cref="TypeDefinition" /> of the type containing the constructor.
/// </param>
/// <param name="method">
/// The name of the constructor to get.
/// </param>
/// <returns>
/// The <see cref="MethodReference" /> for the constructor.
/// </returns>
public MethodReference GetConstructorReference(TypeDefinition typeDefinition, string method)
{
var methodDefinition = typeDefinition.Methods.FirstOrDefault(m => m.Name == method);
return this.AssemblyDefinition.MainModule.Import(methodDefinition);
}
/// <summary>
/// Gets a <see cref="PropertyDefinition" /> for the specified property.
/// </summary>
/// <param name="type">
/// The type containing the property.
/// </param>
/// <param name="property">
/// The name of the property.
/// </param>
/// <returns>
/// The <see cref="PropertyDefinition" /> for the specified property.
/// </returns>
public PropertyDefinition GetPropertyDefinition(string type, string property)
{
PropertyDefinition propertyDefinition = null;
var typeDef = this.GetTypeDefinition(type);
if (typeDef != null)
{
propertyDefinition = typeDef.Properties.FirstOrDefault(m => m.Name == property);
}
return propertyDefinition;
}
/// <summary>
/// Gets a <see cref="FieldDefinition" /> for the specified field.
/// </summary>
/// <param name="type">
/// The type containing the field.
/// </param>
/// <param name="field">
/// The name of the field.
/// </param>
/// <returns>
/// The <see cref="FieldDefinition" /> for the specified field.
/// </returns>
public FieldDefinition GetFieldDefinition(string type, string field)
{
FieldDefinition fieldDefinition = null;
var typeDef = this.GetTypeDefinition(type);
if (typeDef != null)
{
fieldDefinition = typeDef.Fields.FirstOrDefault(m => m.Name == field);
}
return fieldDefinition;
}
/// <summary>
/// Imports a method into the AssemblyDefinition.
/// </summary>
/// <param name="method">
/// The <see cref="MethodBase" /> of the method to import.
/// </param>
/// <returns>
/// The <see cref="MethodReference" /> of the imported method.
/// </returns>
/// <exception cref="Exception">
/// Throws an exception if the AssemblyDefinition is null.
/// </exception>
public MethodReference ImportMethod(MethodBase method)
{
if (this.AssemblyDefinition == null)
{
throw new Exception("ERROR Assembly not properly read. Cannot parse");
}
MethodReference reference = null;
if (method != null)
{
reference = this.AssemblyDefinition.MainModule.Import(method);
}
return reference;
}
/// <summary>
/// Imports a method into the AssemblyDefinition.
/// </summary>
/// <param name="method">
/// The <see cref="MethodReference" /> of the method to import.
/// </param>
/// <returns>
/// The <see cref="MethodReference" /> of the imported method.
/// </returns>
/// <exception cref="Exception">
/// Throws an exception if the AssemblyDefinition is null.
/// </exception>
public MethodReference ImportMethod(MethodReference method)
{
if (this.AssemblyDefinition == null)
{
throw new Exception("ERROR Assembly not properly read. Cannot parse");
}
MethodReference reference = null;
if (method != null)
{
reference = this.AssemblyDefinition.MainModule.Import(method);
}
return reference;
}
/// <summary>
/// Inserts a new type into the AssemblyDefinition
/// </summary>
/// <param name="type">
/// The <see cref="TypeDefinition" /> to insert.
/// </param>
public void InsertType(TypeDefinition type)
{
var sdvType = new TypeDefinition(type.Namespace, type.Name, type.Attributes);
foreach (var md in type.Methods)
{
sdvType.Methods.Add(md);
}
this.AssemblyDefinition.MainModule.Types.Add(sdvType);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.IO.Tests
{
public abstract class FileStream_ReadWrite_Span : FileSystemTest
{
protected abstract FileStream CreateFileStream(string path, FileMode mode, FileAccess access = FileAccess.ReadWrite);
[Fact]
public void DisposedStream_ReadWrite_Throws()
{
var fs = CreateFileStream(GetTestFilePath(), FileMode.Create);
fs.Dispose();
Assert.Throws<ObjectDisposedException>(() => fs.Read(new Span<byte>(new byte[1])));
Assert.Throws<ObjectDisposedException>(() => fs.Write(new Span<byte>(new byte[1])));
}
[Fact]
public void EmptyFile_Read_Succeeds()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create))
{
// use a recognizable pattern
var buffer = (byte[])TestBuffer.Clone();
Assert.Equal(0, fs.Read(Span<byte>.Empty));
Assert.Equal(0, fs.Read(new Span<byte>(buffer, 0, 1)));
Assert.Equal(TestBuffer, buffer);
Assert.Equal(0, fs.Read(new Span<byte>(buffer, 0, buffer.Length)));
Assert.Equal(TestBuffer, buffer);
Assert.Equal(0, fs.Read(new Span<byte>(buffer, buffer.Length - 1, 1)));
Assert.Equal(TestBuffer, buffer);
Assert.Equal(0, fs.Read(new Span<byte>(buffer, buffer.Length / 2, buffer.Length - buffer.Length / 2)));
Assert.Equal(TestBuffer, buffer);
}
}
[Fact]
public void NonEmptyFile_Read_GetsExpectedData()
{
string fileName = GetTestFilePath();
File.WriteAllBytes(fileName, TestBuffer);
using (var fs = CreateFileStream(fileName, FileMode.Open))
{
var buffer = new byte[TestBuffer.Length];
Assert.Equal(TestBuffer.Length, fs.Read(new Span<byte>(buffer, 0, buffer.Length)));
Assert.Equal(TestBuffer, buffer);
// Larger than needed buffer, read into beginning, rest remains untouched
fs.Position = 0;
buffer = new byte[TestBuffer.Length * 2];
Assert.Equal(TestBuffer.Length, fs.Read(new Span<byte>(buffer)));
Assert.Equal(TestBuffer, buffer.Take(TestBuffer.Length));
Assert.Equal(new byte[buffer.Length - TestBuffer.Length], buffer.Skip(TestBuffer.Length));
// Larger than needed buffer, read into middle, beginning and end remain untouched
fs.Position = 0;
buffer = new byte[TestBuffer.Length * 2];
Assert.Equal(TestBuffer.Length, fs.Read(new Span<byte>(buffer, 2, buffer.Length - 2)));
Assert.Equal(TestBuffer, buffer.Skip(2).Take(TestBuffer.Length));
Assert.Equal(new byte[2], buffer.Take(2));
Assert.Equal(new byte[buffer.Length - TestBuffer.Length - 2], buffer.Skip(2 + TestBuffer.Length));
}
}
[Fact]
public void ReadOnly_Write_Throws()
{
string fileName = GetTestFilePath();
File.WriteAllBytes(fileName, TestBuffer);
using (var fs = CreateFileStream(fileName, FileMode.Open, FileAccess.Read))
{
Assert.Throws<NotSupportedException>(() => fs.Write(new Span<byte>(new byte[1])));
fs.Dispose();
Assert.Throws<ObjectDisposedException>(() => fs.Write(new Span<byte>(new byte[1]))); // Disposed checking happens first
}
}
[Fact]
public void WriteOnly_Read_Throws()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create, FileAccess.Write))
{
Assert.Throws<NotSupportedException>(() => fs.Read(new Span<byte>(new byte[1])));
fs.Dispose();
Assert.Throws<ObjectDisposedException>(() => fs.Read(new Span<byte>(new byte[1]))); // Disposed checking happens first
}
}
[Fact]
public void EmptyWrites_NoDataWritten()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create))
{
fs.Write(Span<byte>.Empty);
Assert.Equal(0, fs.Length);
Assert.Equal(0, fs.Position);
}
}
[Fact]
public void NonEmptyWrite_WritesExpectedData()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create))
{
fs.Write(new Span<byte>(TestBuffer));
Assert.Equal(TestBuffer.Length, fs.Length);
Assert.Equal(TestBuffer.Length, fs.Position);
fs.Position = 0;
var buffer = new byte[TestBuffer.Length];
Assert.Equal(TestBuffer.Length, fs.Read(new Span<byte>(buffer)));
Assert.Equal(TestBuffer, buffer);
}
}
[Fact]
public void DisposedStream_ReadWriteAsync_Throws()
{
var fs = CreateFileStream(GetTestFilePath(), FileMode.Create);
fs.Dispose();
Assert.Throws<ObjectDisposedException>(() => { fs.ReadAsync(new Memory<byte>(new byte[1])); });
Assert.Throws<ObjectDisposedException>(() => { fs.WriteAsync(new ReadOnlyMemory<byte>(new byte[1])); });
}
[Fact]
public async Task EmptyFile_ReadAsync_Succeeds()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create))
{
// use a recognizable pattern
var buffer = (byte[])TestBuffer.Clone();
Assert.Equal(0, await fs.ReadAsync(Memory<byte>.Empty));
Assert.Equal(0, await fs.ReadAsync(new Memory<byte>(buffer, 0, 1)));
Assert.Equal(TestBuffer, buffer);
Assert.Equal(0, await fs.ReadAsync(new Memory<byte>(buffer, 0, buffer.Length)));
Assert.Equal(TestBuffer, buffer);
Assert.Equal(0, await fs.ReadAsync(new Memory<byte>(buffer, buffer.Length - 1, 1)));
Assert.Equal(TestBuffer, buffer);
Assert.Equal(0, await fs.ReadAsync(new Memory<byte>(buffer, buffer.Length / 2, buffer.Length - buffer.Length / 2)));
Assert.Equal(TestBuffer, buffer);
}
}
[Fact]
public async Task NonEmptyFile_ReadAsync_GetsExpectedData()
{
string fileName = GetTestFilePath();
File.WriteAllBytes(fileName, TestBuffer);
using (var fs = CreateFileStream(fileName, FileMode.Open))
{
var buffer = new byte[TestBuffer.Length];
Assert.Equal(TestBuffer.Length, await fs.ReadAsync(new Memory<byte>(buffer, 0, buffer.Length)));
Assert.Equal(TestBuffer, buffer);
// Larger than needed buffer, read into beginning, rest remains untouched
fs.Position = 0;
buffer = new byte[TestBuffer.Length * 2];
Assert.Equal(TestBuffer.Length, await fs.ReadAsync(new Memory<byte>(buffer)));
Assert.Equal(TestBuffer, buffer.Take(TestBuffer.Length));
Assert.Equal(new byte[buffer.Length - TestBuffer.Length], buffer.Skip(TestBuffer.Length));
// Larger than needed buffer, read into middle, beginning and end remain untouched
fs.Position = 0;
buffer = new byte[TestBuffer.Length * 2];
Assert.Equal(TestBuffer.Length, await fs.ReadAsync(new Memory<byte>(buffer, 2, buffer.Length - 2)));
Assert.Equal(TestBuffer, buffer.Skip(2).Take(TestBuffer.Length));
Assert.Equal(new byte[2], buffer.Take(2));
Assert.Equal(new byte[buffer.Length - TestBuffer.Length - 2], buffer.Skip(2 + TestBuffer.Length));
}
}
[Fact]
public void ReadOnly_WriteAsync_Throws()
{
string fileName = GetTestFilePath();
File.WriteAllBytes(fileName, TestBuffer);
using (var fs = CreateFileStream(fileName, FileMode.Open, FileAccess.Read))
{
Assert.Throws<NotSupportedException>(() => { fs.WriteAsync(new ReadOnlyMemory<byte>(new byte[1])); });
fs.Dispose();
Assert.Throws<ObjectDisposedException>(() => { fs.WriteAsync(new ReadOnlyMemory<byte>(new byte[1])); }); // Disposed checking happens first
}
}
[Fact]
public void WriteOnly_ReadAsync_Throws()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create, FileAccess.Write))
{
Assert.Throws<NotSupportedException>(() => { fs.ReadAsync(new Memory<byte>(new byte[1])); });
fs.Dispose();
Assert.Throws<ObjectDisposedException>(() => { fs.ReadAsync(new Memory<byte>(new byte[1])); });// Disposed checking happens first
}
}
[Fact]
public async Task EmptyWriteAsync_NoDataWritten()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create))
{
await fs.WriteAsync(Memory<byte>.Empty);
Assert.Equal(0, fs.Length);
Assert.Equal(0, fs.Position);
}
}
[Fact]
public async Task NonEmptyWriteAsync_WritesExpectedData()
{
using (var fs = CreateFileStream(GetTestFilePath(), FileMode.Create))
{
await fs.WriteAsync(new Memory<byte>(TestBuffer));
Assert.Equal(TestBuffer.Length, fs.Length);
Assert.Equal(TestBuffer.Length, fs.Position);
fs.Position = 0;
var buffer = new byte[TestBuffer.Length];
Assert.Equal(TestBuffer.Length, await fs.ReadAsync(new Memory<byte>(buffer)));
Assert.Equal(TestBuffer, buffer);
}
}
}
public class Sync_FileStream_ReadWrite_Span : FileStream_ReadWrite_Span
{
protected override FileStream CreateFileStream(string path, FileMode mode, FileAccess access) =>
new FileStream(path, mode, access, FileShare.None, 0x1000, FileOptions.None);
}
public class Async_FileStream_ReadWrite_Span : FileStream_ReadWrite_Span
{
protected override FileStream CreateFileStream(string path, FileMode mode, FileAccess access) =>
new FileStream(path, mode, access, FileShare.None, 0x1000, FileOptions.Asynchronous);
}
public sealed class Sync_DerivedFileStream_ReadWrite_Span : Sync_FileStream_ReadWrite_Span
{
protected override FileStream CreateFileStream(string path, FileMode mode, FileAccess access) =>
new DerivedFileStream(path, mode, access, FileShare.None, 0x1000, FileOptions.None);
[Fact]
public void CallSpanReadWriteOnDerivedFileStream_ArrayMethodsUsed()
{
using (var fs = (DerivedFileStream)CreateFileStream(GetTestFilePath(), FileMode.Create, FileAccess.ReadWrite))
{
Assert.False(fs.WriteArrayInvoked);
Assert.False(fs.ReadArrayInvoked);
fs.Write(new ReadOnlySpan<byte>(new byte[1]));
Assert.True(fs.WriteArrayInvoked);
Assert.False(fs.ReadArrayInvoked);
fs.Position = 0;
fs.Read(new Span<byte>(new byte[1]));
Assert.True(fs.WriteArrayInvoked);
Assert.True(fs.ReadArrayInvoked);
}
}
[Fact]
public async Task CallMemoryReadWriteAsyncOnDerivedFileStream_ArrayMethodsUsed()
{
using (var fs = (DerivedFileStream)CreateFileStream(GetTestFilePath(), FileMode.Create, FileAccess.ReadWrite))
{
Assert.False(fs.WriteAsyncArrayInvoked);
Assert.False(fs.ReadAsyncArrayInvoked);
await fs.WriteAsync(new ReadOnlyMemory<byte>(new byte[1]));
Assert.True(fs.WriteAsyncArrayInvoked);
Assert.False(fs.ReadAsyncArrayInvoked);
fs.Position = 0;
await fs.ReadAsync(new Memory<byte>(new byte[1]));
Assert.True(fs.WriteAsyncArrayInvoked);
Assert.True(fs.ReadAsyncArrayInvoked);
}
}
}
public sealed class Async_DerivedFileStream_ReadWrite_Span : Async_FileStream_ReadWrite_Span
{
protected override FileStream CreateFileStream(string path, FileMode mode, FileAccess access) =>
new DerivedFileStream(path, mode, access, FileShare.None, 0x1000, FileOptions.Asynchronous);
}
internal sealed class DerivedFileStream : FileStream
{
public bool ReadArrayInvoked = false, WriteArrayInvoked = false;
public bool ReadAsyncArrayInvoked = false, WriteAsyncArrayInvoked = false;
public DerivedFileStream(string path, FileMode mode, FileAccess access, FileShare share, int bufferSize, FileOptions options) :
base(path, mode, access, share, bufferSize, options)
{
}
public override int Read(byte[] array, int offset, int count)
{
ReadArrayInvoked = true;
return base.Read(array, offset, count);
}
public override void Write(byte[] array, int offset, int count)
{
WriteArrayInvoked = true;
base.Write(array, offset, count);
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
ReadAsyncArrayInvoked = true;
return base.ReadAsync(buffer, offset, count, cancellationToken);
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
WriteAsyncArrayInvoked = true;
return base.WriteAsync(buffer, offset, count, cancellationToken);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using Sep.Git.Tfs.Commands;
using Sep.Git.Tfs.Core.TfsInterop;
using Sep.Git.Tfs.Util;
namespace Sep.Git.Tfs.Core
{
public class GitTfsRemote : IGitTfsRemote
{
private static readonly Regex isInDotGit = new Regex("(?:^|/)\\.git(?:/|$)");
private static readonly Regex treeShaRegex = new Regex("^tree (" + GitTfsConstants.Sha1 + ")");
private readonly Globals globals;
private readonly TextWriter stdout;
private readonly RemoteOptions remoteOptions;
private long? maxChangesetId;
private string maxCommitHash;
private bool isTfsAuthenticated;
public RemoteInfo RemoteInfo { get; private set; }
public GitTfsRemote(RemoteInfo info, IGitRepository repository, RemoteOptions remoteOptions, Globals globals, ITfsHelper tfsHelper, TextWriter stdout)
{
this.remoteOptions = remoteOptions;
this.globals = globals;
this.stdout = stdout;
Tfs = tfsHelper;
Repository = repository;
RemoteInfo = info;
Id = info.Id;
TfsUrl = info.Url;
TfsRepositoryPath = info.Repository;
TfsUsername = info.Username;
TfsPassword = info.Password;
Aliases = (info.Aliases ?? Enumerable.Empty<string>()).ToArray();
IgnoreRegexExpression = info.IgnoreRegex;
IgnoreExceptRegexExpression = info.IgnoreExceptRegex;
Autotag = info.Autotag;
this.IsSubtree = CheckSubtree();
}
private bool CheckSubtree()
{
var m = GitTfsConstants.RemoteSubtreeRegex.Match(this.Id);
if (m.Success)
{
this.OwningRemoteId = m.Groups["owner"].Value;
this.Prefix = m.Groups["prefix"].Value;
return true;
}
return false;
}
public void EnsureTfsAuthenticated()
{
if (isTfsAuthenticated)
return;
Tfs.EnsureAuthenticated();
isTfsAuthenticated = true;
}
public bool IsDerived
{
get { return false; }
}
public bool IsSubtree { get; private set; }
public bool IsSubtreeOwner
{
get
{
return TfsRepositoryPath == null;
}
}
public string Id { get; set; }
public string TfsUrl
{
get { return Tfs.Url; }
set { Tfs.Url = value; }
}
private string[] Aliases { get; set; }
public bool Autotag { get; set; }
public string TfsUsername
{
get { return Tfs.Username; }
set { Tfs.Username = value; }
}
public string TfsPassword
{
get { return Tfs.Password; }
set { Tfs.Password = value; }
}
public string TfsRepositoryPath { get; set; }
/// <summary>
/// Gets the TFS server-side paths of all subtrees of this remote.
/// Valid if the remote has subtrees, which occurs when <see cref="TfsRepositoryPath"/> is null.
/// </summary>
public string[] TfsSubtreePaths
{
get
{
if (tfsSubtreePaths == null)
tfsSubtreePaths = Repository.GetSubtrees(this).Select(x => x.TfsRepositoryPath).ToArray();
return tfsSubtreePaths;
}
}
private string[] tfsSubtreePaths = null;
public string IgnoreRegexExpression { get; set; }
public string IgnoreExceptRegexExpression { get; set; }
public IGitRepository Repository { get; set; }
public ITfsHelper Tfs { get; set; }
public string OwningRemoteId { get; private set; }
public string Prefix { get; private set; }
public bool ExportMetadatas { get; set; }
public long MaxChangesetId
{
get { InitHistory(); return maxChangesetId.Value; }
set { maxChangesetId = value; }
}
public string MaxCommitHash
{
get { InitHistory(); return maxCommitHash; }
set { maxCommitHash = value; }
}
private TfsChangesetInfo GetTfsChangesetById(int id)
{
return Repository.FilterParentTfsCommits(RemoteRef, false, c => c.ChangesetId == id).FirstOrDefault();
}
private void InitHistory()
{
if (maxChangesetId == null)
{
var mostRecentUpdate = Repository.GetLastParentTfsCommits(RemoteRef).FirstOrDefault();
if (mostRecentUpdate != null)
{
MaxCommitHash = mostRecentUpdate.GitCommit;
MaxChangesetId = mostRecentUpdate.ChangesetId;
}
else
{
MaxChangesetId = 0;
}
}
}
private string Dir
{
get
{
return Ext.CombinePaths(globals.GitDir, "tfs", Id);
}
}
private string IndexFile
{
get
{
return Path.Combine(Dir, "index");
}
}
private string WorkingDirectory
{
get
{
var dir = Repository.GetConfig(GitTfsConstants.WorkspaceConfigKey);
if (this.IsSubtree)
{
if(dir != null)
{
return Path.Combine(dir, this.Prefix);
}
//find the relative path to the owning remote
return Ext.CombinePaths(globals.GitDir, "tfs", this.OwningRemoteId, "workspace", this.Prefix);
}
return dir ?? DefaultWorkingDirectory;
}
}
private string DefaultWorkingDirectory
{
get
{
return Path.Combine(Dir, "workspace");
}
}
public void CleanupWorkspace()
{
Tfs.CleanupWorkspaces(WorkingDirectory);
}
public void CleanupWorkspaceDirectory()
{
try
{
var allFiles = Directory.EnumerateFiles(WorkingDirectory, "*", SearchOption.AllDirectories);
foreach (var file in allFiles)
File.SetAttributes(file, File.GetAttributes(file) & ~FileAttributes.ReadOnly);
Directory.Delete(WorkingDirectory, true);
}
catch (Exception ex)
{
Trace.WriteLine("CleanupWorkspaceDirectory: " + ex.Message);
}
}
public bool ShouldSkip(string path)
{
return IsInDotGit(path) || IsIgnored(path);
}
private bool IsIgnored(string path)
{
return Ignorance.IsIncluded(path);
}
private Bouncer _ignorance;
private Bouncer Ignorance
{
get
{
if (_ignorance == null)
{
_ignorance = new Bouncer();
_ignorance.Include(IgnoreRegexExpression);
_ignorance.Include(remoteOptions.IgnoreRegex);
_ignorance.Exclude(IgnoreExceptRegexExpression);
_ignorance.Exclude(remoteOptions.ExceptRegex);
}
return _ignorance;
}
}
private bool IsInDotGit(string path)
{
return isInDotGit.IsMatch(path);
}
public string GetPathInGitRepo(string tfsPath)
{
if (tfsPath == null) return null;
if (!IsSubtreeOwner)
{
if (!tfsPath.StartsWith(TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase)) return null;
tfsPath = tfsPath.Substring(TfsRepositoryPath.Length);
}
else
{
//look through the subtrees
var p = this.globals.Repository.GetSubtrees(this)
.Where(x => x.IsSubtree)
.FirstOrDefault(x => tfsPath.StartsWith(x.TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase));
if (p == null) return null;
tfsPath = p.GetPathInGitRepo(tfsPath);
//we must prepend the prefix in order to get the correct directory
if (tfsPath.StartsWith("/"))
tfsPath = p.Prefix + tfsPath;
else
tfsPath = p.Prefix + "/" + tfsPath;
}
while (tfsPath.StartsWith("/"))
tfsPath = tfsPath.Substring(1);
return tfsPath;
}
public class FetchResult : IFetchResult
{
public bool IsSuccess { get; set; }
public long LastFetchedChangesetId { get; set; }
public string ParentBranchTfsPath { get; set; }
}
public IFetchResult Fetch(bool stopOnFailMergeCommit = false)
{
return FetchWithMerge(-1, stopOnFailMergeCommit);
}
public IFetchResult FetchWithMerge(long mergeChangesetId, bool stopOnFailMergeCommit = false, params string[] parentCommitsHashes)
{
var fetchResult = new FetchResult{IsSuccess = true};
foreach (var changeset in FetchChangesets())
{
AssertTemporaryIndexClean(MaxCommitHash);
var log = Apply(MaxCommitHash, changeset);
if (changeset.IsMergeChangeset)
{
var parentChangesetId = Tfs.FindMergeChangesetParent(TfsRepositoryPath, changeset.Summary.ChangesetId, this);
var shaParent = Repository.FindCommitHashByChangesetId(parentChangesetId);
if (shaParent == null)
shaParent = FindMergedRemoteAndFetch(parentChangesetId, stopOnFailMergeCommit);
if (shaParent != null)
{
log.CommitParents.Add(shaParent);
}
else
{
if (stopOnFailMergeCommit)
{
fetchResult.IsSuccess = false;
fetchResult.LastFetchedChangesetId = MaxChangesetId;
return fetchResult;
}
//TODO : Manage case where there is not yet a git commit for the parent changset!!!!!
stdout.WriteLine("warning: this changeset " + changeset.Summary.ChangesetId +
" is a merge changeset. But it can't have been managed accordingly because one of the parent changeset "
+ parentChangesetId + " is not present in the repository! If you want to do it, fetch the branch containing this changeset before retrying...");
}
}
if (changeset.Summary.ChangesetId == mergeChangesetId)
{
foreach (var parent in parentCommitsHashes)
{
log.CommitParents.Add(parent);
}
}
if (ExportMetadatas)
{
if (changeset.Summary.Workitems.Any())
{
string workitems = string.Empty;
foreach (var workitem in changeset.Summary.Workitems)
workitems += "\n" + GitTfsConstants.GitTfsWorkItemPrefix + workitem.Id + " associate";
log.Log += workitems;
}
if (!string.IsNullOrWhiteSpace(changeset.Summary.PolicyOverrideComment))
log.Log += "\n" + GitTfsConstants.GitTfsPolicyOverrideCommentPrefix + changeset.Summary.PolicyOverrideComment;
if (!string.IsNullOrWhiteSpace(changeset.Summary.CodeReviewer))
log.Log += "\n" + GitTfsConstants.GitTfsCodeReviewerPrefix + changeset.Summary.CodeReviewer;
if (!string.IsNullOrWhiteSpace(changeset.Summary.SecurityReviewer))
log.Log += "\n" + GitTfsConstants.GitTfsSecurityReviewerPrefix + changeset.Summary.SecurityReviewer;
if (!string.IsNullOrWhiteSpace(changeset.Summary.PerformanceReviewer))
log.Log += "\n" + GitTfsConstants.GitTfsPerformanceReviewerPrefix + changeset.Summary.PerformanceReviewer;
}
var commitSha = Commit(log);
UpdateTfsHead(commitSha, changeset.Summary.ChangesetId);
StringBuilder metadatas = new StringBuilder();
if(changeset.Summary.Workitems.Any())
{
string workitemNote = "Workitems:\n";
foreach(var workitem in changeset.Summary.Workitems)
{
workitemNote += String.Format("[{0}] {1}\n {2}\n", workitem.Id, workitem.Title, workitem.Url);
}
metadatas.Append(workitemNote);
}
if (!string.IsNullOrWhiteSpace(changeset.Summary.PolicyOverrideComment))
metadatas.Append("\nPolicy Override Comment:" + changeset.Summary.PolicyOverrideComment);
if (!string.IsNullOrWhiteSpace(changeset.Summary.CodeReviewer))
metadatas.Append("\nCode Reviewer:" + changeset.Summary.CodeReviewer);
if (!string.IsNullOrWhiteSpace(changeset.Summary.SecurityReviewer))
metadatas.Append("\nSecurity Reviewer:" + changeset.Summary.SecurityReviewer);
if (!string.IsNullOrWhiteSpace(changeset.Summary.PerformanceReviewer))
metadatas.Append("\nPerformance Reviewer:" + changeset.Summary.PerformanceReviewer);
if (metadatas.Length != 0)
Repository.CreateNote(commitSha, metadatas.ToString(), log.AuthorName, log.AuthorEmail, log.Date);
DoGcIfNeeded();
}
return fetchResult;
}
private string FindMergedRemoteAndFetch(int parentChangesetId, bool stopOnFailMergeCommit)
{
var tfsRemotes = FindTfsRemoteOfChangeset(Tfs.GetChangeset(parentChangesetId));
foreach (var tfsRemote in tfsRemotes.Where(r=>string.Compare(r.TfsRepositoryPath, this.TfsRepositoryPath, StringComparison.InvariantCultureIgnoreCase) != 0))
{
var fetchResult = tfsRemote.Fetch(stopOnFailMergeCommit);
}
return Repository.FindCommitHashByChangesetId(parentChangesetId);
}
private IEnumerable<IGitTfsRemote> FindTfsRemoteOfChangeset(IChangeset changeset)
{
//I think you want something that uses GetPathInGitRepo and ShouldSkip. See TfsChangeset.Apply.
//Don't know if there is a way to extract remote tfs repository path from changeset datas! Should be better!!!
return Repository.ReadAllTfsRemotes().Where(r => changeset.Changes.Any(c => r.GetPathInGitRepo(c.Item.ServerItem) != null));
}
private string CommitChangeset(ITfsChangeset changeset, string parent)
{
var log = Apply(parent, changeset);
return Commit(log);
}
public void QuickFetch()
{
var changeset = GetLatestChangeset();
quickFetch(changeset);
}
public void QuickFetch(int changesetId)
{
var changeset = Tfs.GetChangeset(changesetId, this);
quickFetch(changeset);
}
private void quickFetch(ITfsChangeset changeset)
{
AssertTemporaryIndexEmpty();
var log = CopyTree(MaxCommitHash, changeset);
UpdateTfsHead(Commit(log), changeset.Summary.ChangesetId);
DoGcIfNeeded();
}
private IEnumerable<ITfsChangeset> FetchChangesets()
{
Trace.WriteLine(RemoteRef + ": Getting changesets from " + (MaxChangesetId + 1) + " to current ...", "info");
// TFS 2010 doesn't like when we ask for history past its last changeset.
if (MaxChangesetId == GetLatestChangeset().Summary.ChangesetId)
return Enumerable.Empty<ITfsChangeset>();
if(!IsSubtreeOwner)
return Tfs.GetChangesets(TfsRepositoryPath, MaxChangesetId + 1, this);
return globals.Repository.GetSubtrees(this)
.SelectMany(x => Tfs.GetChangesets(x.TfsRepositoryPath, this.MaxChangesetId + 1, x))
.OrderBy(x => x.Summary.ChangesetId);
}
public ITfsChangeset GetChangeset(long changesetId)
{
return Tfs.GetChangeset((int)changesetId, this);
}
private ITfsChangeset GetLatestChangeset()
{
if (!string.IsNullOrEmpty(this.TfsRepositoryPath))
{
return Tfs.GetLatestChangeset(this);
}
else
{
var changesetId = globals.Repository.GetSubtrees(this).Select(x => Tfs.GetLatestChangeset(x)).Max(x => x.Summary.ChangesetId);
return GetChangeset(changesetId);
}
}
public void UpdateTfsHead(string commitHash, long changesetId)
{
MaxCommitHash = commitHash;
MaxChangesetId = changesetId;
Repository.UpdateRef(RemoteRef, MaxCommitHash, "C" + MaxChangesetId);
if (Autotag)
Repository.UpdateRef(TagPrefix + "C" + MaxChangesetId, MaxCommitHash);
LogCurrentMapping();
}
private void LogCurrentMapping()
{
stdout.WriteLine("C" + MaxChangesetId + " = " + MaxCommitHash);
}
private string TagPrefix
{
get { return "refs/tags/tfs/" + Id + "/"; }
}
public string RemoteRef
{
get { return "refs/remotes/tfs/" + Id; }
}
private void DoGcIfNeeded()
{
Trace.WriteLine("GC Countdown: " + globals.GcCountdown);
if (--globals.GcCountdown < 0)
{
globals.GcCountdown = globals.GcPeriod;
try
{
Repository.CommandNoisy("gc", "--auto");
}
catch (Exception e)
{
Trace.WriteLine(e);
stdout.WriteLine("Warning: `git gc` failed! Try running it after git-tfs is finished.");
}
}
}
private void AssertTemporaryIndexClean(string treeish)
{
if (string.IsNullOrEmpty(treeish))
{
AssertTemporaryIndexEmpty();
return;
}
WithTemporaryIndex(() => AssertIndexClean(treeish));
}
private void AssertTemporaryIndexEmpty()
{
if (File.Exists(IndexFile))
File.Delete(IndexFile);
}
private void AssertIndexClean(string treeish)
{
if (!File.Exists(IndexFile)) Repository.CommandNoisy("read-tree", treeish);
var currentTree = Repository.CommandOneline("write-tree");
var expectedCommitInfo = Repository.Command("cat-file", "commit", treeish);
var expectedCommitTree = treeShaRegex.Match(expectedCommitInfo).Groups[1].Value;
if (expectedCommitTree != currentTree)
{
Trace.WriteLine("Index mismatch: " + expectedCommitTree + " != " + currentTree);
Trace.WriteLine("rereading " + treeish);
File.Delete(IndexFile);
Repository.CommandNoisy("read-tree", treeish);
currentTree = Repository.CommandOneline("write-tree");
if (expectedCommitTree != currentTree)
{
throw new Exception("Unable to create a clean temporary index: trees (" + treeish + ") " + expectedCommitTree + " != " + currentTree);
}
}
}
private LogEntry Apply(string parent, ITfsChangeset changeset)
{
LogEntry result = null;
WithTemporaryIndex(() => WithWorkspace(changeset.Summary, workspace =>
{
AssertTemporaryIndexClean(parent);
GitIndexInfo.Do(Repository, index => result = changeset.Apply(parent, index, workspace));
result.Tree = Repository.CommandOneline("write-tree");
}));
if (!String.IsNullOrEmpty(parent)) result.CommitParents.Add(parent);
return result;
}
private LogEntry CopyTree(string lastCommit, ITfsChangeset changeset)
{
LogEntry result = null;
WithTemporaryIndex(() => WithWorkspace(changeset.Summary, workspace => {
GitIndexInfo.Do(Repository, index => result = changeset.CopyTree(index, workspace));
result.Tree = Repository.CommandOneline("write-tree");
}));
if (!String.IsNullOrEmpty(lastCommit)) result.CommitParents.Add(lastCommit);
return result;
}
private string Commit(LogEntry logEntry)
{
string commitHash = null;
WithCommitHeaderEnv(logEntry, () => commitHash = WriteCommit(logEntry));
// TODO (maybe): StoreChangesetMetadata(commitInfo);
return commitHash;
}
private string WriteCommit(LogEntry logEntry)
{
// TODO (maybe): encode logEntry.Log according to 'git config --get i18n.commitencoding', if specified
//var commitEncoding = Repository.CommandOneline("config", "i18n.commitencoding");
//var encoding = LookupEncoding(commitEncoding) ?? Encoding.UTF8;
string commitHash = null;
//the remote to be associated with the commit might be a subtree, if it's null then it's not from a subtree.
var remote = logEntry.Remote ?? this;
Repository.CommandInputOutputPipe((procIn, procOut) =>
{
procIn.WriteLine(logEntry.Log);
procIn.WriteLine(GitTfsConstants.TfsCommitInfoFormat, remote.TfsUrl,
remote.TfsRepositoryPath, logEntry.ChangesetId);
procIn.Close();
commitHash = ParseCommitInfo(procOut.ReadToEnd());
}, BuildCommitCommand(logEntry));
return commitHash;
}
private string[] BuildCommitCommand(LogEntry logEntry)
{
var tree = logEntry.Tree ?? GetTemporaryIndexTreeSha();
tree.AssertValidSha();
var commitCommand = new List<string> { "commit-tree", tree };
foreach (var parent in logEntry.CommitParents)
{
commitCommand.Add("-p");
commitCommand.Add(parent);
}
return commitCommand.ToArray();
}
private string GetTemporaryIndexTreeSha()
{
string tree = null;
WithTemporaryIndex(() => tree = Repository.CommandOneline("write-tree"));
return tree;
}
private string ParseCommitInfo(string commitTreeOutput)
{
return commitTreeOutput.Trim();
}
//private Encoding LookupEncoding(string encoding)
//{
// if(encoding == null)
// return null;
// throw new NotImplementedException("Need to implement encoding lookup for " + encoding);
//}
private void WithCommitHeaderEnv(LogEntry logEntry, Action action)
{
WithTemporaryEnvironment(action, new Dictionary<string, string>
{
{"GIT_AUTHOR_NAME", logEntry.AuthorName},
{"GIT_AUTHOR_EMAIL", logEntry.AuthorEmail},
{"GIT_AUTHOR_DATE", logEntry.Date.FormatForGit()},
{"GIT_COMMITTER_DATE", logEntry.Date.FormatForGit()},
{"GIT_COMMITTER_NAME", logEntry.CommitterName ?? logEntry.AuthorName},
{"GIT_COMMITTER_EMAIL", logEntry.CommitterEmail ?? logEntry.AuthorEmail}
});
}
private void WithTemporaryIndex(Action action)
{
WithTemporaryEnvironment(() =>
{
Directory.CreateDirectory(Path.GetDirectoryName(IndexFile));
action();
}, new Dictionary<string, string> { { "GIT_INDEX_FILE", IndexFile } });
}
private void WithTemporaryEnvironment(Action action, IDictionary<string, string> newEnvironment)
{
var oldEnvironment = new Dictionary<string, string>();
PushEnvironment(newEnvironment, oldEnvironment);
try
{
action();
}
finally
{
PushEnvironment(oldEnvironment);
}
}
private void PushEnvironment(IDictionary<string, string> desiredEnvironment)
{
PushEnvironment(desiredEnvironment, new Dictionary<string, string>());
}
private void PushEnvironment(IDictionary<string, string> desiredEnvironment, IDictionary<string, string> oldEnvironment)
{
foreach (var key in desiredEnvironment.Keys)
{
oldEnvironment[key] = Environment.GetEnvironmentVariable(key);
Environment.SetEnvironmentVariable(key, desiredEnvironment[key]);
}
}
public void Unshelve(string shelvesetOwner, string shelvesetName, string destinationBranch)
{
var destinationRef = GitRepository.ShortToLocalName(destinationBranch);
if(Repository.HasRef(destinationRef))
throw new GitTfsException("ERROR: Destination branch (" + destinationBranch + ") already exists!");
var shelvesetChangeset = Tfs.GetShelvesetData(this, shelvesetOwner, shelvesetName);
var parentId = shelvesetChangeset.BaseChangesetId;
var ch = GetTfsChangesetById(parentId);
if (ch == null)
throw new GitTfsException("ERROR: Parent changeset C" + parentId + " not found."
+" Try fetching the latest changes from TFS");
var commit = CommitChangeset(shelvesetChangeset, ch.GitCommit);
Repository.UpdateRef(destinationRef, commit, "Shelveset " + shelvesetName + " from " + shelvesetOwner);
}
public void Shelve(string shelvesetName, string head, TfsChangesetInfo parentChangeset, bool evaluateCheckinPolicies)
{
WithWorkspace(parentChangeset, workspace => Shelve(shelvesetName, head, parentChangeset, evaluateCheckinPolicies, workspace));
}
public bool HasShelveset(string shelvesetName)
{
return Tfs.HasShelveset(shelvesetName);
}
private void Shelve(string shelvesetName, string head, TfsChangesetInfo parentChangeset, bool evaluateCheckinPolicies, ITfsWorkspace workspace)
{
PendChangesToWorkspace(head, parentChangeset.GitCommit, workspace);
workspace.Shelve(shelvesetName, evaluateCheckinPolicies, () => Repository.GetCommitMessage(head, parentChangeset.GitCommit));
}
public long CheckinTool(string head, TfsChangesetInfo parentChangeset)
{
var changeset = 0L;
WithWorkspace(parentChangeset, workspace => changeset = CheckinTool(head, parentChangeset, workspace));
return changeset;
}
private long CheckinTool(string head, TfsChangesetInfo parentChangeset, ITfsWorkspace workspace)
{
PendChangesToWorkspace(head, parentChangeset.GitCommit, workspace);
return workspace.CheckinTool(() => Repository.GetCommitMessage(head, parentChangeset.GitCommit));
}
private void PendChangesToWorkspace(string head, string parent, ITfsWorkspaceModifier workspace)
{
using (var tidyWorkspace = new DirectoryTidier(workspace, GetLatestChangeset().GetFullTree()))
{
foreach (var change in Repository.GetChangedFiles(parent, head))
{
change.Apply(tidyWorkspace);
}
}
}
public long Checkin(string head, TfsChangesetInfo parentChangeset, CheckinOptions options, string sourceTfsPath = null)
{
var changeset = 0L;
WithWorkspace(parentChangeset, workspace => changeset = Checkin(head, parentChangeset.GitCommit, workspace, options, sourceTfsPath));
return changeset;
}
public long Checkin(string head, string parent, TfsChangesetInfo parentChangeset, CheckinOptions options, string sourceTfsPath = null)
{
var changeset = 0L;
WithWorkspace(parentChangeset, workspace => changeset = Checkin(head, parent, workspace, options, sourceTfsPath));
return changeset;
}
private void WithWorkspace(TfsChangesetInfo parentChangeset, Action<ITfsWorkspace> action)
{
//are there any subtrees?
var subtrees = globals.Repository.GetSubtrees(this);
if (subtrees.Any())
{
Tfs.WithWorkspace(WorkingDirectory, this, subtrees.Select(x => new Tuple<string, string>(x.TfsRepositoryPath, x.Prefix)), parentChangeset, action);
}
else
{
Tfs.WithWorkspace(WorkingDirectory, this, parentChangeset, action);
}
}
private long Checkin(string head, string parent, ITfsWorkspace workspace, CheckinOptions options, string sourceTfsPath)
{
PendChangesToWorkspace(head, parent, workspace);
if (!string.IsNullOrWhiteSpace(sourceTfsPath))
workspace.Merge(sourceTfsPath, TfsRepositoryPath);
return workspace.Checkin(options);
}
public bool MatchesUrlAndRepositoryPath(string tfsUrl, string tfsRepositoryPath)
{
if(!MatchesTfsUrl(tfsUrl))
return false;
if(TfsRepositoryPath == null)
return tfsRepositoryPath == null;
return TfsRepositoryPath.Equals(tfsRepositoryPath, StringComparison.OrdinalIgnoreCase);
}
private bool MatchesTfsUrl(string tfsUrl)
{
return TfsUrl.Equals(tfsUrl, StringComparison.OrdinalIgnoreCase) || Aliases.Contains(tfsUrl, StringComparison.OrdinalIgnoreCase);
}
}
}
| |
using NQuery.Symbols;
namespace NQuery
{
public static class TypeFacts
{
private static class MissingType { }
private static class UnknownType { }
private static class NullType { }
public static readonly Type Missing = typeof(MissingType);
public static readonly Type Unknown = typeof(UnknownType);
public static readonly Type Null = typeof(NullType);
public static bool IsMissing(this Type type)
{
return type == Missing;
}
public static bool IsUnknown(this Type type)
{
return type == Unknown;
}
public static bool IsError(this Type type)
{
return type.IsMissing() || type.IsUnknown();
}
public static bool IsNull(this Type type)
{
return type == Null;
}
internal static Type ToOutputType(this Type type)
{
return type.IsNull() ? typeof(object) : type;
}
internal static bool IsNonBoolean(this Type type)
{
return !type.IsError() && type != typeof(bool);
}
internal static bool IsIntrinsicNumericType(this KnownType value)
{
switch (value)
{
case KnownType.SByte:
case KnownType.Byte:
case KnownType.Int16:
case KnownType.UInt16:
case KnownType.Int32:
case KnownType.UInt32:
case KnownType.Int64:
case KnownType.UInt64:
case KnownType.Char:
case KnownType.Single:
case KnownType.Double:
return true;
case KnownType.Decimal:
case KnownType.Boolean:
case KnownType.String:
case KnownType.Object:
return false;
default:
throw ExceptionBuilder.UnexpectedValue(value);
}
}
internal static bool IsSignedNumericType(this KnownType value)
{
switch (value)
{
case KnownType.SByte:
case KnownType.Int16:
case KnownType.Int32:
case KnownType.Int64:
return true;
default:
return false;
}
}
internal static bool IsUnsignedNumericType(this KnownType value)
{
switch (value)
{
case KnownType.Byte:
case KnownType.UInt16:
case KnownType.UInt32:
case KnownType.UInt64:
return true;
default:
return false;
}
}
internal static KnownType? GetKnownType(this Type type)
{
if (type == typeof(byte))
return KnownType.Byte;
if (type == typeof(sbyte))
return KnownType.SByte;
if (type == typeof(char))
return KnownType.Char;
if (type == typeof(short))
return KnownType.Int16;
if (type == typeof(ushort))
return KnownType.UInt16;
if (type == typeof(int))
return KnownType.Int32;
if (type == typeof(uint))
return KnownType.UInt32;
if (type == typeof(long))
return KnownType.Int64;
if (type == typeof(ulong))
return KnownType.UInt64;
if (type == typeof(float))
return KnownType.Single;
if (type == typeof(double))
return KnownType.Double;
if (type == typeof(decimal))
return KnownType.Decimal;
if (type == typeof(bool))
return KnownType.Boolean;
if (type == typeof(string))
return KnownType.String;
if (type == typeof(object))
return KnownType.Object;
return null;
}
public static string ToDisplayName(this Type type)
{
if (type.IsUnknown())
return Resources.TypeUnknown;
if (type.IsNull())
return Resources.TypeNull;
if (type.IsMissing())
return Resources.TypeMissing;
var knownType = type.GetKnownType();
return knownType is null ? type.Name : knownType.Value.ToDisplayName();
}
private static string ToDisplayName(this KnownType type)
{
switch (type)
{
case KnownType.SByte:
return @"SBYTE";
case KnownType.Byte:
return @"BYTE";
case KnownType.Int16:
return @"SHORT";
case KnownType.UInt16:
return @"USHORT";
case KnownType.Int32:
return @"INT";
case KnownType.UInt32:
return @"UINT";
case KnownType.Int64:
return @"LONG";
case KnownType.UInt64:
return @"ULONG";
case KnownType.Char:
return @"CHAR";
case KnownType.Single:
return @"FLOAT";
case KnownType.Double:
return @"DOUBLE";
case KnownType.Decimal:
return @"DECIMAL";
case KnownType.Boolean:
return @"BOOL";
case KnownType.String:
return @"STRING";
case KnownType.Object:
return @"OBJECT";
default:
throw ExceptionBuilder.UnexpectedValue(type);
}
}
public static bool IsComparable(this Type type)
{
var comparable = typeof(IComparable);
return comparable.IsAssignableFrom(type);
}
public static bool CanBeNull(this Type type)
{
var isReferenceType = type.IsClass;
var isNullableOfT = type.IsNullableOfT();
return isReferenceType || isNullableOfT;
}
public static bool IsNullableOfT(this Type type)
{
return type.IsValueType &&
type.IsGenericType &&
type.GetGenericTypeDefinition() == typeof(Nullable<>);
}
public static Type GetNonNullableType(this Type type)
{
return type.IsNullableOfT()
? type.GetGenericArguments().Single()
: type;
}
public static Type GetNullableType(this Type type)
{
return type.CanBeNull()
? type
: typeof(Nullable<>).MakeGenericType(type);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.IO;
using System.Xml.Linq;
using System.Runtime.InteropServices;
using System.Text;
using Xunit;
namespace System
{
public static partial class PlatformDetection
{
public static bool IsWindowsIoTCore => false;
public static bool IsWindows => false;
public static bool IsWindows7 => false;
public static bool IsWindows8x => false;
public static bool IsWindows10Version1607OrGreater => false;
public static bool IsWindows10Version1703OrGreater => false;
public static bool IsWindows10InsiderPreviewBuild16215OrGreater => false;
public static bool IsWindows10Version16251OrGreater => false;
public static bool IsNotOneCoreUAP => true;
public static bool IsNetfx462OrNewer() { return false; }
public static bool IsNetfx470OrNewer() { return false; }
public static bool IsNetfx471OrNewer() { return false; }
public static bool IsWinRT => false;
public static int WindowsVersion => -1;
public static bool IsOpenSUSE => IsDistroAndVersion("opensuse");
public static bool IsUbuntu => IsDistroAndVersion("ubuntu");
public static bool IsDebian => IsDistroAndVersion("debian");
public static bool IsDebian8 => IsDistroAndVersion("debian", "8");
public static bool IsUbuntu1404 => IsDistroAndVersion("ubuntu", "14.04");
public static bool IsCentos7 => IsDistroAndVersion("centos", "7");
public static bool IsTizen => IsDistroAndVersion("tizen");
public static bool IsNotFedoraOrRedHatOrCentos => !IsDistroAndVersion("fedora") && !IsDistroAndVersion("rhel") && !IsDistroAndVersion("centos");
public static bool IsFedora => IsDistroAndVersion("fedora");
public static bool IsWindowsNanoServer => false;
public static bool IsWindowsAndElevated => false;
public static Version OSXKernelVersion { get; } = GetOSXKernelVersion();
public static string GetDistroVersionString()
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
return "";
}
DistroInfo v = ParseOsReleaseFile();
return "Distro=" + v.Id + " VersionId=" + v.VersionId + " Pretty=" + v.PrettyName + " Version=" + v.Version;
}
private static readonly Version s_osxProductVersion = GetOSXProductVersion();
public static bool IsMacOsHighSierraOrHigher { get; } =
IsOSX && (s_osxProductVersion.Major > 10 || (s_osxProductVersion.Major == 10 && s_osxProductVersion.Minor >= 13));
private static DistroInfo ParseOsReleaseFile()
{
Debug.Assert(RuntimeInformation.IsOSPlatform(OSPlatform.Linux));
DistroInfo ret = new DistroInfo();
ret.Id = "";
ret.VersionId = "";
ret.Version = "";
ret.PrettyName = "";
if (File.Exists("/etc/os-release"))
{
foreach (string line in File.ReadLines("/etc/os-release"))
{
if (line.StartsWith("ID=", System.StringComparison.Ordinal))
{
ret.Id = RemoveQuotes(line.Substring("ID=".Length));
}
else if (line.StartsWith("VERSION_ID=", System.StringComparison.Ordinal))
{
ret.VersionId = RemoveQuotes(line.Substring("VERSION_ID=".Length));
}
else if (line.StartsWith("VERSION=", System.StringComparison.Ordinal))
{
ret.Version = RemoveQuotes(line.Substring("VERSION=".Length));
}
else if (line.StartsWith("PRETTY_NAME=", System.StringComparison.Ordinal))
{
ret.PrettyName = RemoveQuotes(line.Substring("PRETTY_NAME=".Length));
}
}
}
return ret;
}
private static string RemoveQuotes(string s)
{
s = s.Trim();
if (s.Length >= 2 && s[0] == '"' && s[s.Length - 1] == '"')
{
// Remove quotes.
s = s.Substring(1, s.Length - 2);
}
return s;
}
private struct DistroInfo
{
public string Id { get; set; }
public string VersionId { get; set; }
public string Version { get; set; }
public string PrettyName { get; set; }
}
/// <summary>
/// Get whether the OS platform matches the given Linux distro and optional version.
/// </summary>
/// <param name="distroId">The distribution id.</param>
/// <param name="versionId">The distro version. If omitted, compares the distro only.</param>
/// <returns>Whether the OS platform matches the given Linux distro and optional version.</returns>
private static bool IsDistroAndVersion(string distroId, string versionId = null)
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
{
DistroInfo v = ParseOsReleaseFile();
if (v.Id == distroId && (versionId == null || v.VersionId == versionId))
{
return true;
}
}
return false;
}
private static Version GetOSXKernelVersion()
{
if (IsOSX)
{
byte[] bytes = new byte[256];
IntPtr bytesLength = new IntPtr(bytes.Length);
Assert.Equal(0, sysctlbyname("kern.osrelease", bytes, ref bytesLength, null, IntPtr.Zero));
string versionString = Encoding.UTF8.GetString(bytes);
return Version.Parse(versionString);
}
return new Version(0, 0, 0);
}
private static Version GetOSXProductVersion()
{
try
{
if (IsOSX)
{
// <plist version="1.0">
// <dict>
// <key>ProductBuildVersion</key>
// <string>17A330h</string>
// <key>ProductCopyright</key>
// <string>1983-2017 Apple Inc.</string>
// <key>ProductName</key>
// <string>Mac OS X</string>
// <key>ProductUserVisibleVersion</key>
// <string>10.13</string>
// <key>ProductVersion</key>
// <string>10.13</string>
// </dict>
// </plist>
XElement dict = XDocument.Load("/System/Library/CoreServices/SystemVersion.plist").Root.Element("dict");
if (dict != null)
{
foreach (XElement key in dict.Elements("key"))
{
if ("ProductVersion".Equals(key.Value))
{
XElement stringElement = key.NextNode as XElement;
if (stringElement != null && stringElement.Name.LocalName.Equals("string"))
{
string versionString = stringElement.Value;
if (versionString != null)
{
return Version.Parse(versionString);
}
}
}
}
}
}
}
catch
{
}
// In case of exception or couldn't get the version
return new Version(0, 0, 0);
}
[DllImport("libc", SetLastError = true)]
private static extern int sysctlbyname(string ctlName, byte[] oldp, ref IntPtr oldpLen, byte[] newp, IntPtr newpLen);
[DllImport("libc", SetLastError = true)]
internal static extern unsafe uint geteuid();
public static bool IsSuperUser => geteuid() == 0;
}
}
| |
using UnityEngine;
using System.Collections;
using System.IO;
using System.Runtime.InteropServices;
public class AVProWindowsMediaPlayVideoDemo : MonoBehaviour
{
public GUISkin _skin;
public AVProWindowsMediaMovie _movie;
public AVProWindowsMediaGUIDisplay _display;
private bool _visible = true;
private float _alpha = 1.0f;
private bool _playFromMemory = false;
private GCHandle _bytesHandle;
private System.IntPtr _moviePtr;
private uint _movieLength;
private void ReleaseMemoryFile()
{
if (_bytesHandle.IsAllocated)
_bytesHandle.Free();
_moviePtr = System.IntPtr.Zero;
_movieLength = 0;
}
private void LoadFileToMemory(string folder, string filename)
{
string filePath = Path.Combine(folder, filename);
// If we're running outside of the editor we may need to resolve the relative path
// as the working-directory may not be that of the application EXE.
if (!Application.isEditor && !Path.IsPathRooted(filePath))
{
string rootPath = Path.GetFullPath(Path.Combine(Application.dataPath, ".."));
filePath = Path.Combine(rootPath, filePath);
}
ReleaseMemoryFile();
if (File.Exists(filePath))
{
byte[] bytes = System.IO.File.ReadAllBytes(filePath);
if (bytes.Length > 0)
{
_bytesHandle = GCHandle.Alloc(bytes, GCHandleType.Pinned);
_moviePtr = _bytesHandle.AddrOfPinnedObject();
_movieLength = (uint)bytes.Length;
_movie.LoadMovieFromMemory(true, filename, _moviePtr, _movieLength);
}
}
}
public void OnGUI()
{
GUI.skin = _skin;
if (_visible)
{
GUI.color = new Color(1f, 1f, 1f, _alpha);
GUILayout.BeginArea(new Rect(0, 0, 740, 350), GUI.skin.box);
ControlWindow(0);
GUILayout.EndArea();
}
GUI.color = new Color(1f, 1f, 1f, 1f - _alpha);
GUI.Box(new Rect(0, 0, 128, 32), "Demo Controls");
}
void Update()
{
Rect r = new Rect(0, 0, 740, 350);
if (r.Contains(new Vector2(Input.mousePosition.x, Screen.height - Input.mousePosition.y)))
{
_visible = true;
_alpha = 1.0f;
}
else
{
_alpha -= Time.deltaTime * 4f;
if (_alpha <= 0.0f)
{
_alpha = 0.0f;
_visible = false;
}
}
}
public void ControlWindow(int id)
{
if (_movie == null)
return;
GUILayout.Space(16f);
GUILayout.BeginVertical();
GUILayout.BeginHorizontal();
GUILayout.Label("Folder: ", GUILayout.Width(100));
_movie._folder = GUILayout.TextField(_movie._folder, 192);
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("File: ", GUILayout.Width(100));
_movie._filename = GUILayout.TextField(_movie._filename, 128, GUILayout.Width(440));
if (GUILayout.Button("Load File", GUILayout.Width(90)))
{
if (!_playFromMemory)
{
_movie.LoadMovie(true);
}
else
{
LoadFileToMemory(_movie._folder, _movie._filename);
}
}
GUILayout.EndHorizontal();
if (_display != null)
{
GUILayout.BeginHorizontal();
GUILayout.Space(100f);
/*if (_display._alphaBlend)
_display._alphaBlend = GUILayout.Toggle(_display._alphaBlend, "Rendering with Transparency");
else
_display._alphaBlend = GUILayout.Toggle(_display._alphaBlend, "Rendering without Transparency");*/
if (_display._alphaBlend != GUILayout.Toggle(_display._alphaBlend, "Render with Transparency"))
{
_display._alphaBlend = !_display._alphaBlend;
if (_display._alphaBlend)
{
_movie._colourFormat = AVProWindowsMediaMovie.ColourFormat.RGBA32;
}
else
{
_movie._colourFormat = AVProWindowsMediaMovie.ColourFormat.YCbCr_HD;
}
if (!_playFromMemory)
{
_movie.LoadMovie(true);
}
else
{
LoadFileToMemory(_movie._folder, _movie._filename);
}
}
if (_playFromMemory != GUILayout.Toggle(_playFromMemory, "Play from Memory"))
{
_playFromMemory = !_playFromMemory;
if (_movie.MovieInstance != null)
{
if (!_playFromMemory)
{
_movie.LoadMovie(true);
}
else
{
LoadFileToMemory(_movie._folder, _movie._filename);
}
}
}
GUILayout.EndHorizontal();
}
AVProWindowsMedia moviePlayer = _movie.MovieInstance;
if (moviePlayer != null)
{
GUILayout.BeginHorizontal();
GUILayout.Label("Info:", GUILayout.Width(100f));
GUILayout.Label(moviePlayer.Width + "x" + moviePlayer.Height + " @ " + moviePlayer.FrameRate.ToString("F2") + " FPS");
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("Volume ", GUILayout.Width(100));
float volume = _movie._volume;
float newVolume = GUILayout.HorizontalSlider(volume, 0.0f, 1.0f, GUILayout.Width(200));
if (volume != newVolume)
{
_movie._volume = newVolume;
}
GUILayout.Label(_movie._volume.ToString("F1"));
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("Balance ", GUILayout.Width(100));
float balance = moviePlayer.AudioBalance;
float newBalance = GUILayout.HorizontalSlider(balance, -1.0f, 1.0f, GUILayout.Width(200));
if (balance != newBalance)
{
moviePlayer.AudioBalance = newBalance;
}
GUILayout.Label(moviePlayer.AudioBalance.ToString("F1"));
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("Audio Delay", GUILayout.Width(100));
int delay = moviePlayer.AudioDelay;
int newDelay = Mathf.FloorToInt(GUILayout.HorizontalSlider(delay, -1000.0f, 1000.0f, GUILayout.Width(200)));
if (delay != newDelay)
{
moviePlayer.AudioDelay = newDelay;
}
float msPerFrame = 1000.0f / moviePlayer.FrameRate;
int frameDelay = Mathf.FloorToInt((float)newDelay / msPerFrame);
GUILayout.Label(moviePlayer.AudioDelay.ToString() + "ms (" + frameDelay + " frames)");
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("Time ", GUILayout.Width(100));
float position = moviePlayer.PositionSeconds;
float newPosition = GUILayout.HorizontalSlider(position, 0.0f, moviePlayer.DurationSeconds, GUILayout.Width(200));
if (position != newPosition)
{
moviePlayer.PositionSeconds = newPosition;
}
GUILayout.Label(moviePlayer.PositionSeconds.ToString("F1") + " / " + moviePlayer.DurationSeconds.ToString("F1") + "s");
if (GUILayout.Button("Play"))
{
moviePlayer.Play();
}
if (GUILayout.Button("Pause"))
{
moviePlayer.Pause();
}
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
GUILayout.Label("Frame", GUILayout.Width(100f));
uint positionFrame = moviePlayer.PositionFrames;
if (positionFrame != uint.MaxValue)
{
uint newPositionFrame = (uint)GUILayout.HorizontalSlider(positionFrame, 0.0f, (float)moviePlayer.DurationFrames - 1, GUILayout.Width(200));
if (positionFrame != newPositionFrame)
{
moviePlayer.PositionFrames = newPositionFrame;
}
GUILayout.Label(moviePlayer.PositionFrames.ToString() + " / " + (moviePlayer.DurationFrames - 1).ToString());
if (GUILayout.RepeatButton("<", GUILayout.Width(50)))
{
if (moviePlayer.PositionFrames > 0)
{
moviePlayer.PositionFrames--;
}
}
if (GUILayout.RepeatButton(">", GUILayout.Width(50)))
{
if (moviePlayer.PositionFrames < (moviePlayer.DurationFrames - 1))
{
moviePlayer.PositionFrames++;
}
}
GUILayout.EndHorizontal();
}
GUILayout.BeginHorizontal();
GUILayout.Label("Rate ", GUILayout.Width(100f));
GUILayout.Label(moviePlayer.PlaybackRate.ToString("F2") + "x");
if (GUILayout.Button("-", GUILayout.Width(50)))
{
moviePlayer.PlaybackRate = moviePlayer.PlaybackRate * 0.5f;
}
if (GUILayout.Button("+", GUILayout.Width(50)))
{
moviePlayer.PlaybackRate = moviePlayer.PlaybackRate * 2.0f;
}
GUILayout.EndHorizontal();
#if UNITY_EDITOR
GUILayout.Label("Displaying at " + moviePlayer.DisplayFPS.ToString("F1") + " fps");
#endif
}
GUILayout.EndVertical();
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using wkt = Google.Protobuf.WellKnownTypes;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.PubSub.V1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedPublisherServiceApiClientTest
{
[xunit::FactAttribute]
public void CreateTopicRequestObject()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
Topic request = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.CreateTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic response = client.CreateTopic(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateTopicRequestObjectAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
Topic request = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.CreateTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Topic>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic responseCallSettings = await client.CreateTopicAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Topic responseCancellationToken = await client.CreateTopicAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateTopic()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
Topic request = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.CreateTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic response = client.CreateTopic(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateTopicAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
Topic request = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.CreateTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Topic>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic responseCallSettings = await client.CreateTopicAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Topic responseCancellationToken = await client.CreateTopicAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateTopicResourceNames()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
Topic request = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.CreateTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic response = client.CreateTopic(request.TopicName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateTopicResourceNamesAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
Topic request = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.CreateTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Topic>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic responseCallSettings = await client.CreateTopicAsync(request.TopicName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Topic responseCancellationToken = await client.CreateTopicAsync(request.TopicName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void UpdateTopicRequestObject()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
UpdateTopicRequest request = new UpdateTopicRequest
{
Topic = new Topic(),
UpdateMask = new wkt::FieldMask(),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.UpdateTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic response = client.UpdateTopic(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task UpdateTopicRequestObjectAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
UpdateTopicRequest request = new UpdateTopicRequest
{
Topic = new Topic(),
UpdateMask = new wkt::FieldMask(),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.UpdateTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Topic>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic responseCallSettings = await client.UpdateTopicAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Topic responseCancellationToken = await client.UpdateTopicAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void PublishRequestObject()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
PublishRequest request = new PublishRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Messages =
{
new PubsubMessage(),
},
};
PublishResponse expectedResponse = new PublishResponse
{
MessageIds =
{
"message_idsbfb136bc",
},
};
mockGrpcClient.Setup(x => x.Publish(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
PublishResponse response = client.Publish(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task PublishRequestObjectAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
PublishRequest request = new PublishRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Messages =
{
new PubsubMessage(),
},
};
PublishResponse expectedResponse = new PublishResponse
{
MessageIds =
{
"message_idsbfb136bc",
},
};
mockGrpcClient.Setup(x => x.PublishAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<PublishResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
PublishResponse responseCallSettings = await client.PublishAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
PublishResponse responseCancellationToken = await client.PublishAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void Publish()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
PublishRequest request = new PublishRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Messages =
{
new PubsubMessage(),
},
};
PublishResponse expectedResponse = new PublishResponse
{
MessageIds =
{
"message_idsbfb136bc",
},
};
mockGrpcClient.Setup(x => x.Publish(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
PublishResponse response = client.Publish(request.Topic, request.Messages);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task PublishAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
PublishRequest request = new PublishRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Messages =
{
new PubsubMessage(),
},
};
PublishResponse expectedResponse = new PublishResponse
{
MessageIds =
{
"message_idsbfb136bc",
},
};
mockGrpcClient.Setup(x => x.PublishAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<PublishResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
PublishResponse responseCallSettings = await client.PublishAsync(request.Topic, request.Messages, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
PublishResponse responseCancellationToken = await client.PublishAsync(request.Topic, request.Messages, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void PublishResourceNames()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
PublishRequest request = new PublishRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Messages =
{
new PubsubMessage(),
},
};
PublishResponse expectedResponse = new PublishResponse
{
MessageIds =
{
"message_idsbfb136bc",
},
};
mockGrpcClient.Setup(x => x.Publish(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
PublishResponse response = client.Publish(request.TopicAsTopicName, request.Messages);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task PublishResourceNamesAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
PublishRequest request = new PublishRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Messages =
{
new PubsubMessage(),
},
};
PublishResponse expectedResponse = new PublishResponse
{
MessageIds =
{
"message_idsbfb136bc",
},
};
mockGrpcClient.Setup(x => x.PublishAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<PublishResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
PublishResponse responseCallSettings = await client.PublishAsync(request.TopicAsTopicName, request.Messages, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
PublishResponse responseCancellationToken = await client.PublishAsync(request.TopicAsTopicName, request.Messages, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetTopicRequestObject()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
GetTopicRequest request = new GetTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.GetTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic response = client.GetTopic(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetTopicRequestObjectAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
GetTopicRequest request = new GetTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.GetTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Topic>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic responseCallSettings = await client.GetTopicAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Topic responseCancellationToken = await client.GetTopicAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetTopic()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
GetTopicRequest request = new GetTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.GetTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic response = client.GetTopic(request.Topic);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetTopicAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
GetTopicRequest request = new GetTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.GetTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Topic>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic responseCallSettings = await client.GetTopicAsync(request.Topic, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Topic responseCancellationToken = await client.GetTopicAsync(request.Topic, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetTopicResourceNames()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
GetTopicRequest request = new GetTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.GetTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic response = client.GetTopic(request.TopicAsTopicName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetTopicResourceNamesAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
GetTopicRequest request = new GetTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
Topic expectedResponse = new Topic
{
TopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
Labels =
{
{
"key8a0b6e3c",
"value60c16320"
},
},
MessageStoragePolicy = new MessageStoragePolicy(),
KmsKeyName = "kms_key_name06bd122b",
SchemaSettings = new SchemaSettings(),
SatisfiesPzs = false,
MessageRetentionDuration = new wkt::Duration(),
};
mockGrpcClient.Setup(x => x.GetTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Topic>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
Topic responseCallSettings = await client.GetTopicAsync(request.TopicAsTopicName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Topic responseCancellationToken = await client.GetTopicAsync(request.TopicAsTopicName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteTopicRequestObject()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DeleteTopicRequest request = new DeleteTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
client.DeleteTopic(request);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteTopicRequestObjectAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DeleteTopicRequest request = new DeleteTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
await client.DeleteTopicAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteTopicAsync(request, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteTopic()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DeleteTopicRequest request = new DeleteTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
client.DeleteTopic(request.Topic);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteTopicAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DeleteTopicRequest request = new DeleteTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
await client.DeleteTopicAsync(request.Topic, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteTopicAsync(request.Topic, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteTopicResourceNames()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DeleteTopicRequest request = new DeleteTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteTopic(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
client.DeleteTopic(request.TopicAsTopicName);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteTopicResourceNamesAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DeleteTopicRequest request = new DeleteTopicRequest
{
TopicAsTopicName = TopicName.FromProjectTopic("[PROJECT]", "[TOPIC]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteTopicAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
await client.DeleteTopicAsync(request.TopicAsTopicName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteTopicAsync(request.TopicAsTopicName, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DetachSubscriptionRequestObject()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DetachSubscriptionRequest request = new DetachSubscriptionRequest
{
SubscriptionAsSubscriptionName = SubscriptionName.FromProjectSubscription("[PROJECT]", "[SUBSCRIPTION]"),
};
DetachSubscriptionResponse expectedResponse = new DetachSubscriptionResponse { };
mockGrpcClient.Setup(x => x.DetachSubscription(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
DetachSubscriptionResponse response = client.DetachSubscription(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DetachSubscriptionRequestObjectAsync()
{
moq::Mock<Publisher.PublisherClient> mockGrpcClient = new moq::Mock<Publisher.PublisherClient>(moq::MockBehavior.Strict);
DetachSubscriptionRequest request = new DetachSubscriptionRequest
{
SubscriptionAsSubscriptionName = SubscriptionName.FromProjectSubscription("[PROJECT]", "[SUBSCRIPTION]"),
};
DetachSubscriptionResponse expectedResponse = new DetachSubscriptionResponse { };
mockGrpcClient.Setup(x => x.DetachSubscriptionAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<DetachSubscriptionResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
PublisherServiceApiClient client = new PublisherServiceApiClientImpl(mockGrpcClient.Object, null);
DetachSubscriptionResponse responseCallSettings = await client.DetachSubscriptionAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
DetachSubscriptionResponse responseCancellationToken = await client.DetachSubscriptionAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Xml;
using Nini.Config;
using log4net;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Server.Base;
using OpenSim.Services.Interfaces;
using OpenSim.Framework.ServiceAuth;
using OpenSim.Framework.Servers.HttpServer;
using OpenSim.Server.Handlers.Base;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Server.Handlers.MapImage
{
public class MapAddServiceConnector : ServiceConnector
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private IMapImageService m_MapService;
private IGridService m_GridService;
private string m_ConfigName = "MapImageService";
public MapAddServiceConnector(IConfigSource config, IHttpServer server, string configName) :
base(config, server, configName)
{
IConfig serverConfig = config.Configs[m_ConfigName];
if (serverConfig == null)
throw new Exception(String.Format("No section {0} in config file", m_ConfigName));
string mapService = serverConfig.GetString("LocalServiceModule",
String.Empty);
if (mapService == String.Empty)
throw new Exception("No LocalServiceModule in config file");
Object[] args = new Object[] { config };
m_MapService = ServerUtils.LoadPlugin<IMapImageService>(mapService, args);
string gridService = serverConfig.GetString("GridService", String.Empty);
if (gridService != string.Empty)
m_GridService = ServerUtils.LoadPlugin<IGridService>(gridService, args);
if (m_GridService != null)
m_log.InfoFormat("[MAP IMAGE HANDLER]: GridService check is ON");
else
m_log.InfoFormat("[MAP IMAGE HANDLER]: GridService check is OFF");
bool proxy = serverConfig.GetBoolean("HasProxy", false);
IServiceAuth auth = ServiceAuth.Create(config, m_ConfigName);
server.AddStreamHandler(new MapServerPostHandler(m_MapService, m_GridService, proxy, auth));
}
}
class MapServerPostHandler : BaseStreamHandler
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private IMapImageService m_MapService;
private IGridService m_GridService;
bool m_Proxy;
public MapServerPostHandler(IMapImageService service, IGridService grid, bool proxy, IServiceAuth auth) :
base("POST", "/map", auth)
{
m_MapService = service;
m_GridService = grid;
m_Proxy = proxy;
}
protected override byte[] ProcessRequest(string path, Stream requestData, IOSHttpRequest httpRequest, IOSHttpResponse httpResponse)
{
// m_log.DebugFormat("[MAP SERVICE IMAGE HANDLER]: Received {0}", path);
StreamReader sr = new StreamReader(requestData);
string body = sr.ReadToEnd();
sr.Close();
body = body.Trim();
try
{
Dictionary<string, object> request = ServerUtils.ParseQueryString(body);
if (!request.ContainsKey("X") || !request.ContainsKey("Y") || !request.ContainsKey("DATA"))
{
httpResponse.StatusCode = (int)OSHttpStatusCode.ClientErrorBadRequest;
return FailureResult("Bad request.");
}
uint x = 0, y = 0;
UInt32.TryParse(request["X"].ToString(), out x);
UInt32.TryParse(request["Y"].ToString(), out y);
m_log.DebugFormat("[MAP ADD SERVER CONNECTOR]: Received map data for region at {0}-{1}", x, y);
// string type = "image/jpeg";
//
// if (request.ContainsKey("TYPE"))
// type = request["TYPE"].ToString();
if (m_GridService != null)
{
System.Net.IPAddress ipAddr = GetCallerIP(httpRequest);
GridRegion r = m_GridService.GetRegionByPosition(UUID.Zero, (int)Util.RegionToWorldLoc(x), (int)Util.RegionToWorldLoc(y));
if (r != null)
{
if (r.ExternalEndPoint.Address.ToString() != ipAddr.ToString())
{
m_log.WarnFormat("[MAP IMAGE HANDLER]: IP address {0} may be trying to impersonate region in IP {1}", ipAddr, r.ExternalEndPoint.Address);
return FailureResult("IP address of caller does not match IP address of registered region");
}
}
else
{
m_log.WarnFormat("[MAP IMAGE HANDLER]: IP address {0} may be rogue. Region not found at coordinates {1}-{2}",
ipAddr, x, y);
return FailureResult("Region not found at given coordinates");
}
}
byte[] data = Convert.FromBase64String(request["DATA"].ToString());
string reason = string.Empty;
bool result = m_MapService.AddMapTile((int)x, (int)y, data, out reason);
if (result)
return SuccessResult();
else
return FailureResult(reason);
}
catch (Exception e)
{
m_log.ErrorFormat("[MAP SERVICE IMAGE HANDLER]: Exception {0} {1}", e.Message, e.StackTrace);
}
return FailureResult("Unexpected server error");
}
private byte[] SuccessResult()
{
XmlDocument doc = new XmlDocument();
XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration,
"", "");
doc.AppendChild(xmlnode);
XmlElement rootElement = doc.CreateElement("", "ServerResponse",
"");
doc.AppendChild(rootElement);
XmlElement result = doc.CreateElement("", "Result", "");
result.AppendChild(doc.CreateTextNode("Success"));
rootElement.AppendChild(result);
return Util.DocToBytes(doc);
}
private byte[] FailureResult(string msg)
{
XmlDocument doc = new XmlDocument();
XmlNode xmlnode = doc.CreateNode(XmlNodeType.XmlDeclaration,
"", "");
doc.AppendChild(xmlnode);
XmlElement rootElement = doc.CreateElement("", "ServerResponse",
"");
doc.AppendChild(rootElement);
XmlElement result = doc.CreateElement("", "Result", "");
result.AppendChild(doc.CreateTextNode("Failure"));
rootElement.AppendChild(result);
XmlElement message = doc.CreateElement("", "Message", "");
message.AppendChild(doc.CreateTextNode(msg));
rootElement.AppendChild(message);
return Util.DocToBytes(doc);
}
private System.Net.IPAddress GetCallerIP(IOSHttpRequest request)
{
if (!m_Proxy)
return request.RemoteIPEndPoint.Address;
// We're behind a proxy
string xff = "X-Forwarded-For";
string xffValue = request.Headers[xff.ToLower()];
if (xffValue == null || (xffValue != null && xffValue == string.Empty))
xffValue = request.Headers[xff];
if (xffValue == null || (xffValue != null && xffValue == string.Empty))
{
m_log.WarnFormat("[MAP IMAGE HANDLER]: No XFF header");
return request.RemoteIPEndPoint.Address;
}
System.Net.IPEndPoint ep = Util.GetClientIPFromXFF(xffValue);
if (ep != null)
return ep.Address;
// Oops
return request.RemoteIPEndPoint.Address;
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using log4net;
using Nini.Config;
using OpenMetaverse;
using Mono.Addins;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Connectors.Hypergrid;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Region.CoreModules.Avatar.Lure
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "HGLureModule")]
public class HGLureModule : ISharedRegionModule
{
private static readonly ILog m_log = LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
private readonly List<Scene> m_scenes = new List<Scene>();
private IMessageTransferModule m_TransferModule = null;
private bool m_Enabled = false;
private string m_ThisGridURL;
private ExpiringCache<UUID, GridInstantMessage> m_PendingLures = new ExpiringCache<UUID, GridInstantMessage>();
public void Initialise(IConfigSource config)
{
if (config.Configs["Messaging"] != null)
{
if (config.Configs["Messaging"].GetString("LureModule", string.Empty) == "HGLureModule")
{
m_Enabled = true;
m_ThisGridURL = Util.GetConfigVarFromSections<string>(config, "GatekeeperURI",
new string[] { "Startup", "Hypergrid", "Messaging" }, String.Empty);
// Legacy. Remove soon!
m_ThisGridURL = config.Configs["Messaging"].GetString("Gatekeeper", m_ThisGridURL);
m_log.DebugFormat("[LURE MODULE]: {0} enabled", Name);
}
}
}
public void AddRegion(Scene scene)
{
if (!m_Enabled)
return;
lock (m_scenes)
{
m_scenes.Add(scene);
scene.EventManager.OnIncomingInstantMessage += OnIncomingInstantMessage;
scene.EventManager.OnNewClient += OnNewClient;
}
}
public void RegionLoaded(Scene scene)
{
if (!m_Enabled)
return;
if (m_TransferModule == null)
{
m_TransferModule =
scene.RequestModuleInterface<IMessageTransferModule>();
if (m_TransferModule == null)
{
m_log.Error("[LURE MODULE]: No message transfer module, lures will not work!");
m_Enabled = false;
m_scenes.Clear();
scene.EventManager.OnNewClient -= OnNewClient;
scene.EventManager.OnIncomingInstantMessage -= OnIncomingInstantMessage;
}
}
}
public void RemoveRegion(Scene scene)
{
if (!m_Enabled)
return;
lock (m_scenes)
{
m_scenes.Remove(scene);
scene.EventManager.OnNewClient -= OnNewClient;
scene.EventManager.OnIncomingInstantMessage -= OnIncomingInstantMessage;
}
}
void OnNewClient(IClientAPI client)
{
client.OnInstantMessage += OnInstantMessage;
client.OnStartLure += OnStartLure;
client.OnTeleportLureRequest += OnTeleportLureRequest;
}
public void PostInitialise()
{
}
public void Close()
{
}
public string Name
{
get { return "HGLureModule"; }
}
public Type ReplaceableInterface
{
get { return null; }
}
void OnInstantMessage(IClientAPI client, GridInstantMessage im)
{
}
void OnIncomingInstantMessage(GridInstantMessage im)
{
if (im.dialog == (byte)InstantMessageDialog.RequestTeleport)
{
UUID sessionID = new UUID(im.imSessionID);
if (!m_PendingLures.Contains(sessionID))
{
m_log.DebugFormat("[HG LURE MODULE]: RequestTeleport sessionID={0}, regionID={1}, message={2}", im.imSessionID, im.RegionID, im.message);
m_PendingLures.Add(sessionID, im, 7200); // 2 hours
}
// Forward. We do this, because the IM module explicitly rejects
// IMs of this type
if (m_TransferModule != null)
m_TransferModule.SendInstantMessage(im, delegate(bool success) { });
}
}
public void OnStartLure(byte lureType, string message, UUID targetid, IClientAPI client)
{
if (!(client.Scene is Scene))
return;
Scene scene = (Scene)(client.Scene);
ScenePresence presence = scene.GetScenePresence(client.AgentId);
message += "@" + m_ThisGridURL;
m_log.DebugFormat("[HG LURE MODULE]: TP invite with message {0}", message);
UUID sessionID = UUID.Random();
GridInstantMessage m = new GridInstantMessage(scene, client.AgentId,
client.FirstName+" "+client.LastName, targetid,
(byte)InstantMessageDialog.RequestTeleport, false,
message, sessionID, false, presence.AbsolutePosition,
new Byte[0], true);
m.RegionID = client.Scene.RegionInfo.RegionID.Guid;
m_log.DebugFormat("[HG LURE MODULE]: RequestTeleport sessionID={0}, regionID={1}, message={2}", m.imSessionID, m.RegionID, m.message);
m_PendingLures.Add(sessionID, m, 7200); // 2 hours
if (m_TransferModule != null)
{
m_TransferModule.SendInstantMessage(m,
delegate(bool success) { });
}
}
public void OnTeleportLureRequest(UUID lureID, uint teleportFlags, IClientAPI client)
{
if (!(client.Scene is Scene))
return;
// Scene scene = (Scene)(client.Scene);
GridInstantMessage im = null;
if (m_PendingLures.TryGetValue(lureID, out im))
{
m_PendingLures.Remove(lureID);
Lure(client, teleportFlags, im);
}
else
m_log.DebugFormat("[HG LURE MODULE]: pending lure {0} not found", lureID);
}
private void Lure(IClientAPI client, uint teleportflags, GridInstantMessage im)
{
Scene scene = (Scene)(client.Scene);
GridRegion region = scene.GridService.GetRegionByUUID(scene.RegionInfo.ScopeID, new UUID(im.RegionID));
if (region != null)
scene.RequestTeleportLocation(client, region.RegionHandle, im.Position + new Vector3(0.5f, 0.5f, 0f), Vector3.UnitX, teleportflags);
else // we don't have that region here. Check if it's HG
{
string[] parts = im.message.Split(new char[] { '@' });
if (parts.Length > 1)
{
string url = parts[parts.Length - 1]; // the last part
if (url.Trim(new char[] {'/'}) != m_ThisGridURL.Trim(new char[] {'/'}))
{
m_log.DebugFormat("[HG LURE MODULE]: Luring agent to grid {0} region {1} position {2}", url, im.RegionID, im.Position);
GatekeeperServiceConnector gConn = new GatekeeperServiceConnector();
GridRegion gatekeeper = new GridRegion();
gatekeeper.ServerURI = url;
GridRegion finalDestination = gConn.GetHyperlinkRegion(gatekeeper, new UUID(im.RegionID));
if (finalDestination != null)
{
ScenePresence sp = scene.GetScenePresence(client.AgentId);
IEntityTransferModule transferMod = scene.RequestModuleInterface<IEntityTransferModule>();
if (transferMod != null && sp != null)
transferMod.DoTeleport(
sp, gatekeeper, finalDestination, im.Position + new Vector3(0.5f, 0.5f, 0f),
Vector3.UnitX, teleportflags);
}
}
}
}
}
}
}
| |
/* ***************************************************************************
* This file is part of SharpNEAT - Evolution of Neural Networks.
*
* Copyright 2004-2006, 2009-2010 Colin Green (sharpneat@gmail.com)
*
* SharpNEAT is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* SharpNEAT is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with SharpNEAT. If not, see <http://www.gnu.org/licenses/>.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Xml;
using SharpNeat.Utility;
namespace SharpNeat.Network
{
/// <summary>
/// Static class for reading and writing Network Definitions(s) to and from XML.
/// </summary>
public static class NetworkXmlIO
{
#region Constants [XML Strings]
const string __ElemRoot = "Root";
const string __ElemNetworks = "Networks";
const string __ElemNetwork = "Network";
const string __ElemNodes = "Nodes";
const string __ElemNode = "Node";
const string __ElemConnections = "Connections";
const string __ElemConnection = "Con";
const string __ElemActivationFunctions = "ActivationFunctions";
const string __ElemActivationFn = "Fn";
const string __AttrId = "id";
const string __AttrName = "name";
const string __AttrType = "type";
const string __AttrSourceId = "src";
const string __AttrTargetId = "tgt";
const string __AttrWeight = "wght";
const string __AttrActivationFunctionId = "fnId";
const string __AttrAuxState = "aux";
const string __AttrProbability = "prob";
#endregion
#region Public Static Methods [Save to XmlDocument]
/// <summary>
/// Writes a single NetworkDefinition to XML within a containing 'Root' element and the activation function
/// library that the genome is associated with.
/// The XML is returned as a newly created XmlDocument.
/// </summary>
/// <param name="networkDef">The NetworkDefinition to save.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be emitted. They are required
/// for HyperNEAT genomes but not for NEAT.</param>
public static XmlDocument SaveComplete(NetworkDefinition networkDef, bool nodeFnIds)
{
XmlDocument doc = new XmlDocument();
using(XmlWriter xw = doc.CreateNavigator().AppendChild())
{
WriteComplete(xw, networkDef, nodeFnIds);
}
return doc;
}
/// <summary>
/// Writes a list of NetworkDefinition(s) to XML within a containing 'Root' element and the activation
/// function library that the genomes are associated with.
/// The XML is returned as a newly created XmlDocument.
/// </summary>
/// <param name="networkDefList">List of genomes to write as XML.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be emitted. They are required
/// for HyperNEAT genomes but not for NEAT.</param>
public static XmlDocument SaveComplete(IList<NetworkDefinition> networkDefList, bool nodeFnIds)
{
XmlDocument doc = new XmlDocument();
using(XmlWriter xw = doc.CreateNavigator().AppendChild())
{
WriteComplete(xw, networkDefList, nodeFnIds);
}
return doc;
}
/// <summary>
/// Writes a single NetworkDefinition to XML.
/// The XML is returned as a newly created XmlDocument.
/// </summary>
/// <param name="networkDef">The genome to save.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be emitted. They are required
/// for HyperNEAT genomes but not for NEAT.</param>
public static XmlDocument Save(NetworkDefinition networkDef, bool nodeFnIds)
{
XmlDocument doc = new XmlDocument();
using(XmlWriter xw = doc.CreateNavigator().AppendChild())
{
Write(xw, networkDef, nodeFnIds);
}
return doc;
}
#endregion
#region Public Static Methods [Load from XmlDocument]
/// <summary>
/// Reads a list of NetworkDefinition(s) from XML that has a containing 'Root' element. The root element
/// also contains the activation function library that the network definitions are associated with.
/// </summary>
/// <param name="xmlNode">The XmlNode to read from. This can be an XmlDocument or XmlElement.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be read. If false then
/// all node activation function IDs default to 0.</param>
public static List<NetworkDefinition> LoadCompleteGenomeList(XmlNode xmlNode, bool nodeFnIds)
{
using(XmlNodeReader xr = new XmlNodeReader(xmlNode))
{
return ReadCompleteNetworkDefinitionList(xr, nodeFnIds);
}
}
/// <summary>
/// Reads a NetworkDefinition from XML.
/// </summary>
/// <param name="xmlNode">The XmlNode to read from. This can be an XmlDocument or XmlElement.</param>
/// <param name="activationFnLib">The activation function library used to decode node activation function IDs.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be read. They are required
/// for HyperNEAT genomes but not for NEAT. If false then all node activation function IDs default to 0.</param>
public static NetworkDefinition ReadGenome(XmlNode xmlNode, IActivationFunctionLibrary activationFnLib, bool nodeFnIds)
{
using(XmlNodeReader xr = new XmlNodeReader(xmlNode))
{
return ReadNetworkDefinition(xr, activationFnLib, nodeFnIds);
}
}
#endregion
#region Public Static Methods [Write to XML]
/// <summary>
/// Writes a list of INetworkDefinition(s) to XML within a containing 'Root' element and the activation
/// function library that the genomes are associated with.
/// </summary>
/// <param name="xw">XmlWriter to write XML to.</param>
/// <param name="networkDefList">List of network definitions to write as XML.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be emitted. They are required
/// for HyperNEAT genomes but not for NEAT.</param>
public static void WriteComplete(XmlWriter xw, IList<NetworkDefinition> networkDefList, bool nodeFnIds)
{
int count = networkDefList.Count;
List<INetworkDefinition> tmpList = new List<INetworkDefinition>(count);
foreach(NetworkDefinition networkDef in networkDefList) {
tmpList.Add(networkDef);
}
WriteComplete(xw, tmpList, nodeFnIds);
}
/// <summary>
/// Writes a list of INetworkDefinition(s) to XML within a containing 'Root' element and the activation
/// function library that the genomes are associated with.
/// </summary>
/// <param name="xw">XmlWriter to write XML to.</param>
/// <param name="networkDefList">List of network definitions to write as XML.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be emitted. They are required
/// for HyperNEAT genomes but not for NEAT.</param>
public static void WriteComplete(XmlWriter xw, IList<INetworkDefinition> networkDefList, bool nodeFnIds)
{
if(networkDefList.Count == 0)
{ // Nothing to do.
return;
}
// <Root>
xw.WriteStartElement(__ElemRoot);
// Write activation function library from the first network definition
// (we expect all networks to use the same library).
IActivationFunctionLibrary activationFnLib = networkDefList[0].ActivationFnLibrary;
Write(xw, activationFnLib);
// <Networks>
xw.WriteStartElement(__ElemNetworks);
// Write networks.
foreach(INetworkDefinition networkDef in networkDefList) {
Debug.Assert(networkDef.ActivationFnLibrary == activationFnLib);
Write(xw, networkDef, nodeFnIds);
}
// </Networks>
xw.WriteEndElement();
// </Root>
xw.WriteEndElement();
}
/// <summary>
/// Writes a single INetworkDefinition to XML within a containing 'Root' element and the activation
/// function library that the genome is associated with.
/// </summary>
/// <param name="xw">XmlWriter to write XML to.</param>
/// <param name="networkDef">Network definition to write as XML.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be emitted. They are required
/// for HyperNEAT genomes but not for NEAT.</param>
public static void WriteComplete(XmlWriter xw, INetworkDefinition networkDef, bool nodeFnIds)
{
// <Root>
xw.WriteStartElement(__ElemRoot);
// Write activation function library.
Write(xw, networkDef.ActivationFnLibrary);
// <Networks>
xw.WriteStartElement(__ElemNetworks);
// Write single network.
Write(xw, networkDef, nodeFnIds);
// </Networks>
xw.WriteEndElement();
// </Root>
xw.WriteEndElement();
}
/// <summary>
/// Writes an INetworkDefinition to XML.
/// </summary>
/// <param name="xw">XmlWriter to write XML to.</param>
/// <param name="networkDef">Network definition to write as XML.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be emitted. They are required
/// for HyperNEAT genomes but not for NEAT.</param>
public static void Write(XmlWriter xw, INetworkDefinition networkDef, bool nodeFnIds)
{
xw.WriteStartElement(__ElemNetwork);
// Emit nodes.
xw.WriteStartElement(__ElemNodes);
foreach(INetworkNode node in networkDef.NodeList)
{
xw.WriteStartElement(__ElemNode);
xw.WriteAttributeString(__AttrType, GetNodeTypeString(node.NodeType));
xw.WriteAttributeString(__AttrId, node.Id.ToString(NumberFormatInfo.InvariantInfo));
if(nodeFnIds) {
xw.WriteAttributeString(__AttrActivationFunctionId, node.ActivationFnId.ToString(NumberFormatInfo.InvariantInfo));
}
xw.WriteEndElement();
}
xw.WriteEndElement();
// Emit connections.
xw.WriteStartElement(__ElemConnections);
foreach(INetworkConnection con in networkDef.ConnectionList)
{
xw.WriteStartElement(__ElemConnection);
xw.WriteAttributeString(__AttrSourceId, con.SourceNodeId.ToString(NumberFormatInfo.InvariantInfo));
xw.WriteAttributeString(__AttrTargetId, con.TargetNodeId.ToString(NumberFormatInfo.InvariantInfo));
xw.WriteAttributeString(__AttrWeight, con.Weight.ToString("R", NumberFormatInfo.InvariantInfo));
xw.WriteEndElement();
}
xw.WriteEndElement();
// </Network>
xw.WriteEndElement();
}
/// <summary>
/// Writes an activation function library to XML. This links activation function names to the
/// integer IDs used by network nodes, which allows us emit just the ID for each node thus
/// resulting in XML that is more compact compared to emitting the activation function name for
/// each node.
/// </summary>
public static void Write(XmlWriter xw, IActivationFunctionLibrary activationFnLib)
{
xw.WriteStartElement(__ElemActivationFunctions);
IList<ActivationFunctionInfo> fnList = activationFnLib.GetFunctionList();
foreach(ActivationFunctionInfo fnInfo in fnList)
{
xw.WriteStartElement(__ElemActivationFn);
xw.WriteAttributeString(__AttrId, fnInfo.Id.ToString(NumberFormatInfo.InvariantInfo));
xw.WriteAttributeString(__AttrName, fnInfo.ActivationFunction.FunctionId);
xw.WriteAttributeString(__AttrProbability, fnInfo.SelectionProbability.ToString("R", NumberFormatInfo.InvariantInfo));
xw.WriteEndElement();
}
xw.WriteEndElement();
}
#endregion
#region Public Static Methods [Read from XML]
/// <summary>
/// Reads a list of NetworkDefinition(s) from XML that has a containing 'Root' element. The root
/// element also contains the activation function library that the genomes are associated with.
/// </summary>
/// <param name="xr">The XmlReader to read from.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be read. They are required
/// for HyperNEAT genomes but not NEAT</param>
public static List<NetworkDefinition> ReadCompleteNetworkDefinitionList(XmlReader xr, bool nodeFnIds)
{
// Find <Root>.
XmlIoUtils.MoveToElement(xr, false, __ElemRoot);
// Read IActivationFunctionLibrray.
XmlIoUtils.MoveToElement(xr, true, __ElemActivationFunctions);
IActivationFunctionLibrary activationFnLib = ReadActivationFunctionLibrary(xr);
XmlIoUtils.MoveToElement(xr, false, __ElemNetworks);
List<NetworkDefinition> networkDefList = new List<NetworkDefinition>();
using(XmlReader xrSubtree = xr.ReadSubtree())
{
// Re-scan for the root <Networks> element.
XmlIoUtils.MoveToElement(xrSubtree, false);
// Move to first Network elem.
XmlIoUtils.MoveToElement(xr, true, __ElemNetwork);
// Read Network elements.
do
{
NetworkDefinition networkDef = ReadNetworkDefinition(xr, activationFnLib, nodeFnIds);
networkDefList.Add(networkDef);
}
while(xrSubtree.ReadToNextSibling(__ElemNetwork));
}
return networkDefList;
}
/// <summary>
/// Reads a network definition from XML.
/// An activation function library is required to decode the function ID at each node, typically the
/// library is stored alongside the network definition XML and will have already been read elsewhere and
/// passed in here.
/// </summary>
/// <param name="xr">The XmlReader to read from.</param>
/// <param name="activationFnLib">The activation function library used to decode node activation function IDs.</param>
/// <param name="nodeFnIds">Indicates if node activation function IDs should be read. They are required
/// for HyperNEAT genomes but not NEAT</param>
public static NetworkDefinition ReadNetworkDefinition(XmlReader xr, IActivationFunctionLibrary activationFnLib, bool nodeFnIds)
{
// Find <Network>.
XmlIoUtils.MoveToElement(xr, false, __ElemNetwork);
int initialDepth = xr.Depth;
// Find <Nodes>.
XmlIoUtils.MoveToElement(xr, true, __ElemNodes);
// Create a reader over the <Nodes> sub-tree.
int inputNodeCount = 0;
int outputNodeCount = 0;
NodeList nodeList = new NodeList();
using(XmlReader xrSubtree = xr.ReadSubtree())
{
// Re-scan for the root <Nodes> element.
XmlIoUtils.MoveToElement(xrSubtree, false);
// Move to first node elem.
XmlIoUtils.MoveToElement(xrSubtree, true, __ElemNode);
// Read node elements.
do
{
NodeType nodeType = ReadAttributeAsNodeType(xrSubtree, __AttrType);
uint id = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrId);
int fnId = 0;
double[] auxState = null;
if(nodeFnIds)
{ // Read activation fn ID.
fnId = XmlIoUtils.ReadAttributeAsInt(xrSubtree, __AttrActivationFunctionId);
// Read aux state as comma seperated list of real values.
auxState = XmlIoUtils.ReadAttributeAsDoubleArray(xrSubtree, __AttrAuxState);
}
// TODO: Read node aux state data.
NetworkNode node = new NetworkNode(id, nodeType, fnId, auxState);
nodeList.Add(node);
// Track the number of input and output nodes.
switch(nodeType)
{
case NodeType.Input:
inputNodeCount++;
break;
case NodeType.Output:
outputNodeCount++;
break;
}
}
while(xrSubtree.ReadToNextSibling(__ElemNode));
}
// Find <Connections>.
XmlIoUtils.MoveToElement(xr, false, __ElemConnections);
// Create a reader over the <Connections> sub-tree.
ConnectionList connList = new ConnectionList();
using(XmlReader xrSubtree = xr.ReadSubtree())
{
// Re-scan for the root <Connections> element.
XmlIoUtils.MoveToElement(xrSubtree, false);
// Move to first connection elem.
string localName = XmlIoUtils.MoveToElement(xrSubtree, true);
if(localName == __ElemConnection)
{ // We have at least one connection.
// Read connection elements.
do
{
uint srcId = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrSourceId);
uint tgtId = XmlIoUtils.ReadAttributeAsUInt(xrSubtree, __AttrTargetId);
double weight = XmlIoUtils.ReadAttributeAsDouble(xrSubtree, __AttrWeight);
NetworkConnection conn = new NetworkConnection(srcId, tgtId, weight);
connList.Add(conn);
}
while(xrSubtree.ReadToNextSibling(__ElemConnection));
}
}
// Move the reader beyond the closing tags </Connections> and </Network>.
do
{
if (xr.Depth <= initialDepth) {
break;
}
}
while(xr.Read());
// Construct and return loaded network definition.
return new NetworkDefinition(inputNodeCount, outputNodeCount, activationFnLib, nodeList, connList);
}
/// <summary>
/// Reads an IActivationFunctionLibrary from the provided XmlReader.
/// </summary>
public static IActivationFunctionLibrary ReadActivationFunctionLibrary(XmlReader xr)
{
XmlIoUtils.MoveToElement(xr, false, __ElemActivationFunctions);
// Create a reader over the sub-tree.
List<ActivationFunctionInfo> fnList = new List<ActivationFunctionInfo>();
using(XmlReader xrSubtree = xr.ReadSubtree())
{
// Re-scan for the root element.
XmlIoUtils.MoveToElement(xrSubtree, false);
// Move to first function elem.
XmlIoUtils.MoveToElement(xrSubtree, true, __ElemActivationFn);
// Read function elements.
do
{
int id = XmlIoUtils.ReadAttributeAsInt(xrSubtree, __AttrId);
double selectionProb = XmlIoUtils.ReadAttributeAsDouble(xrSubtree, __AttrProbability);
string fnName = xrSubtree.GetAttribute(__AttrName);
// Lookup function name.
IActivationFunction activationFn = GetActivationFunction(fnName);
// Add new function to our list of functions.
ActivationFunctionInfo fnInfo = new ActivationFunctionInfo(id, selectionProb, activationFn);
fnList.Add(fnInfo);
}
while(xrSubtree.ReadToNextSibling(__ElemActivationFn));
}
// If we have read library items then ensure that their selection probabilities are normalized.
if(fnList.Count != 0) {
NormalizeSelectionProbabilities(fnList);
}
return new DefaultActivationFunctionLibrary(fnList);
}
#endregion
#region Public Static Methods [Low-level XML Parsing]
/// <summary>
/// Read the named attribute and parse its string value as a NodeType.
/// </summary>
public static NodeType ReadAttributeAsNodeType(XmlReader xr, string attrName)
{
string valStr = xr.GetAttribute(attrName);
return GetNodeType(valStr);
}
/// <summary>
/// Gets the NodeType for the specified node type string.
/// Note. we use our own type strings in place of Enum.ToString() to provide more compact XML.
/// </summary>
public static NodeType GetNodeType(string type)
{
switch(type)
{
case "bias":
return NodeType.Bias;
case "in":
return NodeType.Input;
case "out":
return NodeType.Output;
case "hid":
return NodeType.Hidden;
}
#if NET4
throw new InvalidDataException(string.Format("Unknown node type [{0}]", type));
#else
throw new InvalidOperationException(string.Format("Unknown node type [{0}]", type));
#endif
}
/// <summary>
/// Gets the node type string for the specified NodeType.
/// Note. we use our own type strings in place of Enum.ToString() to provide more compact XML.
/// </summary>
public static string GetNodeTypeString(NodeType nodeType)
{
switch(nodeType)
{
case NodeType.Bias:
return "bias";
case NodeType.Input:
return "in";
case NodeType.Output:
return "out";
case NodeType.Hidden:
return "hid";
}
throw new ArgumentException(string.Format("Unexpected NodeType [{0}]", nodeType));
}
/// <summary>
/// Gets an IActivationFunction from its short name.
/// </summary>
public static IActivationFunction GetActivationFunction(string name)
{
switch(name)
{
case "BipolarGaussian":
return BipolarGaussian.__DefaultInstance;
case "BipolarSigmoid":
return BipolarSigmoid.__DefaultInstance;
case "Linear":
return Linear.__DefaultInstance;
case "Sine":
return Sine.__DefaultInstance;
case "Absolute":
return Absolute.__DefaultInstance;
case "AbsoluteRoot":
return AbsoluteRoot.__DefaultInstance;
case "Gaussian":
return Gaussian.__DefaultInstance;
case "InverseAbsoluteSigmoid":
return InverseAbsoluteSigmoid.__DefaultInstance;
case "PlainSigmoid":
return PlainSigmoid.__DefaultInstance;
case "ReducedSigmoid":
return ReducedSigmoid.__DefaultInstance;
case "SteepenedSigmoid":
return SteepenedSigmoid.__DefaultInstance;
case "SteepenedSigmoidApproximation":
return SteepenedSigmoidApproximation.__DefaultInstance;
case "StepFunction":
return StepFunction.__DefaultInstance;
case "RbfGaussian":
return RbfGaussian.__DefaultInstance;
}
throw new ArgumentException(string.Format("Unexpected activation function [{0}]", name));
}
#endregion
#region Private Static Methods
/// <summary>
/// Normalize the selection probabilities of the provided ActivationFunctionInfo items.
/// </summary>
private static void NormalizeSelectionProbabilities(IList<ActivationFunctionInfo> fnList)
{
double total = 0.0;
int count = fnList.Count;
for(int i=0; i<count; i++) {
total += fnList[i].SelectionProbability;
}
if(Math.Abs(total - 1.0) < 0.0001)
{ // Probabilities already normalized to within acceptable limits (from rounding errors).
return;
}
// Normalize the probabilities. Note that ActivationFunctionInfo is immutable therefore
// we replace the existing items.
for(int i=0; i<count; i++)
{
ActivationFunctionInfo item = fnList[i];
fnList[i] = new ActivationFunctionInfo(item.Id, item.SelectionProbability/total, item.ActivationFunction);
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
**
** Purpose: This class is used to determine which binary compatibility
** behaviors are enabled at runtime. A type for
** tracking which target Framework an app was built against, or an
** appdomain-wide setting from the host telling us which .NET
** Framework version we should emulate.
**
**
===========================================================*/
using System;
using System.Diagnostics.Contracts;
using System.Globalization;
using System.Runtime.CompilerServices;
namespace System.Runtime.Versioning
{
// Provides a simple way to test whether an application was built against specific .NET Framework
// flavors and versions, with the intent of allowing Framework developers to mimic behavior of older
// Framework releases. This allows us to make behavioral breaking changes in a binary compatible way,
// for an application. This works at the per-AppDomain level, not process nor per-Assembly.
//
// To opt into newer behavior, applications must specify a TargetFrameworkAttribute on their assembly
// saying what version they targeted, or a host must set this when creating an AppDomain. Note
// that command line apps don't have this attribute!
//
// To use this class:
// Developers need to figure out whether they're working on the phone, desktop, or Silverlight, and
// what version they are introducing a breaking change in. Pick one predicate below, and use that
// to decide whether to run the new or old behavior. Example:
//
// if (BinaryCompatibility.TargetsAtLeast_Phone_V7_1) {
// // new behavior for phone 7.1 and other releases where we will integrate this change, like .NET Framework 4.5
// }
// else {
// // Legacy behavior
// }
//
// If you are making a breaking change in one specific branch that won't be integrated normally to
// all other branches (ie, say you're making breaking changes to Windows Phone 8 after .NET Framework v4.5
// has locked down for release), then add in specific predicates for each relevant platform.
//
// Maintainers of this class:
// Revisit the table once per release, perhaps at the end of the last coding milestone, to verify a
// default policy saying whether all quirks from a particular flavor & release should be enabled in
// other releases (ie, should all Windows Phone 8.0 quirks be enabled in .NET Framework v5)?
//
// History:
// Here is the order in which releases were made along with some basic integration information. The idea
// is to track what set of compatibility features are present in each other.
// While we cannot guarantee this list is perfectly linear (ie, a feature could be implemented in the last
// few weeks before shipping and make it into only one of three concommittent releases due to triaging),
// this is a good high level summary of code flow.
//
// Desktop Silverlight Windows Phone
// .NET Framework 3.0 -> Silverlight 2
// .NET Framework 3.5
// Silverlight 3
// Silverlight 4
// .NET Framework 4 Phone 8.0
// .NET Framework 4.5 Phone 8.1
// .NET Framework 4.5.1 Phone 8.1
//
// (Note: Windows Phone 7.0 was built using the .NET Compact Framework, which forked around v1 or v1.1)
//
// Compatibility Policy decisions:
// If we cannot determine that an app was built for a newer .NET Framework (ie, the app has no
// TargetFrameworkAttribute), then quirks will be enabled to emulate older behavior.
// As such, your test code should define the TargetFrameworkAttribute (which VS does for you)
// if you want to see the new behavior!
[FriendAccessAllowed]
internal static class BinaryCompatibility
{
// Use this for new behavior introduced in the phone branch. It will do the right thing for desktop & SL.
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Phone_V7_1 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Phone_V7_1; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Phone_V8_0 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Phone_V8_0; } }
// Use this for new behavior introduced in the Desktop branch. It will do the right thing for Phone & SL.
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Desktop_V4_5 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Desktop_V4_5; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Desktop_V4_5_1 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Desktop_V4_5_1; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Desktop_V4_5_2 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Desktop_V4_5_2; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Desktop_V4_5_3 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Desktop_V4_5_3; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Desktop_V4_5_4 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Desktop_V4_5_4; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Desktop_V5_0 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Desktop_V5_0; } }
// Use this for new behavior introduced in the Silverlight branch. It will do the right thing for desktop & Phone.
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Silverlight_V4 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Silverlight_V4; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Silverlight_V5 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Silverlight_V5; } }
[FriendAccessAllowed]
internal static bool TargetsAtLeast_Silverlight_V6 { [FriendAccessAllowed] get { return s_map.TargetsAtLeast_Silverlight_V6; } }
[FriendAccessAllowed]
internal static TargetFrameworkId AppWasBuiltForFramework {
[FriendAccessAllowed]
get {
Contract.Ensures(Contract.Result<TargetFrameworkId>() > TargetFrameworkId.NotYetChecked);
if (s_AppWasBuiltForFramework == TargetFrameworkId.NotYetChecked)
ReadTargetFrameworkId();
return s_AppWasBuiltForFramework;
}
}
// Version number is major * 10000 + minor * 100 + build (ie, 4.5.1.0 would be version 40501).
[FriendAccessAllowed]
internal static int AppWasBuiltForVersion {
[FriendAccessAllowed]
get {
Contract.Ensures(Contract.Result<int>() > 0 || s_AppWasBuiltForFramework == TargetFrameworkId.Unspecified);
if (s_AppWasBuiltForFramework == TargetFrameworkId.NotYetChecked)
ReadTargetFrameworkId();
Contract.Assert(s_AppWasBuiltForFramework != TargetFrameworkId.Unrecognized);
return s_AppWasBuiltForVersion;
}
}
#region private
private static TargetFrameworkId s_AppWasBuiltForFramework;
// Version number is major * 10000 + minor * 100 + build (ie, 4.5.1.0 would be version 40501).
private static int s_AppWasBuiltForVersion;
readonly static BinaryCompatibilityMap s_map = new BinaryCompatibilityMap();
// For parsing a target Framework moniker, from the FrameworkName class
private const char c_componentSeparator = ',';
private const char c_keyValueSeparator = '=';
private const char c_versionValuePrefix = 'v';
private const String c_versionKey = "Version";
private const String c_profileKey = "Profile";
/// <summary>
/// BinaryCompatibilityMap is basically a bitvector. There is a boolean field for each of the
/// properties in BinaryCompatibility
/// </summary>
private sealed class BinaryCompatibilityMap
{
// A bit for each property
internal bool TargetsAtLeast_Phone_V7_1;
internal bool TargetsAtLeast_Phone_V8_0;
internal bool TargetsAtLeast_Phone_V8_1;
internal bool TargetsAtLeast_Desktop_V4_5;
internal bool TargetsAtLeast_Desktop_V4_5_1;
internal bool TargetsAtLeast_Desktop_V4_5_2;
internal bool TargetsAtLeast_Desktop_V4_5_3;
internal bool TargetsAtLeast_Desktop_V4_5_4;
internal bool TargetsAtLeast_Desktop_V5_0;
internal bool TargetsAtLeast_Silverlight_V4;
internal bool TargetsAtLeast_Silverlight_V5;
internal bool TargetsAtLeast_Silverlight_V6;
internal BinaryCompatibilityMap()
{
AddQuirksForFramework(AppWasBuiltForFramework, AppWasBuiltForVersion);
}
// The purpose of this method is to capture information about integrations & behavioral compatibility
// between our multiple different release vehicles. IE, if a behavior shows up in Silverlight version 5,
// does it show up in the .NET Framework version 4.5 and Windows Phone 8?
// Version number is major * 10000 + minor * 100 + build (ie, 4.5.1.0 would be version 40501).
private void AddQuirksForFramework(TargetFrameworkId builtAgainstFramework, int buildAgainstVersion)
{
Contract.Requires(buildAgainstVersion > 0 || builtAgainstFramework == TargetFrameworkId.Unspecified);
switch (builtAgainstFramework)
{
case TargetFrameworkId.NetFramework:
case TargetFrameworkId.NetCore: // Treat Windows 8 tailored apps as normal desktop apps - same product
if (buildAgainstVersion >= 50000)
TargetsAtLeast_Desktop_V5_0 = true;
// Potential 4.5 servicing releases
if (buildAgainstVersion >= 40504)
TargetsAtLeast_Desktop_V4_5_4 = true;
if (buildAgainstVersion >= 40503)
TargetsAtLeast_Desktop_V4_5_3 = true;
if (buildAgainstVersion >= 40502)
TargetsAtLeast_Desktop_V4_5_2 = true;
if (buildAgainstVersion >= 40501)
TargetsAtLeast_Desktop_V4_5_1 = true;
if (buildAgainstVersion >= 40500)
{
TargetsAtLeast_Desktop_V4_5 = true;
// On XX/XX/XX we integrated all changes from the phone V7_1 into the branch from which contains Desktop V4_5, thus
// Any application built for V4_5 (or above) should have all the quirks for Phone V7_1 turned on.
AddQuirksForFramework(TargetFrameworkId.Phone, 70100);
// All Silverlight 5 behavior should be in the .NET Framework version 4.5
AddQuirksForFramework(TargetFrameworkId.Silverlight, 50000);
}
break;
case TargetFrameworkId.Phone:
if (buildAgainstVersion >= 80000)
{
// This is for Apollo apps. For Apollo apps we don't want to enable any of the 4.5 or 4.5.1 quirks
TargetsAtLeast_Phone_V8_0 = true;
//TargetsAtLeast_Desktop_V4_5 = true;
}
if (buildAgainstVersion >= 80100)
{
// For WindowsPhone 8.1 and SL 8.1 scenarios we want to enable both 4.5 and 4.5.1 quirks.
TargetsAtLeast_Desktop_V4_5 = true;
TargetsAtLeast_Desktop_V4_5_1 = true;
}
if (buildAgainstVersion >= 710)
TargetsAtLeast_Phone_V7_1 = true;
break;
case TargetFrameworkId.Silverlight:
if (buildAgainstVersion >= 40000)
TargetsAtLeast_Silverlight_V4 = true;
if (buildAgainstVersion >= 50000)
TargetsAtLeast_Silverlight_V5 = true;
if (buildAgainstVersion >= 60000)
{
TargetsAtLeast_Silverlight_V6 = true;
}
break;
case TargetFrameworkId.Unspecified:
break;
case TargetFrameworkId.NotYetChecked:
case TargetFrameworkId.Unrecognized:
Contract.Assert(false, "Bad framework kind");
break;
default:
Contract.Assert(false, "Error: we introduced a new Target Framework but did not update our binary compatibility map");
break;
}
}
}
#region String Parsing
// If this doesn't work, perhaps we could fall back to parsing the metadata version number.
private static bool ParseTargetFrameworkMonikerIntoEnum(String targetFrameworkMoniker, out TargetFrameworkId targetFramework, out int targetFrameworkVersion)
{
Contract.Requires(!String.IsNullOrEmpty(targetFrameworkMoniker));
targetFramework = TargetFrameworkId.NotYetChecked;
targetFrameworkVersion = 0;
String identifier = null;
String profile = null;
ParseFrameworkName(targetFrameworkMoniker, out identifier, out targetFrameworkVersion, out profile);
switch (identifier)
{
case ".NETFramework":
targetFramework = TargetFrameworkId.NetFramework;
break;
case ".NETPortable":
targetFramework = TargetFrameworkId.Portable;
break;
case ".NETCore":
targetFramework = TargetFrameworkId.NetCore;
break;
case "WindowsPhone":
if (targetFrameworkVersion >= 80100)
{
// A TFM of the form WindowsPhone,Version=v8.1 corresponds to SL 8.1 scenario
// and gets the same quirks as WindowsPhoneApp\v8.1 store apps.
targetFramework = TargetFrameworkId.Phone;
}
else
{
// There is no TFM for Apollo or below and hence we assign the targetFramework to Unspecified.
targetFramework = TargetFrameworkId.Unspecified;
}
break;
case "WindowsPhoneApp":
targetFramework = TargetFrameworkId.Phone;
break;
case "Silverlight":
targetFramework = TargetFrameworkId.Silverlight;
// Windows Phone 7 is Silverlight,Version=v4.0,Profile=WindowsPhone
// Windows Phone 7.1 is Silverlight,Version=v4.0,Profile=WindowsPhone71
if (!String.IsNullOrEmpty(profile))
{
if (profile == "WindowsPhone")
{
targetFramework = TargetFrameworkId.Phone;
targetFrameworkVersion = 70000;
}
else if (profile == "WindowsPhone71")
{
targetFramework = TargetFrameworkId.Phone;
targetFrameworkVersion = 70100;
}
else if (profile == "WindowsPhone8")
{
targetFramework = TargetFrameworkId.Phone;
targetFrameworkVersion = 80000;
}
else if (profile.StartsWith("WindowsPhone", StringComparison.Ordinal))
{
Contract.Assert(false, "This is a phone app, but we can't tell what version this is!");
targetFramework = TargetFrameworkId.Unrecognized;
targetFrameworkVersion = 70100;
}
else
{
Contract.Assert(false, String.Format(CultureInfo.InvariantCulture, "Unrecognized Silverlight profile \"{0}\". What is this, an XBox app?", profile));
targetFramework = TargetFrameworkId.Unrecognized;
}
}
break;
default:
Contract.Assert(false, String.Format(CultureInfo.InvariantCulture, "Unrecognized Target Framework Moniker in our Binary Compatibility class. Framework name: \"{0}\"", targetFrameworkMoniker));
targetFramework = TargetFrameworkId.Unrecognized;
break;
}
return true;
}
// This code was a constructor copied from the FrameworkName class, which is located in System.dll.
// Parses strings in the following format: "<identifier>, Version=[v|V]<version>, Profile=<profile>"
// - The identifier and version is required, profile is optional
// - Only three components are allowed.
// - The version string must be in the System.Version format; an optional "v" or "V" prefix is allowed
private static void ParseFrameworkName(String frameworkName, out String identifier, out int version, out String profile)
{
if (frameworkName == null)
{
throw new ArgumentNullException("frameworkName");
}
if (frameworkName.Length == 0)
{
throw new ArgumentException(Environment.GetResourceString("Argument_StringZeroLength"), "frameworkName");
}
Contract.EndContractBlock();
String[] components = frameworkName.Split(c_componentSeparator);
version = 0;
// Identifer and Version are required, Profile is optional.
if (components.Length < 2 || components.Length > 3)
{
throw new ArgumentException(Environment.GetResourceString("Argument_FrameworkNameTooShort"), "frameworkName");
}
//
// 1) Parse the "Identifier", which must come first. Trim any whitespace
//
identifier = components[0].Trim();
if (identifier.Length == 0)
{
throw new ArgumentException(Environment.GetResourceString("Argument_FrameworkNameInvalid"), "frameworkName");
}
bool versionFound = false;
profile = null;
//
// The required "Version" and optional "Profile" component can be in any order
//
for (int i = 1; i < components.Length; i++)
{
// Get the key/value pair separated by '='
string[] keyValuePair = components[i].Split(c_keyValueSeparator);
if (keyValuePair.Length != 2)
{
throw new ArgumentException(Environment.GetResourceString("SR.Argument_FrameworkNameInvalid"), "frameworkName");
}
// Get the key and value, trimming any whitespace
string key = keyValuePair[0].Trim();
string value = keyValuePair[1].Trim();
//
// 2) Parse the required "Version" key value
//
if (key.Equals(c_versionKey, StringComparison.OrdinalIgnoreCase))
{
versionFound = true;
// Allow the version to include a 'v' or 'V' prefix...
if (value.Length > 0 && (value[0] == c_versionValuePrefix || value[0] == 'V'))
{
value = value.Substring(1);
}
Version realVersion = new Version(value);
// The version class will represent some unset values as -1 internally (instead of 0).
version = realVersion.Major * 10000;
if (realVersion.Minor > 0)
version += realVersion.Minor * 100;
if (realVersion.Build > 0)
version += realVersion.Build;
}
//
// 3) Parse the optional "Profile" key value
//
else if (key.Equals(c_profileKey, StringComparison.OrdinalIgnoreCase))
{
if (!String.IsNullOrEmpty(value))
{
profile = value;
}
}
else
{
throw new ArgumentException(Environment.GetResourceString("Argument_FrameworkNameInvalid"), "frameworkName");
}
}
if (!versionFound)
{
throw new ArgumentException(Environment.GetResourceString("Argument_FrameworkNameMissingVersion"), "frameworkName");
}
}
[System.Security.SecuritySafeCritical]
private static void ReadTargetFrameworkId()
{
String targetFrameworkName = AppDomain.CurrentDomain.GetTargetFrameworkName();
var overrideValue = System.Runtime.Versioning.CompatibilitySwitch.GetValueInternal("TargetFrameworkMoniker");
if (!string.IsNullOrEmpty(overrideValue))
{
targetFrameworkName = overrideValue;
}
// Write to a local then to _targetFramework, after writing the version number.
TargetFrameworkId fxId;
int fxVersion = 0;
if (targetFrameworkName == null)
{
fxId = TargetFrameworkId.Unspecified;
}
else if (!ParseTargetFrameworkMonikerIntoEnum(targetFrameworkName, out fxId, out fxVersion))
fxId = TargetFrameworkId.Unrecognized;
s_AppWasBuiltForFramework = fxId;
s_AppWasBuiltForVersion = fxVersion;
}
#endregion String Parsing
#endregion private
}
}
| |
/************************************************************************************
Copyright : Copyright 2014 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.3 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
http://www.oculus.com/licenses/LICENSE-3.3
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using UnityEngine;
using VR = UnityEngine.VR;
/// <summary>
/// Manages an Oculus Rift head-mounted display (HMD).
/// </summary>
public class OVRDisplay
{
/// <summary>
/// Specifies the size and field-of-view for one eye texture.
/// </summary>
public struct EyeRenderDesc
{
/// <summary>
/// The horizontal and vertical size of the texture.
/// </summary>
public Vector2 resolution;
/// <summary>
/// The angle of the horizontal and vertical field of view in degrees.
/// </summary>
public Vector2 fov;
}
/// <summary>
/// Contains latency measurements for a single frame of rendering.
/// </summary>
public struct LatencyData
{
/// <summary>
/// The time it took to render both eyes in seconds.
/// </summary>
public float render;
/// <summary>
/// The time it took to perform TimeWarp in seconds.
/// </summary>
public float timeWarp;
/// <summary>
/// The time between the end of TimeWarp and scan-out in seconds.
/// </summary>
public float postPresent;
public float renderError;
public float timeWarpError;
}
private bool needsConfigureTexture;
private EyeRenderDesc[] eyeDescs = new EyeRenderDesc[2];
/// <summary>
/// Creates an instance of OVRDisplay. Called by OVRManager.
/// </summary>
public OVRDisplay()
{
UpdateTextures();
}
/// <summary>
/// Updates the internal state of the OVRDisplay. Called by OVRManager.
/// </summary>
public void Update()
{
UpdateTextures();
}
/// <summary>
/// Occurs when the head pose is reset.
/// </summary>
public event System.Action RecenteredPose;
/// <summary>
/// Recenters the head pose.
/// </summary>
public void RecenterPose()
{
UnityEngine.XR.InputTracking.Recenter();
if (RecenteredPose != null)
{
RecenteredPose();
}
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
OVRMixedReality.RecenterPose();
#endif
}
/// <summary>
/// Gets the current linear acceleration of the head.
/// </summary>
public Vector3 acceleration
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeAcceleration(OVRPlugin.Node.None, OVRPlugin.Step.Render).FromFlippedZVector3f();
}
}
/// <summary>
/// Gets the current angular acceleration of the head.
/// </summary>
public Vector3 angularAcceleration
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeAngularAcceleration(OVRPlugin.Node.None, OVRPlugin.Step.Render).FromFlippedZVector3f() * Mathf.Rad2Deg;
}
}
/// <summary>
/// Gets the current linear velocity of the head.
/// </summary>
public Vector3 velocity
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeVelocity(OVRPlugin.Node.None, OVRPlugin.Step.Render).FromFlippedZVector3f();
}
}
/// <summary>
/// Gets the current angular velocity of the head.
/// </summary>
public Vector3 angularVelocity
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeAngularVelocity(OVRPlugin.Node.None, OVRPlugin.Step.Render).FromFlippedZVector3f() * Mathf.Rad2Deg;
}
}
/// <summary>
/// Gets the resolution and field of view for the given eye.
/// </summary>
public EyeRenderDesc GetEyeRenderDesc(UnityEngine.XR.XRNode eye)
{
return eyeDescs[(int)eye];
}
/// <summary>
/// Gets the current measured latency values.
/// </summary>
public LatencyData latency
{
get {
if (!OVRManager.isHmdPresent)
return new LatencyData();
string latency = OVRPlugin.latency;
var r = new Regex("Render: ([0-9]+[.][0-9]+)ms, TimeWarp: ([0-9]+[.][0-9]+)ms, PostPresent: ([0-9]+[.][0-9]+)ms", RegexOptions.None);
var ret = new LatencyData();
Match match = r.Match(latency);
if (match.Success)
{
ret.render = float.Parse(match.Groups[1].Value);
ret.timeWarp = float.Parse(match.Groups[2].Value);
ret.postPresent = float.Parse(match.Groups[3].Value);
}
return ret;
}
}
/// <summary>
/// Gets application's frame rate reported by oculus plugin
/// </summary>
public float appFramerate
{
get
{
if (!OVRManager.isHmdPresent)
return 0;
return OVRPlugin.GetAppFramerate();
}
}
/// <summary>
/// Gets the recommended MSAA level for optimal quality/performance the current device.
/// </summary>
public int recommendedMSAALevel
{
get
{
int result = OVRPlugin.recommendedMSAALevel;
if (result == 1)
result = 0;
return result;
}
}
private void UpdateTextures()
{
ConfigureEyeDesc(UnityEngine.XR.XRNode.LeftEye);
ConfigureEyeDesc(UnityEngine.XR.XRNode.RightEye);
}
private void ConfigureEyeDesc(UnityEngine.XR.XRNode eye)
{
if (!OVRManager.isHmdPresent)
return;
OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye);
OVRPlugin.Frustumf frust = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);
eyeDescs[(int)eye] = new EyeRenderDesc()
{
resolution = new Vector2(size.w, size.h),
fov = Mathf.Rad2Deg * new Vector2(frust.fovX, frust.fovY),
};
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace KappaServer.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using Gtk;
using System.Diagnostics;
using Pango;
public partial class MainWindow: Gtk.Window
{
Process roscore = new Process();
Process usb = new Process();
Process image = new Process();
Process uvc = new Process();
Process record = new Process();
Process gps = new Process();
List<Process> processes = new List<Process> ();
List<ToggleButton> buttons = new List<ToggleButton> ();
FontDescription font = new FontDescription();
List<int> processIds = new List<int>();
public MainWindow (): base (Gtk.WindowType.Toplevel)
{
Build ();
GLib.Timeout.Add (1000, CheckWorkingTimeoutHandler);
processes.AddRange (new Process[] { roscore, usb, image, record, gps });
buttons.AddRange (new ToggleButton[] { buttonRoscore, buttonUsb, buttonImage, buttonRecord, buttonGps });
SetButtonColors ();
}
protected bool CheckWorkingTimeoutHandler() {
RefreshButtons ();
return true;
}
void SetButtonColors ()
{
foreach (Button button in buttons) {
SetButtonColors (button);
}
SetButtonColors (buttonUvc);
SetButtonColors (buttonCloseAll);
SetButtonColors (buttonExit);
}
void SetButtonColors (Button button) {
button.ModifyBg (StateType.Normal, new Gdk.Color (220, 220, 220));
button.ModifyBg (StateType.Prelight, new Gdk.Color (160, 160, 180));
button.ModifyBg (StateType.Active, new Gdk.Color (160, 160, 160));
}
void SetSelectedButtonColors (Button button) {
button.ModifyBg (StateType.Active, new Gdk.Color (220, 220, 220));
button.ModifyBg (StateType.Prelight, new Gdk.Color (160, 160, 180));
button.ModifyBg (StateType.Normal, new Gdk.Color (160, 160, 160));
}
void CloseAllProcesses ()
{
Process kill = new Process ();
kill.EnableRaisingEvents = true;
kill.StartInfo.FileName = "/usr/bin/bash_kill";
kill.Start();
try {
kill.WaitForExit (2000);
} catch {
}
foreach (ToggleButton button in buttons) {
button.Active = false;
}
foreach (Process proc in processes) {
if (ProcessHasStarted (proc) && !proc.HasExited) {
try {
proc.Kill ();
} catch {
}
}
}
Process[] p = Process.GetProcesses ();
foreach (Process proc in p) {
try {
string name = proc.ProcessName;
if (name.Contains ("bash_roscore") ||
name.Contains ("bash_usb") ||
name.Contains ("bash_uvc") ||
name.Contains ("bash_left") ||
name.Contains ("bash_record") ||
name.Contains ("bash_gps")) {
proc.Kill ();
}
} catch {
}
try {
if (processIds.Contains(proc.Id)) {
proc.Kill ();
}
} catch {
}
}
}
void RefreshButtons ()
{
int i = 0;
foreach (Process proc in processes) {
if (ProcessHasStarted (proc) && !proc.HasExited && buttons [i] != null) {
SetSelectedButtonColors (buttons [i]);
//buttons [i].Active = true;
} else {
SetButtonColors (buttons [i]);
//buttons [i].Active = false;
}
i++;
}
}
void AddProcessId (Process proc)
{
try {
processIds.Add(proc.Id);
} catch {
}
}
protected bool ProcessHasStarted(Process proc) {
if (proc == null) {
return false;
}
try {
bool h = proc.HasExited;
} catch (InvalidOperationException ex) {
if (ex.Message.Contains ("Process has not been started")) {
return false;
}
}
return true;
}
protected void OnDeleteEvent (object sender, DeleteEventArgs a)
{
CloseAllProcesses ();
Application.Quit ();
a.RetVal = true;
}
protected void buttonExitClicked (object sender, EventArgs e)
{
CloseAllProcesses ();
Application.Quit ();
}
protected void buttonCloseAllClicked (object sender, EventArgs e)
{
CloseAllProcesses ();
RefreshButtons ();
}
protected void buttonRoscoreClicked (object sender, EventArgs e)
{
if (buttonRoscore.Active) {
roscore.EnableRaisingEvents = true;
roscore.StartInfo.FileName = "/usr/bin/bash_roscore";
roscore.Start();
AddProcessId (roscore);
//roscore.StartInfo.RedirectStandardOutput = true;
//roscore.StartInfo.Arguments = "-l | grep NTFS";
//roscore.WaitForExit();
//string data = roscore.StandardOutput.ReadToEnd();
//Console.WriteLine( data + " was returned" );
}
}
protected void buttonUsbClicked (object sender, EventArgs e)
{
if (buttonUsb.Active) {
usb.EnableRaisingEvents = true;
usb.StartInfo.FileName = "/usr/bin/bash_usb";
usb.Start ();
AddProcessId (usb);
}
}
protected void buttonImageClicked (object sender, EventArgs e)
{
if (buttonImage.Active) {
image.EnableRaisingEvents = true;
image.StartInfo.FileName = "/usr/bin/bash_left";
image.Start ();
AddProcessId (image);
}
}
protected void buttonUvcClicked (object sender, EventArgs e)
{
uvc.EnableRaisingEvents = true;
uvc.StartInfo.FileName = "/usr/bin/bash_uvc";
uvc.Start ();
AddProcessId (uvc);
}
protected void buttonRecordClicked (object sender, EventArgs e)
{
if (buttonRecord.Active) {
record.EnableRaisingEvents = true;
record.StartInfo.FileName = "/usr/bin/bash_record";
record.Start ();
AddProcessId (record);
}
}
protected void buttonGpsClicked (object sender, EventArgs e)
{
if (buttonGps.Active) {
gps.EnableRaisingEvents = true;
gps.StartInfo.FileName = "/usr/bin/bash_gps";
gps.Start ();
AddProcessId (gps);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
////////////////////////////////////////////////////////////////////////////
//
//
// Purpose: This class represents settings specified by de jure or
// de facto standards for a particular country/region. In
// contrast to CultureInfo, the RegionInfo does not represent
// preferences of the user and does not depend on the user's
// language or culture.
//
//
////////////////////////////////////////////////////////////////////////////
namespace System.Globalization {
using System;
using System.Runtime.Serialization;
using System.Diagnostics;
using System.Diagnostics.Contracts;
[Serializable]
[System.Runtime.InteropServices.ComVisible(true)]
public partial class RegionInfo
{
//--------------------------------------------------------------------//
// Internal Information //
//--------------------------------------------------------------------//
//
// Variables.
//
//
// Name of this region (ie: es-US): serialized, the field used for deserialization
//
internal String m_name;
//
// The CultureData instance that we are going to read data from.
//
[NonSerialized]internal CultureData m_cultureData;
//
// The RegionInfo for our current region
//
internal static volatile RegionInfo s_currentRegionInfo;
////////////////////////////////////////////////////////////////////////
//
// RegionInfo Constructors
//
// Note: We prefer that a region be created with a full culture name (ie: en-US)
// because otherwise the native strings won't be right.
//
// In Silverlight we enforce that RegionInfos must be created with a full culture name
//
////////////////////////////////////////////////////////////////////////
public RegionInfo(String name) {
if (name==null)
throw new ArgumentNullException(nameof(name));
if (name.Length == 0) //The InvariantCulture has no matching region
{
throw new ArgumentException(Environment.GetResourceString("Argument_NoRegionInvariantCulture"), nameof(name));
}
Contract.EndContractBlock();
//
// First try it as an entire culture. We must have user override as true here so
// that we can pick up custom cultures *before* built-in ones (if they want to
// prefer built-in cultures they will pass "us" instead of "en-US").
//
this.m_cultureData = CultureData.GetCultureDataForRegion(name,true);
// this.m_name = name.ToUpper(CultureInfo.InvariantCulture);
if (this.m_cultureData == null)
throw new ArgumentException(
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("Argument_InvalidCultureName"), name), nameof(name));
// Not supposed to be neutral
if (this.m_cultureData.IsNeutralCulture)
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidNeutralRegionName", name), nameof(name));
SetName(name);
}
#if FEATURE_USE_LCID
// We'd rather people use the named version since this doesn't allow custom locales
public RegionInfo(int culture)
{
if (culture == CultureInfo.LOCALE_INVARIANT) //The InvariantCulture has no matching region
{
throw new ArgumentException(Environment.GetResourceString("Argument_NoRegionInvariantCulture"));
}
if (culture == CultureInfo.LOCALE_NEUTRAL)
{
// Not supposed to be neutral
throw new ArgumentException(Environment.GetResourceString("Argument_CultureIsNeutral", culture), nameof(culture));
}
if (culture == CultureInfo.LOCALE_CUSTOM_DEFAULT)
{
// Not supposed to be neutral
throw new ArgumentException(Environment.GetResourceString("Argument_CustomCultureCannotBePassedByNumber", culture), nameof(culture));
}
this.m_cultureData = CultureData.GetCultureData(culture,true);
this.m_name = this.m_cultureData.SREGIONNAME;
if (this.m_cultureData.IsNeutralCulture)
{
// Not supposed to be neutral
throw new ArgumentException(Environment.GetResourceString("Argument_CultureIsNeutral", culture), nameof(culture));
}
m_cultureId = culture;
}
#endif
internal RegionInfo(CultureData cultureData)
{
this.m_cultureData = cultureData;
this.m_name = this.m_cultureData.SREGIONNAME;
}
private void SetName(string name)
{
// Use the name of the region we found
this.m_name = this.m_cultureData.SREGIONNAME;
}
#region Serialization
//
// m_cultureId is needed for serialization only to detect the case if the region info is created using the name or using the LCID.
// in case m_cultureId is zero means that the RigionInfo is created using name. otherwise it is created using LCID.
//
[OptionalField(VersionAdded = 2)]
int m_cultureId;
// the following field is defined to keep the compatibility with Everett.
// don't change/remove the names/types of these field.
[OptionalField(VersionAdded = 2)]
internal int m_dataItem = 0;
[OnDeserialized]
private void OnDeserialized(StreamingContext ctx)
{
// This won't happen anyway since CoreCLR doesn't support serialization
this.m_cultureData = CultureData.GetCultureData(m_name, true);
if (this.m_cultureData == null)
throw new ArgumentException(
String.Format(
CultureInfo.CurrentCulture,
Environment.GetResourceString("Argument_InvalidCultureName"), m_name), nameof(m_name));
if (m_cultureId == 0)
{
SetName(this.m_name);
}
else
{
this.m_name = this.m_cultureData.SREGIONNAME;
}
}
[OnSerializing]
private void OnSerializing(StreamingContext ctx)
{
// Used to fill in everett data item, unnecessary now
}
#endregion Serialization
////////////////////////////////////////////////////////////////////////
//
// GetCurrentRegion
//
// This instance provides methods based on the current user settings.
// These settings are volatile and may change over the lifetime of the
// thread.
//
////////////////////////////////////////////////////////////////////////
public static RegionInfo CurrentRegion {
get {
RegionInfo temp = s_currentRegionInfo;
if (temp == null)
{
temp = new RegionInfo(CultureInfo.CurrentCulture.m_cultureData);
// Need full name for custom cultures
temp.m_name=temp.m_cultureData.SREGIONNAME;
s_currentRegionInfo = temp;
}
return temp;
}
}
////////////////////////////////////////////////////////////////////////
//
// GetName
//
// Returns the name of the region (ie: en-US)
//
////////////////////////////////////////////////////////////////////////
public virtual String Name {
get {
Debug.Assert(m_name != null, "Expected RegionInfo.m_name to be populated already");
return (m_name);
}
}
////////////////////////////////////////////////////////////////////////
//
// GetEnglishName
//
// Returns the name of the region in English. (ie: United States)
//
////////////////////////////////////////////////////////////////////////
public virtual String EnglishName
{
get
{
return (this.m_cultureData.SENGCOUNTRY);
}
}
////////////////////////////////////////////////////////////////////////
//
// GetDisplayName
//
// Returns the display name (localized) of the region. (ie: United States
// if the current UI language is en-US)
//
////////////////////////////////////////////////////////////////////////
public virtual String DisplayName
{
get
{
return (this.m_cultureData.SLOCALIZEDCOUNTRY);
}
}
////////////////////////////////////////////////////////////////////////
//
// GetNativeName
//
// Returns the native name of the region. (ie: Deutschland)
// WARNING: You need a full locale name for this to make sense.
//
////////////////////////////////////////////////////////////////////////
[System.Runtime.InteropServices.ComVisible(false)]
public virtual String NativeName
{
get
{
return (this.m_cultureData.SNATIVECOUNTRY);
}
}
////////////////////////////////////////////////////////////////////////
//
// TwoLetterISORegionName
//
// Returns the two letter ISO region name (ie: US)
//
////////////////////////////////////////////////////////////////////////
public virtual String TwoLetterISORegionName
{
get
{
return (this.m_cultureData.SISO3166CTRYNAME);
}
}
////////////////////////////////////////////////////////////////////////
//
// ThreeLetterISORegionName
//
// Returns the three letter ISO region name (ie: USA)
//
////////////////////////////////////////////////////////////////////////
public virtual String ThreeLetterISORegionName
{
get
{
return (this.m_cultureData.SISO3166CTRYNAME2);
}
}
////////////////////////////////////////////////////////////////////////
//
// ThreeLetterWindowsRegionName
//
// Returns the three letter windows region name (ie: USA)
//
////////////////////////////////////////////////////////////////////////
public virtual String ThreeLetterWindowsRegionName
{
get
{
return (this.m_cultureData.SABBREVCTRYNAME);
}
}
////////////////////////////////////////////////////////////////////////
//
// IsMetric
//
// Returns true if this region uses the metric measurement system
//
////////////////////////////////////////////////////////////////////////
public virtual bool IsMetric {
get {
int value = this.m_cultureData.IMEASURE;
return (value==0);
}
}
[System.Runtime.InteropServices.ComVisible(false)]
public virtual int GeoId
{
get
{
return (this.m_cultureData.IGEOID);
}
}
////////////////////////////////////////////////////////////////////////
//
// CurrencyEnglishName
//
// English name for this region's currency, ie: Swiss Franc
//
////////////////////////////////////////////////////////////////////////
[System.Runtime.InteropServices.ComVisible(false)]
public virtual String CurrencyEnglishName
{
get
{
return (this.m_cultureData.SENGLISHCURRENCY);
}
}
////////////////////////////////////////////////////////////////////////
//
// CurrencyEnglishName
//
// English name for this region's currency, ie: Schweizer Franken
// WARNING: You need a full locale name for this to make sense.
//
////////////////////////////////////////////////////////////////////////
[System.Runtime.InteropServices.ComVisible(false)]
public virtual String CurrencyNativeName
{
get
{
return (this.m_cultureData.SNATIVECURRENCY);
}
}
////////////////////////////////////////////////////////////////////////
//
// CurrencySymbol
//
// Currency Symbol for this locale, ie: Fr. or $
//
////////////////////////////////////////////////////////////////////////
public virtual String CurrencySymbol {
get {
return (this.m_cultureData.SCURRENCY);
}
}
////////////////////////////////////////////////////////////////////////
//
// ISOCurrencySymbol
//
// ISO Currency Symbol for this locale, ie: CHF
//
////////////////////////////////////////////////////////////////////////
public virtual String ISOCurrencySymbol {
get {
return (this.m_cultureData.SINTLSYMBOL);
}
}
////////////////////////////////////////////////////////////////////////
//
// Equals
//
// Implements Object.Equals(). Returns a boolean indicating whether
// or not object refers to the same RegionInfo as the current instance.
//
// RegionInfos are considered equal if and only if they have the same name
// (ie: en-US)
//
////////////////////////////////////////////////////////////////////////
public override bool Equals(Object value)
{
RegionInfo that = value as RegionInfo;
if (that != null)
{
return this.Name.Equals(that.Name);
}
return (false);
}
////////////////////////////////////////////////////////////////////////
//
// GetHashCode
//
// Implements Object.GetHashCode(). Returns the hash code for the
// CultureInfo. The hash code is guaranteed to be the same for RegionInfo
// A and B where A.Equals(B) is true.
//
////////////////////////////////////////////////////////////////////////
public override int GetHashCode()
{
return (this.Name.GetHashCode());
}
////////////////////////////////////////////////////////////////////////
//
// ToString
//
// Implements Object.ToString(). Returns the name of the Region, ie: es-US
//
////////////////////////////////////////////////////////////////////////
public override String ToString()
{
return (Name);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc.ApplicationModels;
using Microsoft.AspNetCore.Mvc.Infrastructure;
using Microsoft.AspNetCore.Mvc.RazorPages.Infrastructure;
using Microsoft.AspNetCore.Mvc.Routing;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.DependencyInjection;
namespace Microsoft.AspNetCore.Builder
{
/// <summary>
/// Contains extension methods for using Razor Pages with <see cref="IEndpointRouteBuilder"/>.
/// </summary>
public static class RazorPagesEndpointRouteBuilderExtensions
{
/// <summary>
/// Adds endpoints for Razor Pages to the <see cref="IEndpointRouteBuilder"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/>.</param>
/// <returns>An <see cref="PageActionEndpointConventionBuilder"/> for endpoints associated with Razor Pages.</returns>
public static PageActionEndpointConventionBuilder MapRazorPages(this IEndpointRouteBuilder endpoints)
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
EnsureRazorPagesServices(endpoints);
return GetOrCreateDataSource(endpoints).DefaultBuilder;
}
/// <summary>
/// Adds a specialized <see cref="RouteEndpoint"/> to the <see cref="IEndpointRouteBuilder"/> that will match
/// requests for non-file-names with the lowest possible priority. The request will be routed to a page endpoint that
/// matches <paramref name="page"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/> to add the route to.</param>
/// <param name="page">The page name.</param>
/// <remarks>
/// <para>
/// <see cref="MapFallbackToPage(IEndpointRouteBuilder, string)"/> is intended to handle cases where URL path of
/// the request does not contain a file name, and no other endpoint has matched. This is convenient for routing
/// requests for dynamic content to a SPA framework, while also allowing requests for non-existent files to
/// result in an HTTP 404.
/// </para>
/// <para>
/// <see cref="MapFallbackToPage(IEndpointRouteBuilder, string)"/> registers an endpoint using the pattern
/// <c>{*path:nonfile}</c>. The order of the registered endpoint will be <c>int.MaxValue</c>.
/// </para>
/// <para>
/// <see cref="MapFallbackToPage(IEndpointRouteBuilder, string)"/> does not re-execute routing, and will
/// not generate route values based on routes defined elsewhere. When using this overload, the <c>path</c> route value
/// will be available.
/// </para>
/// </remarks>
public static IEndpointConventionBuilder MapFallbackToPage(this IEndpointRouteBuilder endpoints, string page)
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
if (page == null)
{
throw new ArgumentNullException(nameof(page));
}
PageConventionCollection.EnsureValidPageName(page, nameof(page));
EnsureRazorPagesServices(endpoints);
// Called for side-effect to make sure that the data source is registered.
var pageDataSource = GetOrCreateDataSource(endpoints);
pageDataSource.CreateInertEndpoints = true;
RegisterInCache(endpoints.ServiceProvider, pageDataSource);
// Maps a fallback endpoint with an empty delegate. This is OK because
// we don't expect the delegate to run.
var builder = endpoints.MapFallback(context => Task.CompletedTask);
builder.Add(b =>
{
// MVC registers a policy that looks for this metadata.
b.Metadata.Add(CreateDynamicPageMetadata(page, area: null));
b.Metadata.Add(new PageEndpointDataSourceIdMetadata(pageDataSource.DataSourceId));
});
return builder;
}
/// <summary>
/// Adds a specialized <see cref="RouteEndpoint"/> to the <see cref="IEndpointRouteBuilder"/> that will match
/// requests for non-file-names with the lowest possible priority. The request will be routed to a page endpoint that
/// matches <paramref name="page"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/> to add the route to.</param>
/// <param name="pattern">The route pattern.</param>
/// <param name="page">The action name.</param>
/// <remarks>
/// <para>
/// <see cref="MapFallbackToPage(IEndpointRouteBuilder, string, string)"/> is intended to handle cases where URL path of
/// the request does not contain a file name, and no other endpoint has matched. This is convenient for routing
/// requests for dynamic content to a SPA framework, while also allowing requests for non-existent files to
/// result in an HTTP 404.
/// </para>
/// <para>
/// The order of the registered endpoint will be <c>int.MaxValue</c>.
/// </para>
/// <para>
/// This overload will use the provided <paramref name="pattern"/> verbatim. Use the <c>:nonfile</c> route contraint
/// to exclude requests for static files.
/// </para>
/// <para>
/// <see cref="MapFallbackToPage(IEndpointRouteBuilder, string, string)"/> does not re-execute routing, and will
/// not generate route values based on routes defined elsewhere. When using this overload, the route values provided by matching
/// <paramref name="pattern"/> will be available.
/// </para>
/// </remarks>
public static IEndpointConventionBuilder MapFallbackToPage(
this IEndpointRouteBuilder endpoints,
string pattern,
string page)
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
if (pattern == null)
{
throw new ArgumentNullException(nameof(pattern));
}
if (page == null)
{
throw new ArgumentNullException(nameof(page));
}
PageConventionCollection.EnsureValidPageName(page, nameof(page));
EnsureRazorPagesServices(endpoints);
// Called for side-effect to make sure that the data source is registered.
var pageDataSource = GetOrCreateDataSource(endpoints);
pageDataSource.CreateInertEndpoints = true;
RegisterInCache(endpoints.ServiceProvider, pageDataSource);
// Maps a fallback endpoint with an empty delegate. This is OK because
// we don't expect the delegate to run.
var builder = endpoints.MapFallback(pattern, context => Task.CompletedTask);
builder.Add(b =>
{
// MVC registers a policy that looks for this metadata.
b.Metadata.Add(CreateDynamicPageMetadata(page, area: null));
b.Metadata.Add(new PageEndpointDataSourceIdMetadata(pageDataSource.DataSourceId));
});
return builder;
}
/// <summary>
/// Adds a specialized <see cref="RouteEndpoint"/> to the <see cref="IEndpointRouteBuilder"/> that will match
/// requests for non-file-names with the lowest possible priority. The request will be routed to a page endpoint that
/// matches <paramref name="page"/>, and <paramref name="area"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/> to add the route to.</param>
/// <param name="page">The action name.</param>
/// <param name="area">The area name.</param>
/// <remarks>
/// <para>
/// <see cref="MapFallbackToAreaPage(IEndpointRouteBuilder, string, string)"/> is intended to handle cases where URL path of
/// the request does not contain a file name, and no other endpoint has matched. This is convenient for routing
/// requests for dynamic content to a SPA framework, while also allowing requests for non-existent files to
/// result in an HTTP 404.
/// </para>
/// <para>
/// <see cref="MapFallbackToAreaPage(IEndpointRouteBuilder, string, string)"/> registers an endpoint using the pattern
/// <c>{*path:nonfile}</c>. The order of the registered endpoint will be <c>int.MaxValue</c>.
/// </para>
/// <para>
/// <see cref="MapFallbackToAreaPage(IEndpointRouteBuilder, string, string)"/> does not re-execute routing, and will
/// not generate route values based on routes defined elsewhere. When using this overload, the <c>path</c> route value
/// will be available.
/// </para>
/// </remarks>
public static IEndpointConventionBuilder MapFallbackToAreaPage(
this IEndpointRouteBuilder endpoints,
string page,
string area)
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
if (page == null)
{
throw new ArgumentNullException(nameof(page));
}
PageConventionCollection.EnsureValidPageName(page, nameof(page));
EnsureRazorPagesServices(endpoints);
// Called for side-effect to make sure that the data source is registered.
var pageDataSource = GetOrCreateDataSource(endpoints);
pageDataSource.CreateInertEndpoints = true;
RegisterInCache(endpoints.ServiceProvider, pageDataSource);
// Maps a fallback endpoint with an empty delegate. This is OK because
// we don't expect the delegate to run.
var builder = endpoints.MapFallback(context => Task.CompletedTask);
builder.Add(b =>
{
// MVC registers a policy that looks for this metadata.
b.Metadata.Add(CreateDynamicPageMetadata(page, area));
b.Metadata.Add(new PageEndpointDataSourceIdMetadata(pageDataSource.DataSourceId));
});
return builder;
}
/// <summary>
/// Adds a specialized <see cref="RouteEndpoint"/> to the <see cref="IEndpointRouteBuilder"/> that will match
/// requests for non-file-names with the lowest possible priority. The request will be routed to a page endpoint that
/// matches <paramref name="page"/>, and <paramref name="area"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/> to add the route to.</param>
/// <param name="pattern">The route pattern.</param>
/// <param name="page">The action name.</param>
/// <param name="area">The area name.</param>
/// <remarks>
/// <para>
/// <see cref="MapFallbackToAreaPage(IEndpointRouteBuilder, string, string, string)"/> is intended to handle cases where URL path of
/// the request does not contain a file name, and no other endpoint has matched. This is convenient for routing
/// requests for dynamic content to a SPA framework, while also allowing requests for non-existent files to
/// result in an HTTP 404.
/// </para>
/// <para>
/// The order of the registered endpoint will be <c>int.MaxValue</c>.
/// </para>
/// <para>
/// This overload will use the provided <paramref name="pattern"/> verbatim. Use the <c>:nonfile</c> route contraint
/// to exclude requests for static files.
/// </para>
/// <para>
/// <see cref="MapFallbackToAreaPage(IEndpointRouteBuilder, string, string, string)"/> does not re-execute routing, and will
/// not generate route values based on routes defined elsewhere. When using this overload, the route values provided by matching
/// <paramref name="pattern"/> will be available.
/// </para>
/// </remarks>
public static IEndpointConventionBuilder MapFallbackToAreaPage(
this IEndpointRouteBuilder endpoints,
string pattern,
string page,
string area)
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
if (pattern == null)
{
throw new ArgumentNullException(nameof(pattern));
}
if (page == null)
{
throw new ArgumentNullException(nameof(page));
}
PageConventionCollection.EnsureValidPageName(page, nameof(page));
EnsureRazorPagesServices(endpoints);
// Called for side-effect to make sure that the data source is registered.
var pageDataSource = GetOrCreateDataSource(endpoints);
pageDataSource.CreateInertEndpoints = true;
RegisterInCache(endpoints.ServiceProvider, pageDataSource);
// Maps a fallback endpoint with an empty delegate. This is OK because
// we don't expect the delegate to run.
var builder = endpoints.MapFallback(pattern, context => Task.CompletedTask);
builder.Add(b =>
{
// MVC registers a policy that looks for this metadata.
b.Metadata.Add(CreateDynamicPageMetadata(page, area));
b.Metadata.Add(new PageEndpointDataSourceIdMetadata(pageDataSource.DataSourceId));
});
return builder;
}
/// <summary>
/// Adds a specialized <see cref="RouteEndpoint"/> to the <see cref="IEndpointRouteBuilder"/> that will
/// attempt to select a page using the route values produced by <typeparamref name="TTransformer"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/> to add the route to.</param>
/// <param name="pattern">The URL pattern of the route.</param>
/// <typeparam name="TTransformer">The type of a <see cref="DynamicRouteValueTransformer"/>.</typeparam>
/// <remarks>
/// <para>
/// This method allows the registration of a <see cref="RouteEndpoint"/> and <see cref="DynamicRouteValueTransformer"/>
/// that combine to dynamically select a page using custom logic.
/// </para>
/// <para>
/// The instance of <typeparamref name="TTransformer"/> will be retrieved from the dependency injection container.
/// Register <typeparamref name="TTransformer"/> with the desired service lifetime in <c>ConfigureServices</c>.
/// </para>
/// </remarks>
public static void MapDynamicPageRoute<TTransformer>(this IEndpointRouteBuilder endpoints, string pattern)
where TTransformer : DynamicRouteValueTransformer
{
MapDynamicPageRoute<TTransformer>(endpoints, pattern, state: null);
}
/// <summary>
/// Adds a specialized <see cref="RouteEndpoint"/> to the <see cref="IEndpointRouteBuilder"/> that will
/// attempt to select a page using the route values produced by <typeparamref name="TTransformer"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/> to add the route to.</param>
/// <param name="pattern">The URL pattern of the route.</param>
/// <param name="state">A state object to provide to the <typeparamref name="TTransformer" /> instance.</param>
/// <typeparam name="TTransformer">The type of a <see cref="DynamicRouteValueTransformer"/>.</typeparam>
/// <remarks>
/// <para>
/// This method allows the registration of a <see cref="RouteEndpoint"/> and <see cref="DynamicRouteValueTransformer"/>
/// that combine to dynamically select a page using custom logic.
/// </para>
/// <para>
/// The instance of <typeparamref name="TTransformer"/> will be retrieved from the dependency injection container.
/// Register <typeparamref name="TTransformer"/> with the desired service lifetime in <c>ConfigureServices</c>.
/// </para>
/// </remarks>
public static void MapDynamicPageRoute<TTransformer>(this IEndpointRouteBuilder endpoints, string pattern, object? state)
where TTransformer : DynamicRouteValueTransformer
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
if (pattern == null)
{
throw new ArgumentNullException(nameof(pattern));
}
EnsureRazorPagesServices(endpoints);
// Called for side-effect to make sure that the data source is registered.
var pageDataSource = GetOrCreateDataSource(endpoints);
RegisterInCache(endpoints.ServiceProvider, pageDataSource);
pageDataSource.AddDynamicPageEndpoint(endpoints, pattern, typeof(TTransformer), state);
}
/// <summary>
/// Adds a specialized <see cref="RouteEndpoint"/> to the <see cref="IEndpointRouteBuilder"/> that will
/// attempt to select a page using the route values produced by <typeparamref name="TTransformer"/>.
/// </summary>
/// <param name="endpoints">The <see cref="IEndpointRouteBuilder"/> to add the route to.</param>
/// <param name="pattern">The URL pattern of the route.</param>
/// <param name="state">A state object to provide to the <typeparamref name="TTransformer" /> instance.</param>
/// <param name="order">The matching order for the dynamic route.</param>
/// <typeparam name="TTransformer">The type of a <see cref="DynamicRouteValueTransformer"/>.</typeparam>
/// <remarks>
/// <para>
/// This method allows the registration of a <see cref="RouteEndpoint"/> and <see cref="DynamicRouteValueTransformer"/>
/// that combine to dynamically select a page using custom logic.
/// </para>
/// <para>
/// The instance of <typeparamref name="TTransformer"/> will be retrieved from the dependency injection container.
/// Register <typeparamref name="TTransformer"/> with the desired service lifetime in <c>ConfigureServices</c>.
/// </para>
/// </remarks>
public static void MapDynamicPageRoute<TTransformer>(this IEndpointRouteBuilder endpoints, string pattern, object state, int order)
where TTransformer : DynamicRouteValueTransformer
{
if (endpoints == null)
{
throw new ArgumentNullException(nameof(endpoints));
}
if (pattern == null)
{
throw new ArgumentNullException(nameof(pattern));
}
EnsureRazorPagesServices(endpoints);
// Called for side-effect to make sure that the data source is registered.
var pageDataSource = GetOrCreateDataSource(endpoints);
RegisterInCache(endpoints.ServiceProvider, pageDataSource);
pageDataSource.AddDynamicPageEndpoint(endpoints, pattern, typeof(TTransformer), state, order);
}
private static DynamicPageMetadata CreateDynamicPageMetadata(string page, string? area)
{
return new DynamicPageMetadata(new RouteValueDictionary()
{
{ "page", page },
{ "area", area }
});
}
private static void EnsureRazorPagesServices(IEndpointRouteBuilder endpoints)
{
var marker = endpoints.ServiceProvider.GetService<PageActionEndpointDataSourceFactory>();
if (marker == null)
{
throw new InvalidOperationException(Mvc.Core.Resources.FormatUnableToFindServices(
nameof(IServiceCollection),
"AddRazorPages",
"ConfigureServices(...)"));
}
}
private static PageActionEndpointDataSource GetOrCreateDataSource(IEndpointRouteBuilder endpoints)
{
var dataSource = endpoints.DataSources.OfType<PageActionEndpointDataSource>().FirstOrDefault();
if (dataSource == null)
{
var orderProviderCache = endpoints.ServiceProvider.GetRequiredService<OrderedEndpointsSequenceProviderCache>();
var factory = endpoints.ServiceProvider.GetRequiredService<PageActionEndpointDataSourceFactory>();
dataSource = factory.Create(orderProviderCache.GetOrCreateOrderedEndpointsSequenceProvider(endpoints));
endpoints.DataSources.Add(dataSource);
}
return dataSource;
}
private static void RegisterInCache(IServiceProvider serviceProvider, PageActionEndpointDataSource dataSource)
{
var cache = serviceProvider.GetRequiredService<DynamicPageEndpointSelectorCache>();
cache.AddDataSource(dataSource);
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
#if UNITY_EDITOR || !UNITY_FLASH
namespace tk2dRuntime.TileMap
{
public static class RenderMeshBuilder
{
public static void BuildForChunk(tk2dTileMap tileMap, SpriteChunk chunk, ColorChunk colorChunk, bool useColor, bool skipPrefabs, int baseX, int baseY)
{
List<Vector3> meshVertices = new List<Vector3>();
List<Color> meshColors = new List<Color>();
List<Vector2> meshUvs = new List<Vector2>();
List<Vector2> meshUv2s = new List<Vector2>();
//List<int> meshIndices = new List<int>();
int[] spriteIds = chunk.spriteIds;
Vector3 tileSize = tileMap.data.tileSize;
int spriteCount = tileMap.SpriteCollectionInst.spriteDefinitions.Length;
Object[] tilePrefabs = tileMap.data.tilePrefabs;
tk2dSpriteDefinition firstSprite = tileMap.SpriteCollectionInst.FirstValidDefinition;
bool buildNormals = (firstSprite != null && firstSprite.normals != null && firstSprite.normals.Length > 0);
bool generateUv2 = tileMap.data.generateUv2;
var colorMode = tileMap.data.colorMode;
Color32 clearColor = (useColor && tileMap.ColorChannel != null)?tileMap.ColorChannel.clearColor:Color.white;
// revert to no color mode (i.e. fill with clear color) when there isn't a color channel, or it is empty
if (colorChunk == null || colorChunk.colors.Length == 0)
useColor = false;
int x0, x1, dx;
int y0, y1, dy;
BuilderUtil.GetLoopOrder(tileMap.data.sortMethod,
tileMap.partitionSizeX, tileMap.partitionSizeY,
out x0, out x1, out dx,
out y0, out y1, out dy);
float xOffsetMult = 0.0f, yOffsetMult = 0.0f;
tileMap.data.GetTileOffset(out xOffsetMult, out yOffsetMult);
List<int>[] meshIndices = new List<int>[tileMap.SpriteCollectionInst.materials.Length];
for (int j = 0; j < meshIndices.Length; ++j)
meshIndices[j] = new List<int>();
int colorChunkSize = tileMap.partitionSizeX + 1;
for (int y = y0; y != y1; y += dy)
{
float xOffset = ((baseY + y) & 1) * xOffsetMult;
for (int x = x0; x != x1; x += dx)
{
int spriteId = spriteIds[y * tileMap.partitionSizeX + x];
int tile = BuilderUtil.GetTileFromRawTile(spriteId);
bool flipH = BuilderUtil.IsRawTileFlagSet(spriteId, tk2dTileFlags.FlipX);
bool flipV = BuilderUtil.IsRawTileFlagSet(spriteId, tk2dTileFlags.FlipY);
bool rot90 = BuilderUtil.IsRawTileFlagSet(spriteId, tk2dTileFlags.Rot90);
Vector3 currentPos = new Vector3(tileSize.x * (x + xOffset), tileSize.y * y, 0);
if (tile < 0 || tile >= spriteCount)
continue;
if (skipPrefabs && tilePrefabs[tile])
continue;
var sprite = tileMap.SpriteCollectionInst.spriteDefinitions[tile];
int baseVertex = meshVertices.Count;
for (int v = 0; v < sprite.positions.Length; ++v)
{
Vector3 flippedPos = BuilderUtil.ApplySpriteVertexTileFlags(tileMap, sprite, sprite.positions[v], flipH, flipV, rot90);
if (useColor && colorChunk != null)
{
Color tileColorx0y0 = colorChunk.colors[y * colorChunkSize + x];
Color tileColorx1y0 = colorChunk.colors[y * colorChunkSize + x + 1];
Color tileColorx0y1 = colorChunk.colors[(y + 1) * colorChunkSize + x];
Color tileColorx1y1 = colorChunk.colors[(y + 1) * colorChunkSize + (x + 1)];
switch (colorMode)
{
case tk2dTileMapData.ColorMode.Interpolate:
{
Vector3 centeredSpriteVertex = flippedPos - sprite.untrimmedBoundsData[0];
Vector3 alignedSpriteVertex = centeredSpriteVertex + tileMap.data.tileSize * 0.5f;
float tileColorX = Mathf.Clamp01(alignedSpriteVertex.x / tileMap.data.tileSize.x);
float tileColorY = Mathf.Clamp01(alignedSpriteVertex.y / tileMap.data.tileSize.y);
Color color = Color.Lerp(
Color.Lerp(tileColorx0y0, tileColorx1y0, tileColorX),
Color.Lerp(tileColorx0y1, tileColorx1y1, tileColorX),
tileColorY);
meshColors.Add(color);
break;
}
case tk2dTileMapData.ColorMode.Solid:
{
meshColors.Add(tileColorx0y0);
break;
}
}
}
else
{
meshColors.Add(clearColor);
}
if (generateUv2)
{
if (sprite.normalizedUvs.Length == 0)
{
meshUv2s.Add(Vector2.zero);
}
else
{
meshUv2s.Add(sprite.normalizedUvs[v]);
}
}
meshVertices.Add(currentPos + flippedPos);
meshUvs.Add(sprite.uvs[v]);
}
bool reverseIndices = false; // flipped?
if (flipH) reverseIndices = !reverseIndices;
if (flipV) reverseIndices = !reverseIndices;
List<int> indices = meshIndices[sprite.materialId];
for (int i = 0; i < sprite.indices.Length; ++i) {
int j = reverseIndices ? (sprite.indices.Length - 1 - i) : i;
indices.Add(baseVertex + sprite.indices[j]);
}
}
}
if (chunk.mesh == null)
chunk.mesh = tk2dUtil.CreateMesh();
chunk.mesh.Clear();
chunk.mesh.vertices = meshVertices.ToArray();
chunk.mesh.uv = meshUvs.ToArray();
if (generateUv2)
{
chunk.mesh.uv2 = meshUv2s.ToArray();
}
chunk.mesh.colors = meshColors.ToArray();
List<Material> materials = new List<Material>();
int materialId = 0;
int subMeshCount = 0;
foreach (var indices in meshIndices)
{
if (indices.Count > 0)
{
materials.Add(tileMap.SpriteCollectionInst.materialInsts[materialId]);
subMeshCount++;
}
materialId++;
}
if (subMeshCount > 0)
{
chunk.mesh.subMeshCount = subMeshCount;
chunk.gameObject.GetComponent<Renderer>().materials = materials.ToArray();
int subMeshId = 0;
foreach (var indices in meshIndices)
{
if (indices.Count > 0)
{
chunk.mesh.SetTriangles(indices.ToArray(), subMeshId);
subMeshId++;
}
}
}
chunk.mesh.RecalculateBounds();
if (buildNormals) {
chunk.mesh.RecalculateNormals();
}
var meshFilter = chunk.gameObject.GetComponent<MeshFilter>();
meshFilter.sharedMesh = chunk.mesh;
}
public static void Build(tk2dTileMap tileMap, bool editMode, bool forceBuild)
{
bool skipPrefabs = editMode?false:true;
bool incremental = !forceBuild;
int numLayers = tileMap.data.NumLayers;
for (int layerId = 0; layerId < numLayers; ++layerId)
{
var layer = tileMap.Layers[layerId];
if (layer.IsEmpty)
continue;
var layerData = tileMap.data.Layers[layerId];
bool useColor = !tileMap.ColorChannel.IsEmpty && tileMap.data.Layers[layerId].useColor;
#if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2)
bool useSortingLayer = tileMap.data.useSortingLayers;
#endif
for (int cellY = 0; cellY < layer.numRows; ++cellY)
{
int baseY = cellY * layer.divY;
for (int cellX = 0; cellX < layer.numColumns; ++cellX)
{
int baseX = cellX * layer.divX;
var chunk = layer.GetChunk(cellX, cellY);
ColorChunk colorChunk = tileMap.ColorChannel.GetChunk(cellX, cellY);
bool colorChunkDirty = (colorChunk != null) && colorChunk.Dirty;
if (incremental && !colorChunkDirty && !chunk.Dirty)
continue;
if (chunk.mesh != null)
chunk.mesh.Clear();
if (chunk.IsEmpty)
continue;
if (editMode ||
(!editMode && !layerData.skipMeshGeneration)) {
BuildForChunk(tileMap, chunk, colorChunk, useColor, skipPrefabs, baseX, baseY);
#if !(UNITY_3_5 || UNITY_4_0 || UNITY_4_0_1 || UNITY_4_1 || UNITY_4_2)
if (chunk.gameObject != null && useSortingLayer) {
Renderer r = chunk.gameObject.GetComponent<Renderer>();
if (r != null) {
r.sortingLayerName = layerData.sortingLayerName;
r.sortingOrder = layerData.sortingOrder;
}
}
#endif
}
if (chunk.mesh != null)
tileMap.TouchMesh(chunk.mesh);
}
}
}
}
}
}
#endif
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
*
* ***************************************************************************/
using System;
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Reflection;
#if SILVERLIGHT
using System.Core;
#endif
#if CLR2
namespace Microsoft.Scripting.Ast {
#else
namespace System.Linq.Expressions {
#endif
/// <summary>
/// Represents accessing a field or property.
/// </summary>
#if !SILVERLIGHT
[DebuggerTypeProxy(typeof(Expression.MemberExpressionProxy))]
#endif
public class MemberExpression : Expression {
private readonly Expression _expression;
/// <summary>
/// Gets the field or property to be accessed.
/// </summary>
public MemberInfo Member {
get { return GetMember(); }
}
/// <summary>
/// Gets the containing object of the field or property.
/// </summary>
public Expression Expression {
get { return _expression; }
}
// param order: factories args in order, then other args
internal MemberExpression(Expression expression) {
_expression = expression;
}
internal static MemberExpression Make(Expression expression, MemberInfo member) {
if (member.MemberType == MemberTypes.Field) {
FieldInfo fi = (FieldInfo)member;
return new FieldExpression(expression, fi);
} else {
PropertyInfo pi = (PropertyInfo)member;
return new PropertyExpression(expression, pi);
}
}
/// <summary>
/// Returns the node type of this <see cref="Expression" />. (Inherited from <see cref="Expression" />.)
/// </summary>
/// <returns>The <see cref="ExpressionType"/> that represents this expression.</returns>
public sealed override ExpressionType NodeType {
get { return ExpressionType.MemberAccess; }
}
internal virtual MemberInfo GetMember() {
throw ContractUtils.Unreachable;
}
/// <summary>
/// Dispatches to the specific visit method for this node type.
/// </summary>
protected internal override Expression Accept(ExpressionVisitor visitor) {
return visitor.VisitMember(this);
}
/// <summary>
/// Creates a new expression that is like this one, but using the
/// supplied children. If all of the children are the same, it will
/// return this expression.
/// </summary>
/// <param name="expression">The <see cref="Expression" /> property of the result.</param>
/// <returns>This expression if no children changed, or an expression with the updated children.</returns>
public MemberExpression Update(Expression expression) {
if (expression == Expression) {
return this;
}
return Expression.MakeMemberAccess(expression, Member);
}
}
internal class FieldExpression : MemberExpression {
private readonly FieldInfo _field;
public FieldExpression(Expression expression, FieldInfo member)
: base(expression) {
_field = member;
}
internal override MemberInfo GetMember() {
return _field;
}
public sealed override Type Type {
get { return _field.FieldType; }
}
}
internal class PropertyExpression : MemberExpression {
private readonly PropertyInfo _property;
public PropertyExpression(Expression expression, PropertyInfo member)
: base(expression) {
_property = member;
}
internal override MemberInfo GetMember() {
return _property;
}
public sealed override Type Type {
get { return _property.PropertyType; }
}
}
public partial class Expression {
#region Field
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a field.
/// </summary>
/// <param name="expression">The containing object of the field. This can be null for static fields.</param>
/// <param name="field">The field to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1719:ParameterNamesShouldNotMatchMemberNames")]
public static MemberExpression Field(Expression expression, FieldInfo field) {
ContractUtils.RequiresNotNull(field, "field");
if (field.IsStatic) {
if (expression != null) throw new ArgumentException(Strings.OnlyStaticFieldsHaveNullInstance, "expression");
} else {
if (expression == null) throw new ArgumentException(Strings.OnlyStaticFieldsHaveNullInstance, "field");
RequiresCanRead(expression, "expression");
if (!TypeUtils.AreReferenceAssignable(field.DeclaringType, expression.Type)) {
throw Error.FieldInfoNotDefinedForType(field.DeclaringType, field.Name, expression.Type);
}
}
return MemberExpression.Make(expression, field);
}
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a field.
/// </summary>
/// <param name="expression">The containing object of the field. This can be null for static fields.</param>
/// <param name="fieldName">The field to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
public static MemberExpression Field(Expression expression, string fieldName) {
RequiresCanRead(expression, "expression");
// bind to public names first
FieldInfo fi = expression.Type.GetField(fieldName, BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (fi == null) {
fi = expression.Type.GetField(fieldName, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
}
if (fi == null) {
throw Error.InstanceFieldNotDefinedForType(fieldName, expression.Type);
}
return Expression.Field(expression, fi);
}
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a field.
/// </summary>
/// <param name="expression">The containing object of the field. This can be null for static fields.</param>
/// <param name="type">The <see cref="Type"/> containing the field.</param>
/// <param name="fieldName">The field to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1719:ParameterNamesShouldNotMatchMemberNames")]
public static MemberExpression Field(Expression expression, Type type, string fieldName) {
ContractUtils.RequiresNotNull(type, "type");
// bind to public names first
FieldInfo fi = type.GetField(fieldName, BindingFlags.Static | BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (fi == null) {
fi = type.GetField(fieldName, BindingFlags.Static | BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
}
if (fi == null) {
throw Error.FieldNotDefinedForType(fieldName, type);
}
return Expression.Field(expression, fi);
}
#endregion
#region Property
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a property.
/// </summary>
/// <param name="expression">The containing object of the property. This can be null for static properties.</param>
/// <param name="propertyName">The property to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
public static MemberExpression Property(Expression expression, string propertyName) {
RequiresCanRead(expression, "expression");
ContractUtils.RequiresNotNull(propertyName, "propertyName");
// bind to public names first
PropertyInfo pi = expression.Type.GetProperty(propertyName, BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (pi == null) {
pi = expression.Type.GetProperty(propertyName, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
}
if (pi == null) {
throw Error.InstancePropertyNotDefinedForType(propertyName, expression.Type);
}
return Property(expression, pi);
}
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a property.
/// </summary>
/// <param name="expression">The containing object of the property. This can be null for static properties.</param>
/// <param name="type">The <see cref="Type"/> containing the property.</param>
/// <param name="propertyName">The property to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
public static MemberExpression Property(Expression expression, Type type, string propertyName) {
ContractUtils.RequiresNotNull(type, "type");
ContractUtils.RequiresNotNull(propertyName, "propertyName");
// bind to public names first
PropertyInfo pi = type.GetProperty(propertyName, BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (pi == null) {
pi = type.GetProperty(propertyName, BindingFlags.Static | BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
}
if (pi == null) {
throw Error.PropertyNotDefinedForType(propertyName, type);
}
return Property(expression, pi);
}
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a property.
/// </summary>
/// <param name="expression">The containing object of the property. This can be null for static properties.</param>
/// <param name="property">The property to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1719:ParameterNamesShouldNotMatchMemberNames")]
public static MemberExpression Property(Expression expression, PropertyInfo property) {
ContractUtils.RequiresNotNull(property, "property");
MethodInfo mi = property.GetGetMethod(true) ?? property.GetSetMethod(true);
if (mi == null) {
throw Error.PropertyDoesNotHaveAccessor(property);
}
if (mi.IsStatic) {
if (expression != null) throw new ArgumentException(Strings.OnlyStaticPropertiesHaveNullInstance, "expression");
} else {
if (expression == null) throw new ArgumentException(Strings.OnlyStaticPropertiesHaveNullInstance, "property");
RequiresCanRead(expression, "expression");
if (!TypeUtils.IsValidInstanceType(property, expression.Type)) {
throw Error.PropertyNotDefinedForType(property, expression.Type);
}
}
return MemberExpression.Make(expression, property);
}
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a property.
/// </summary>
/// <param name="expression">The containing object of the property. This can be null for static properties.</param>
/// <param name="propertyAccessor">An accessor method of the property to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
public static MemberExpression Property(Expression expression, MethodInfo propertyAccessor) {
ContractUtils.RequiresNotNull(propertyAccessor, "propertyAccessor");
ValidateMethodInfo(propertyAccessor);
return Property(expression, GetProperty(propertyAccessor));
}
private static PropertyInfo GetProperty(MethodInfo mi) {
Type type = mi.DeclaringType;
BindingFlags flags = BindingFlags.Public | BindingFlags.NonPublic;
flags |= (mi.IsStatic) ? BindingFlags.Static : BindingFlags.Instance;
PropertyInfo[] props = type.GetProperties(flags);
foreach (PropertyInfo pi in props) {
if (pi.CanRead && CheckMethod(mi, pi.GetGetMethod(true))) {
return pi;
}
if (pi.CanWrite && CheckMethod(mi, pi.GetSetMethod(true))) {
return pi;
}
}
throw Error.MethodNotPropertyAccessor(mi.DeclaringType, mi.Name);
}
private static bool CheckMethod(MethodInfo method, MethodInfo propertyMethod) {
if (method == propertyMethod) {
return true;
}
// If the type is an interface then the handle for the method got by the compiler will not be the
// same as that returned by reflection.
// Check for this condition and try and get the method from reflection.
Type type = method.DeclaringType;
if (type.IsInterface && method.Name == propertyMethod.Name && type.GetMethod(method.Name) == propertyMethod) {
return true;
}
return false;
}
#endregion
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a property or field.
/// </summary>
/// <param name="expression">The containing object of the member. This can be null for static members.</param>
/// <param name="propertyOrFieldName">The member to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
public static MemberExpression PropertyOrField(Expression expression, string propertyOrFieldName) {
RequiresCanRead(expression, "expression");
// bind to public names first
PropertyInfo pi = expression.Type.GetProperty(propertyOrFieldName, BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (pi != null)
return Property(expression, pi);
FieldInfo fi = expression.Type.GetField(propertyOrFieldName, BindingFlags.Instance | BindingFlags.Public | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (fi != null)
return Field(expression, fi);
pi = expression.Type.GetProperty(propertyOrFieldName, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (pi != null)
return Property(expression, pi);
fi = expression.Type.GetField(propertyOrFieldName, BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.IgnoreCase | BindingFlags.FlattenHierarchy);
if (fi != null)
return Field(expression, fi);
throw Error.NotAMemberOfType(propertyOrFieldName, expression.Type);
}
/// <summary>
/// Creates a <see cref="MemberExpression"/> accessing a property or field.
/// </summary>
/// <param name="expression">The containing object of the member. This can be null for static members.</param>
/// <param name="member">The member to be accessed.</param>
/// <returns>The created <see cref="MemberExpression"/>.</returns>
public static MemberExpression MakeMemberAccess(Expression expression, MemberInfo member) {
ContractUtils.RequiresNotNull(member, "member");
FieldInfo fi = member as FieldInfo;
if (fi != null) {
return Expression.Field(expression, fi);
}
PropertyInfo pi = member as PropertyInfo;
if (pi != null) {
return Expression.Property(expression, pi);
}
throw Error.MemberNotFieldOrProperty(member);
}
}
}
| |
using System;
using NUnit.Framework;
using OpenQA.Selenium.Environment;
namespace OpenQA.Selenium
{
[TestFixture]
public class FormHandlingTests : DriverTestFixture
{
[Test]
public void ShouldClickOnSubmitInputElements()
{
driver.Url = formsPage;
driver.FindElement(By.Id("submitButton")).Click();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ClickingOnUnclickableElementsDoesNothing()
{
driver.Url = formsPage;
driver.FindElement(By.XPath("//body")).Click();
}
[Test]
public void ShouldBeAbleToClickImageButtons()
{
driver.Url = formsPage;
driver.FindElement(By.Id("imageButton")).Click();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldBeAbleToSubmitForms()
{
driver.Url = formsPage;
driver.FindElement(By.Name("login")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldSubmitAFormWhenAnyInputElementWithinThatFormIsSubmitted()
{
driver.Url = formsPage;
driver.FindElement(By.Id("checky")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldSubmitAFormWhenAnyElementWithinThatFormIsSubmitted()
{
driver.Url = formsPage;
driver.FindElement(By.XPath("//form/p")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
[IgnoreBrowser(Browser.Android)]
[IgnoreBrowser(Browser.Chrome)]
[IgnoreBrowser(Browser.IPhone)]
[IgnoreBrowser(Browser.Opera)]
[IgnoreBrowser(Browser.PhantomJS)]
[IgnoreBrowser(Browser.Safari)]
public void ShouldNotBeAbleToSubmitAFormThatDoesNotExist()
{
driver.Url = formsPage;
Assert.Throws<NoSuchElementException>(() => driver.FindElement(By.Name("SearchableText")).Submit());
}
[Test]
public void ShouldBeAbleToEnterTextIntoATextAreaBySettingItsValue()
{
driver.Url = javascriptPage;
IWebElement textarea = driver.FindElement(By.Id("keyUpArea"));
string cheesey = "Brie and cheddar";
textarea.SendKeys(cheesey);
Assert.AreEqual(textarea.GetAttribute("value"), cheesey);
}
[Test]
public void SendKeysKeepsCapitalization()
{
driver.Url = javascriptPage;
IWebElement textarea = driver.FindElement(By.Id("keyUpArea"));
string cheesey = "BrIe And CheDdar";
textarea.SendKeys(cheesey);
Assert.AreEqual(textarea.GetAttribute("value"), cheesey);
}
[Test]
public void ShouldSubmitAFormUsingTheNewlineLiteral()
{
driver.Url = formsPage;
IWebElement nestedForm = driver.FindElement(By.Id("nested_form"));
IWebElement input = nestedForm.FindElement(By.Name("x"));
input.SendKeys("\n");
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual("We Arrive Here", driver.Title);
Assert.IsTrue(driver.Url.EndsWith("?x=name"));
}
[Test]
public void ShouldSubmitAFormUsingTheEnterKey()
{
driver.Url = formsPage;
IWebElement nestedForm = driver.FindElement(By.Id("nested_form"));
IWebElement input = nestedForm.FindElement(By.Name("x"));
input.SendKeys(Keys.Enter);
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual("We Arrive Here", driver.Title);
Assert.IsTrue(driver.Url.EndsWith("?x=name"));
}
[Test]
public void ShouldEnterDataIntoFormFields()
{
driver.Url = xhtmlTestPage;
IWebElement element = driver.FindElement(By.XPath("//form[@name='someForm']/input[@id='username']"));
String originalValue = element.GetAttribute("value");
Assert.AreEqual(originalValue, "change");
element.Clear();
element.SendKeys("some text");
element = driver.FindElement(By.XPath("//form[@name='someForm']/input[@id='username']"));
String newFormValue = element.GetAttribute("value");
Assert.AreEqual(newFormValue, "some text");
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToAlterTheContentsOfAFileUploadInputElement()
{
driver.Url = formsPage;
IWebElement uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
string filePath = System.IO.Path.Combine(EnvironmentManager.Instance.CurrentDirectory, "test.txt");
System.IO.FileInfo inputFile = new System.IO.FileInfo(filePath);
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
uploadElement.SendKeys(inputFile.FullName);
System.IO.FileInfo outputFile = new System.IO.FileInfo(uploadElement.GetAttribute("value"));
Assert.AreEqual(inputFile.Name, outputFile.Name);
inputFile.Delete();
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToSendKeysToAFileUploadInputElementInAnXhtmlDocument()
{
// IE before 9 doesn't handle pages served with an XHTML content type, and just prompts for to
// download it
if (TestUtilities.IsOldIE(driver))
{
return;
}
driver.Url = xhtmlFormPage;
IWebElement uploadElement = driver.FindElement(By.Id("file"));
Assert.AreEqual(string.Empty, uploadElement.GetAttribute("value"));
string filePath = System.IO.Path.Combine(EnvironmentManager.Instance.CurrentDirectory, "test.txt");
System.IO.FileInfo inputFile = new System.IO.FileInfo(filePath);
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
uploadElement.SendKeys(inputFile.FullName);
System.IO.FileInfo outputFile = new System.IO.FileInfo(uploadElement.GetAttribute("value"));
Assert.AreEqual(inputFile.Name, outputFile.Name);
inputFile.Delete();
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToUploadTheSameFileTwice()
{
string filePath = System.IO.Path.Combine(EnvironmentManager.Instance.CurrentDirectory, "test.txt");
System.IO.FileInfo inputFile = new System.IO.FileInfo(filePath);
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
for (int i = 0; i < 2; ++i)
{
driver.Url = formsPage;
IWebElement uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
uploadElement.SendKeys(inputFile.FullName);
uploadElement.Submit();
}
// If we get this far, then we're all good.
}
[Test]
public void SendingKeyboardEventsShouldAppendTextInInputs()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("working"));
element.SendKeys("Some");
String value = element.GetAttribute("value");
Assert.AreEqual(value, "Some");
element.SendKeys(" text");
value = element.GetAttribute("value");
Assert.AreEqual(value, "Some text");
}
[Test]
public void SendingKeyboardEventsShouldAppendTextInInputsWithExistingValue()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("inputWithText"));
element.SendKeys(". Some text");
string value = element.GetAttribute("value");
Assert.AreEqual("Example text. Some text", value);
}
[Test]
[IgnoreBrowser(Browser.HtmlUnit, "Not implemented going to the end of the line first")]
public void SendingKeyboardEventsShouldAppendTextInTextAreas()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("withText"));
element.SendKeys(". Some text");
String value = element.GetAttribute("value");
Assert.AreEqual(value, "Example text. Some text");
}
[Test]
public void ShouldBeAbleToClearTextFromInputElements()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("working"));
element.SendKeys("Some text");
String value = element.GetAttribute("value");
Assert.IsTrue(value.Length > 0);
element.Clear();
value = element.GetAttribute("value");
Assert.AreEqual(value.Length, 0);
}
[Test]
public void EmptyTextBoxesShouldReturnAnEmptyStringNotNull()
{
driver.Url = formsPage;
IWebElement emptyTextBox = driver.FindElement(By.Id("working"));
Assert.AreEqual(emptyTextBox.GetAttribute("value"), "");
IWebElement emptyTextArea = driver.FindElement(By.Id("emptyTextArea"));
Assert.AreEqual(emptyTextBox.GetAttribute("value"), "");
}
[Test]
public void ShouldBeAbleToClearTextFromTextAreas()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("withText"));
element.SendKeys("Some text");
String value = element.GetAttribute("value");
Assert.IsTrue(value.Length > 0);
element.Clear();
value = element.GetAttribute("value");
Assert.AreEqual(value.Length, 0);
}
[Test]
[IgnoreBrowser(Browser.IE, "Hangs")]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Opera, "Untested")]
[IgnoreBrowser(Browser.PhantomJS, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support alert handling")]
public void HandleFormWithJavascriptAction()
{
string url = EnvironmentManager.Instance.UrlBuilder.WhereIs("form_handling_js_submit.html");
driver.Url = url;
IWebElement element = driver.FindElement(By.Id("theForm"));
element.Submit();
IAlert alert = driver.SwitchTo().Alert();
string text = alert.Text;
alert.Dismiss();
Assert.AreEqual("Tasty cheese", text);
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
public void CanClickOnASubmitButton()
{
CheckSubmitButton("internal_explicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
public void CanClickOnAnImplicitSubmitButton()
{
CheckSubmitButton("internal_implicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Fails on HtmlUnit")]
[IgnoreBrowser(Browser.IE, "Fails on IE")]
public void CanClickOnAnExternalSubmitButton()
{
CheckSubmitButton("external_explicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Fails on HtmlUnit")]
[IgnoreBrowser(Browser.IE, "Fails on IE")]
public void CanClickOnAnExternalImplicitSubmitButton()
{
CheckSubmitButton("external_implicit_submit");
}
private void CheckSubmitButton(string buttonId)
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("click_tests/html5_submit_buttons.html");
string name = "Gromit";
driver.FindElement(By.Id("name")).SendKeys(name);
driver.FindElement(By.Id(buttonId)).Click();
WaitFor(TitleToBe("Submitted Successfully!"), "Browser title is not 'Submitted Successfully!'");
Assert.That(driver.Url.Contains("name=" + name), "URL does not contain 'name=" + name + "'. Actual URL:" + driver.Url);
}
private Func<bool> TitleToBe(string desiredTitle)
{
return () =>
{
return driver.Title == desiredTitle;
};
}
}
}
| |
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using FluentAssertions;
using JsonApiDotNetCore.Configuration;
using JsonApiDotNetCore.Resources;
using JsonApiDotNetCore.Serialization.Objects;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using TestBuildingBlocks;
using Xunit;
namespace JsonApiDotNetCoreTests.IntegrationTests.AtomicOperations.ResourceDefinitions.Serialization
{
public sealed class AtomicSerializationResourceDefinitionTests
: IClassFixture<IntegrationTestContext<TestableStartup<OperationsDbContext>, OperationsDbContext>>
{
private readonly IntegrationTestContext<TestableStartup<OperationsDbContext>, OperationsDbContext> _testContext;
private readonly OperationsFakers _fakers = new();
public AtomicSerializationResourceDefinitionTests(IntegrationTestContext<TestableStartup<OperationsDbContext>, OperationsDbContext> testContext)
{
_testContext = testContext;
testContext.UseController<OperationsController>();
testContext.ConfigureServicesAfterStartup(services =>
{
services.AddResourceDefinition<RecordCompanyDefinition>();
services.AddSingleton<ResourceDefinitionHitCounter>();
services.AddScoped(typeof(IResourceChangeTracker<>), typeof(NeverSameResourceChangeTracker<>));
});
var hitCounter = _testContext.Factory.Services.GetRequiredService<ResourceDefinitionHitCounter>();
hitCounter.Reset();
}
[Fact]
public async Task Transforms_on_create_resource_with_side_effects()
{
// Arrange
var hitCounter = _testContext.Factory.Services.GetRequiredService<ResourceDefinitionHitCounter>();
List<RecordCompany> newCompanies = _fakers.RecordCompany.Generate(2);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<RecordCompany>();
});
var requestBody = new
{
atomic__operations = new[]
{
new
{
op = "add",
data = new
{
type = "recordCompanies",
attributes = new
{
name = newCompanies[0].Name,
countryOfResidence = newCompanies[0].CountryOfResidence
}
}
},
new
{
op = "add",
data = new
{
type = "recordCompanies",
attributes = new
{
name = newCompanies[1].Name,
countryOfResidence = newCompanies[1].CountryOfResidence
}
}
}
}
};
const string route = "/operations";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecutePostAtomicAsync<Document>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Results.Should().HaveCount(2);
responseDocument.Results[0].Data.SingleValue.Attributes["name"].Should().Be(newCompanies[0].Name.ToUpperInvariant());
responseDocument.Results[0].Data.SingleValue.Attributes["countryOfResidence"].Should().Be(newCompanies[0].CountryOfResidence.ToUpperInvariant());
responseDocument.Results[1].Data.SingleValue.Attributes["name"].Should().Be(newCompanies[1].Name.ToUpperInvariant());
responseDocument.Results[1].Data.SingleValue.Attributes["countryOfResidence"].Should().Be(newCompanies[1].CountryOfResidence.ToUpperInvariant());
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
List<RecordCompany> companiesInDatabase = await dbContext.RecordCompanies.ToListAsync();
companiesInDatabase.Should().HaveCount(2);
companiesInDatabase[0].Name.Should().Be(newCompanies[0].Name.ToUpperInvariant());
companiesInDatabase[0].CountryOfResidence.Should().Be(newCompanies[0].CountryOfResidence);
companiesInDatabase[1].Name.Should().Be(newCompanies[1].Name.ToUpperInvariant());
companiesInDatabase[1].CountryOfResidence.Should().Be(newCompanies[1].CountryOfResidence);
});
hitCounter.HitExtensibilityPoints.Should().BeEquivalentTo(new[]
{
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnDeserialize),
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnDeserialize),
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnSerialize),
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnSerialize)
}, options => options.WithStrictOrdering());
}
[Fact]
public async Task Skips_on_create_resource_with_ToOne_relationship()
{
// Arrange
var hitCounter = _testContext.Factory.Services.GetRequiredService<ResourceDefinitionHitCounter>();
RecordCompany existingCompany = _fakers.RecordCompany.Generate();
string newTrackTitle = _fakers.MusicTrack.Generate().Title;
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.RecordCompanies.Add(existingCompany);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
atomic__operations = new[]
{
new
{
op = "add",
data = new
{
type = "musicTracks",
attributes = new
{
title = newTrackTitle
},
relationships = new
{
ownedBy = new
{
data = new
{
type = "recordCompanies",
id = existingCompany.StringId
}
}
}
}
}
}
};
const string route = "/operations";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecutePostAtomicAsync<Document>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Results.Should().HaveCount(1);
hitCounter.HitExtensibilityPoints.Should().BeEmpty();
}
[Fact]
public async Task Transforms_on_update_resource_with_side_effects()
{
// Arrange
var hitCounter = _testContext.Factory.Services.GetRequiredService<ResourceDefinitionHitCounter>();
List<RecordCompany> existingCompanies = _fakers.RecordCompany.Generate(2);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
await dbContext.ClearTableAsync<RecordCompany>();
dbContext.RecordCompanies.AddRange(existingCompanies);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
atomic__operations = new[]
{
new
{
op = "update",
data = new
{
type = "recordCompanies",
id = existingCompanies[0].StringId,
attributes = new
{
}
}
},
new
{
op = "update",
data = new
{
type = "recordCompanies",
id = existingCompanies[1].StringId,
attributes = new
{
}
}
}
}
};
const string route = "/operations";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecutePostAtomicAsync<Document>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
string country0 = existingCompanies[0].CountryOfResidence.ToUpperInvariant();
string country1 = existingCompanies[1].CountryOfResidence.ToUpperInvariant();
responseDocument.Results.Should().HaveCount(2);
responseDocument.Results[0].Data.SingleValue.Attributes["name"].Should().Be(existingCompanies[0].Name);
responseDocument.Results[0].Data.SingleValue.Attributes["countryOfResidence"].Should().Be(country0);
responseDocument.Results[1].Data.SingleValue.Attributes["name"].Should().Be(existingCompanies[1].Name);
responseDocument.Results[1].Data.SingleValue.Attributes["countryOfResidence"].Should().Be(country1);
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
List<RecordCompany> companiesInDatabase = await dbContext.RecordCompanies.ToListAsync();
companiesInDatabase.Should().HaveCount(2);
companiesInDatabase[0].Name.Should().Be(existingCompanies[0].Name);
companiesInDatabase[0].CountryOfResidence.Should().Be(existingCompanies[0].CountryOfResidence);
companiesInDatabase[1].Name.Should().Be(existingCompanies[1].Name);
companiesInDatabase[1].CountryOfResidence.Should().Be(existingCompanies[1].CountryOfResidence);
});
hitCounter.HitExtensibilityPoints.Should().BeEquivalentTo(new[]
{
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnDeserialize),
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnDeserialize),
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnSerialize),
(typeof(RecordCompany), ResourceDefinitionHitCounter.ExtensibilityPoint.OnSerialize)
}, options => options.WithStrictOrdering());
}
[Fact]
public async Task Skips_on_update_resource_with_ToOne_relationship()
{
// Arrange
var hitCounter = _testContext.Factory.Services.GetRequiredService<ResourceDefinitionHitCounter>();
MusicTrack existingTrack = _fakers.MusicTrack.Generate();
RecordCompany existingCompany = _fakers.RecordCompany.Generate();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.AddInRange(existingTrack, existingCompany);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
atomic__operations = new[]
{
new
{
op = "update",
data = new
{
type = "musicTracks",
id = existingTrack.StringId,
attributes = new
{
},
relationships = new
{
ownedBy = new
{
data = new
{
type = "recordCompanies",
id = existingCompany.StringId
}
}
}
}
}
}
};
const string route = "/operations";
// Act
(HttpResponseMessage httpResponse, Document responseDocument) = await _testContext.ExecutePostAtomicAsync<Document>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.OK);
responseDocument.Results.Should().HaveCount(1);
hitCounter.HitExtensibilityPoints.Should().BeEmpty();
}
[Fact]
public async Task Skips_on_update_ToOne_relationship()
{
// Arrange
var hitCounter = _testContext.Factory.Services.GetRequiredService<ResourceDefinitionHitCounter>();
MusicTrack existingTrack = _fakers.MusicTrack.Generate();
RecordCompany existingCompany = _fakers.RecordCompany.Generate();
await _testContext.RunOnDatabaseAsync(async dbContext =>
{
dbContext.AddInRange(existingTrack, existingCompany);
await dbContext.SaveChangesAsync();
});
var requestBody = new
{
atomic__operations = new[]
{
new
{
op = "update",
@ref = new
{
type = "musicTracks",
id = existingTrack.StringId,
relationship = "ownedBy"
},
data = new
{
type = "recordCompanies",
id = existingCompany.StringId
}
}
}
};
const string route = "/operations";
// Act
(HttpResponseMessage httpResponse, string responseDocument) = await _testContext.ExecutePostAtomicAsync<string>(route, requestBody);
// Assert
httpResponse.Should().HaveStatusCode(HttpStatusCode.NoContent);
responseDocument.Should().BeEmpty();
hitCounter.HitExtensibilityPoints.Should().BeEmpty();
}
}
}
| |
/*
* This file is part of the OpenKinect Project. http://www.openkinect.org
*
* Copyright (c) 2010 individual OpenKinect contributors. See the CONTRIB file
* for details.
*
* This code is licensed to you under the terms of the Apache License, version
* 2.0, or, at your option, the terms of the GNU General Public License,
* version 2.0. See the APACHE20 and GPL2 files for the text of the licenses,
* or the following URLs:
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.gnu.org/licenses/gpl-2.0.txt
*
* If you redistribute this file in source form, modified or unmodified, you
* may:
* 1) Leave this header intact and distribute it under the same terms,
* accompanying it with the APACHE20 and GPL20 files, or
* 2) Delete the Apache 2.0 clause and accompany it with the GPL2 file, or
* 3) Delete the GPL v2 clause and accompany it with the APACHE20 file
* In all cases you must keep the copyright notice intact and include a copy
* of the CONTRIB file.
*
* Binary distributions must follow the binary distribution requirements of
* either License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Windows.Forms;
using System.Threading;
using freenect;
using System.Drawing;
using System.Diagnostics;
namespace KinectDemo
{
public partial class MainWindow : Form
{
/// <summary>
/// Current kinect device
/// </summary>
private Kinect kinect = null;
/// <summary>
/// Update timer for tilt/motor status
/// </summary>
private System.Windows.Forms.Timer statusUpdateTimer = new System.Windows.Forms.Timer();
/// <summary>
/// Is a device connected and running?
/// </summary>
private volatile bool isRunning = false;
/// <summary>
/// Thread for updating status and letting kinect process events
/// </summary>
private Thread updateThread = null;
/// <summary>
/// Constructor
/// </summary>
public MainWindow()
{
// Initialize UI stuff
this.InitializeComponents();
// Initialize update timer
this.statusUpdateTimer.Interval = 1000;
this.statusUpdateTimer.Tick += this.HandleStatusUpdateTimerTick;
// Update device list
this.UpdateDeviceList();
}
/// <summary>
/// Updates the list of devices shown in the GUI
/// </summary>
private void UpdateDeviceList()
{
// Clear old
this.selectDeviceCombo.Items.Clear();
// Get count
int deviceCount = Kinect.DeviceCount;
// Fill in combo box
for(int i = 0; i < deviceCount; i++)
{
this.selectDeviceCombo.Items.Add("Device " + i);
}
// Do we have anything to auto-select?
if(deviceCount > 0)
{
this.selectDeviceCombo.SelectedIndex = 0;
// Enable buttons
this.connectButton.Visible = this.connectButton.Enabled = true;
this.disconnectButton.Visible = false;
}
else
{
// Disable buttons
this.connectButton.Visible = false;
this.disconnectButton.Visible = false;
}
}
/// <summary>
/// Update list of modes
/// </summary>
private void UpdateModeList()
{
// Go through video modes and add em
this.selectVideoModeCombo.Items.Clear();
this.selectVideoModeCombo.Items.Add("Disabled");
foreach(var mode in this.kinect.VideoCamera.Modes)
{
if(mode.Format == VideoFormat.RGB || mode.Format == VideoFormat.Infrared8Bit || mode.Format == VideoFormat.Infrared10Bit)
{
this.selectVideoModeCombo.Items.Add(mode);
}
}
// Autoselect first mode
if(this.kinect.VideoCamera.Modes.Length > 0)
{
this.selectVideoModeCombo.SelectedIndex = 2;
}
// Go through depth modes and add em
this.selectDepthModeCombo.Items.Clear();
this.selectDepthModeCombo.Items.Add("Disabled");
foreach(var mode in this.kinect.DepthCamera.Modes)
{
if(mode.Format == DepthFormat.Depth10Bit || mode.Format == DepthFormat.Depth11Bit)
{
this.selectDepthModeCombo.Items.Add(mode);
}
}
// Autoselect first mode
if(this.kinect.DepthCamera.Modes.Length > 0)
{
this.selectDepthModeCombo.SelectedIndex = 1;
}
}
/// <summary>
/// Connects to the specified device
/// </summary>
private void Connect(int deviceID)
{
// If a device is already connected, disconnect
if(this.isRunning)
{
this.Disconnect();
}
// Now running
this.isRunning = true;
// Create instance
this.kinect = new Kinect(deviceID);
// Open kinect
this.kinect.Open();
// Set tilt to 0 to start with
this.motorTiltUpDown.Value = 0;
// Setup image handlers
this.kinect.VideoCamera.DataReceived += HandleKinectVideoCameraDataReceived;
this.kinect.DepthCamera.DataReceived += HandleKinectDepthCameraDataReceived;
// LED is set to none to start
this.kinect.LED.Color = LEDColor.None;
this.selectLEDColorCombo.SelectedIndex = 0;
// Update video/depth modes
this.UpdateModeList();
// Enable video/depth mode chooser
this.selectVideoModeGroup.Enabled = true;
// Setup update thread
this.updateThread = new Thread(delegate()
{
while(this.isRunning)
{
try
{
// Update instance's status
this.kinect.UpdateStatus();
// Let preview control render another frame
this.previewControl.Render();
Kinect.ProcessEvents();
}
catch(ThreadInterruptedException e)
{
return;
}
catch(Exception ex)
{
}
}
});
// Start updating status
this.statusUpdateTimer.Enabled = true;
// Disable connect button and enable the disconnect one
this.disconnectButton.Visible = true;
this.connectButton.Visible = false;
this.refreshButton.Visible = false;
this.selectDeviceCombo.Enabled = false;
// Enable content areas
this.contentPanel.Enabled = true;
// Start update thread
this.updateThread.Start();
}
/// <summary>
/// Disconnects from teh currently connected Kinect device
/// </summary>
private void Disconnect()
{
// Are we running?
if(this.isRunning == false)
{
return;
}
// Stop updating status
this.statusUpdateTimer.Enabled = false;
// No longer running
this.isRunning = false;
// Wait till update thread closes down
this.updateThread.Interrupt();
this.updateThread.Join();
this.updateThread = null;
// Disconnect from the kinect
this.kinect.Close();
this.kinect = null;
// Disable video/depth mode chooser
this.selectVideoModeGroup.Enabled = false;
// Disable disconnect button and enable the connect/refresh ones
this.disconnectButton.Visible = false;
this.connectButton.Visible = true;
this.refreshButton.Visible = true;
this.selectDeviceCombo.Enabled = true;
// Disable content areas
this.contentPanel.Enabled = false;
}
/// <summary>
///
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="DepthCamera.DataReceivedEventArgs"/>
/// </param>
private void HandleKinectDepthCameraDataReceived (object sender, DepthCamera.DataReceivedEventArgs e)
{
this.previewControl.HandleDepthBackBufferUpdate();
this.kinect.DepthCamera.DataBuffer = this.previewControl.DepthBackBuffer;
}
/// <summary>
///
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="VideoCamera.DataReceivedEventArgs"/>
/// </param>
private void HandleKinectVideoCameraDataReceived (object sender, VideoCamera.DataReceivedEventArgs e)
{
this.previewControl.HandleVideoBackBufferUpdate();
this.kinect.VideoCamera.DataBuffer = this.previewControl.VideoBackBuffer;
}
/// <summary>
/// Update status panes
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleStatusUpdateTimerTick (object sender, EventArgs e)
{
this.motorCurrentTiltLabel.Text = "Current Tilt: " + this.kinect.Motor.Tilt;
this.motorTiltStatusLabel.Text = "Tilt Status: " + this.kinect.Motor.TiltStatus.ToString();
this.accelerometerXValueLabel.Text = Math.Round(this.kinect.Accelerometer.X, 2).ToString();
this.accelerometerYValueLabel.Text = Math.Round(this.kinect.Accelerometer.Y, 2).ToString();
this.accelerometerZValueLabel.Text = Math.Round(this.kinect.Accelerometer.Z, 2).ToString();
this.Text = "Kinect.NET Demo - Video FPS: " + this.previewControl.VideoFPS + " | Depth FPS: " + this.previewControl.DepthFPS;
}
/// <summary>
/// Selected different LED color
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleSelectLEDColorComboSelectedIndexChanged (object sender, EventArgs e)
{
this.kinect.LED.Color = (LEDColor)Enum.Parse(typeof(LEDColor), this.selectLEDColorCombo.SelectedItem.ToString());
}
/// <summary>
/// Motor tilt changed
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleMotorTiltUpDownValueChanged (object sender, EventArgs e)
{
this.kinect.Motor.Tilt = (double)this.motorTiltUpDown.Value;
}
/// <summary>
/// Handle form being closed. Here we make sure we are closed down
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="FormClosingEventArgs"/>
/// </param>
private void HandleFormClosing (object sender, FormClosingEventArgs e)
{
this.Disconnect();
}
/// <summary>
/// Handle refresh button
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleRefreshButtonClick (object sender, EventArgs e)
{
this.UpdateDeviceList();
}
/// <summary>
/// Handle connnect button
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleConnectButtonClick (object sender, EventArgs e)
{
this.Connect(this.selectDeviceCombo.SelectedIndex);
}
/// <summary>
/// Handle disconnect button
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleDisconnectButtonClick (object sender, EventArgs e)
{
this.Disconnect();
}
/// <summary>
/// Handle about button
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleAboutButtonClick (object sender, EventArgs e)
{
new AboutWindow().ShowDialog();
}
/// <summary>
/// Handle depth mode being changed
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleSelectDepthModeComboSelectedIndexChanged (object sender, EventArgs e)
{
// Check to see if we are actually connected
if(this.isRunning == false)
{
// Not running, shouldn't even be here
return;
}
// Get index selected
int index = this.selectDepthModeCombo.SelectedIndex;
// 0 means "Disabled", otherwise, it's a depth format
if(index == 0)
{
this.kinect.DepthCamera.Stop();
}
else if(index > 0)
{
var mode = (DepthFrameMode)this.selectDepthModeCombo.SelectedItem;
this.kinect.DepthCamera.Stop();
this.kinect.DepthCamera.Mode = mode;
this.previewControl.DepthMode = mode;
// Start up camera again
this.kinect.DepthCamera.Start();
}
}
/// <summary>
/// Handle video mode being changed
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="EventArgs"/>
/// </param>
private void HandleSelectVideoModeComboSelectedIndexChanged (object sender, EventArgs e)
{
// Check to see if we are actually connected
if(this.isRunning == false)
{
// Not running, shouldn't even be here
return;
}
// Get index selected
int index = this.selectVideoModeCombo.SelectedIndex;
// 0 means "Disabled", otherwise, it's a depth format
if(index == 0)
{
// Disabled
this.kinect.VideoCamera.Stop();
}
else if(index > 0)
{
var mode = (VideoFrameMode)this.selectVideoModeCombo.SelectedItem;
this.kinect.VideoCamera.Stop();
this.kinect.VideoCamera.Mode = mode;
this.previewControl.VideoMode = mode;
// Start up camera again
this.kinect.VideoCamera.Start();
}
}
/// <summary>
///
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="FormClosingEventArgs"/>
/// </param>
private void HandleDepthPreviewWindowFormClosing (object sender, FormClosingEventArgs e)
{
e.Cancel = true;
this.selectDepthModeCombo.SelectedIndex = 0;
}
/// <summary>
///
/// </summary>
/// <param name="sender">
/// A <see cref="System.Object"/>
/// </param>
/// <param name="e">
/// A <see cref="FormClosingEventArgs"/>
/// </param>
private void HandleVideoPreviewWindowFormClosing (object sender, FormClosingEventArgs e)
{
e.Cancel = true;
this.selectVideoModeCombo.SelectedIndex = 0;
}
/// <summary>
/// About Window
/// </summary>
private class AboutWindow : Form
{
public AboutWindow()
{
///
/// linkLabel
///
LinkLabel linkLabel = new LinkLabel();
linkLabel.Text = "openkinect.org";
linkLabel.Click += delegate(object sender, EventArgs e)
{
Process.Start("http://openkinect.org/wiki/CSharp_Wrapper");
};
linkLabel.Dock = DockStyle.Top;
///
/// authorLabel
///
Label authorLabel = new Label();
authorLabel.Text = "by Aditya Gaddam";
authorLabel.Dock = DockStyle.Top;
///
/// titleLabel
///
Label titleLabel = new Label();
titleLabel.Text = "Kinect.NET Demo";
titleLabel.Font = new Font(this.Font.FontFamily, 14.0f);
titleLabel.Dock = DockStyle.Top;
///
/// logoImageBox
///
PictureBox logoPictureBox = new PictureBox();
logoPictureBox.Image = Image.FromFile("openkinect_logo.png");
logoPictureBox.Dock = DockStyle.Left;
logoPictureBox.Width = 96;
///
/// aboutContentPanel
///
Panel aboutContentPanel = new Panel();
aboutContentPanel.Dock = DockStyle.Fill;
aboutContentPanel.Controls.Add(linkLabel);
aboutContentPanel.Controls.Add(authorLabel);
aboutContentPanel.Controls.Add(titleLabel);
aboutContentPanel.Padding = new Padding(7, 0, 0, 0);
///
/// AboutWindow
///
this.ShowInTaskbar = false;
//this.FormBorderStyle = FormBorderStyle.FixedSingle;
this.MinimizeBox = false;
this.MaximizeBox = false;
this.StartPosition = FormStartPosition.CenterScreen;
this.Text = "About";
this.Width = 350;
this.Height = 140;
this.Font = new System.Drawing.Font(this.Font.FontFamily, 9.0f);
this.BackColor = Color.White;
this.Controls.Add(aboutContentPanel);
this.Controls.Add(logoPictureBox);
this.Padding = new Padding(7);
}
}
}
}
| |
//------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------
// NOTE: this file was generated from 'xd.xml'
namespace System.ServiceModel
{
using System.Xml;
using System.Runtime.CompilerServices;
class ServiceModelStringsVersion1 : ServiceModelStrings
{
public const string String0 = "mustUnderstand";
public const string String1 = "Envelope";
public const string String2 = "http://www.w3.org/2003/05/soap-envelope";
public const string String3 = "http://www.w3.org/2005/08/addressing";
public const string String4 = "Header";
public const string String5 = "Action";
public const string String6 = "To";
public const string String7 = "Body";
public const string String8 = "Algorithm";
public const string String9 = "RelatesTo";
public const string String10 = "http://www.w3.org/2005/08/addressing/anonymous";
public const string String11 = "URI";
public const string String12 = "Reference";
public const string String13 = "MessageID";
public const string String14 = "Id";
public const string String15 = "Identifier";
public const string String16 = "http://schemas.xmlsoap.org/ws/2005/02/rm";
public const string String17 = "Transforms";
public const string String18 = "Transform";
public const string String19 = "DigestMethod";
public const string String20 = "DigestValue";
public const string String21 = "Address";
public const string String22 = "ReplyTo";
public const string String23 = "SequenceAcknowledgement";
public const string String24 = "AcknowledgementRange";
public const string String25 = "Upper";
public const string String26 = "Lower";
public const string String27 = "BufferRemaining";
public const string String28 = "http://schemas.microsoft.com/ws/2006/05/rm";
public const string String29 = "http://schemas.xmlsoap.org/ws/2005/02/rm/SequenceAcknowledgement";
public const string String30 = "SecurityTokenReference";
public const string String31 = "Sequence";
public const string String32 = "MessageNumber";
public const string String33 = "http://www.w3.org/2000/09/xmldsig#";
public const string String34 = "http://www.w3.org/2000/09/xmldsig#enveloped-signature";
public const string String35 = "KeyInfo";
public const string String36 = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd";
public const string String37 = "http://www.w3.org/2001/04/xmlenc#";
public const string String38 = "http://schemas.xmlsoap.org/ws/2005/02/sc";
public const string String39 = "DerivedKeyToken";
public const string String40 = "Nonce";
public const string String41 = "Signature";
public const string String42 = "SignedInfo";
public const string String43 = "CanonicalizationMethod";
public const string String44 = "SignatureMethod";
public const string String45 = "SignatureValue";
public const string String46 = "DataReference";
public const string String47 = "EncryptedData";
public const string String48 = "EncryptionMethod";
public const string String49 = "CipherData";
public const string String50 = "CipherValue";
public const string String51 = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd";
public const string String52 = "Security";
public const string String53 = "Timestamp";
public const string String54 = "Created";
public const string String55 = "Expires";
public const string String56 = "Length";
public const string String57 = "ReferenceList";
public const string String58 = "ValueType";
public const string String59 = "Type";
public const string String60 = "EncryptedHeader";
public const string String61 = "http://docs.oasis-open.org/wss/oasis-wss-wssecurity-secext-1.1.xsd";
public const string String62 = "RequestSecurityTokenResponseCollection";
public const string String63 = "http://schemas.xmlsoap.org/ws/2005/02/trust";
public const string String64 = "http://schemas.xmlsoap.org/ws/2005/02/trust#BinarySecret";
public const string String65 = "http://schemas.microsoft.com/ws/2006/02/transactions";
public const string String66 = "s";
public const string String67 = "Fault";
public const string String68 = "MustUnderstand";
public const string String69 = "role";
public const string String70 = "relay";
public const string String71 = "Code";
public const string String72 = "Reason";
public const string String73 = "Text";
public const string String74 = "Node";
public const string String75 = "Role";
public const string String76 = "Detail";
public const string String77 = "Value";
public const string String78 = "Subcode";
public const string String79 = "NotUnderstood";
public const string String80 = "qname";
public const string String81 = "";
public const string String82 = "From";
public const string String83 = "FaultTo";
public const string String84 = "EndpointReference";
public const string String85 = "PortType";
public const string String86 = "ServiceName";
public const string String87 = "PortName";
public const string String88 = "ReferenceProperties";
public const string String89 = "RelationshipType";
public const string String90 = "Reply";
public const string String91 = "a";
public const string String92 = "http://schemas.xmlsoap.org/ws/2006/02/addressingidentity";
public const string String93 = "Identity";
public const string String94 = "Spn";
public const string String95 = "Upn";
public const string String96 = "Rsa";
public const string String97 = "Dns";
public const string String98 = "X509v3Certificate";
public const string String99 = "http://www.w3.org/2005/08/addressing/fault";
public const string String100 = "ReferenceParameters";
public const string String101 = "IsReferenceParameter";
public const string String102 = "http://www.w3.org/2005/08/addressing/reply";
public const string String103 = "http://www.w3.org/2005/08/addressing/none";
public const string String104 = "Metadata";
public const string String105 = "http://schemas.xmlsoap.org/ws/2004/08/addressing";
public const string String106 = "http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous";
public const string String107 = "http://schemas.xmlsoap.org/ws/2004/08/addressing/fault";
public const string String108 = "http://schemas.xmlsoap.org/ws/2004/06/addressingex";
public const string String109 = "RedirectTo";
public const string String110 = "Via";
public const string String111 = "http://www.w3.org/2001/10/xml-exc-c14n#";
public const string String112 = "PrefixList";
public const string String113 = "InclusiveNamespaces";
public const string String114 = "ec";
public const string String115 = "SecurityContextToken";
public const string String116 = "Generation";
public const string String117 = "Label";
public const string String118 = "Offset";
public const string String119 = "Properties";
public const string String120 = "Cookie";
public const string String121 = "wsc";
public const string String122 = "http://schemas.xmlsoap.org/ws/2004/04/sc";
public const string String123 = "http://schemas.xmlsoap.org/ws/2004/04/security/sc/dk";
public const string String124 = "http://schemas.xmlsoap.org/ws/2004/04/security/sc/sct";
public const string String125 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/RST/SCT";
public const string String126 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/RSTR/SCT";
public const string String127 = "RenewNeeded";
public const string String128 = "BadContextToken";
public const string String129 = "c";
public const string String130 = "http://schemas.xmlsoap.org/ws/2005/02/sc/dk";
public const string String131 = "http://schemas.xmlsoap.org/ws/2005/02/sc/sct";
public const string String132 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RST/SCT";
public const string String133 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RSTR/SCT";
public const string String134 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RST/SCT/Renew";
public const string String135 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RSTR/SCT/Renew";
public const string String136 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RST/SCT/Cancel";
public const string String137 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RSTR/SCT/Cancel";
public const string String138 = "http://www.w3.org/2001/04/xmlenc#aes128-cbc";
public const string String139 = "http://www.w3.org/2001/04/xmlenc#kw-aes128";
public const string String140 = "http://www.w3.org/2001/04/xmlenc#aes192-cbc";
public const string String141 = "http://www.w3.org/2001/04/xmlenc#kw-aes192";
public const string String142 = "http://www.w3.org/2001/04/xmlenc#aes256-cbc";
public const string String143 = "http://www.w3.org/2001/04/xmlenc#kw-aes256";
public const string String144 = "http://www.w3.org/2001/04/xmlenc#des-cbc";
public const string String145 = "http://www.w3.org/2000/09/xmldsig#dsa-sha1";
public const string String146 = "http://www.w3.org/2001/10/xml-exc-c14n#WithComments";
public const string String147 = "http://www.w3.org/2000/09/xmldsig#hmac-sha1";
public const string String148 = "http://www.w3.org/2001/04/xmldsig-more#hmac-sha256";
public const string String149 = "http://schemas.xmlsoap.org/ws/2005/02/sc/dk/p_sha1";
public const string String150 = "http://www.w3.org/2001/04/xmlenc#ripemd160";
public const string String151 = "http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p";
public const string String152 = "http://www.w3.org/2000/09/xmldsig#rsa-sha1";
public const string String153 = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256";
public const string String154 = "http://www.w3.org/2001/04/xmlenc#rsa-1_5";
public const string String155 = "http://www.w3.org/2000/09/xmldsig#sha1";
public const string String156 = "http://www.w3.org/2001/04/xmlenc#sha256";
public const string String157 = "http://www.w3.org/2001/04/xmlenc#sha512";
public const string String158 = "http://www.w3.org/2001/04/xmlenc#tripledes-cbc";
public const string String159 = "http://www.w3.org/2001/04/xmlenc#kw-tripledes";
public const string String160 = "http://schemas.xmlsoap.org/2005/02/trust/tlsnego#TLS_Wrap";
public const string String161 = "http://schemas.xmlsoap.org/2005/02/trust/spnego#GSS_Wrap";
public const string String162 = "http://schemas.microsoft.com/ws/2006/05/security";
public const string String163 = "dnse";
public const string String164 = "o";
public const string String165 = "Password";
public const string String166 = "PasswordText";
public const string String167 = "Username";
public const string String168 = "UsernameToken";
public const string String169 = "BinarySecurityToken";
public const string String170 = "EncodingType";
public const string String171 = "KeyIdentifier";
public const string String172 = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Base64Binary";
public const string String173 = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#HexBinary";
public const string String174 = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-soap-message-security-1.0#Text";
public const string String175 = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-x509-token-profile-1.0#X509SubjectKeyIdentifier";
public const string String176 = "http://docs.oasis-open.org/wss/oasis-wss-kerberos-token-profile-1.1#GSS_Kerberosv5_AP_REQ";
public const string String177 = "http://docs.oasis-open.org/wss/oasis-wss-kerberos-token-profile-1.1#GSS_Kerberosv5_AP_REQ1510";
public const string String178 = "http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.0#SAMLAssertionID";
public const string String179 = "Assertion";
public const string String180 = "urn:oasis:names:tc:SAML:1.0:assertion";
public const string String181 = "http://docs.oasis-open.org/wss/oasis-wss-rel-token-profile-1.0.pdf#license";
public const string String182 = "FailedAuthentication";
public const string String183 = "InvalidSecurityToken";
public const string String184 = "InvalidSecurity";
public const string String185 = "k";
public const string String186 = "SignatureConfirmation";
public const string String187 = "TokenType";
public const string String188 = "http://docs.oasis-open.org/wss/oasis-wss-soap-message-security-1.1#ThumbprintSHA1";
public const string String189 = "http://docs.oasis-open.org/wss/oasis-wss-soap-message-security-1.1#EncryptedKey";
public const string String190 = "http://docs.oasis-open.org/wss/oasis-wss-soap-message-security-1.1#EncryptedKeySHA1";
public const string String191 = "http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.1#SAMLV1.1";
public const string String192 = "http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.1#SAMLV2.0";
public const string String193 = "http://docs.oasis-open.org/wss/oasis-wss-saml-token-profile-1.1#SAMLID";
public const string String194 = "AUTH-HASH";
public const string String195 = "RequestSecurityTokenResponse";
public const string String196 = "KeySize";
public const string String197 = "RequestedTokenReference";
public const string String198 = "AppliesTo";
public const string String199 = "Authenticator";
public const string String200 = "CombinedHash";
public const string String201 = "BinaryExchange";
public const string String202 = "Lifetime";
public const string String203 = "RequestedSecurityToken";
public const string String204 = "Entropy";
public const string String205 = "RequestedProofToken";
public const string String206 = "ComputedKey";
public const string String207 = "RequestSecurityToken";
public const string String208 = "RequestType";
public const string String209 = "Context";
public const string String210 = "BinarySecret";
public const string String211 = "http://schemas.xmlsoap.org/ws/2005/02/trust/spnego";
public const string String212 = " http://schemas.xmlsoap.org/ws/2005/02/trust/tlsnego";
public const string String213 = "wst";
public const string String214 = "http://schemas.xmlsoap.org/ws/2004/04/trust";
public const string String215 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/RST/Issue";
public const string String216 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/RSTR/Issue";
public const string String217 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/Issue";
public const string String218 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/CK/PSHA1";
public const string String219 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/SymmetricKey";
public const string String220 = "http://schemas.xmlsoap.org/ws/2004/04/security/trust/Nonce";
public const string String221 = "KeyType";
public const string String222 = "http://schemas.xmlsoap.org/ws/2004/04/trust/SymmetricKey";
public const string String223 = "http://schemas.xmlsoap.org/ws/2004/04/trust/PublicKey";
public const string String224 = "Claims";
public const string String225 = "InvalidRequest";
public const string String226 = "RequestFailed";
public const string String227 = "SignWith";
public const string String228 = "EncryptWith";
public const string String229 = "EncryptionAlgorithm";
public const string String230 = "CanonicalizationAlgorithm";
public const string String231 = "ComputedKeyAlgorithm";
public const string String232 = "UseKey";
public const string String233 = "http://schemas.microsoft.com/net/2004/07/secext/WS-SPNego";
public const string String234 = "http://schemas.microsoft.com/net/2004/07/secext/TLSNego";
public const string String235 = "t";
public const string String236 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RST/Issue";
public const string String237 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RSTR/Issue";
public const string String238 = "http://schemas.xmlsoap.org/ws/2005/02/trust/Issue";
public const string String239 = "http://schemas.xmlsoap.org/ws/2005/02/trust/SymmetricKey";
public const string String240 = "http://schemas.xmlsoap.org/ws/2005/02/trust/CK/PSHA1";
public const string String241 = "http://schemas.xmlsoap.org/ws/2005/02/trust/Nonce";
public const string String242 = "RenewTarget";
public const string String243 = "CancelTarget";
public const string String244 = "RequestedTokenCancelled";
public const string String245 = "RequestedAttachedReference";
public const string String246 = "RequestedUnattachedReference";
public const string String247 = "IssuedTokens";
public const string String248 = "http://schemas.xmlsoap.org/ws/2005/02/trust/Renew";
public const string String249 = "http://schemas.xmlsoap.org/ws/2005/02/trust/Cancel";
public const string String250 = "http://schemas.xmlsoap.org/ws/2005/02/trust/PublicKey";
public const string String251 = "Access";
public const string String252 = "AccessDecision";
public const string String253 = "Advice";
public const string String254 = "AssertionID";
public const string String255 = "AssertionIDReference";
public const string String256 = "Attribute";
public const string String257 = "AttributeName";
public const string String258 = "AttributeNamespace";
public const string String259 = "AttributeStatement";
public const string String260 = "AttributeValue";
public const string String261 = "Audience";
public const string String262 = "AudienceRestrictionCondition";
public const string String263 = "AuthenticationInstant";
public const string String264 = "AuthenticationMethod";
public const string String265 = "AuthenticationStatement";
public const string String266 = "AuthorityBinding";
public const string String267 = "AuthorityKind";
public const string String268 = "AuthorizationDecisionStatement";
public const string String269 = "Binding";
public const string String270 = "Condition";
public const string String271 = "Conditions";
public const string String272 = "Decision";
public const string String273 = "DoNotCacheCondition";
public const string String274 = "Evidence";
public const string String275 = "IssueInstant";
public const string String276 = "Issuer";
public const string String277 = "Location";
public const string String278 = "MajorVersion";
public const string String279 = "MinorVersion";
public const string String280 = "NameIdentifier";
public const string String281 = "Format";
public const string String282 = "NameQualifier";
public const string String283 = "Namespace";
public const string String284 = "NotBefore";
public const string String285 = "NotOnOrAfter";
public const string String286 = "saml";
public const string String287 = "Statement";
public const string String288 = "Subject";
public const string String289 = "SubjectConfirmation";
public const string String290 = "SubjectConfirmationData";
public const string String291 = "ConfirmationMethod";
public const string String292 = "urn:oasis:names:tc:SAML:1.0:cm:holder-of-key";
public const string String293 = "urn:oasis:names:tc:SAML:1.0:cm:sender-vouches";
public const string String294 = "SubjectLocality";
public const string String295 = "DNSAddress";
public const string String296 = "IPAddress";
public const string String297 = "SubjectStatement";
public const string String298 = "urn:oasis:names:tc:SAML:1.0:am:unspecified";
public const string String299 = "xmlns";
public const string String300 = "Resource";
public const string String301 = "UserName";
public const string String302 = "urn:oasis:names:tc:SAML:1.1:nameid-format:WindowsDomainQualifiedName";
public const string String303 = "EmailName";
public const string String304 = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress";
public const string String305 = "u";
public const string String306 = "ChannelInstance";
public const string String307 = "http://schemas.microsoft.com/ws/2005/02/duplex";
public const string String308 = "Encoding";
public const string String309 = "MimeType";
public const string String310 = "CarriedKeyName";
public const string String311 = "Recipient";
public const string String312 = "EncryptedKey";
public const string String313 = "KeyReference";
public const string String314 = "e";
public const string String315 = "http://www.w3.org/2001/04/xmlenc#Element";
public const string String316 = "http://www.w3.org/2001/04/xmlenc#Content";
public const string String317 = "KeyName";
public const string String318 = "MgmtData";
public const string String319 = "KeyValue";
public const string String320 = "RSAKeyValue";
public const string String321 = "Modulus";
public const string String322 = "Exponent";
public const string String323 = "X509Data";
public const string String324 = "X509IssuerSerial";
public const string String325 = "X509IssuerName";
public const string String326 = "X509SerialNumber";
public const string String327 = "X509Certificate";
public const string String328 = "AckRequested";
public const string String329 = "http://schemas.xmlsoap.org/ws/2005/02/rm/AckRequested";
public const string String330 = "AcksTo";
public const string String331 = "Accept";
public const string String332 = "CreateSequence";
public const string String333 = "http://schemas.xmlsoap.org/ws/2005/02/rm/CreateSequence";
public const string String334 = "CreateSequenceRefused";
public const string String335 = "CreateSequenceResponse";
public const string String336 = "http://schemas.xmlsoap.org/ws/2005/02/rm/CreateSequenceResponse";
public const string String337 = "FaultCode";
public const string String338 = "InvalidAcknowledgement";
public const string String339 = "LastMessage";
public const string String340 = "http://schemas.xmlsoap.org/ws/2005/02/rm/LastMessage";
public const string String341 = "LastMessageNumberExceeded";
public const string String342 = "MessageNumberRollover";
public const string String343 = "Nack";
public const string String344 = "netrm";
public const string String345 = "Offer";
public const string String346 = "r";
public const string String347 = "SequenceFault";
public const string String348 = "SequenceTerminated";
public const string String349 = "TerminateSequence";
public const string String350 = "http://schemas.xmlsoap.org/ws/2005/02/rm/TerminateSequence";
public const string String351 = "UnknownSequence";
public const string String352 = "http://schemas.microsoft.com/ws/2006/02/tx/oletx";
public const string String353 = "oletx";
public const string String354 = "OleTxTransaction";
public const string String355 = "PropagationToken";
public const string String356 = "http://schemas.xmlsoap.org/ws/2004/10/wscoor";
public const string String357 = "wscoor";
public const string String358 = "CreateCoordinationContext";
public const string String359 = "CreateCoordinationContextResponse";
public const string String360 = "CoordinationContext";
public const string String361 = "CurrentContext";
public const string String362 = "CoordinationType";
public const string String363 = "RegistrationService";
public const string String364 = "Register";
public const string String365 = "RegisterResponse";
public const string String366 = "ProtocolIdentifier";
public const string String367 = "CoordinatorProtocolService";
public const string String368 = "ParticipantProtocolService";
public const string String369 = "http://schemas.xmlsoap.org/ws/2004/10/wscoor/CreateCoordinationContext";
public const string String370 = "http://schemas.xmlsoap.org/ws/2004/10/wscoor/CreateCoordinationContextResponse";
public const string String371 = "http://schemas.xmlsoap.org/ws/2004/10/wscoor/Register";
public const string String372 = "http://schemas.xmlsoap.org/ws/2004/10/wscoor/RegisterResponse";
public const string String373 = "http://schemas.xmlsoap.org/ws/2004/10/wscoor/fault";
public const string String374 = "ActivationCoordinatorPortType";
public const string String375 = "RegistrationCoordinatorPortType";
public const string String376 = "InvalidState";
public const string String377 = "InvalidProtocol";
public const string String378 = "InvalidParameters";
public const string String379 = "NoActivity";
public const string String380 = "ContextRefused";
public const string String381 = "AlreadyRegistered";
public const string String382 = "http://schemas.xmlsoap.org/ws/2004/10/wsat";
public const string String383 = "wsat";
public const string String384 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Completion";
public const string String385 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Durable2PC";
public const string String386 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Volatile2PC";
public const string String387 = "Prepare";
public const string String388 = "Prepared";
public const string String389 = "ReadOnly";
public const string String390 = "Commit";
public const string String391 = "Rollback";
public const string String392 = "Committed";
public const string String393 = "Aborted";
public const string String394 = "Replay";
public const string String395 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Commit";
public const string String396 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Rollback";
public const string String397 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Committed";
public const string String398 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Aborted";
public const string String399 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Prepare";
public const string String400 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Prepared";
public const string String401 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/ReadOnly";
public const string String402 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/Replay";
public const string String403 = "http://schemas.xmlsoap.org/ws/2004/10/wsat/fault";
public const string String404 = "CompletionCoordinatorPortType";
public const string String405 = "CompletionParticipantPortType";
public const string String406 = "CoordinatorPortType";
public const string String407 = "ParticipantPortType";
public const string String408 = "InconsistentInternalState";
public const string String409 = "mstx";
public const string String410 = "Enlistment";
public const string String411 = "protocol";
public const string String412 = "LocalTransactionId";
public const string String413 = "IsolationLevel";
public const string String414 = "IsolationFlags";
public const string String415 = "Description";
public const string String416 = "Loopback";
public const string String417 = "RegisterInfo";
public const string String418 = "ContextId";
public const string String419 = "TokenId";
public const string String420 = "AccessDenied";
public const string String421 = "InvalidPolicy";
public const string String422 = "CoordinatorRegistrationFailed";
public const string String423 = "TooManyEnlistments";
public const string String424 = "Disabled";
public const string String425 = "ActivityId";
public const string String426 = "http://schemas.microsoft.com/2004/09/ServiceModel/Diagnostics";
public const string String427 = "http://docs.oasis-open.org/wss/oasis-wss-kerberos-token-profile-1.1#Kerberosv5APREQSHA1";
public const string String428 = "http://schemas.xmlsoap.org/ws/2002/12/policy";
public const string String429 = "FloodMessage";
public const string String430 = "LinkUtility";
public const string String431 = "Hops";
public const string String432 = "http://schemas.microsoft.com/net/2006/05/peer/HopCount";
public const string String433 = "PeerVia";
public const string String434 = "http://schemas.microsoft.com/net/2006/05/peer";
public const string String435 = "PeerFlooder";
public const string String436 = "PeerTo";
public const string String437 = "http://schemas.microsoft.com/ws/2005/05/routing";
public const string String438 = "PacketRoutable";
public const string String439 = "http://schemas.microsoft.com/ws/2005/05/addressing/none";
public const string String440 = "http://schemas.microsoft.com/ws/2005/05/envelope/none";
public const string String441 = "http://www.w3.org/2001/XMLSchema-instance";
public const string String442 = "http://www.w3.org/2001/XMLSchema";
public const string String443 = "nil";
public const string String444 = "type";
public const string String445 = "char";
public const string String446 = "boolean";
public const string String447 = "byte";
public const string String448 = "unsignedByte";
public const string String449 = "short";
public const string String450 = "unsignedShort";
public const string String451 = "int";
public const string String452 = "unsignedInt";
public const string String453 = "long";
public const string String454 = "unsignedLong";
public const string String455 = "float";
public const string String456 = "double";
public const string String457 = "decimal";
public const string String458 = "dateTime";
public const string String459 = "string";
public const string String460 = "base64Binary";
public const string String461 = "anyType";
public const string String462 = "duration";
public const string String463 = "guid";
public const string String464 = "anyURI";
public const string String465 = "QName";
public const string String466 = "time";
public const string String467 = "date";
public const string String468 = "hexBinary";
public const string String469 = "gYearMonth";
public const string String470 = "gYear";
public const string String471 = "gMonthDay";
public const string String472 = "gDay";
public const string String473 = "gMonth";
public const string String474 = "integer";
public const string String475 = "positiveInteger";
public const string String476 = "negativeInteger";
public const string String477 = "nonPositiveInteger";
public const string String478 = "nonNegativeInteger";
public const string String479 = "normalizedString";
public const string String480 = "ConnectionLimitReached";
public const string String481 = "http://schemas.xmlsoap.org/soap/envelope/";
public const string String482 = "actor";
public const string String483 = "faultcode";
public const string String484 = "faultstring";
public const string String485 = "faultactor";
public const string String486 = "detail";
public override int Count { get { return 487; } }
public override string this[int index]
{
get
{
DiagnosticUtility.DebugAssert(index >= 0 && index < this.Count, "check index");
switch (index)
{
case 0: return String0;
case 1: return String1;
case 2: return String2;
case 3: return String3;
case 4: return String4;
case 5: return String5;
case 6: return String6;
case 7: return String7;
case 8: return String8;
case 9: return String9;
case 10: return String10;
case 11: return String11;
case 12: return String12;
case 13: return String13;
case 14: return String14;
case 15: return String15;
case 16: return String16;
case 17: return String17;
case 18: return String18;
case 19: return String19;
case 20: return String20;
case 21: return String21;
case 22: return String22;
case 23: return String23;
case 24: return String24;
case 25: return String25;
case 26: return String26;
case 27: return String27;
case 28: return String28;
case 29: return String29;
case 30: return String30;
case 31: return String31;
case 32: return String32;
case 33: return String33;
case 34: return String34;
case 35: return String35;
case 36: return String36;
case 37: return String37;
case 38: return String38;
case 39: return String39;
case 40: return String40;
case 41: return String41;
case 42: return String42;
case 43: return String43;
case 44: return String44;
case 45: return String45;
case 46: return String46;
case 47: return String47;
case 48: return String48;
case 49: return String49;
case 50: return String50;
case 51: return String51;
case 52: return String52;
case 53: return String53;
case 54: return String54;
case 55: return String55;
case 56: return String56;
case 57: return String57;
case 58: return String58;
case 59: return String59;
case 60: return String60;
case 61: return String61;
case 62: return String62;
case 63: return String63;
case 64: return String64;
case 65: return String65;
case 66: return String66;
case 67: return String67;
case 68: return String68;
case 69: return String69;
case 70: return String70;
case 71: return String71;
case 72: return String72;
case 73: return String73;
case 74: return String74;
case 75: return String75;
case 76: return String76;
case 77: return String77;
case 78: return String78;
case 79: return String79;
case 80: return String80;
case 81: return String81;
case 82: return String82;
case 83: return String83;
case 84: return String84;
case 85: return String85;
case 86: return String86;
case 87: return String87;
case 88: return String88;
case 89: return String89;
case 90: return String90;
case 91: return String91;
case 92: return String92;
case 93: return String93;
case 94: return String94;
case 95: return String95;
case 96: return String96;
case 97: return String97;
case 98: return String98;
case 99: return String99;
case 100: return String100;
case 101: return String101;
case 102: return String102;
case 103: return String103;
case 104: return String104;
case 105: return String105;
case 106: return String106;
case 107: return String107;
case 108: return String108;
case 109: return String109;
case 110: return String110;
case 111: return String111;
case 112: return String112;
case 113: return String113;
case 114: return String114;
case 115: return String115;
case 116: return String116;
case 117: return String117;
case 118: return String118;
case 119: return String119;
case 120: return String120;
case 121: return String121;
case 122: return String122;
case 123: return String123;
case 124: return String124;
case 125: return String125;
case 126: return String126;
case 127: return String127;
case 128: return String128;
case 129: return String129;
case 130: return String130;
case 131: return String131;
case 132: return String132;
case 133: return String133;
case 134: return String134;
case 135: return String135;
case 136: return String136;
case 137: return String137;
case 138: return String138;
case 139: return String139;
case 140: return String140;
case 141: return String141;
case 142: return String142;
case 143: return String143;
case 144: return String144;
case 145: return String145;
case 146: return String146;
case 147: return String147;
case 148: return String148;
case 149: return String149;
case 150: return String150;
case 151: return String151;
case 152: return String152;
case 153: return String153;
case 154: return String154;
case 155: return String155;
case 156: return String156;
case 157: return String157;
case 158: return String158;
case 159: return String159;
case 160: return String160;
case 161: return String161;
case 162: return String162;
case 163: return String163;
case 164: return String164;
case 165: return String165;
case 166: return String166;
case 167: return String167;
case 168: return String168;
case 169: return String169;
case 170: return String170;
case 171: return String171;
case 172: return String172;
case 173: return String173;
case 174: return String174;
case 175: return String175;
case 176: return String176;
case 177: return String177;
case 178: return String178;
case 179: return String179;
case 180: return String180;
case 181: return String181;
case 182: return String182;
case 183: return String183;
case 184: return String184;
case 185: return String185;
case 186: return String186;
case 187: return String187;
case 188: return String188;
case 189: return String189;
case 190: return String190;
case 191: return String191;
case 192: return String192;
case 193: return String193;
case 194: return String194;
case 195: return String195;
case 196: return String196;
case 197: return String197;
case 198: return String198;
case 199: return String199;
case 200: return String200;
case 201: return String201;
case 202: return String202;
case 203: return String203;
case 204: return String204;
case 205: return String205;
case 206: return String206;
case 207: return String207;
case 208: return String208;
case 209: return String209;
case 210: return String210;
case 211: return String211;
case 212: return String212;
case 213: return String213;
case 214: return String214;
case 215: return String215;
case 216: return String216;
case 217: return String217;
case 218: return String218;
case 219: return String219;
case 220: return String220;
case 221: return String221;
case 222: return String222;
case 223: return String223;
case 224: return String224;
case 225: return String225;
case 226: return String226;
case 227: return String227;
case 228: return String228;
case 229: return String229;
case 230: return String230;
case 231: return String231;
case 232: return String232;
case 233: return String233;
case 234: return String234;
case 235: return String235;
case 236: return String236;
case 237: return String237;
case 238: return String238;
case 239: return String239;
case 240: return String240;
case 241: return String241;
case 242: return String242;
case 243: return String243;
case 244: return String244;
case 245: return String245;
case 246: return String246;
case 247: return String247;
case 248: return String248;
case 249: return String249;
case 250: return String250;
case 251: return String251;
case 252: return String252;
case 253: return String253;
case 254: return String254;
case 255: return String255;
case 256: return String256;
case 257: return String257;
case 258: return String258;
case 259: return String259;
case 260: return String260;
case 261: return String261;
case 262: return String262;
case 263: return String263;
case 264: return String264;
case 265: return String265;
case 266: return String266;
case 267: return String267;
case 268: return String268;
case 269: return String269;
case 270: return String270;
case 271: return String271;
case 272: return String272;
case 273: return String273;
case 274: return String274;
case 275: return String275;
case 276: return String276;
case 277: return String277;
case 278: return String278;
case 279: return String279;
case 280: return String280;
case 281: return String281;
case 282: return String282;
case 283: return String283;
case 284: return String284;
case 285: return String285;
case 286: return String286;
case 287: return String287;
case 288: return String288;
case 289: return String289;
case 290: return String290;
case 291: return String291;
case 292: return String292;
case 293: return String293;
case 294: return String294;
case 295: return String295;
case 296: return String296;
case 297: return String297;
case 298: return String298;
case 299: return String299;
case 300: return String300;
case 301: return String301;
case 302: return String302;
case 303: return String303;
case 304: return String304;
case 305: return String305;
case 306: return String306;
case 307: return String307;
case 308: return String308;
case 309: return String309;
case 310: return String310;
case 311: return String311;
case 312: return String312;
case 313: return String313;
case 314: return String314;
case 315: return String315;
case 316: return String316;
case 317: return String317;
case 318: return String318;
case 319: return String319;
case 320: return String320;
case 321: return String321;
case 322: return String322;
case 323: return String323;
case 324: return String324;
case 325: return String325;
case 326: return String326;
case 327: return String327;
case 328: return String328;
case 329: return String329;
case 330: return String330;
case 331: return String331;
case 332: return String332;
case 333: return String333;
case 334: return String334;
case 335: return String335;
case 336: return String336;
case 337: return String337;
case 338: return String338;
case 339: return String339;
case 340: return String340;
case 341: return String341;
case 342: return String342;
case 343: return String343;
case 344: return String344;
case 345: return String345;
case 346: return String346;
case 347: return String347;
case 348: return String348;
case 349: return String349;
case 350: return String350;
case 351: return String351;
case 352: return String352;
case 353: return String353;
case 354: return String354;
case 355: return String355;
case 356: return String356;
case 357: return String357;
case 358: return String358;
case 359: return String359;
case 360: return String360;
case 361: return String361;
case 362: return String362;
case 363: return String363;
case 364: return String364;
case 365: return String365;
case 366: return String366;
case 367: return String367;
case 368: return String368;
case 369: return String369;
case 370: return String370;
case 371: return String371;
case 372: return String372;
case 373: return String373;
case 374: return String374;
case 375: return String375;
case 376: return String376;
case 377: return String377;
case 378: return String378;
case 379: return String379;
case 380: return String380;
case 381: return String381;
case 382: return String382;
case 383: return String383;
case 384: return String384;
case 385: return String385;
case 386: return String386;
case 387: return String387;
case 388: return String388;
case 389: return String389;
case 390: return String390;
case 391: return String391;
case 392: return String392;
case 393: return String393;
case 394: return String394;
case 395: return String395;
case 396: return String396;
case 397: return String397;
case 398: return String398;
case 399: return String399;
case 400: return String400;
case 401: return String401;
case 402: return String402;
case 403: return String403;
case 404: return String404;
case 405: return String405;
case 406: return String406;
case 407: return String407;
case 408: return String408;
case 409: return String409;
case 410: return String410;
case 411: return String411;
case 412: return String412;
case 413: return String413;
case 414: return String414;
case 415: return String415;
case 416: return String416;
case 417: return String417;
case 418: return String418;
case 419: return String419;
case 420: return String420;
case 421: return String421;
case 422: return String422;
case 423: return String423;
case 424: return String424;
case 425: return String425;
case 426: return String426;
case 427: return String427;
case 428: return String428;
case 429: return String429;
case 430: return String430;
case 431: return String431;
case 432: return String432;
case 433: return String433;
case 434: return String434;
case 435: return String435;
case 436: return String436;
case 437: return String437;
case 438: return String438;
case 439: return String439;
case 440: return String440;
case 441: return String441;
case 442: return String442;
case 443: return String443;
case 444: return String444;
case 445: return String445;
case 446: return String446;
case 447: return String447;
case 448: return String448;
case 449: return String449;
case 450: return String450;
case 451: return String451;
case 452: return String452;
case 453: return String453;
case 454: return String454;
case 455: return String455;
case 456: return String456;
case 457: return String457;
case 458: return String458;
case 459: return String459;
case 460: return String460;
case 461: return String461;
case 462: return String462;
case 463: return String463;
case 464: return String464;
case 465: return String465;
case 466: return String466;
case 467: return String467;
case 468: return String468;
case 469: return String469;
case 470: return String470;
case 471: return String471;
case 472: return String472;
case 473: return String473;
case 474: return String474;
case 475: return String475;
case 476: return String476;
case 477: return String477;
case 478: return String478;
case 479: return String479;
case 480: return String480;
case 481: return String481;
case 482: return String482;
case 483: return String483;
case 484: return String484;
case 485: return String485;
case 486: return String486;
default: return null;
}
}
}
}
}
| |
/* Generated SBE (Simple Binary Encoding) message codec */
using System;
using System.Text;
using System.Collections.Generic;
using Adaptive.Agrona;
namespace Adaptive.Archiver.Codecs {
public class RecordingSignalEventDecoder
{
public const ushort BLOCK_LENGTH = 44;
public const ushort TEMPLATE_ID = 24;
public const ushort SCHEMA_ID = 101;
public const ushort SCHEMA_VERSION = 6;
private RecordingSignalEventDecoder _parentMessage;
private IDirectBuffer _buffer;
protected int _offset;
protected int _limit;
protected int _actingBlockLength;
protected int _actingVersion;
public RecordingSignalEventDecoder()
{
_parentMessage = this;
}
public ushort SbeBlockLength()
{
return BLOCK_LENGTH;
}
public ushort SbeTemplateId()
{
return TEMPLATE_ID;
}
public ushort SbeSchemaId()
{
return SCHEMA_ID;
}
public ushort SbeSchemaVersion()
{
return SCHEMA_VERSION;
}
public string SbeSemanticType()
{
return "";
}
public IDirectBuffer Buffer()
{
return _buffer;
}
public int Offset()
{
return _offset;
}
public RecordingSignalEventDecoder Wrap(
IDirectBuffer buffer, int offset, int actingBlockLength, int actingVersion)
{
this._buffer = buffer;
this._offset = offset;
this._actingBlockLength = actingBlockLength;
this._actingVersion = actingVersion;
Limit(offset + actingBlockLength);
return this;
}
public int EncodedLength()
{
return _limit - _offset;
}
public int Limit()
{
return _limit;
}
public void Limit(int limit)
{
this._limit = limit;
}
public static int ControlSessionIdId()
{
return 1;
}
public static int ControlSessionIdSinceVersion()
{
return 0;
}
public static int ControlSessionIdEncodingOffset()
{
return 0;
}
public static int ControlSessionIdEncodingLength()
{
return 8;
}
public static string ControlSessionIdMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.EPOCH: return "unix";
case MetaAttribute.TIME_UNIT: return "nanosecond";
case MetaAttribute.SEMANTIC_TYPE: return "";
case MetaAttribute.PRESENCE: return "required";
}
return "";
}
public static long ControlSessionIdNullValue()
{
return -9223372036854775808L;
}
public static long ControlSessionIdMinValue()
{
return -9223372036854775807L;
}
public static long ControlSessionIdMaxValue()
{
return 9223372036854775807L;
}
public long ControlSessionId()
{
return _buffer.GetLong(_offset + 0, ByteOrder.LittleEndian);
}
public static int CorrelationIdId()
{
return 2;
}
public static int CorrelationIdSinceVersion()
{
return 0;
}
public static int CorrelationIdEncodingOffset()
{
return 8;
}
public static int CorrelationIdEncodingLength()
{
return 8;
}
public static string CorrelationIdMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.EPOCH: return "unix";
case MetaAttribute.TIME_UNIT: return "nanosecond";
case MetaAttribute.SEMANTIC_TYPE: return "";
case MetaAttribute.PRESENCE: return "required";
}
return "";
}
public static long CorrelationIdNullValue()
{
return -9223372036854775808L;
}
public static long CorrelationIdMinValue()
{
return -9223372036854775807L;
}
public static long CorrelationIdMaxValue()
{
return 9223372036854775807L;
}
public long CorrelationId()
{
return _buffer.GetLong(_offset + 8, ByteOrder.LittleEndian);
}
public static int RecordingIdId()
{
return 3;
}
public static int RecordingIdSinceVersion()
{
return 0;
}
public static int RecordingIdEncodingOffset()
{
return 16;
}
public static int RecordingIdEncodingLength()
{
return 8;
}
public static string RecordingIdMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.EPOCH: return "unix";
case MetaAttribute.TIME_UNIT: return "nanosecond";
case MetaAttribute.SEMANTIC_TYPE: return "";
case MetaAttribute.PRESENCE: return "required";
}
return "";
}
public static long RecordingIdNullValue()
{
return -9223372036854775808L;
}
public static long RecordingIdMinValue()
{
return -9223372036854775807L;
}
public static long RecordingIdMaxValue()
{
return 9223372036854775807L;
}
public long RecordingId()
{
return _buffer.GetLong(_offset + 16, ByteOrder.LittleEndian);
}
public static int SubscriptionIdId()
{
return 4;
}
public static int SubscriptionIdSinceVersion()
{
return 0;
}
public static int SubscriptionIdEncodingOffset()
{
return 24;
}
public static int SubscriptionIdEncodingLength()
{
return 8;
}
public static string SubscriptionIdMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.EPOCH: return "unix";
case MetaAttribute.TIME_UNIT: return "nanosecond";
case MetaAttribute.SEMANTIC_TYPE: return "";
case MetaAttribute.PRESENCE: return "required";
}
return "";
}
public static long SubscriptionIdNullValue()
{
return -9223372036854775808L;
}
public static long SubscriptionIdMinValue()
{
return -9223372036854775807L;
}
public static long SubscriptionIdMaxValue()
{
return 9223372036854775807L;
}
public long SubscriptionId()
{
return _buffer.GetLong(_offset + 24, ByteOrder.LittleEndian);
}
public static int PositionId()
{
return 5;
}
public static int PositionSinceVersion()
{
return 0;
}
public static int PositionEncodingOffset()
{
return 32;
}
public static int PositionEncodingLength()
{
return 8;
}
public static string PositionMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.EPOCH: return "unix";
case MetaAttribute.TIME_UNIT: return "nanosecond";
case MetaAttribute.SEMANTIC_TYPE: return "";
case MetaAttribute.PRESENCE: return "required";
}
return "";
}
public static long PositionNullValue()
{
return -9223372036854775808L;
}
public static long PositionMinValue()
{
return -9223372036854775807L;
}
public static long PositionMaxValue()
{
return 9223372036854775807L;
}
public long Position()
{
return _buffer.GetLong(_offset + 32, ByteOrder.LittleEndian);
}
public static int SignalId()
{
return 6;
}
public static int SignalSinceVersion()
{
return 0;
}
public static int SignalEncodingOffset()
{
return 40;
}
public static int SignalEncodingLength()
{
return 4;
}
public static string SignalMetaAttribute(MetaAttribute metaAttribute)
{
switch (metaAttribute)
{
case MetaAttribute.EPOCH: return "unix";
case MetaAttribute.TIME_UNIT: return "nanosecond";
case MetaAttribute.SEMANTIC_TYPE: return "";
case MetaAttribute.PRESENCE: return "required";
}
return "";
}
public RecordingSignal Signal()
{
return (RecordingSignal)_buffer.GetInt(_offset + 40, ByteOrder.LittleEndian);
}
public override string ToString()
{
return AppendTo(new StringBuilder(100)).ToString();
}
public StringBuilder AppendTo(StringBuilder builder)
{
int originalLimit = Limit();
Limit(_offset + _actingBlockLength);
builder.Append("[RecordingSignalEvent](sbeTemplateId=");
builder.Append(TEMPLATE_ID);
builder.Append("|sbeSchemaId=");
builder.Append(SCHEMA_ID);
builder.Append("|sbeSchemaVersion=");
if (_parentMessage._actingVersion != SCHEMA_VERSION)
{
builder.Append(_parentMessage._actingVersion);
builder.Append('/');
}
builder.Append(SCHEMA_VERSION);
builder.Append("|sbeBlockLength=");
if (_actingBlockLength != BLOCK_LENGTH)
{
builder.Append(_actingBlockLength);
builder.Append('/');
}
builder.Append(BLOCK_LENGTH);
builder.Append("):");
//Token{signal=BEGIN_FIELD, name='controlSessionId', referencedName='null', description='null', id=1, version=0, deprecated=0, encodedLength=0, offset=0, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
//Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=0, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
builder.Append("ControlSessionId=");
builder.Append(ControlSessionId());
builder.Append('|');
//Token{signal=BEGIN_FIELD, name='correlationId', referencedName='null', description='null', id=2, version=0, deprecated=0, encodedLength=0, offset=8, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
//Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=8, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
builder.Append("CorrelationId=");
builder.Append(CorrelationId());
builder.Append('|');
//Token{signal=BEGIN_FIELD, name='recordingId', referencedName='null', description='null', id=3, version=0, deprecated=0, encodedLength=0, offset=16, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
//Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=16, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
builder.Append("RecordingId=");
builder.Append(RecordingId());
builder.Append('|');
//Token{signal=BEGIN_FIELD, name='subscriptionId', referencedName='null', description='null', id=4, version=0, deprecated=0, encodedLength=0, offset=24, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
//Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=24, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
builder.Append("SubscriptionId=");
builder.Append(SubscriptionId());
builder.Append('|');
//Token{signal=BEGIN_FIELD, name='position', referencedName='null', description='null', id=5, version=0, deprecated=0, encodedLength=0, offset=32, componentTokenCount=3, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
//Token{signal=ENCODING, name='int64', referencedName='null', description='null', id=-1, version=0, deprecated=0, encodedLength=8, offset=32, componentTokenCount=1, encoding=Encoding{presence=REQUIRED, primitiveType=INT64, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
builder.Append("Position=");
builder.Append(Position());
builder.Append('|');
//Token{signal=BEGIN_FIELD, name='signal', referencedName='null', description='null', id=6, version=0, deprecated=0, encodedLength=0, offset=40, componentTokenCount=11, encoding=Encoding{presence=REQUIRED, primitiveType=null, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='unix', timeUnit=nanosecond, semanticType='null'}}
//Token{signal=BEGIN_ENUM, name='RecordingSignal', referencedName='null', description='Signal of operations happening to a recording.', id=-1, version=0, deprecated=0, encodedLength=4, offset=40, componentTokenCount=9, encoding=Encoding{presence=REQUIRED, primitiveType=INT32, byteOrder=LITTLE_ENDIAN, minValue=null, maxValue=null, nullValue=null, constValue=null, characterEncoding='null', epoch='null', timeUnit=null, semanticType='null'}}
builder.Append("Signal=");
builder.Append(Signal());
Limit(originalLimit);
return builder;
}
}
}
| |
//------------------------------------------------------------------------------
// Microsoft Windows Client Platform
// Copyright (c) Microsoft Corporation, 2001, 2002, 2003
//
// File: ColorTransform.cs
//------------------------------------------------------------------------------
using System;
using System.IO;
using System.Runtime.InteropServices;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using MS.Internal;
using MS.Win32;
using System.Security;
using System.Security.Permissions;
using System.Collections;
using System.ComponentModel;
using System.ComponentModel.Design.Serialization;
using System.Reflection;
using System.Diagnostics;
using System.Globalization;
using Microsoft.Win32.SafeHandles;
using UnsafeNativeMethods=MS.Win32.PresentationCore.UnsafeNativeMethods;
namespace System.Windows.Media
{
///<summary>
/// </summary>
internal class ColorTransform
{
//------------------------------------------------------
//
// Constructors
//
//------------------------------------------------------
#region Constructors
private ColorTransform()
{
}
/// <SecurityNote>
/// SecurityCritical: This code calls critical code (unmanaged)
/// SecurityTreatAsSafe: Call passes in managed ColorContext to unmanaged code which is safe
/// The _colorTransformHelper comes out of an elevation and is stored locally
/// in a variable whose access is tracked to prevent any malicious tampering.
/// And that constructing this object is inherently a safe operation.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
internal ColorTransform(ColorContext srcContext, ColorContext dstContext)
{
InitializeICM();
if (srcContext == null)
{
srcContext = new ColorContext(PixelFormats.Bgra32);
}
if (dstContext == null)
{
dstContext = new ColorContext(PixelFormats.Bgra32);
}
_inputColorType = srcContext.ColorType;
_outputColorType = dstContext.ColorType;
_colorTransformHelper.CreateTransform(srcContext.ProfileHandle, dstContext.ProfileHandle);
}
/// <SecurityNote>
/// SecurityCritical: This code calls critical code (unmanaged)
/// SecurityTreatAsSafe: All parameters to unmanaged call are generated inside of the method and are safe
/// The _colorTransformHelper comes out of an elevation and is stored locally
/// in a variable whose access is tracked to prevent any malicious tampering.
/// And that constructing this object is inherently a safe operation.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
internal ColorTransform(SafeMILHandle bitmapSource, ColorContext srcContext, ColorContext dstContext, System.Windows.Media.PixelFormat pixelFormat)
{
InitializeICM();
if (srcContext == null)
{
srcContext = new ColorContext(pixelFormat);
}
if (dstContext == null)
{
dstContext = new ColorContext(pixelFormat);
}
_inputColorType = srcContext.ColorType;
_outputColorType = dstContext.ColorType;
//if this failed or the handle is invalid, we can't continue
if (srcContext.ProfileHandle != null && !srcContext.ProfileHandle.IsInvalid)
{
//if this failed or the handle is invalid, we can't continue
if (dstContext.ProfileHandle != null && !dstContext.ProfileHandle.IsInvalid)
{
_colorTransformHelper.CreateTransform(srcContext.ProfileHandle, dstContext.ProfileHandle);
}
}
}
#endregion
//------------------------------------------------------
//
// Public Methods
//
//------------------------------------------------------
#region Public Methods
#endregion
//------------------------------------------------------
//
// Internal Methods
//
//------------------------------------------------------
#region Internal Methods
/// <SecurityNote>
/// SecurityCritical: This code calls critical code (unmanaged)
/// SecurityTreatAsSafe: All parameters to unmanaged call are generated inside of the method and are safe
/// The _colorTransformHelper comes out of an elevation and is stored locally
/// in a variable whose access is tracked to prevent any malicious tampering.
/// And that constructing this object is inherently a safe operation.
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
internal void Translate(float[] srcValue, float[] dstValue)
{
// 3. create Win32 unmanaged profile handle from memory source profile using OpenColorProfileW
IntPtr[] pProfiles = new IntPtr[2];
IntPtr paInputColors = IntPtr.Zero;
IntPtr paOutputColors = IntPtr.Zero;
try
{
// 6. transform colors using TranslateColors
UInt32 numColors = 1;
long inputColor = ICM2Color(srcValue);
paInputColors = Marshal.AllocHGlobal(64);
Marshal.WriteInt64(paInputColors, inputColor);
paOutputColors = Marshal.AllocHGlobal(64);
long outputColor = 0;
Marshal.WriteInt64(paOutputColors, outputColor);
_colorTransformHelper.TranslateColors(
(IntPtr)paInputColors,
numColors,
_inputColorType,
(IntPtr)paOutputColors,
_outputColorType
);
outputColor = Marshal.ReadInt64(paOutputColors);
for (int i = 0; i < dstValue.GetLength(0); i++)
{
UInt32 result = 0x0000ffff & (UInt32)(outputColor >> (16 * i));
float a = (result & 0x7fffffff) / (float)(0x10000);
if (result < 0)
dstValue[i] = -a;
else
dstValue[i] = a;
}
}
finally
{
Marshal.FreeHGlobal(paInputColors);
Marshal.FreeHGlobal(paOutputColors);
}
}
#endregion Internal Methods
//------------------------------------------------------
//
// Private Methods
//
//------------------------------------------------------
#region Private Methods
/// <SecurityNote>
/// SecurityCritical: This code calls critical code (unmanaged)
/// </SecurityNote>
[SecurityCritical]
private void InitializeICM()
{
_colorTransformHelper = new ColorTransformHelper();
}
private long ICM2Color(float[] srcValue)
{
long colorValue;
if (srcValue.GetLength(0) < 3 || srcValue.GetLength(0) > 8)
{
throw new NotSupportedException(); // Only support color spaces with 3,4,5,6,7,8 channels
}
if (srcValue.GetLength(0) <= 4)
{
UInt16[] channel = new UInt16[4];
channel[0] = channel[1] = channel[2] = channel[3] = 0;
for (int i = 0; i < srcValue.GetLength(0); i++)
{
if (srcValue[i] >= 1.0)// this fails for values above 1.0 and below 0.0
{
channel[i] = 0xffff;
}
else if (srcValue[i] <= 0.0)
{
channel[i] = 0x0;
}
else
{
channel[i] = (UInt16)(srcValue[i] * (float)0xFFFF);
}
}
colorValue = (long)(((UInt64)channel[3] << 48) + ((UInt64)channel[2] << 32) + ((UInt64)channel[1] << 16) + (UInt64)channel[0]);
}
else
{
byte[] channel = new byte[8];
channel[0] = channel[1] = channel[2] = channel[3] =
channel[4] = channel[5] = channel[6] = channel[7] = 0;
for (int i = 0; i < srcValue.GetLength(0); i++)
{
if (srcValue[i] >= 1.0)// this fails for values above 1.0 and below 0.0
{
channel[i] = 0xff;
}
else if (srcValue[i] <= 0.0)
{
channel[i] = 0x0;
}
else
{
channel[i] = (byte)(srcValue[i] * (float)0xFF);
}
}
colorValue = (long)(((UInt64)channel[7] << 56) + ((UInt64)channel[6] << 48) +
((UInt64)channel[5] << 40) + ((UInt64)channel[4] << 32) +
((UInt64)channel[3] << 24) + ((UInt64)channel[2] << 16) +
((UInt64)channel[1] << 8) + ((UInt64)channel[0] << 0));
}
return colorValue;
}
#endregion
//------------------------------------------------------
//
// Private Fields
//
//------------------------------------------------------
#region Private Fields
/// <SecurityNote>
/// SecurityCritical: This comes out of an elevation needs to be critical and tracked.
/// </SecurityNote>
[SecurityCritical]
private ColorTransformHelper _colorTransformHelper;
private UInt32 _inputColorType;
private UInt32 _outputColorType;
#endregion
}
}
| |
/*==========================================================================;
*
* This file is part of LATINO. See http://www.latinolib.org
*
* File: UrlNormalizer.cs
* Desc: URL normalization routines
* Created: Jan-2012
*
* Author: Miha Grcar
*
***************************************************************************/
using System;
using System.Web;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Collections.Specialized;
using System.IO;
namespace Latino.WebMining
{
/* .-----------------------------------------------------------------------
|
| Class UrlNormalizer
|
'-----------------------------------------------------------------------
*/
public class UrlNormalizer
{
/* .-----------------------------------------------------------------------
|
| Class Rule
|
'-----------------------------------------------------------------------
*/
private class Rule
{
public Regex mUrlRegex;
public Set<string> mQueryParams;
public Rule(string urlRegex, params string[] qParams)
{
mUrlRegex = new Regex(urlRegex, RegexOptions.Compiled);
mQueryParams = new Set<string>(qParams);
}
public string Execute(string left, ArrayList<string> path, ArrayList<KeyDat<string, string>> query)
{
return UrlAsString(left, path, query, mQueryParams);
}
}
/* .-----------------------------------------------------------------------
|
| Enum NormalizationMode
|
'-----------------------------------------------------------------------
*/
public enum NormalizationMode
{
Basic,
DropQuery,
Heuristics
}
private ArrayList<string> mBlacklist
= new ArrayList<string>();
private ArrayList<Rule> mRules
= new ArrayList<Rule>();
// effective top-level domains
private static Set<string> mTld
= new Set<string>();
private static Set<string> mNotTld
= new Set<string>();
static UrlNormalizer()
{
Stream stream = Utils.GetManifestResourceStream(typeof(UrlNormalizer), "EffectiveTldNames.dat");
StreamReader reader = new StreamReader(stream);
string line;
while ((line = reader.ReadLine()) != null)
{
if (line != "" && !line.StartsWith("//"))
{
if (line.StartsWith("!"))
{
mNotTld.Add(line.TrimStart('!'));
}
else
{
mTld.Add(line);
}
}
}
stream.Close();
}
public UrlNormalizer(string blacklistConfigKey, string rulesConfigKey)
{
// load blacklist
string blacklistFileName = null;
if (blacklistConfigKey != null) { blacklistFileName = Utils.GetConfigValue(blacklistConfigKey, null); }
if (blacklistFileName != null)
{
string[] lines = File.ReadAllLines(blacklistFileName);
foreach (string _line in lines)
{
string line = _line.Trim();
if (line == "" || line.StartsWith("#")) { continue; }
mBlacklist.Add(line);
}
}
// load rules
string rulesFileName = null;
if (rulesConfigKey != null) { rulesFileName = Utils.GetConfigValue(rulesConfigKey, null); }
if (rulesFileName != null)
{
string[] lines = File.ReadAllLines(rulesFileName);
foreach (string _line in lines)
{
string line = _line.Trim();
if (line == "" || line.StartsWith("#")) { continue; }
string[] items = line.Split('\t');
Rule rule = new Rule(items[0], line.Substring(line.IndexOf('\t') + 1).Split('\t'));
mRules.Add(rule);
}
}
}
public UrlNormalizer() : this("urlBlacklistFileName", "urlRulesFileName")
{
}
private static string Normalize(string str)
{
string nStr = "";
foreach (char ch in str.ToLower())
{
if (char.IsLetterOrDigit(ch)) { nStr += ch; }
}
return nStr;
}
private static string ExecuteRules(string url, string left, ArrayList<string> path, ArrayList<KeyDat<string, string>> query,
IEnumerable<Rule> rules)
{
foreach (Rule rule in rules)
{
Match m = rule.mUrlRegex.Match(url);
if (m.Success)
{
int c = 0;
foreach (KeyDat<string, string> queryParam in query)
{
if (rule.mQueryParams.Contains(queryParam.Key)) { c++; }
}
if (c < rule.mQueryParams.Count) { continue; }
return rule.Execute(left, path, query);
}
}
return null;
}
// *** everything after # (document fragment) is discarded
public static void ParseUrl(string url, out string left, out ArrayList<string> path, out ArrayList<KeyDat<string, string>> queryParsed)
{
Uri u = new Uri(url);
left = string.Format("{0}://{1}:{2}", u.Scheme, u.Host, u.Port);
path = new ArrayList<string>();
for (int i = 1; i < u.Segments.Length; i++)
{
string seg = HttpUtility.UrlDecode(u.Segments[i].Trim('/'));
if (seg != "") { path.Add(seg); }
}
NameValueCollection query = HttpUtility.ParseQueryString(u.Query);
queryParsed = new ArrayList<KeyDat<string, string>>();
for (int i = 0; i < query.Count; i++)
{
string key = query.Keys[i];
if (key == null) { key = "null"; }
string val = GetValuesAsStr(query, i);
queryParsed.Add(new KeyDat<string, string>(key, val));
}
queryParsed.Sort();
}
private static string UrlEncode(string txt, params char[] chars)
{
foreach (char ch in chars)
{
txt = txt.Replace(ch.ToString(), string.Format("%{0:X2}", (int)ch));
}
return txt;
}
private static string GetValuesAsStr(NameValueCollection query, int idx)
{
string[] values = query.GetValues(idx);
Array.Sort(values);
string valuesStr = "";
foreach (string value in values)
{
valuesStr += UrlEncode(value, '%', ',') + ",";
}
return valuesStr.TrimEnd(',');
}
public static string UrlAsString(string left, IEnumerable<string> path, ArrayList<KeyDat<string, string>> query, Set<string> queryFilter)
{
string url = left;
foreach (string seg in path)
{
url += "/" + UrlEncode(seg, '%', '/', '?');
}
ArrayList<KeyDat<string, string>> tmp = query;
if (queryFilter != null)
{
tmp = new ArrayList<KeyDat<string, string>>();
foreach (KeyDat<string, string> item in query)
{
if (queryFilter.Contains(item.Key)) { tmp.Add(item); }
}
}
if (tmp.Count > 0)
{
url += "?";
foreach (KeyDat<string, string> item in tmp)
{
url += UrlEncode(item.Key, '%', '&', '=') + "=" + UrlEncode(item.Dat, '&', '=') + "&";
}
url = url.TrimEnd('&');
}
return url;
}
public string NormalizeUrl(string url, string title, out bool blacklist, NormalizationMode mode)
{
blacklist = false;
string left;
ArrayList<string> path;
ArrayList<KeyDat<string, string>> queryParsed;
ParseUrl(url, out left, out path, out queryParsed);
string content = title == null ? "" : Normalize(title);
string cid = Utils.GetHashCode128(content).ToString("N");
queryParsed.InsertSorted(new KeyDat<string, string>("__cid__", cid)); // inject content-id query parameter
string url1 = UrlAsString(left, path, queryParsed, null);
foreach (string prefix in mBlacklist)
{
if (url1.StartsWith(prefix))
{
blacklist = true;
break;
}
}
if (mode == NormalizationMode.Basic) { return url1; }
string url2 = UrlAsString(left, path, queryParsed, new Set<string>());
if (mode == NormalizationMode.DropQuery) { return url2; }
string url3 = ExecuteRules(url1, left, path, queryParsed, mRules);
if (url3 == null) { url3 = url2; }
return url3;
}
public static string GetTldFromUrl(string url)
{
string left;
ArrayList<string> path;
ArrayList<KeyDat<string, string>> queryParsed;
ParseUrl(url, out left, out path, out queryParsed);
return GetTldFromDomainName(left.Split(':')[1].TrimStart('/'));
}
public static string GetTldFromDomainName(string domainName)
{
string[] parts = domainName.Split('.');
int idx = 0;
for (int i = 0; i < parts.Length - 1; i++)
{
idx += parts[i].Length + 1;
string tail = domainName.Substring(idx);
if (mTld.Contains(tail)) { return tail; }
string group = "*" + domainName.Substring(idx + parts[i + 1].Length);
if (mTld.Contains(group) && !mNotTld.Contains(tail)) { return tail; }
}
return new ArrayList<string>(parts).Last;
}
}
}
| |
using System;
using System.Diagnostics;
using System.Text;
namespace Core.Command
{
public partial class Delete
{
public static Table SrcListLookup(Parse parse, SrcList src)
{
SrcList.SrcListItem item = src.Ids[0];
Debug.Assert(item != null && src.Srcs == 1);
Table table = parse.LocateTableItem(false, item);
Parse.DeleteTable(parse.Ctx, ref item.Table);
item.Table = table;
if (table != null)
table.Refs++;
if (Select.IndexedByLookup(parse, item) != 0)
table = null;
return table;
}
public static bool IsReadOnly(Parse parse, Table table, bool viewOk)
{
// A table is not writable under the following circumstances:
// 1) It is a virtual table and no implementation of the xUpdate method has been provided, or
// 2) It is a system table (i.e. sqlite_master), this call is not part of a nested parse and writable_schema pragma has not
// been specified.
// In either case leave an error message in pParse and return non-zero.
if ((E.IsVirtual(table) && VTable.GetVTable(parse.Ctx, table).Module.IModule.Update == null) ||
((table.TabFlags & TF.Readonly) != 0 && (parse.Ctx.Flags & Context.FLAG.WriteSchema) == 0 && parse.Nested == 0))
{
parse.ErrorMsg("table %s may not be modified", table.Name);
return true;
}
#if !OMIT_VIEW
if (!viewOk && table.Select != null)
{
parse.ErrorMsg("cannot modify %s because it is a view", table.Name);
return true;
}
#endif
return false;
}
#if !OMIT_VIEW && !OMIT_TRIGGER
public static void MaterializeView(Parse parse, Table view, Expr where_, int curId)
{
Context ctx = parse.Ctx;
int db = Prepare.SchemaToIndex(ctx, view.Schema);
where_ = Expr.Dup(ctx, where_, 0);
SrcList from = Parse.SrcListAppend(ctx, null, null, null);
if (from != null)
{
Debug.Assert(from.Srcs == 1);
from.Ids[0].Name = view.Name;
from.Ids[0].Database = ctx.DBs[db].Name;
Debug.Assert(from.Ids[0].On == null);
Debug.Assert(from.Ids[0].Using == null);
}
Select select = Select.New(parse, 0, from, where_, 0, 0, 0, 0, 0, 0);
if (select != null) select.SelFlags |= SF.Materialize;
SelectDest dest = new SelectDest();
Select.DestInit(dest, SRT.EphemTab, curId);
Select.Select(parse, select, dest);
Select.Delete(ctx, select);
}
#endif
#if true || ENABLE_UPDATE_DELETE_LIMIT && !OMIT_SUBQUERY
public Expr LimitWhere(Parse parse, SrcList src, Expr where_, ExprList orderBy, Expr limit, Expr offset, char stmtType)
{
// Check that there isn't an ORDER BY without a LIMIT clause.
if (orderBy != null && (limit == null))
{
parse.ErrorMsg("ORDER BY without LIMIT on %s", stmtType);
goto limit_where_cleanup_2;
}
// We only need to generate a select expression if there is a limit/offset term to enforce.
if (limit == null)
{
Debug.Assert(offset == null); // if pLimit is null, pOffset will always be null as well.
return where_;
}
// Generate a select expression tree to enforce the limit/offset term for the DELETE or UPDATE statement. For example:
// DELETE FROM table_a WHERE col1=1 ORDER BY col2 LIMIT 1 OFFSET 1
// becomes:
// DELETE FROM table_a WHERE rowid IN (
// SELECT rowid FROM table_a WHERE col1=1 ORDER BY col2 LIMIT 1 OFFSET 1
// );
Expr selectRowid = Expr.PExpr_(parse, TK.ROW, null, null, null); // SELECT rowid ...
if (selectRowid == null) goto limit_where_cleanup_2;
ExprList elist = ExprList.Append(parse, null, selectRowid); // Expression list contaning only pSelectRowid
if (elist == null) goto limit_where_cleanup_2;
// duplicate the FROM clause as it is needed by both the DELETE/UPDATE tree and the SELECT subtree.
SrcList selectSrc = SrcList.Dup(parse.Ctx, src, 0); // SELECT rowid FROM x ... (dup of pSrc)
if (selectSrc == null)
{
ExprList.Delete(parse.Ctx, elist);
goto limit_where_cleanup_2;
}
// generate the SELECT expression tree.
Select select = Select.New(parse, elist, selectSrc, where_, null, null, orderBy, 0, limit, offset); // Complete SELECT tree
if (select == null) return null;
// now generate the new WHERE rowid IN clause for the DELETE/UDPATE
Expr whereRowid = Expr.PExpr_(parse, TK.ROW, null, null, null); // WHERE rowid ..
if (whereRowid == null) goto limit_where_cleanup_1;
Expr inClause = Expr.PExpr_(parse, TK.IN, whereRowid, null, null); // WHERE rowid IN ( select )
if (inClause == null) goto limit_where_cleanup_1;
inClause.x.Select = select;
inClause.Flags |= EP.xIsSelect;
Expr.SetHeight(parse, inClause);
return inClause;
// something went wrong. clean up anything allocated.
limit_where_cleanup_1:
Select.Delete(parse.Ctx, select);
return null;
limit_where_cleanup_2:
Expr.Delete(parse.Ctx, ref where_);
ExprList.Delete(parse.Ctx, orderBy);
Expr.Delete(parse.Ctx, ref limit);
Expr.Delete(parse.Ctx, ref offset);
return null;
}
#endif
public static void DeleteFrom(Parse parse, SrcList tabList, Expr where_)
{
AuthContext sContext = new AuthContext(); // Authorization context
Context ctx = parse.Ctx; // DataEx database structure
if (parse.Errs != 0 || ctx.MallocFailed)
goto delete_from_cleanup;
Debug.Assert(tabList.Srcs == 1);
// Locate the table which we want to delete. This table has to be put in an SrcList structure because some of the subroutines we
// will be calling are designed to work with multiple tables and expect an SrcList* parameter instead of just a Table* parameter.
Table table = SrcList.Lookup(parse, tabList); // The table from which records will be deleted
if (table == null) goto delete_from_cleanup;
// Figure out if we have any triggers and if the table being deleted from is a view
#if !OMIT_TRIGGER
int dummy;
Trigger trigger = Triggers.Exist(parse, table, TK.DELETE, null, out dummy); // List of table triggers, if required
#if OMIT_VIEW
const bool isView = false;
#else
bool isView = (table.Select != null); // True if attempting to delete from a view
#endif
#else
const Trigger trigger = null;
bool isView = false;
#endif
// If pTab is really a view, make sure it has been initialized.
if (sqlite3ViewGetColumnNames(parse, table) != null || IsReadOnly(parse, table, (trigger != null)))
goto delete_from_cleanup;
int db = sqlite3SchemaToIndex(ctx, table.Schema); // Database number
Debug.Assert(db < ctx.DBs.length);
string dbName = ctx.DBs[db].Name; // Name of database holding pTab
ARC rcauth = Auth.Check(parse, AUTH.DELETE, table.Name, 0, dbName); // Value returned by authorization callback
Debug.Assert(rcauth == ARC.OK || rcauth == ARC.DENY || rcauth == ARC.IGNORE);
if (rcauth == ARC.DENY)
goto delete_from_cleanup;
Debug.Assert(!isView || trigger != null);
// Assign cursor number to the table and all its indices.
Debug.Assert(tabList.Srcs == 1);
int curId = tabList.Ids[0].Cursor = parse.Tabs++; // VDBE VdbeCursor number for pTab
Index idx; // For looping over indices of the table
for (idx = table.Index; idx != null; idx = idx.Next)
parse.Tabs++;
// Start the view context
if (isView)
Auth.ContextPush(parse, sContext, table.Name);
// Begin generating code.
Vdbe v = parse.GetVdbe(); // The virtual database engine
if (v == null)
goto delete_from_cleanup;
if (parse.Nested == 0) v.CountChanges();
parse.BeginWriteOperation(1, db);
// If we are trying to delete from a view, realize that view into a ephemeral table.
#if !OMIT_VIEW && !OMIT_TRIGGER
if (isView)
MaterializeView(parse, table, where_, curId);
#endif
// Resolve the column names in the WHERE clause.
NameContext sNC = new NameContext(); // Name context to resolve expressions in
sNC.Parse = parse;
sNC.SrcList = tabList;
if (sqlite3ResolveExprNames(sNC, ref where_) != 0)
goto delete_from_cleanup;
// Initialize the counter of the number of rows deleted, if we are counting rows.
int memCnt = -1; // Memory cell used for change counting
if ((ctx.Flags & Context.FLAG.CountRows) != 0)
{
memCnt = ++parse.Mems;
v.AddOp2(OP.Integer, 0, memCnt);
}
#if !OMIT_TRUNCATE_OPTIMIZATION
// Special case: A DELETE without a WHERE clause deletes everything. It is easier just to erase the whole table. Prior to version 3.6.5,
// this optimization caused the row change count (the value returned by API function sqlite3_count_changes) to be set incorrectly.
if (rcauth == ARC.OK && where_ == null && trigger == null && !IsVirtual(table) && !FKey.FkRequired(parse, table, null, 0))
{
Debug.Assert(!isView);
v.AddOp4(OP.Clear, table.Id, db, memCnt, table.Name, Vdbe.P4T.STATIC);
for (idx = table.Index; idx != null; idx = idx.Next)
{
Debug.Assert(idx.Schema == table.Schema);
v.AddOp2(OP.Clear, idx.Id, db);
}
}
else
#endif
// The usual case: There is a WHERE clause so we have to scan through the table and pick which records to delete.
{
int rowSet = ++parse.Mems; // Register for rowset of rows to delete
int rowid = ++parse.Mems; // Used for storing rowid values.
// Collect rowids of every row to be deleted.
v.AddOp2(OP.Null, 0, rowSet);
ExprList dummy = null;
WhereInfo winfo = Where.Begin(parse, tabList, where_, ref dummy, WHERE_DUPLICATES_OK, 0); // Information about the WHERE clause
if (winfo == null) goto delete_from_cleanup;
int regRowid = Expr.CodeGetColumn(parse, table, -1, curId, rowid); // Actual register containing rowids
v.AddOp2(OP.RowSetAdd, rowSet, regRowid);
if ((ctx.Flags & Context.FLAG.CountRows) != 0)
v.AddOp2(OP.AddImm, memCnt, 1);
Where.End(winfo);
// Delete every item whose key was written to the list during the database scan. We have to delete items after the scan is complete
// because deleting an item can change the scan order.
int end = v.MakeLabel();
// Unless this is a view, open cursors for the table we are deleting from and all its indices. If this is a view, then the
// only effect this statement has is to fire the INSTEAD OF triggers.
if (!isView)
sqlite3OpenTableAndIndices(parse, table, curId, OP.OpenWrite);
int addr = v.AddOp3(OP.RowSetRead, rowSet, end, rowid);
// Delete the row
#if !OMIT_VIRTUALTABLE
if (IsVirtual(table))
{
VTable vtable = VTable.GetVTable(ctx, table);
VTable.MakeWritable(parse, table);
v.AddOp4(OP.VUpdate, 0, 1, rowid, vtable, Vdbe.P4T.VTAB);
v.ChangeP5(OE.Abort);
sqlite3MayAbort(parse);
}
else
#endif
{
int count = (parse.Nested == 0; // True to count changes
GenerateRowDelete(parse, table, curId, rowid, count, trigger, OE.Default);
}
// End of the delete loop
v.AddOp2(OP.Goto, 0, addr);
v.ResolveLabel(end);
// Close the cursors open on the table and its indexes.
if (!isView && !IsVirtual(table))
{
for (int i = 1, idx = table.Index; idx != null; i++, idx = idx.Next)
v.AddOp2(OP.Close, curId + i, idx.Id);
v.AddOp1(OP.Close, curId);
}
}
// Update the sqlite_sequence table by storing the content of the maximum rowid counter values recorded while inserting into
// autoincrement tables.
if (parse.Nested == 0 && parse.TriggerTab == null)
sqlite3AutoincrementEnd(parse);
// Return the number of rows that were deleted. If this routine is generating code because of a call to sqlite3NestedParse(), do not
// invoke the callback function.
if ((ctx.Flags & Context.FLAG.CountRows) != 0 && parse.Nested == 0 && parse.TriggerTab == null)
{
v.AddOp2(OP.ResultRow, memCnt, 1);
v.SetNumCols(1);
v.SetColName(0, COLNAME_NAME, "rows deleted", SQLITE_STATIC);
}
delete_from_cleanup:
Auth.ContextPop(sContext);
SrcList.Delete(ctx, ref tabList);
Expr.Delete(ctx, ref where_);
return;
}
public static void GenerateRowDelete(Parse parse, Table table, int curId, int rowid, int count, Trigger trigger, OE onconf)
{
// Vdbe is guaranteed to have been allocated by this stage.
Vdbe v = parse.V;
Debug.Assert(v != null);
// Seek cursor iCur to the row to delete. If this row no longer exists (this can happen if a trigger program has already deleted it), do
// not attempt to delete it or fire any DELETE triggers.
int label = v.MakeLabel(); // Label resolved to end of generated code
v.AddOp3(OP.NotExists, curId, label, rowid);
// If there are any triggers to fire, allocate a range of registers to use for the old.* references in the triggers.
if (FKey.FkRequired(parse, table, null, 0) != 0 || trigger != null)
{
// TODO: Could use temporary registers here. Also could attempt to avoid copying the contents of the rowid register.
uint mask = sqlite3TriggerColmask(parse, trigger, null, 0, TRIGGER.BEFORE | TRIGGER.AFTER, table, onconf); // Mask of OLD.* columns in use
mask |= sqlite3FkOldmask(parse, table);
int oldId = parse.Mems + 1; // First register in OLD.* array
parse.Mems += (1 + table.Cols.length);
// Populate the OLD.* pseudo-table register array. These values will be used by any BEFORE and AFTER triggers that exist.
v.AddOp2(OP.Copy, rowid, oldId);
for (int col = 0; col < table.Cols.length; col++) // Iterator used while populating OLD.*
if (mask == 0xffffffff || (mask & (1 << col)) != 0)
Expr.CodeGetColumnOfTable(v, table, curId, col, oldId + col + 1);
// Invoke BEFORE DELETE trigger programs.
sqlite3CodeRowTrigger(parse, trigger, TK.DELETE, null, TRIGGER.BEFORE, table, oldId, onconf, label);
// Seek the cursor to the row to be deleted again. It may be that the BEFORE triggers coded above have already removed the row
// being deleted. Do not attempt to delete the row a second time, and do not fire AFTER triggers.
v.AddOp3(OP.NotExists, curId, label, rowid);
// Do FK processing. This call checks that any FK constraints that refer to this table (i.e. constraints attached to other tables) are not violated by deleting this row.
FKey.FkCheck(parse, table, oldId, 0);
}
// Delete the index and table entries. Skip this step if table is really a view (in which case the only effect of the DELETE statement is to fire the INSTEAD OF triggers).
if (table.Select == null)
{
GenerateRowIndexDelete(parse, table, curId, null);
v.AddOp2(OP.Delete, curId, (count != 0 ? (int)OPFLAG.NCHANGE : 0));
if (count != 0)
v.ChangeP4(-1, table.Name, Vdbe.P4T.TRANSIENT);
}
// Do any ON CASCADE, SET NULL or SET DEFAULT operations required to handle rows (possibly in other tables) that refer via a foreign key to the row just deleted.
FKey.FkActions(parse, table, null, oldId);
// Invoke AFTER DELETE trigger programs.
sqlite3CodeRowTrigger(parse, trigger, TK.DELETE, null, TRIGGER.AFTER, table, oldId, onconf, label);
// Jump here if the row had already been deleted before any BEFORE trigger programs were invoked. Or if a trigger program throws a RAISE(IGNORE) exception.
v.ResolveLabel(label);
}
//public static void GenerateRowIndexDelete(Parse parse, Table table, int curId, int nothing) { int[] regIdxs = null; GenerateRowIndexDelete(parse, table, curId, regIdxs); }
public static void GenerateRowIndexDelete(Parse parse, Table table, int curId, int[] regIdxs)
{
int i;
Index idx;
for (i = 1, idx = table.Index; idx != null; i++, idx = idx.Next)
{
if (regIdxs != null && regIdxs[i - 1] == 0)
continue;
int r1 = GenerateIndexKey(parse, idx, curId, 0, false);
parse.V.AddOp3(OP.IdxDelete, curId + i, r1, idx.Columns.length + 1);
}
}
public static int GenerateIndexKey(Parse parse, Index index, int curId, int regOut, bool doMakeRec)
{
Vdbe v = parse.V;
Table table = index.Table;
int cols = index.Columns.length;
int regBase = Expr.GetTempRange(parse, cols + 1);
v.AddOp2(OP.Rowid, curId, regBase + cols);
for (int j = 0; j < cols; j++)
{
int idx = index.Columns[j];
if (idx == table.PKey)
v.AddOp2(OP.SCopy, regBase + cols, regBase + j);
else
{
v.AddOp3(OP.Column, curId, idx, regBase + j);
v.ColumnDefault(table, idx, -1);
}
}
if (doMakeRec)
{
string affName = (table.Select != null || E.CtxOptimizationDisabled(parse.Ctx, SQLITE.IdxRealAsInt) ? null : sqlite3IndexAffinityStr(v, index));
v.AddOp3(OP.MakeRecord, regBase, cols + 1, regOut);
v.ChangeP4(-1, affName, Vdbe.P4T.TRANSIENT);
}
Expr.ReleaseTempRange(parse, regBase, cols + 1);
return regBase;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.Azure.AcceptanceTestsAzureParameterGrouping
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Test Infrastructure for AutoRest
/// </summary>
public partial class AutoRestParameterGroupingTestService : ServiceClient<AutoRestParameterGroupingTestService>, IAutoRestParameterGroupingTestService, IAzureClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
public Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Gets Azure subscription credentials.
/// </summary>
public ServiceClientCredentials Credentials { get; private set; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
public string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running Operations.
/// Default value is 30.
/// </summary>
public int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
public bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IParameterGroupingOperations.
/// </summary>
public virtual IParameterGroupingOperations ParameterGrouping { get; private set; }
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestParameterGroupingTestService(params DelegatingHandler[] handlers) : base(handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestParameterGroupingTestService(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestParameterGroupingTestService(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestParameterGroupingTestService(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets Azure subscription credentials.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestParameterGroupingTestService(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets Azure subscription credentials.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestParameterGroupingTestService(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Gets Azure subscription credentials.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestParameterGroupingTestService(Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestParameterGroupingTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Gets Azure subscription credentials.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public AutoRestParameterGroupingTestService(Uri baseUri, ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
this.ParameterGrouping = new ParameterGroupingOperations(this);
this.BaseUri = new Uri("https://localhost");
this.AcceptLanguage = "en-US";
this.LongRunningOperationRetryTimeout = 30;
this.GenerateClientRequestId = true;
SerializationSettings = new JsonSerializerSettings
{
Formatting = Formatting.Indented,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings = new JsonSerializerSettings
{
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings.Converters.Add(new CloudErrorJsonConverter());
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using L10NSharp;
using SIL.Archiving.Generic;
using SIL.Archiving.IMDI.Schema;
using System.Windows.Forms;
namespace SIL.Archiving.IMDI
{
/// <summary>Implements archiving for IMDI repositories</summary>
public class IMDIArchivingDlgViewModel : ArchivingDlgViewModel, ISupportMetadataOnly
{
private readonly IMDIPackage _imdiData;
private string _corpusDirectoryName;
private bool _workerException;
private string _programPreset;
private string _otherProgramPath;
private readonly string _configFileName = Path.Combine(ArchivingFileSystem.SilCommonArchivingDataFolder, "IMDIProgram.config");
private string _outputFolder;
#region Properties
internal override string ArchiveType
{
get
{
return LocalizationManager.GetString("DialogBoxes.ArchivingDlg.IMDIArchiveType", "IMDI",
"This is the abbreviation for Isle Metadata Initiative (http://www.mpi.nl/imdi/). " +
"Typically this probably does not need to be localized.");
}
}
public override string NameOfProgramToLaunch
{
get
{
// DO NOT SHOW THE LAUNCH OPTION AT THIS TIME
return null;
//if (string.IsNullOrEmpty(PathToProgramToLaunch))
// return null;
//// Arbil
//if (PathToProgramToLaunch.ToLower().Contains("arbil")) return "Arbil";
//// if not one of the presets, just return the exe name
//string exe = Path.GetFileNameWithoutExtension(PathToProgramToLaunch);
//string dir = Path.GetDirectoryName(PathToProgramToLaunch);
//if (!string.IsNullOrEmpty(dir))
//{
// dir = Path.GetFileNameWithoutExtension(dir);
// if (dir.Length > 0 && exe.ToLowerInvariant().Contains(dir.ToLowerInvariant()))
// return dir;
//}
//return exe;
}
}
/// ------------------------------------------------------------------------------------
public override string InformativeText
{
get
{
string programInfo = string.IsNullOrEmpty(NameOfProgramToLaunch) ?
string.Format(LocalizationManager.GetString("DialogBoxes.ArchivingDlg.NoIMDIProgramInfoText",
"The {0} package will be created in {1}.",
"Parameter 0 is 'IMDI'; " +
"Parameter 1 is the path where the package is created."),
ArchiveType, PackagePath)
:
string.Format(LocalizationManager.GetString("DialogBoxes.ArchivingDlg.IMDIProgramInfoText",
"This tool will help you use {0} to archive your {1} data. When the {1} package has been " +
"created, you can launch {0} and enter any additional information before doing the actual submission.",
"Parameter 0 is the name of the program that will be launched to further prepare the IMDI data for submission; " +
"Parameter 1 is the name of the calling (host) program (SayMore, FLEx, etc.)"), NameOfProgramToLaunch, AppName);
return string.Format(LocalizationManager.GetString("DialogBoxes.ArchivingDlg.IMDIOverviewText",
"{0} ({1}) is a metadata standard to describe multi-media and multi-modal language " +
"resources. The standard provides interoperability for browsable and searchable " +
"corpus structures and resource descriptions.",
"Parameter 0 is 'Isle Metadata Initiative' (the first occurrence will be turned into a hyperlink); " +
"Parameter 1 is 'IMDI'"),
ArchiveInfoHyperlinkText, ArchiveType) +
" " + _appSpecificArchivalProcessInfo +
" " + programInfo;
}
}
/// <summary></summary>
public override string ArchiveInfoHyperlinkText
{
get { return LocalizationManager.GetString("DialogBoxes.ArchivingDlg.IsleMetadataInitiative",
"Isle Metadata Initiative", "Typically this probably does not need to be localized."); }
}
/// ------------------------------------------------------------------------------------
public override string ArchiveInfoUrl
{
get { return Properties.Settings.Default.IMDIWebSite; }
}
public bool MetadataOnly { get; set; }
#endregion
/// ------------------------------------------------------------------------------------
/// <summary>Constructor</summary>
/// <param name="appName">The application name</param>
/// <param name="title">Title of the submission.</param>
/// <param name="id">Identifier for the package being created. Used as the CORPUS name.</param>
/// <param name="appSpecificArchivalProcessInfo">Application can use this to pass
/// additional information that will be displayed to the user in the dialog to explain
/// any application-specific details about the archival process.</param>
/// <param name="corpus">Indicates whether this is for an entire project corpus or a
/// single session</param>
/// <param name="setFilesToArchive">Delegate to request client to call methods to set
/// which files should be archived (this is deferred to allow display of progress message)</param>
/// <param name="outputFolder">Base folder where IMDI file structure is to be created</param>
/// ------------------------------------------------------------------------------------
public IMDIArchivingDlgViewModel(string appName, string title, string id,
string appSpecificArchivalProcessInfo, bool corpus,
Action<ArchivingDlgViewModel> setFilesToArchive, string outputFolder)
: base(appName, title, id, appSpecificArchivalProcessInfo, setFilesToArchive)
{
OutputFolder = outputFolder;
_imdiData = new IMDIPackage(corpus, PackagePath)
{
Title = _titles[_id],
Name = _id
};
}
/// ------------------------------------------------------------------------------------
protected override bool DoArchiveSpecificInitialization()
{
// no-op
return true;
}
/// ------------------------------------------------------------------------------------
public override int CalculateMaxProgressBarValue()
{
// One for processing each list and one for copying each file
return _fileLists.Count + _fileLists.SelectMany(kvp => kvp.Value.Item1).Count();
}
/// ------------------------------------------------------------------------------------
protected override string FileGroupDisplayMessage(string groupKey)
{
if (groupKey == string.Empty)
return LocalizationManager.GetString("DialogBoxes.ArchivingDlg.IMDIActorsGroup", "Actors",
"This is the heading displayed in the Archive Using IMDI dialog box for the files for the actors/participants");
return base.FileGroupDisplayMessage(groupKey);
}
/// ------------------------------------------------------------------------------------
/// <summary>
/// Sets a description for the specified session in a single language
/// </summary>
/// <param name="sessionId"></param>
/// <param name="description">The abstract description</param>
/// <param name="iso3LanguageCode">ISO 639-3 3-letter language code</param>
/// ------------------------------------------------------------------------------------
public void SetSessionDescription(string sessionId, string description, string iso3LanguageCode)
{
if (description == null)
throw new ArgumentNullException("description");
if (iso3LanguageCode == null)
throw new ArgumentNullException("iso3LanguageCode");
if (iso3LanguageCode.Length != 3)
{
var msg = LocalizationManager.GetString("DialogBoxes.ArchivingDlg.ISO3CodeRequired",
"ISO 639-3 3-letter language code required.",
"Message displayed when an invalid language code is given.");
throw new ArgumentException(msg, "iso3LanguageCode");
}
_imdiData.AddDescription(sessionId, new LanguageString { Value = description, Iso3LanguageId = iso3LanguageCode });
}
/// <summary></summary>
/// <param name="descriptions"></param>
protected override void SetAbstract_Impl(IDictionary<string, string> descriptions)
{
foreach (var desc in descriptions)
_imdiData.AddDescription(new LanguageString(desc.Value, desc.Key));
}
/// <summary></summary>
/// <returns></returns>
public override string GetMetadata()
{
return _imdiData.BaseImdiFile.ToString();
}
/// ------------------------------------------------------------------------------------
/// <summary>Launch Arbil or Lamus or whatever</summary>
/// <remarks>need custom launcher here because Arbil is a java program, with no executable on linux</remarks>
/// ------------------------------------------------------------------------------------
internal override void LaunchArchivingProgram()
{
if (string.IsNullOrEmpty(PathToProgramToLaunch) || !File.Exists(PathToProgramToLaunch))
return;
// if it is a .jar file, open with java
var exePath = (PathToProgramToLaunch.EndsWith(".jar")) ? "java" : PathToProgramToLaunch;
var args = string.Empty;
if (exePath == "java")
{
// are there additional command line parameters for this program?
if (PathToProgramToLaunch.ToLower().Contains("arbil"))
args = string.Format(ArchivingPrograms.ArbilCommandLineArgs, PathToProgramToLaunch);
else
args = PathToProgramToLaunch;
}
try
{
var prs = new Process { StartInfo = { FileName = exePath, Arguments = args } };
prs.Start();
}
catch (Exception e)
{
ReportError(e, string.Format(LocalizationManager.GetString("DialogBoxes.ArchivingDlg.StartingIMDIErrorMsg",
"There was an error attempting to open the archive package in {0}."), PathToProgramToLaunch));
}
}
#region Create IMDI package in worker thread
/// <summary></summary>
public override bool CreatePackage()
{
IsBusy = true;
// check for missing data that is required by Arbil
var success = _imdiData.SetMissingInformation();
// write the xml files
if (success)
success = _imdiData.CreateIMDIPackage();
// copy the content files
if (success && !MetadataOnly)
success = CreateIMDIPackage();
CleanUp();
if (success)
{
// copy the path to the imdi file to the clipboard
// SP-818: Crash in IMDI export when dialog tries to put string on clipboard
// 18 FEB 2014, Phil Hopper: I found this possible solution using retries on StackOverflow
// http://stackoverflow.com/questions/5707990/requested-clipboard-operation-did-not-succeed
//Clipboard.SetData(DataFormats.Text, _imdiData.MainExportFile);
Clipboard.SetDataObject(_imdiData.MainExportFile, true, 3, 500);
var successMsg = LocalizationManager.GetString("DialogBoxes.ArchivingDlg.ReadyToCallIMDIMsg",
"Exported to {0}. This path is now on your clipboard. If you are using Arbil, go to File, Import, then paste this path in.");
DisplayMessage(string.Format(successMsg, _imdiData.MainExportFile), MessageType.Success);
}
IsBusy = false;
return success;
}
/// <summary></summary>
public bool CreateIMDIPackage()
{
try
{
using (_worker = new BackgroundWorker())
{
_cancelProcess = false;
_workerException = false;
_worker.ProgressChanged += HandleBackgroundWorkerProgressChanged;
_worker.WorkerReportsProgress = true;
_worker.WorkerSupportsCancellation = true;
_worker.DoWork += CreateIMDIPackageInWorkerThread;
_worker.RunWorkerAsync();
while (_worker.IsBusy)
Application.DoEvents();
}
}
catch (Exception e)
{
ReportError(e, LocalizationManager.GetString(
"DialogBoxes.ArchivingDlg.CreatingIMDIPackageErrorMsg",
"There was a problem starting process to create IMDI package."));
return false;
}
finally
{
_worker = null;
}
return !_cancelProcess && !_workerException;
}
public override void Cancel()
{
base.Cancel();
CleanUp();
}
/// <summary></summary>
void HandleBackgroundWorkerProgressChanged(object sender, ProgressChangedEventArgs e)
{
if (e.UserState == null || _cancelProcess)
return;
if (e.UserState is KeyValuePair<Exception, string>)
{
var kvp = (KeyValuePair<Exception, string>)e.UserState;
ReportError(kvp.Key, kvp.Value);
return;
}
if (!string.IsNullOrEmpty(e.UserState as string))
{
if (e.ProgressPercentage == 0)
{
DisplayMessage(e.UserState.ToString(), MessageType.Success);
return;
}
DisplayMessage(e.UserState.ToString(), MessageType.Detail);
}
if (IncrementProgressBarAction != null)
IncrementProgressBarAction();
}
private void CreateIMDIPackageInWorkerThread(object sender, DoWorkEventArgs e)
{
try
{
var outputDirectory = Path.Combine(_imdiData.PackagePath, NormalizeDirectoryName(_imdiData.Name));
if (Thread.CurrentThread.Name == null)
Thread.CurrentThread.Name = "CreateIMDIPackageInWorkerThread";
_worker.ReportProgress(0, LocalizationManager.GetString("DialogBoxes.ArchivingDlg.PreparingFilesMsg",
"Analyzing component files"));
var filesToCopy = new Dictionary<string, string>();
// get files from each session
foreach (var sess in _imdiData.Sessions)
{
Session session = (Session) sess;
_worker.ReportProgress(1 /* actual value ignored, progress just increments */,
session.Name);
// get files to copy
foreach (var file in session.Resources.MediaFile)
{
// create sub directory
var fullSessionDirName = Path.Combine(outputDirectory, NormalizeDirectoryName(file.OutputDirectory));
Directory.CreateDirectory(fullSessionDirName);
var newFileName = NormalizeFilename(string.Empty, Path.GetFileName(file.FullPathAndFileName));
filesToCopy[file.FullPathAndFileName] = Path.Combine(fullSessionDirName, newFileName);
}
foreach (var file in session.Resources.WrittenResource)
{
// create sub directory
var fullSessionDirName = Path.Combine(outputDirectory, NormalizeDirectoryName(file.OutputDirectory));
Directory.CreateDirectory(fullSessionDirName);
var newFileName = NormalizeFilename(string.Empty, Path.GetFileName(file.FullPathAndFileName));
filesToCopy[file.FullPathAndFileName] = Path.Combine(fullSessionDirName, newFileName);
}
if (_cancelProcess)
return;
}
_worker.ReportProgress(0, LocalizationManager.GetString("DialogBoxes.ArchivingDlg.CopyingFilesMsg",
"Copying files"));
// copy the files now
foreach (var fileToCopy in filesToCopy)
{
if (_cancelProcess)
return;
_worker.ReportProgress(1 /* actual value ignored, progress just increments */,
Path.GetFileName(fileToCopy.Key));
if (FileCopyOverride != null)
{
try
{
if (FileCopyOverride(this, fileToCopy.Key, fileToCopy.Value))
{
if (!File.Exists(fileToCopy.Value))
throw new FileNotFoundException("Calling application claimed to copy file but didn't", fileToCopy.Value);
continue;
}
}
catch (Exception error)
{
var msg = string.Format(LocalizationManager.GetString("DialogBoxes.ArchivingDlg.FileExcludedFromPackage",
"File excluded from {0} package: ", "Parameter is the type of archive (e.g., RAMP/IMDI)"), ArchiveType) +
fileToCopy.Value;
ReportError(error, msg);
}
}
// Don't use File.Copy because it's asynchronous.
CopyFile(fileToCopy.Key, fileToCopy.Value);
}
_worker.ReportProgress(0, string.Format(LocalizationManager.GetString("DialogBoxes.ArchivingDlg.SavingFilesInPackageMsg",
"Saving files in {0} package", "Parameter is the type of archive (e.g., RAMP/IMDI)"), ArchiveType));
}
catch (Exception exception)
{
_worker.ReportProgress(0, new KeyValuePair<Exception, string>(exception,
string.Format(LocalizationManager.GetString("DialogBoxes.ArchivingDlg.CreatingArchiveErrorMsg",
"There was an error attempting to create the {0} package.", "Parameter is the type of archive (e.g., IMDI)"), ArchiveType)));
_workerException = true;
}
}
#endregion
/// <summary>Only Latin characters, URL compatible</summary>
protected override StringBuilder DoArchiveSpecificFilenameNormalization(string key, string fileName)
{
return new StringBuilder(NormalizeFileName(fileName));
}
/// <summary>Only Latin characters, URL compatible</summary>
internal static string NormalizeFileName(string fileName)
{
return fileName.ToLatinOnly("_", "+", ".");
}
/// <summary>Only Latin characters, URL compatible</summary>
internal static string NormalizeDirectoryName(string dirName)
{
return dirName.ToLatinOnly("_", "_", ".-");
}
/// <summary>Performs clean-up for the class</summary>
public void CleanUp()
{
// delete temp files, etc
}
/// <summary>Returns the normalized name to use for the output corpus folder. A sub-directory of <c>OutputFolder</c></summary>
public string CorpusDirectoryName
{
get
{
// create the output base directory if it doesn't already exist
if (!Directory.Exists(OutputFolder))
{
Directory.CreateDirectory(OutputFolder);
if (!Directory.Exists(OutputFolder))
throw new DirectoryNotFoundException(string.Format("The path {0} was not found.", OutputFolder));
}
if (string.IsNullOrEmpty(_corpusDirectoryName))
{
var baseName = NormalizeDirectoryName(_titles[_id] + " " + DateTime.Today.ToString("yyyy-MM-dd"));
var test = baseName;
var i = 1;
while (Directory.Exists(Path.Combine(OutputFolder, test)))
{
test = NormalizeDirectoryName(baseName + " " + i.ToString("000"));
i++;
}
_corpusDirectoryName = test;
}
return _corpusDirectoryName;
}
}
/// <summary>Adds a new session and returns it</summary>
/// <param name="sessionId"></param>
public override IArchivingSession AddSession(string sessionId)
{
// look for existing session
foreach (var sess in _imdiData.Sessions.Where(sess => sess.Name == sessionId))
return sess;
// if not found, add a new session
Session session = new Session {Name = sessionId};
_imdiData.Sessions.Add(session);
return session;
}
public override IArchivingPackage ArchivingPackage { get { return _imdiData; } }
/// <summary></summary>
public new string PathToProgramToLaunch
{
get
{
switch (ProgramPreset)
{
case "Arbil":
return ArchivingPrograms.GetArbilExeFileLocation();
default:
return OtherProgramPath;
}
}
set
{
// this is just for compatibility
_otherProgramPath = value;
}
}
/// <summary></summary>
public string ProgramPreset
{
get
{
if (string.IsNullOrEmpty(_programPreset))
GetSavedValues();
return _programPreset;
}
set
{
_programPreset = value;
SaveProgramValues();
}
}
/// <summary></summary>
public string OtherProgramPath
{
get
{
if (string.IsNullOrEmpty(_programPreset))
GetSavedValues();
return _otherProgramPath;
}
set
{
_otherProgramPath = value;
SaveProgramValues();
}
}
private void GetSavedValues()
{
if (File.Exists(_configFileName))
{
var lines = File.ReadAllLines(_configFileName);
foreach (var line in lines)
{
var kvp = line.Split(new[] { '=' }, 2);
if (kvp.Length == 2)
{
switch (kvp[0])
{
case "ProgramPreset":
_programPreset = kvp[1];
break;
case "OtherProgramPath":
_otherProgramPath = kvp[1];
break;
}
}
}
}
// default to Arbil
if (string.IsNullOrEmpty(_programPreset))
_programPreset = "Arbil";
if (_otherProgramPath == null)
_otherProgramPath = string.Empty;
}
private void SaveProgramValues()
{
List<string> lines = new List<string>
{
"ProgramPreset=" + ProgramPreset,
"OtherProgramPath=" + OtherProgramPath
};
File.WriteAllLines(_configFileName, lines);
}
/// <summary />
public string OutputFolder
{
get { return _outputFolder; }
set
{
_outputFolder = value;
PackagePath = !string.IsNullOrEmpty(value)?
Path.Combine(value, CorpusDirectoryName):
CorpusDirectoryName;
if (_imdiData != null)
_imdiData.PackagePath = PackagePath;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System;
using System.Linq;
using AutoRest.Core.Validation;
using AutoRest.Swagger.Validation;
using System.Collections.Generic;
using AutoRest.Core.Utilities;
namespace AutoRest.Swagger.Model
{
/// <summary>
/// Describes a single API operation on a path.
/// </summary>
[Rule(typeof(OperationDescriptionRequired))]
public class Operation : SwaggerBase
{
private string _description;
private string _summary;
public Operation()
{
Consumes = new List<string>();
Produces = new List<string>();
}
/// <summary>
/// A list of tags for API documentation control.
/// </summary>
public IList<string> Tags { get; set; }
/// <summary>
/// A friendly serviceTypeName for the operation. The id MUST be unique among all
/// operations described in the API. Tools and libraries MAY use the
/// operation id to uniquely identify an operation.
/// </summary>
[Rule(typeof(OneUnderscoreInOperationId))]
[Rule(typeof(OperationIdNounInVerb))]
public string OperationId { get; set; }
public string Summary
{
get { return _summary; }
set { _summary = value.StripControlCharacters(); }
}
[Rule(typeof(AvoidMsdnReferences))]
public string Description
{
get { return _description; }
set { _description = value.StripControlCharacters(); }
}
/// <summary>
/// Additional external documentation for this operation.
/// </summary>
public ExternalDoc ExternalDocs { get; set; }
/// <summary>
/// A list of MIME types the operation can consume.
/// </summary>
[CollectionRule(typeof(NonAppJsonTypeWarning))]
public IList<string> Consumes { get; set; }
/// <summary>
/// A list of MIME types the operation can produce.
/// </summary>
[CollectionRule(typeof(NonAppJsonTypeWarning))]
public IList<string> Produces { get; set; }
/// <summary>
/// A list of parameters that are applicable for this operation.
/// If a parameter is already defined at the Path Item, the
/// new definition will override it, but can never remove it.
/// </summary>
[CollectionRule(typeof(OperationParametersValidation))]
[CollectionRule(typeof(AnonymousParameterTypes))]
public IList<SwaggerParameter> Parameters { get; set; }
/// <summary>
/// The list of possible responses as they are returned from executing this operation.
/// </summary>
[Rule(typeof(ResponseRequired))]
public Dictionary<string, OperationResponse> Responses { get; set; }
/// <summary>
/// The transfer protocol for the operation.
/// </summary>
[CollectionRule(typeof(SupportedSchemesWarning))]
public IList<TransferProtocolScheme> Schemes { get; set; }
public bool Deprecated { get; set; }
/// <summary>
/// A declaration of which security schemes are applied for this operation.
/// The list of values describes alternative security schemes that can be used
/// (that is, there is a logical OR between the security requirements).
/// This definition overrides any declared top-level security. To remove a
/// top-level security declaration, an empty array can be used.
/// </summary>
public IList<Dictionary<string, List<string>>> Security { get; set; }
/// <summary>
/// Compare a modified document node (this) to a previous one and look for breaking as well as non-breaking changes.
/// </summary>
/// <param name="context">The modified document context.</param>
/// <param name="previous">The original document model.</param>
/// <returns>A list of messages from the comparison.</returns>
public override IEnumerable<ComparisonMessage> Compare(ComparisonContext context, SwaggerBase previous)
{
var priorOperation = previous as Operation;
var currentRoot = (context.CurrentRoot as ServiceDefinition);
var previousRoot = (context.PreviousRoot as ServiceDefinition);
if (priorOperation == null)
{
throw new ArgumentException("previous");
}
base.Compare(context, previous);
if (!OperationId.Equals(priorOperation.OperationId))
{
context.LogBreakingChange(ComparisonMessages.ModifiedOperationId);
}
CheckParameters(context, priorOperation);
if (Responses != null && priorOperation.Responses != null)
{
foreach (var response in Responses)
{
var oldResponse = priorOperation.FindResponse(response.Key, priorOperation.Responses);
context.Push(response.Key);
if (oldResponse == null)
{
context.LogBreakingChange(ComparisonMessages.AddingResponseCode, response.Key);
}
else
{
response.Value.Compare(context, oldResponse);
}
context.Pop();
}
foreach (var response in priorOperation.Responses)
{
var newResponse = this.FindResponse(response.Key, this.Responses);
if (newResponse == null)
{
context.Push(response.Key);
context.LogBreakingChange(ComparisonMessages.RemovedResponseCode, response.Key);
context.Pop();
}
}
}
return context.Messages;
}
private void CheckParameters(ComparisonContext context, Operation priorOperation)
{
// Check that no parameters were removed or reordered, and compare them if it's not the case.
var currentRoot = (context.CurrentRoot as ServiceDefinition);
var previousRoot = (context.PreviousRoot as ServiceDefinition);
foreach (var oldParam in priorOperation.Parameters
.Select(p => string.IsNullOrEmpty(p.Reference) ? p : FindReferencedParameter(p.Reference, previousRoot.Parameters)))
{
SwaggerParameter newParam = FindParameter(oldParam.Name, Parameters, currentRoot.Parameters);
context.Push(oldParam.Name);
if (newParam != null)
{
newParam.Compare(context, oldParam);
}
else if (oldParam.IsRequired)
{
context.LogBreakingChange(ComparisonMessages.RemovedRequiredParameter, oldParam.Name);
}
context.Pop();
}
// Check that no required parameters were added.
foreach (var newParam in Parameters
.Select(p => string.IsNullOrEmpty(p.Reference) ? p : FindReferencedParameter(p.Reference, currentRoot.Parameters))
.Where(p => p != null && p.IsRequired))
{
if (newParam == null) continue;
SwaggerParameter oldParam = FindParameter(newParam.Name, priorOperation.Parameters, previousRoot.Parameters);
if (oldParam == null)
{
context.Push(newParam.Name);
context.LogBreakingChange(ComparisonMessages.AddingRequiredParameter, newParam.Name);
context.Pop();
}
}
}
private SwaggerParameter FindParameter(string name, IEnumerable<SwaggerParameter> operationParameters, IDictionary<string, SwaggerParameter> clientParameters)
{
if (Parameters != null)
{
foreach (var param in operationParameters)
{
if (name.Equals(param.Name))
return param;
var pRef = FindReferencedParameter(param.Reference, clientParameters);
if (pRef != null && name.Equals(pRef.Name))
{
return pRef;
}
}
}
return null;
}
private OperationResponse FindResponse(string name, IDictionary<string, OperationResponse> responses)
{
OperationResponse response = null;
this.Responses.TryGetValue(name, out response);
return response;
}
private static SwaggerParameter FindReferencedParameter(string reference, IDictionary<string, SwaggerParameter> parameters)
{
if (reference != null && reference.StartsWith("#", StringComparison.Ordinal))
{
var parts = reference.Split('/');
if (parts.Length == 3 && parts[1].Equals("parameters"))
{
SwaggerParameter p = null;
if (parameters.TryGetValue(parts[2], out p))
{
return p;
}
}
}
return null;
}
}
}
| |
using System;
using System.Linq;
using UnityEditor;
using UnityEditor.AnimatedValues;
using UnityEngine;
using UnityWeld.Binding;
using UnityWeld.Binding.Internal;
namespace UnityWeld_Editor
{
[CustomEditor(typeof(TwoWayPropertyBinding))]
class TwoWayPropertyBindingEditor : BaseBindingEditor
{
private TwoWayPropertyBinding targetScript;
private AnimBool viewAdapterOptionsFade;
private AnimBool viewModelAdapterOptionsFade;
private AnimBool exceptionAdapterOptionsFade;
// Whether properties in the target script differ from the value in the prefab.
// Needed to know which ones to display as bold in the inspector.
private bool viewEventPrefabModified;
private bool viewPropertyPrefabModified;
private bool viewAdapterPrefabModified;
private bool viewAdapterOptionsPrefabModified;
private bool viewModelPropertyPrefabModified;
private bool viewModelAdapterPrefabModified;
private bool viewModelAdapterOptionsPrefabModified;
private bool exceptionPropertyPrefabModified;
private bool exceptionAdapterPrefabModified;
private bool exceptionAdapterOptionsPrefabModified;
private void OnEnable()
{
targetScript = (TwoWayPropertyBinding)target;
Type adapterType;
viewAdapterOptionsFade = new AnimBool(ShouldShowAdapterOptions(
targetScript.ViewAdapterTypeName,
out adapterType
));
viewModelAdapterOptionsFade = new AnimBool(ShouldShowAdapterOptions(
targetScript.ViewModelAdapterTypeName,
out adapterType
));
exceptionAdapterOptionsFade = new AnimBool(ShouldShowAdapterOptions(
targetScript.ExceptionAdapterTypeName,
out adapterType
));
viewAdapterOptionsFade.valueChanged.AddListener(Repaint);
viewModelAdapterOptionsFade.valueChanged.AddListener(Repaint);
exceptionAdapterOptionsFade.valueChanged.AddListener(Repaint);
}
private void OnDisable()
{
viewAdapterOptionsFade.valueChanged.RemoveListener(Repaint);
viewModelAdapterOptionsFade.valueChanged.RemoveListener(Repaint);
exceptionAdapterOptionsFade.valueChanged.RemoveListener(Repaint);
}
public override void OnInspectorGUI()
{
UpdatePrefabModifiedProperties();
var defaultLabelStyle = EditorStyles.label.fontStyle;
EditorStyles.label.fontStyle = viewEventPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
ShowEventMenu(
UnityEventWatcher.GetBindableEvents(targetScript.gameObject)
.OrderBy(evt => evt.Name)
.ToArray(),
updatedValue => targetScript.ViewEventName = updatedValue,
targetScript.ViewEventName
);
EditorStyles.label.fontStyle = viewPropertyPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
Type viewPropertyType;
ShowViewPropertyMenu(
new GUIContent("View property", "Property on the view to bind to"),
PropertyFinder.GetBindableProperties(targetScript.gameObject)
.OrderBy(prop => prop.ViewModelTypeName)
.ThenBy(prop => prop.MemberName)
.ToArray(),
updatedValue => targetScript.ViewPropertName = updatedValue,
targetScript.ViewPropertName,
out viewPropertyType
);
// Don't let the user set other options until they've set the event and view property.
var guiPreviouslyEnabled = GUI.enabled;
if (string.IsNullOrEmpty(targetScript.ViewEventName)
|| string.IsNullOrEmpty(targetScript.ViewPropertName))
{
GUI.enabled = false;
}
var viewAdapterTypeNames = GetAdapterTypeNames(
type => viewPropertyType == null ||
TypeResolver.FindAdapterAttribute(type).OutputType == viewPropertyType
);
EditorStyles.label.fontStyle = viewAdapterPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
ShowAdapterMenu(
new GUIContent(
"View adapter",
"Adapter that converts values sent from the view-model to the view."
),
viewAdapterTypeNames,
targetScript.ViewAdapterTypeName,
newValue =>
{
// Get rid of old adapter options if we changed the type of the adapter.
if (newValue != targetScript.ViewAdapterTypeName)
{
Undo.RecordObject(targetScript, "Set view adapter options");
targetScript.ViewAdapterOptions = null;
}
UpdateProperty(
updatedValue => targetScript.ViewAdapterTypeName = updatedValue,
targetScript.ViewAdapterTypeName,
newValue,
"Set view adapter"
);
}
);
EditorStyles.label.fontStyle = viewAdapterOptionsPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
Type viewAdapterType;
viewAdapterOptionsFade.target = ShouldShowAdapterOptions(
targetScript.ViewAdapterTypeName,
out viewAdapterType
);
ShowAdapterOptionsMenu(
"View adapter options",
viewAdapterType,
options => targetScript.ViewAdapterOptions = options,
targetScript.ViewAdapterOptions,
viewAdapterOptionsFade.faded
);
EditorGUILayout.Space();
EditorStyles.label.fontStyle = viewModelPropertyPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
var adaptedViewPropertyType = AdaptTypeBackward(
viewPropertyType,
targetScript.ViewAdapterTypeName
);
ShowViewModelPropertyMenu(
new GUIContent(
"View-model property",
"Property on the view-model to bind to."
),
TypeResolver.FindBindableProperties(targetScript),
updatedValue => targetScript.ViewModelPropertyName = updatedValue,
targetScript.ViewModelPropertyName,
prop => prop.PropertyType == adaptedViewPropertyType
);
var viewModelAdapterTypeNames = GetAdapterTypeNames(
type => adaptedViewPropertyType == null ||
TypeResolver.FindAdapterAttribute(type).OutputType == adaptedViewPropertyType
);
EditorStyles.label.fontStyle = viewModelAdapterPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
ShowAdapterMenu(
new GUIContent(
"View-model adapter",
"Adapter that converts from the view back to the view-model"
),
viewModelAdapterTypeNames,
targetScript.ViewModelAdapterTypeName,
newValue =>
{
if (newValue != targetScript.ViewModelAdapterTypeName)
{
Undo.RecordObject(targetScript, "Set view-model adapter options");
targetScript.ViewModelAdapterOptions = null;
}
UpdateProperty(
updatedValue => targetScript.ViewModelAdapterTypeName = updatedValue,
targetScript.ViewModelAdapterTypeName,
newValue,
"Set view-model adapter"
);
}
);
EditorStyles.label.fontStyle = viewModelAdapterOptionsPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
Type viewModelAdapterType;
viewModelAdapterOptionsFade.target = ShouldShowAdapterOptions(
targetScript.ViewModelAdapterTypeName,
out viewModelAdapterType
);
ShowAdapterOptionsMenu(
"View-model adapter options",
viewModelAdapterType,
options => targetScript.ViewModelAdapterOptions = options,
targetScript.ViewModelAdapterOptions,
viewModelAdapterOptionsFade.faded
);
EditorGUILayout.Space();
var expectionAdapterTypeNames = GetAdapterTypeNames(
type => TypeResolver.FindAdapterAttribute(type).InputType == typeof(Exception)
);
EditorStyles.label.fontStyle = exceptionPropertyPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
var adaptedExceptionPropertyType = AdaptTypeForward(
typeof(Exception),
targetScript.ExceptionAdapterTypeName
);
ShowViewModelPropertyMenuWithNone(
new GUIContent(
"Exception property",
"Property on the view-model to bind the exception to."
),
TypeResolver.FindBindableProperties(targetScript),
updatedValue => targetScript.ExceptionPropertyName = updatedValue,
targetScript.ExceptionPropertyName,
prop => prop.PropertyType == adaptedExceptionPropertyType
);
EditorStyles.label.fontStyle = exceptionAdapterPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
ShowAdapterMenu(
new GUIContent(
"Exception adapter",
"Adapter that handles exceptions thrown by the view-model adapter"
),
expectionAdapterTypeNames,
targetScript.ExceptionAdapterTypeName,
newValue =>
{
if (newValue != targetScript.ExceptionAdapterTypeName)
{
Undo.RecordObject(targetScript, "Set exception adapter options");
targetScript.ExceptionAdapterOptions = null;
}
UpdateProperty(
updatedValue => targetScript.ExceptionAdapterTypeName = updatedValue,
targetScript.ExceptionAdapterTypeName,
newValue,
"Set exception adapter"
);
}
);
EditorStyles.label.fontStyle = exceptionAdapterOptionsPrefabModified
? FontStyle.Bold
: defaultLabelStyle;
Type exceptionAdapterType;
exceptionAdapterOptionsFade.target = ShouldShowAdapterOptions(
targetScript.ExceptionAdapterTypeName,
out exceptionAdapterType
);
ShowAdapterOptionsMenu(
"Exception adapter options",
exceptionAdapterType,
options => targetScript.ExceptionAdapterOptions = options,
targetScript.ExceptionAdapterOptions,
exceptionAdapterOptionsFade.faded
);
EditorStyles.label.fontStyle = defaultLabelStyle;
GUI.enabled = guiPreviouslyEnabled;
}
/// <summary>
/// Check whether each of the properties on the object have been changed
/// from the value in the prefab.
/// </summary>
private void UpdatePrefabModifiedProperties()
{
var property = serializedObject.GetIterator();
// Need to call Next(true) to get the first child. Once we have it, Next(false)
// will iterate through the properties.
property.Next(true);
do
{
switch (property.name)
{
case "viewEventName":
viewEventPrefabModified = property.prefabOverride;
break;
case "viewPropertyName":
viewPropertyPrefabModified = property.prefabOverride;
break;
case "viewAdapterTypeName":
viewAdapterPrefabModified = property.prefabOverride;
break;
case "viewAdapterOptions":
viewAdapterOptionsPrefabModified = property.prefabOverride;
break;
case "viewModelPropertyName":
viewModelPropertyPrefabModified = property.prefabOverride;
break;
case "viewModelAdapterTypeName":
viewModelAdapterPrefabModified = property.prefabOverride;
break;
case "viewModelAdapterOptions":
viewModelAdapterOptionsPrefabModified = property.prefabOverride;
break;
case "exceptionPropertyName":
exceptionPropertyPrefabModified = property.prefabOverride;
break;
case "exceptionAdapterTypeName":
exceptionAdapterPrefabModified = property.prefabOverride;
break;
case "exceptionAdapterOptions":
exceptionAdapterOptionsPrefabModified = property.prefabOverride;
break;
default:
break;
}
}
while (property.Next(false));
}
}
}
| |
#region License, Terms and Author(s)
//
// ELMAH - Error Logging Modules and Handlers for ASP.NET
// Copyright (c) 2004-9 Atif Aziz. All rights reserved.
//
// Author(s):
//
// Atif Aziz, http://www.raboof.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Modified by David Duffett
// It now catches HttpRequestValidationExceptions thrown by .NET 4.0
// when trying to access the Request.QueryString and Request.Form properties.
// These exceptions are logged without this data, but at least do not
// cause logging to fail.
//
#endregion
using System.Diagnostics;
using System.Text;
[assembly: Elmah.Scc("$Id: Error.cs 776 2011-01-12 21:09:24Z azizatif $")]
namespace Elmah
{
#region Imports
using System;
using System.Security.Principal;
using System.Web;
using System.Xml;
using Thread = System.Threading.Thread;
using NameValueCollection = System.Collections.Specialized.NameValueCollection;
#endregion
/// <summary>
/// Represents a logical application error (as opposed to the actual
/// exception it may be representing).
/// </summary>
[ Serializable ]
public sealed class Error : ICloneable
{
private readonly Exception _exception;
private string _applicationName;
private string _hostName;
private string _typeName;
private string _source;
private string _message;
private string _detail;
private string _user;
private DateTime _time;
private int _statusCode;
private string _webHostHtmlMessage;
private NameValueCollection _serverVariables;
private NameValueCollection _queryString;
private NameValueCollection _form;
private NameValueCollection _cookies;
/// <summary>
/// Initializes a new instance of the <see cref="Error"/> class.
/// </summary>
public Error() {}
/// <summary>
/// Initializes a new instance of the <see cref="Error"/> class
/// from a given <see cref="Exception"/> instance.
/// </summary>
public Error(Exception e) :
this(e, null) {}
/// <summary>
/// Initializes a new instance of the <see cref="Error"/> class
/// from a given <see cref="Exception"/> instance and
/// <see cref="HttpContext"/> instance representing the HTTP
/// context during the exception.
/// </summary>
public Error(Exception e, HttpContext context)
{
if (e == null)
throw new ArgumentNullException("e");
_exception = e;
Exception baseException = e.GetBaseException();
//
// Load the basic information.
//
_hostName = Environment.TryGetMachineName(context);
_typeName = baseException.GetType().FullName;
_message = baseException.Message;
_source = baseException.Source;
_detail = getExceptionDetail(e, baseException);
_user = Thread.CurrentPrincipal.Identity.Name ?? string.Empty;
_time = DateTime.Now;
//
// If this is an HTTP exception, then get the status code
// and detailed HTML message provided by the host.
//
HttpException httpException = e as HttpException;
if (httpException != null)
{
_statusCode = httpException.GetHttpCode();
_webHostHtmlMessage = httpException.GetHtmlErrorMessage() ?? string.Empty;
}
//
// If the HTTP context is available, then capture the
// collections that represent the state request as well as
// the user.
//
if (context != null)
{
IPrincipal webUser = context.User;
if (webUser != null
&& (webUser.Identity.Name ?? string.Empty).Length > 0)
{
_user = webUser.Identity.Name;
}
HttpRequest request = context.Request;
_serverVariables = CopyCollection(request.ServerVariables);
_cookies = CopyCollection(request.Cookies);
try
{
_queryString = CopyCollection(request.QueryString);
_form = CopyCollection(request.Form);
}
catch (HttpRequestValidationException requestValidationException)
{
//
// .NET 4.0 will raise this exception if dangerous content is
// detected in the request QueryString or Form collections.
// In these cases, we will continue to log the exception without
// the QueryString or Form data. We cannot get to this data without
// targeting the .NET 4.0 framework and accessing the UnvalidatedRequestValues.
//
Trace.WriteLine(requestValidationException);
}
}
}
/// <summary>
/// Coalesce the exception ToString() methods. Some specific circumstances (AutoMapperMappingException
/// coupled with NHibernate lazy load collection) can cause ToString() on exception to fail.
/// We then attempt to report on the base exception, or as much as we possibly can.
/// </summary>
string getExceptionDetail(params Exception[] exceptionsToCoalesce)
{
StringBuilder detail = new StringBuilder();
foreach(var exception in exceptionsToCoalesce)
try
{
detail.Append(exception.ToString());
break;
}
catch (Exception loggingException)
{
detail.AppendFormat(
"Error logging outer exception details for type '{0}':\r\n" +
" {1}\r\n" +
" \r\n", exception.GetType().Name, loggingException);
}
return detail.ToString();
}
/// <summary>
/// Gets the <see cref="Exception"/> instance used to initialize this
/// instance.
/// </summary>
/// <remarks>
/// This is a run-time property only that is not written or read
/// during XML serialization via <see cref="ErrorXml.Decode"/> and
/// <see cref="ErrorXml.Encode(Error,XmlWriter)"/>.
/// </remarks>
public Exception Exception
{
get { return _exception; }
}
/// <summary>
/// Gets or sets the name of application in which this error occurred.
/// </summary>
public string ApplicationName
{
get { return _applicationName ?? string.Empty; }
set { _applicationName = value; }
}
/// <summary>
/// Gets or sets name of host machine where this error occurred.
/// </summary>
public string HostName
{
get { return _hostName ?? string.Empty; }
set { _hostName = value; }
}
/// <summary>
/// Gets or sets the type, class or category of the error.
/// </summary>
public string Type
{
get { return _typeName ?? string.Empty; }
set { _typeName = value; }
}
/// <summary>
/// Gets or sets the source that is the cause of the error.
/// </summary>
public string Source
{
get { return _source ?? string.Empty; }
set { _source = value; }
}
/// <summary>
/// Gets or sets a brief text describing the error.
/// </summary>
public string Message
{
get { return _message ?? string.Empty; }
set { _message = value; }
}
/// <summary>
/// Gets or sets a detailed text describing the error, such as a
/// stack trace.
/// </summary>
public string Detail
{
get { return _detail ?? string.Empty; }
set { _detail = value; }
}
/// <summary>
/// Gets or sets the user logged into the application at the time
/// of the error.
/// </summary>
public string User
{
get { return _user ?? string.Empty; }
set { _user = value; }
}
/// <summary>
/// Gets or sets the date and time (in local time) at which the
/// error occurred.
/// </summary>
public DateTime Time
{
get { return _time; }
set { _time = value; }
}
/// <summary>
/// Gets or sets the HTTP status code of the output returned to the
/// client for the error.
/// </summary>
/// <remarks>
/// For cases where this value cannot always be reliably determined,
/// the value may be reported as zero.
/// </remarks>
public int StatusCode
{
get { return _statusCode; }
set { _statusCode = value; }
}
/// <summary>
/// Gets or sets the HTML message generated by the web host (ASP.NET)
/// for the given error.
/// </summary>
public string WebHostHtmlMessage
{
get { return _webHostHtmlMessage ?? string.Empty; }
set { _webHostHtmlMessage = value; }
}
/// <summary>
/// Gets a collection representing the Web server variables
/// captured as part of diagnostic data for the error.
/// </summary>
public NameValueCollection ServerVariables
{
get { return FaultIn(ref _serverVariables); }
}
/// <summary>
/// Gets a collection representing the Web query string variables
/// captured as part of diagnostic data for the error.
/// </summary>
public NameValueCollection QueryString
{
get { return FaultIn(ref _queryString); }
}
/// <summary>
/// Gets a collection representing the form variables captured as
/// part of diagnostic data for the error.
/// </summary>
public NameValueCollection Form
{
get { return FaultIn(ref _form); }
}
/// <summary>
/// Gets a collection representing the client cookies
/// captured as part of diagnostic data for the error.
/// </summary>
public NameValueCollection Cookies
{
get { return FaultIn(ref _cookies); }
}
/// <summary>
/// Returns the value of the <see cref="Message"/> property.
/// </summary>
public override string ToString()
{
return this.Message;
}
/// <summary>
/// Creates a new object that is a copy of the current instance.
/// </summary>
object ICloneable.Clone()
{
//
// Make a base shallow copy of all the members.
//
Error copy = (Error) MemberwiseClone();
//
// Now make a deep copy of items that are mutable.
//
copy._serverVariables = CopyCollection(_serverVariables);
copy._queryString = CopyCollection(_queryString);
copy._form = CopyCollection(_form);
copy._cookies = CopyCollection(_cookies);
return copy;
}
private static NameValueCollection CopyCollection(NameValueCollection collection)
{
if (collection == null || collection.Count == 0)
return null;
return new NameValueCollection(collection);
}
private static NameValueCollection CopyCollection(HttpCookieCollection cookies)
{
if (cookies == null || cookies.Count == 0)
return null;
NameValueCollection copy = new NameValueCollection(cookies.Count);
for (int i = 0; i < cookies.Count; i++)
{
HttpCookie cookie = cookies[i];
//
// NOTE: We drop the Path and Domain properties of the
// cookie for sake of simplicity.
//
copy.Add(cookie.Name, cookie.Value);
}
return copy;
}
private static NameValueCollection FaultIn(ref NameValueCollection collection)
{
if (collection == null)
collection = new NameValueCollection();
return collection;
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="ActorPublisherSpec.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using Akka.Actor;
using Akka.Configuration;
using Akka.Pattern;
using Akka.Streams.Actors;
using Akka.Streams.Dsl;
using Akka.Streams.Implementation;
using Akka.Streams.TestKit;
using Akka.Streams.TestKit.Tests;
using Akka.TestKit;
using FluentAssertions;
using Xunit;
using Xunit.Abstractions;
using ActorPublisher = Akka.Streams.Actors.ActorPublisher;
using Cancel = Akka.Streams.Actors.Cancel;
namespace Akka.Streams.Tests.Actor
{
public class ActorPublisherSpec : AkkaSpec
{
private static readonly Config Config = ConfigurationFactory.ParseString(@"
my-dispatcher1 {
type = Dispatcher
executor = ""fork-join-executor""
fork-join-executor {
parallelism-min = 8
parallelism-max = 8
}
mailbox-requirement = ""Akka.Dispatch.IUnboundedMessageQueueSemantics""
}
my-dispatcher1 {
type = Dispatcher
executor = ""fork-join-executor""
fork-join-executor {
parallelism-min = 8
parallelism-max = 8
}
mailbox-requirement = ""Akka.Dispatch.IUnboundedMessageQueueSemantics""
}");
public ActorPublisherSpec(ITestOutputHelper output = null)
: base(
Config.WithFallback(
ConfigurationFactory.FromResource<ScriptedTest>("Akka.Streams.TestKit.Tests.reference.conf")),
output)
{
EventFilter.Exception<IllegalStateException>().Mute();
}
[Fact]
public void ActorPublisher_should_accumulate_demand()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var p = ActorPublisher.Create<string>(actorRef);
var s = this.CreateSubscriberProbe<string>();
p.Subscribe(s);
s.Request(2);
probe.ExpectMsg<TotalDemand>().Elements.Should().Be(2);
s.Request(3);
probe.ExpectMsg<TotalDemand>().Elements.Should().Be(5);
s.Cancel();
}
[Fact]
public void ActorPublisher_should_allow_onNext_up_to_requested_elements_but_not_more()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var p = ActorPublisher.Create<string>(actorRef);
var s = this.CreateSubscriberProbe<string>();
p.Subscribe(s);
s.Request(2);
actorRef.Tell(new Produce("elem-1"));
actorRef.Tell(new Produce("elem-2"));
actorRef.Tell(new Produce("elem-3"));
s.ExpectNext("elem-1");
s.ExpectNext("elem-2");
s.ExpectNoMsg(TimeSpan.FromMilliseconds(300));
s.Cancel();
}
[Fact]
public void ActorPublisher_should_signal_error()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
actorRef.Tell(new Err("wrong"));
s.ExpectSubscription();
s.ExpectError().Message.Should().Be("wrong");
}
[Fact]
public void ActorPublisher_should_not_terminate_after_signaling_onError()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectSubscription();
probe.Watch(actorRef);
actorRef.Tell(new Err("wrong"));
s.ExpectError().Message.Should().Be("wrong");
probe.ExpectNoMsg(TimeSpan.FromMilliseconds(200));
}
[Fact]
public void ActorPublisher_should_terminate_after_signalling_OnErrorThenStop()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectSubscription();
probe.Watch(actorRef);
actorRef.Tell(new ErrThenStop("wrong"));
s.ExpectError().Message.Should().Be("wrong");
probe.ExpectTerminated(actorRef, TimeSpan.FromSeconds(3));
}
[Fact]
public void ActorPublisher_should_signal_error_before_subscribe()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
actorRef.Tell(new Err("early err"));
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectSubscriptionAndError().Message.Should().Be("early err");
}
[Fact]
public void ActorPublisher_should_drop_onNext_elements_after_cancel()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var p = ActorPublisher.Create<string>(actorRef);
var s = this.CreateSubscriberProbe<string>();
p.Subscribe(s);
s.Request(2);
actorRef.Tell(new Produce("elem-1"));
s.Cancel();
actorRef.Tell(new Produce("elem-2"));
s.ExpectNext("elem-1");
s.ExpectNoMsg(TimeSpan.FromMilliseconds(300));
}
[Fact]
public void ActorPublisher_should_remember_requested_after_restart()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var p = ActorPublisher.Create<string>(actorRef);
var s = this.CreateSubscriberProbe<string>();
p.Subscribe(s);
s.Request(3);
probe.ExpectMsg<TotalDemand>().Elements.Should().Be(3);
actorRef.Tell(new Produce("elem-1"));
actorRef.Tell(Boom.Instance);
actorRef.Tell(new Produce("elem-2"));
s.ExpectNext("elem-1");
s.ExpectNext("elem-2");
s.Request(5);
probe.ExpectMsg<TotalDemand>().Elements.Should().Be(6);
s.Cancel();
}
[Fact]
public void ActorPublisher_should_signal_onComplete()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.Request(3);
actorRef.Tell(new Produce("elem-1"));
actorRef.Tell(Complete.Instance);
s.ExpectNext("elem-1");
s.ExpectComplete();
}
[Fact]
public void ActorPublisher_should_not_terminate_after_signalling_onComplete()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
var sub = s.ExpectSubscription();
sub.Request(3);
probe.ExpectMsg<TotalDemand>().Elements.Should().Be(3);
probe.Watch(actorRef);
actorRef.Tell(new Produce("elem-1"));
actorRef.Tell(Complete.Instance);
s.ExpectNext("elem-1");
s.ExpectComplete();
probe.ExpectNoMsg(TimeSpan.FromMilliseconds(200));
}
[Fact]
public void ActorPublisher_should_terminate_after_signalling_onCompleteThenStop()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
var sub = s.ExpectSubscription();
sub.Request(3);
probe.ExpectMsg<TotalDemand>().Elements.Should().Be(3);
probe.Watch(actorRef);
actorRef.Tell(new Produce("elem-1"));
actorRef.Tell(CompleteThenStop.Instance);
s.ExpectNext("elem-1");
s.ExpectComplete();
probe.ExpectTerminated(actorRef,TimeSpan.FromSeconds(3));
}
[Fact]
public void ActorPublisher_should_signal_immediate_onComplete()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
actorRef.Tell(Complete.Instance);
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectSubscriptionAndComplete();
}
[Fact]
public void ActorPublisher_should_only_allow_one_subscriber()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectSubscription();
var s2 = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s2);
s2.ExpectSubscriptionAndError()
.Should()
.BeOfType<IllegalStateException>()
.Which.Message.Should()
.Be($"ActorPublisher {ReactiveStreamsCompliance.SupportsOnlyASingleSubscriber}");
}
[Fact]
public void ActorPublisher_should_not_subscribe_the_same_subscriber_multiple_times()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectSubscription();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectError().Message.Should().Be(ReactiveStreamsCompliance.CanNotSubscribeTheSameSubscriberMultipleTimes);
}
[Fact]
public void ActorPublisher_should_signal_onComplete_when_actor_is_stopped()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisher.Props(probe.Ref));
var s = this.CreateManualSubscriberProbe<string>();
ActorPublisher.Create<string>(actorRef).Subscribe(s);
s.ExpectSubscription();
actorRef.Tell(PoisonPill.Instance);
s.ExpectComplete();
}
[Fact]
public void ActorPublisher_should_work_together_with_Flow_and_ActorSubscriber()
{
var materializer = Sys.Materializer();
this.AssertAllStagesStopped(() =>
{
var probe = CreateTestProbe();
var source = Source.ActorPublisher<int>(Sender.Props);
var sink = Sink.ActorSubscriber<string>(Receiver.Props(probe.Ref));
var t = source.Collect(n =>
{
if (n%2 == 0)
return "elem-" + n;
return null;
}).ToMaterialized(sink, Keep.Both).Run(materializer);
var snd = t.Item1;
var rcv = t.Item2;
for (var i = 1; i <= 3; i++)
snd.Tell(i);
probe.ExpectMsg("elem-2");
for (var n = 4; n <= 500; n++)
{
if (n%19 == 0)
Thread.Sleep(50); // simulate bursts
snd.Tell(n);
}
for (var n = 4; n <= 500; n += 2)
probe.ExpectMsg("elem-" + n);
Watch(snd);
rcv.Tell(PoisonPill.Instance);
ExpectTerminated(snd);
}, materializer);
}
[Fact]
public void ActorPublisher_should_work_in_a_GraphDsl()
{
var materializer = Sys.Materializer();
var probe1 = CreateTestProbe();
var probe2 = CreateTestProbe();
var senderRef1 = ActorOf(Sender.Props);
var source1 = Source.FromPublisher(ActorPublisher.Create<int>(senderRef1))
.MapMaterializedValue(_ => senderRef1);
var sink1 = Sink.FromSubscriber(ActorSubscriber.Create<string>(ActorOf(Receiver.Props(probe1.Ref))))
.MapMaterializedValue(_ => probe1.Ref);
var sink2 = Sink.ActorSubscriber<string>(Receiver.Props(probe2.Ref))
.MapMaterializedValue(_ => probe2.Ref);
var senderRef2 = RunnableGraph.FromGraph(GraphDsl.Create(
Source.ActorPublisher<int>(Sender.Props),
(builder, source2) =>
{
var merge = builder.Add(new Merge<int, int>(2));
var bcast = builder.Add(new Broadcast<string>(2));
builder.From(source1).To(merge.In(0));
builder.From(source2.Outlet).To(merge.In(1));
builder.From(merge.Out).Via(Flow.Create<int>().Select(i => i.ToString())).To(bcast.In);
builder.From(bcast.Out(0)).Via(Flow.Create<string>().Select(s => s + "mark")).To(sink1);
builder.From(bcast.Out(1)).To(sink2);
return ClosedShape.Instance;
})).Run(materializer);
// the scala test is wrong
const int noOfMessages = 10;
for (var i = 0; i < noOfMessages; i++)
{
senderRef1.Tell(i);
senderRef2.Tell(i+noOfMessages);
}
var probe1Messages = new List<string>(noOfMessages*2);
var probe2Messages = new List<string>(noOfMessages*2);
for (var i = 0; i < noOfMessages * 2; i++)
{
probe1Messages.Add(probe1.ExpectMsg<string>());
probe2Messages.Add(probe2.ExpectMsg<string>());
}
probe1Messages.Should().BeEquivalentTo(Enumerable.Range(0, noOfMessages * 2).Select(i => i + "mark"));
probe2Messages.Should().BeEquivalentTo(Enumerable.Range(0, noOfMessages * 2).Select(i => i.ToString()));
}
[Fact]
public void ActorPublisher_should_be_able_to_define_a_subscription_timeout_after_which_it_should_shut_down()
{
var materializer = Sys.Materializer();
this.AssertAllStagesStopped(() =>
{
var timeout = TimeSpan.FromMilliseconds(150);
var a = ActorOf(TimeoutingPublisher.Props(TestActor, timeout));
var pub = ActorPublisher.Create<int>(a);
// don't subscribe for `timeout` millis, so it will shut itself down
ExpectMsg("timed-out");
// now subscribers will already be rejected, while the actor could perform some clean-up
var sub = this.CreateManualSubscriberProbe<int>();
pub.Subscribe(sub);
sub.ExpectSubscriptionAndError();
ExpectMsg("cleaned-up");
// termination is triggered by user code
Watch(a);
ExpectTerminated(a);
}, materializer);
}
[Fact]
public void ActorPublisher_should_be_able_to_define_a_subscription_timeout_which_is_cancelled_by_the_first_incoming_Subscriber()
{
var timeout = TimeSpan.FromMilliseconds(500);
var sub = this.CreateManualSubscriberProbe<int>();
var pub = ActorPublisher.Create<int>(ActorOf(TimeoutingPublisher.Props(TestActor, timeout)));
// subscribe right away, should cancel subscription-timeout
pub.Subscribe(sub);
sub.ExpectSubscription();
ExpectNoMsg(TimeSpan.FromSeconds(1));
}
[Fact]
public void ActorPublisher_should_use_dispatcher_from_materializer_settings()
{
var materializer = ActorMaterializer.Create(Sys, Sys.Materializer().Settings.WithDispatcher("my-dispatcher1"));
var s = this.CreateManualSubscriberProbe<string>();
var actorRef = Source.ActorPublisher<string>(TestPublisher.Props(TestActor, useTestDispatcher: false))
.To(Sink.FromSubscriber(s))
.Run(materializer);
actorRef.Tell(ThreadName.Instance);
ExpectMsg<string>().Should().Contain("my-dispatcher1");
}
[Fact]
public void ActorPublisher_should_use_dispatcher_from_operation_attributes()
{
var materializer = Sys.Materializer();
var s = this.CreateManualSubscriberProbe<string>();
var actorRef = Source.ActorPublisher<string>(TestPublisher.Props(TestActor, useTestDispatcher: false))
.WithAttributes(ActorAttributes.CreateDispatcher("my-dispatcher1"))
.To(Sink.FromSubscriber(s))
.Run(materializer);
actorRef.Tell(ThreadName.Instance);
ExpectMsg<string>().Should().Contain("my-dispatcher1");
}
[Fact]
public void ActorPublisher_should_use_dispatcher_from_props()
{
var materializer = Sys.Materializer();
var s = this.CreateManualSubscriberProbe<string>();
var actorRef = Source.ActorPublisher<string>(TestPublisher.Props(TestActor, useTestDispatcher: false).WithDispatcher("my-dispatcher1"))
.WithAttributes(ActorAttributes.CreateDispatcher("my-dispatcher2"))
.To(Sink.FromSubscriber(s))
.Run(materializer);
actorRef.Tell(ThreadName.Instance);
ExpectMsg<string>().Should().Contain("my-dispatcher1");
}
[Fact]
public void ActorPublisher_should_handle_stash()
{
var probe = CreateTestProbe();
var actorRef = Sys.ActorOf(TestPublisherWithStash.Props(probe.Ref));
var p = new ActorPublisherImpl<string>(actorRef);
var s = this.CreateSubscriberProbe<string>();
p.Subscribe(s);
s.Request(2);
s.Request(3);
actorRef.Tell("unstash");
probe.ExpectMsg(new TotalDemand(5));
probe.ExpectMsg(new TotalDemand(5));
s.Request(4);
probe.ExpectMsg(new TotalDemand(9));
s.Cancel();
}
}
internal class TestPublisher : Actors.ActorPublisher<string>
{
public static Props Props(IActorRef probe, bool useTestDispatcher = true)
{
var p = Akka.Actor.Props.Create(() => new TestPublisher(probe));
return useTestDispatcher ? p.WithDispatcher("akka.test.stream-dispatcher") : p;
}
private readonly IActorRef _probe;
public TestPublisher(IActorRef probe)
{
_probe = probe;
}
protected override bool Receive(object message)
{
return message.Match()
.With<Request>(request => _probe.Tell(new TotalDemand(TotalDemand)))
.With<Produce>(produce => OnNext(produce.Elem))
.With<Err>(err => OnError(new Exception(err.Reason)))
.With<ErrThenStop>(err => OnErrorThenStop(new Exception(err.Reason)))
.With<Complete>(OnComplete)
.With<CompleteThenStop>(OnCompleteThenStop)
.With<Boom>(() => { throw new Exception("boom"); })
.With<ThreadName>(()=>_probe.Tell(Context.Props.Dispatcher /*Thread.CurrentThread.Name*/)) // TODO fix me when thread name is set by dispatcher
.WasHandled;
}
}
internal class TestPublisherWithStash : TestPublisher, IWithUnboundedStash
{
public TestPublisherWithStash(IActorRef probe) : base(probe)
{
}
public new static Props Props(IActorRef probe, bool useTestDispatcher = true)
{
var p = Akka.Actor.Props.Create(() => new TestPublisherWithStash(probe));
return useTestDispatcher ? p.WithDispatcher("akka.test.stream-dispatcher") : p;
}
protected override bool Receive(object message)
{
if ("unstash".Equals(message))
{
Stash.UnstashAll();
Context.Become(base.Receive);
}
else
Stash.Stash();
return true;
}
public IStash Stash { get; set; }
}
internal class Sender : Actors.ActorPublisher<int>
{
public static Props Props { get; } = Props.Create<Sender>().WithDispatcher("akka.test.stream-dispatcher");
private IImmutableList<int> _buffer = ImmutableList<int>.Empty;
protected override bool Receive(object message)
{
return message.Match()
.With<int>(i =>
{
if (_buffer.Count == 0 && TotalDemand > 0)
OnNext(i);
else
{
_buffer = _buffer.Add(i);
DeliverBuffer();
}
})
.With<Request>(DeliverBuffer)
.With<Cancel>(() => Context.Stop(Self))
.WasHandled;
}
private void DeliverBuffer()
{
if (TotalDemand <= 0)
return;
if (TotalDemand <= int.MaxValue)
{
var use = _buffer.Take((int) TotalDemand).ToImmutableList();
_buffer = _buffer.Skip((int) TotalDemand).ToImmutableList();
use.ForEach(OnNext);
}
else
{
var use = _buffer.Take(int.MaxValue).ToImmutableList();
_buffer = _buffer.Skip(int.MaxValue).ToImmutableList();
use.ForEach(OnNext);
DeliverBuffer();
}
}
}
internal class TimeoutingPublisher : Actors.ActorPublisher<int>
{
public static Props Props(IActorRef probe, TimeSpan timeout) =>
Akka.Actor.Props.Create(() => new TimeoutingPublisher(probe, timeout))
.WithDispatcher("akka.test.stream-dispatcher");
private readonly IActorRef _probe;
public TimeoutingPublisher(IActorRef probe, TimeSpan timeout)
{
_probe = probe;
SubscriptionTimeout = timeout;
}
protected override bool Receive(object message)
{
return message.Match()
.With<Request>(() => OnNext(1))
.With<SubscriptionTimeoutExceeded>(() =>
{
_probe.Tell("timed-out");
Context.System.Scheduler.ScheduleTellOnce(SubscriptionTimeout, _probe, "cleaned-up", Self);
Context.System.Scheduler.ScheduleTellOnce(SubscriptionTimeout, Self, PoisonPill.Instance, Nobody.Instance);
})
.WasHandled;
}
}
internal class Receiver : ActorSubscriber
{
public static Props Props(IActorRef probe) =>
Akka.Actor.Props.Create(() => new Receiver(probe)).WithDispatcher("akka.test.stream-dispatcher");
private readonly IActorRef _probe;
public Receiver(IActorRef probe)
{
_probe = probe;
}
public override IRequestStrategy RequestStrategy { get; } = new WatermarkRequestStrategy(10);
protected override bool Receive(object message)
{
return message.Match()
.With<OnNext>(next => _probe.Tell(next.Element))
.WasHandled;
}
}
internal class TotalDemand
{
public readonly long Elements;
public TotalDemand(long elements)
{
Elements = elements;
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) return false;
if (ReferenceEquals(this, obj)) return true;
return obj.GetType() == GetType() && Equals((TotalDemand) obj);
}
protected bool Equals(TotalDemand other) => Elements == other.Elements;
public override int GetHashCode() => Elements.GetHashCode();
}
internal class Produce
{
public readonly string Elem;
public Produce(string elem)
{
Elem = elem;
}
}
internal class Err
{
public readonly string Reason;
public Err(string reason)
{
Reason = reason;
}
}
internal class ErrThenStop
{
public readonly string Reason;
public ErrThenStop(string reason)
{
Reason = reason;
}
}
internal class Boom
{
public static Boom Instance { get; } = new Boom();
private Boom() { }
}
internal class Complete
{
public static Complete Instance { get; } = new Complete();
private Complete() { }
}
internal class CompleteThenStop
{
public static CompleteThenStop Instance { get; } = new CompleteThenStop();
private CompleteThenStop() { }
}
internal class ThreadName
{
public static ThreadName Instance { get; } = new ThreadName();
private ThreadName() { }
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="ClientData.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Web.ClientServices.Providers
{
using System;
using System.Security;
using System.Security.Permissions;
using System.Security.AccessControl;
using System.Globalization;
using System.Data;
using System.Data.Common;
using System.Data.OleDb;
using System.IO;
using System.Windows.Forms;
using System.Data.SqlClient;
using System.Xml.Serialization;
using System.Diagnostics.CodeAnalysis;
using System.Xml.Schema;
using System.Xml;
using System.Collections.Specialized;
using System.IO.IsolatedStorage;
internal class ClientData
{
internal enum ClientDateStoreOrderEnum {
LastLoggedInUserName = 0,
LastLoggedInDateUtc = 1,
PasswordHash = 2,
PasswordSalt = 3,
Roles = 4,
RolesCachedDateUtc = 5,
SettingsNames = 6,
SettingsStoredAs = 7,
SettingsValues = 8,
SettingsNeedReset = 9,
SettingsCacheIsMoreFresh = 10,
CookieNames = 11,
CookieValues = 12
}
private const int _NumStoredValues = 13;
private static string[] _StoredValueNames = new string[_NumStoredValues] {
"LastLoggedInUserName",
"LastLoggedInDateUtc",
"PasswordHash",
"PasswordSalt",
"Roles",
"RolesCachedDateUtc",
"SettingsNames",
"SettingsStoredAs",
"SettingsValues",
"SettingsNeedReset",
"SettingsCacheIsMoreFresh",
"CookieNames",
"CookieValues"};
private object[] _StoredValues = new object[_NumStoredValues] {
"",
DateTime.UtcNow.AddYears(-1),
string.Empty,
string.Empty,
new string[0],
DateTime.UtcNow.AddYears(-1),
new string[0],
new string[0],
new string[0],
false,
false,
new string[0],
new string[0]};
private ClientData() { }
private ClientData(XmlReader reader)
{
reader.ReadStartElement("ClientData");
for (int iter = 0; iter < _NumStoredValues; iter++) {
reader.ReadStartElement(_StoredValueNames[iter]);
if (_StoredValues[iter] is string)
_StoredValues[iter] = reader.ReadContentAsString();
else if (_StoredValues[iter] is DateTime) {
string s = reader.ReadContentAsString();
long l = long.Parse(s, NumberStyles.HexNumber, CultureInfo.InvariantCulture);
_StoredValues[iter] = DateTime.FromFileTimeUtc(l);
} else if (_StoredValues[iter] is bool) {
string s = reader.ReadContentAsString();
_StoredValues[iter] = !(string.IsNullOrEmpty(s) || s != "1");
} else {
_StoredValues[iter] = ReadStringArray(reader);
}
reader.ReadEndElement();
}
reader.ReadEndElement();
}
private static string[] ReadStringArray(XmlReader reader)
{
//string count = reader.GetAttribute("count");
//if (string.IsNullOrEmpty(count))
// return new string[0];
StringCollection sc = new StringCollection();
while (reader.IsStartElement())
{
reader.ReadStartElement("item");
sc.Add(reader.ReadContentAsString());
reader.ReadEndElement();
}
string[] retValue = new string[sc.Count];
sc.CopyTo(retValue, 0);
return retValue;
}
private static void WriteStringArray(XmlWriter writer, string [] arrToWrite)
{
//writer.WriteAttributeString("count", arrToWrite.Length.ToString());
if (arrToWrite.Length == 0)
writer.WriteValue(string.Empty);
for (int iter = 0; iter < arrToWrite.Length; iter++) {
writer.WriteStartElement("item");
writer.WriteValue((arrToWrite[iter]==null) ? string.Empty : arrToWrite[iter]);
writer.WriteEndElement();
}
}
// App data
internal string LastLoggedInUserName { get { return (string)_StoredValues[(int)ClientDateStoreOrderEnum.LastLoggedInUserName]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.LastLoggedInUserName] = value; } }
internal DateTime LastLoggedInDateUtc { get { return (DateTime)_StoredValues[(int)ClientDateStoreOrderEnum.LastLoggedInDateUtc]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.LastLoggedInDateUtc] = value; } }
// Membership data
internal string PasswordHash { get { return (string)_StoredValues[(int)ClientDateStoreOrderEnum.PasswordHash]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.PasswordHash] = value; } }
internal string PasswordSalt { get { return (string)_StoredValues[(int)ClientDateStoreOrderEnum.PasswordSalt]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.PasswordSalt] = value; } }
// Roles data
internal string[] Roles { get { return (string[])_StoredValues[(int)ClientDateStoreOrderEnum.Roles]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.Roles] = value; } }
internal DateTime RolesCachedDateUtc { get { return (DateTime)_StoredValues[(int)ClientDateStoreOrderEnum.RolesCachedDateUtc]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.RolesCachedDateUtc] = value; } }
// Settings data
internal string[] SettingsNames { get { return (string[])_StoredValues[(int)ClientDateStoreOrderEnum.SettingsNames]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.SettingsNames] = value; } }
internal string[] SettingsStoredAs{ get { return (string[])_StoredValues[(int)ClientDateStoreOrderEnum.SettingsStoredAs]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.SettingsStoredAs] = value; } }
internal string[] SettingsValues { get { return (string[])_StoredValues[(int)ClientDateStoreOrderEnum.SettingsValues]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.SettingsValues] = value; } }
internal bool SettingsNeedReset { get { return (bool)_StoredValues[(int)ClientDateStoreOrderEnum.SettingsNeedReset]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.SettingsNeedReset] = value; } }
internal bool SettingsCacheIsMoreFresh { get { return (bool)_StoredValues[(int)ClientDateStoreOrderEnum.SettingsCacheIsMoreFresh]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.SettingsCacheIsMoreFresh] = value; } }
// Cookie data
internal string[] CookieNames { get { return (string[])_StoredValues[(int)ClientDateStoreOrderEnum.CookieNames]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.CookieNames] = value; } }
internal string[] CookieValues { get { return (string[])_StoredValues[(int)ClientDateStoreOrderEnum.CookieValues]; } set { _StoredValues[(int)ClientDateStoreOrderEnum.CookieValues] = value; } }
// Filename
private string FileName = string.Empty;
private bool UsingIsolatedStorage = false;
private const string _IsolatedDir = "System.Web.Extensions.ClientServices.ClientData";
internal void Save()
{
if (!UsingIsolatedStorage) {
using (XmlWriter writer = XmlWriter.Create(FileName)) {
Save(writer);
}
} else {
using(IsolatedStorageFile f = IsolatedStorageFile.GetUserStoreForAssembly()) {
if (f.GetDirectoryNames(_IsolatedDir).Length == 0)
f.CreateDirectory(_IsolatedDir);
using(IsolatedStorageFileStream fs = new IsolatedStorageFileStream(FileName, FileMode.Create, f)) {
using (XmlWriter writer = XmlWriter.Create(fs)) {
Save(writer);
}
}
}
}
}
private void Save(XmlWriter writer)
{
writer.WriteStartElement("ClientData");
for (int iter = 0; iter < _NumStoredValues; iter++) {
writer.WriteStartElement(_StoredValueNames[iter]);
if (_StoredValues[iter] == null) {
writer.WriteValue(string.Empty);
} else if (_StoredValues[iter] is string) {
writer.WriteValue(_StoredValues[iter]);
} else if (_StoredValues[iter] is bool) {
writer.WriteValue(((bool)_StoredValues[iter]) ? "1" : "0");
} else if (_StoredValues[iter] is DateTime) {
writer.WriteValue(((DateTime)_StoredValues[iter]).ToFileTimeUtc().ToString("X", CultureInfo.InvariantCulture));
} else {
WriteStringArray(writer, (string[])_StoredValues[iter]);
}
writer.WriteEndElement();
}
writer.WriteEndElement();
writer.Flush();
}
internal static ClientData Load(string username, bool useIsolatedStorage)
{
ClientData cd = null;
string fileName = null;
if (useIsolatedStorage) {
fileName = _IsolatedDir + "\\" + SqlHelper.GetPartialDBFileName(username, ".clientdata");
try {
using(IsolatedStorageFile f = IsolatedStorageFile.GetUserStoreForAssembly()) {
using(IsolatedStorageFileStream fs = new IsolatedStorageFileStream(fileName, FileMode.Open, f)) {
using (XmlReader xr = XmlReader.Create(fs)) {
cd = new ClientData(xr);
}
}
}
} catch {} // ignore exceptions
} else {
fileName = SqlHelper.GetFullDBFileName(username, ".clientdata");
try {
if (File.Exists(fileName)) {
using (FileStream fs = new FileStream(fileName, FileMode.Open, FileAccess.Read)) {
using (XmlReader xr = XmlReader.Create(fs)) {
cd = new ClientData(xr);
}
}
}
} catch {} // ignore exceptions
}
if (cd == null)
cd = new ClientData();
cd.UsingIsolatedStorage = useIsolatedStorage;
cd.FileName = fileName;
return cd;
}
}
internal static class ClientDataManager
{
static private ClientData _applicationClientData;
static private ClientData _userClientData;
static private string _curUserName;
internal static ClientData GetAppClientData(bool useIsolatedStore)
{
if (_applicationClientData == null)
_applicationClientData = ClientData.Load(null, useIsolatedStore);
return _applicationClientData;
}
internal static ClientData GetUserClientData(string username, bool useIsolatedStore)
{
if (username != _curUserName) {
_curUserName = username;
_userClientData = ClientData.Load(username, useIsolatedStore);
}
return _userClientData;
}
internal static string GetCookie(string username, string cookieName, bool useIsolatedStore)
{
ClientData cd = GetUserClientData(username, useIsolatedStore);
if (cd.CookieNames == null) {
cd.CookieNames = new string[0];
cd.CookieValues = new string[0];
return null;
}
for(int iter=0; iter<cd.CookieNames.Length; iter++)
if (string.Compare(cookieName, cd.CookieNames[iter], StringComparison.OrdinalIgnoreCase) == 0)
return cd.CookieValues[iter];
return null;
}
internal static string StoreCookie(string username, string cookieName, string cookieValue, bool useIsolatedStore)
{
ClientData cd = GetUserClientData(username, useIsolatedStore);
if (cd.CookieNames == null) {
cd.CookieNames = new string[0];
cd.CookieValues = new string[0];
} else {
for(int iter=0; iter<cd.CookieNames.Length; iter++) {
if (cd.CookieValues[iter].StartsWith(cookieName + "=", StringComparison.OrdinalIgnoreCase)) {
if (cd.CookieValues[iter] != cookieName + "=" + cookieValue) {
cd.CookieValues[iter] = cookieName + "=" + cookieValue;
cd.Save();
}
return cd.CookieNames[iter];
}
}
}
string name = Guid.NewGuid().ToString("N");
string [] names = new string[cd.CookieNames.Length+1];
string [] vals = new string[cd.CookieNames.Length+1];
cd.CookieNames.CopyTo(names, 0);
cd.CookieValues.CopyTo(vals, 0);
names[cd.CookieNames.Length] = name;
vals[cd.CookieNames.Length] = cookieName + "=" + cookieValue;
cd.CookieNames = names;
cd.CookieValues = vals;
cd.Save();
return name;
}
internal static void DeleteAllCookies(string username, bool useIsolatedStore)
{
ClientData cd = GetUserClientData(username, useIsolatedStore);
cd.CookieNames = new string[0];
cd.CookieValues = new string[0];
}
}
}
| |
/*
* (c) Copyright Marek Ledvina, Foriero Studo
*/
using UnityEngine;
using System.Collections;
using ForieroEngine.MIDIUnified;
[AddComponentMenu("MIDIUnified/Generators/MidiKeyboardInput")]
public class MidiKeyboardInput : MonoBehaviour, IMidiEvents {
public static MidiKeyboardInput singleton;
public event ShortMessageEventHandler ShortMessageEvent;
public enum KeyboardInputType{
ABCDEFG,
QUERTY
}
public int keyboardOctave = 4;
public bool updateKeyboardOctave = false;
public bool muteTonesWhenChangingOctave = false;
[Range (0, 1)]
public float customVolume = 1f;
public bool midiOut = false;
public ChannelEnum midiChannel = ChannelEnum.None;
public KeyboardInputType keyboardInputType = KeyboardInputType.QUERTY;
void Update(){
ProceedKeyboardInput();
}
void Awake(){
if(singleton != null) {
Debug.Log("GENERATOR MidiKeyboardInput already in scene.");
Destroy(this);
return;
}
singleton = this;
}
void OnDestroy(){
singleton = null;
}
#region KeyboardInput
enum AccidentalState{
none = 0,
sharp = 1,
flat = -1
}
AccidentalState accidentalState = AccidentalState.none;
AccidentalState[] accidentalStates = new AccidentalState[13];
int[] keyOctaveIdx = new int[13];
public void MuteTones(){
int startIndex = keyboardOctave*12;
MidiOut.fireMidiOutEvents = false;
for(int i = startIndex; i<startIndex+13;i++){
if(i>=0 && i < byte.MaxValue/2){
SendShortMessage(CommandEnum.NoteOff, i, MidiConversion.GetMidiVolume(customVolume));
}
}
MidiOut.fireMidiOutEvents = true;
}
void KeyDown(int aMidiIdx){
if(aMidiIdx >= 0 && aMidiIdx < byte.MaxValue/2){
SendShortMessage(CommandEnum.NoteOn, aMidiIdx, MidiConversion.GetMidiVolume(customVolume));
}
}
void KeyUp(int aMidiIdx){
if(aMidiIdx >= 0 && aMidiIdx < byte.MaxValue/2){
SendShortMessage(CommandEnum.NoteOff, aMidiIdx, MidiConversion.GetMidiVolume(customVolume));
}
}
void SendShortMessage(CommandEnum aCommand, int aData1, int aData2){
if(ShortMessageEvent != null){
ShortMessageEvent(
midiChannel == ChannelEnum.None ? (int)aCommand : (int)midiChannel + (int)aCommand,
aData1,
aData2
);
}
if(midiOut){
MidiOut.SendShortMessage(
midiChannel == ChannelEnum.None ? (int)aCommand : (int)midiChannel + (int)aCommand,
aData1,
aData2
);
}
}
void ProceedKeyboardInput(){
if(Input.GetKeyDown(KeyCode.UpArrow)){
accidentalState = AccidentalState.sharp;
}
if(Input.GetKeyDown(KeyCode.DownArrow)){
accidentalState = AccidentalState.flat;
}
if(Input.GetKeyUp(KeyCode.UpArrow)){
accidentalState = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.DownArrow)){
accidentalState = AccidentalState.none;
}
if(Input.GetKeyDown(KeyCode.LeftArrow)){
if(updateKeyboardOctave) {
if(muteTonesWhenChangingOctave) MuteTones();
keyboardOctave--;
}
}
if(Input.GetKeyDown(KeyCode.RightArrow)){
if(updateKeyboardOctave) {
if(muteTonesWhenChangingOctave) MuteTones();
keyboardOctave++;
}
}
if(Input.GetKeyDown(KeyCode.RightControl)){
SendShortMessage(CommandEnum.Controller, (int)PedalEnum.Right, 127);
}
if(Input.GetKeyUp(KeyCode.RightControl)){
SendShortMessage(CommandEnum.Controller, (int)PedalEnum.Right, 0);
}
switch(keyboardInputType){
case KeyboardInputType.ABCDEFG:
#region ABCDEFG
if(Input.GetKeyDown(KeyCode.A)){
KeyDown((keyboardOctave*12) + 9 + (int)accidentalState);
accidentalStates[0] = accidentalState;
keyOctaveIdx[0] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.B)){
KeyDown((keyboardOctave*12) + 11+ (int)accidentalState);
accidentalStates[1] = accidentalState;
keyOctaveIdx[1] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.H)){
KeyDown((keyboardOctave*12) + 11+ (int)accidentalState);
accidentalStates[1] = accidentalState;
keyOctaveIdx[1] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.C)){
KeyDown((keyboardOctave*12) + 0 + (int)accidentalState);
accidentalStates[2] = accidentalState;
keyOctaveIdx[2] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.D)){
KeyDown((keyboardOctave*12) + 2 + (int)accidentalState);
accidentalStates[3] = accidentalState;
keyOctaveIdx[3] = keyboardOctave;
};
if(Input.GetKeyDown(KeyCode.E)){
KeyDown((keyboardOctave*12) + 4 + (int)accidentalState);
accidentalStates[4] = accidentalState;
keyOctaveIdx[4] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.F)){
KeyDown((keyboardOctave*12) + 5 + (int)accidentalState);
accidentalStates[5] = accidentalState;
keyOctaveIdx[5] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.G)){
KeyDown((keyboardOctave*12) + 7 + (int) accidentalState);
accidentalStates[6] = accidentalState;
keyOctaveIdx[6] = keyboardOctave;
}
if(Input.GetKeyUp(KeyCode.A)){
KeyUp((keyOctaveIdx[0]*12) + 9+ (int)accidentalStates[0]);
accidentalStates[0] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.B)){
KeyUp((keyOctaveIdx[1]*12) + 11+ (int)accidentalStates[1]);
accidentalStates[1] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.H)){
KeyUp((keyOctaveIdx[1]*12) + 11+ (int)accidentalStates[1]);
accidentalStates[1] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.C)){
KeyUp((keyOctaveIdx[2]*12) + 0+ (int)accidentalStates[2]);
accidentalStates[2] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.D)){
KeyUp((keyOctaveIdx[3]*12) + 2+ (int)accidentalStates[3]);
accidentalStates[3] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.E)){
KeyUp((keyOctaveIdx[4]*12) + 4+ (int)accidentalStates[4]);
accidentalStates[4] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.F)){
KeyUp((keyOctaveIdx[5]*12) + 5+ (int)accidentalStates[5]);
accidentalStates[5] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.G)){
KeyUp((keyOctaveIdx[6]*12) + 7+ (int)accidentalStates[6]);
accidentalStates[6] = AccidentalState.none;
}
#endregion
break;
case KeyboardInputType.QUERTY:
#region QUERTY
if(Input.GetKeyDown(KeyCode.A)){
KeyDown((keyboardOctave*12) + 0 + (int)accidentalState);
accidentalStates[0] = accidentalState;
keyOctaveIdx[0] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.W)){
KeyDown((keyboardOctave*12) + 1 + (int)accidentalState);
accidentalStates[1] = accidentalState;
keyOctaveIdx[1] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.S)){
KeyDown((keyboardOctave*12) + 2 + (int)accidentalState);
accidentalStates[2] = accidentalState;
keyOctaveIdx[2] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.E)){
KeyDown((keyboardOctave*12) + 3 + (int)accidentalState);
accidentalStates[3] = accidentalState;
keyOctaveIdx[3] = keyboardOctave;
};
if(Input.GetKeyDown(KeyCode.D)){
KeyDown((keyboardOctave*12) + 4 + (int)accidentalState);
accidentalStates[4] = accidentalState;
keyOctaveIdx[4] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.F)){
KeyDown((keyboardOctave*12) + 5 + (int)accidentalState);
accidentalStates[5] = accidentalState;
keyOctaveIdx[5] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.T)){
KeyDown((keyboardOctave*12) + 6 + (int) accidentalState);
accidentalStates[6] = accidentalState;
keyOctaveIdx[6] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.G)){
KeyDown((keyboardOctave*12) + 7 + (int) accidentalState);
accidentalStates[7] = accidentalState;
keyOctaveIdx[7] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.Y)){
KeyDown((keyboardOctave*12) + 8 + (int) accidentalState);
accidentalStates[8] = accidentalState;
keyOctaveIdx[8] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.Z)){
KeyDown((keyboardOctave*12) + 8 + (int) accidentalState);
accidentalStates[8] = accidentalState;
keyOctaveIdx[8] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.H)){
KeyDown((keyboardOctave*12) + 9 + (int) accidentalState);
accidentalStates[9] = accidentalState;
keyOctaveIdx[9] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.U)){
KeyDown((keyboardOctave*12) + 10 + (int) accidentalState);
accidentalStates[10] = accidentalState;
keyOctaveIdx[10] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.J)){
KeyDown((keyboardOctave*12) + 11 + (int) accidentalState);
accidentalStates[11] = accidentalState;
keyOctaveIdx[11] = keyboardOctave;
}
if(Input.GetKeyDown(KeyCode.K)){
KeyDown((keyboardOctave*12) + 12 + (int) accidentalState);
accidentalStates[12] = accidentalState;
keyOctaveIdx[12] = keyboardOctave;
}
if(Input.GetKeyUp(KeyCode.A)){
KeyUp((keyboardOctave*12) + 0 + (int)accidentalState);
accidentalStates[0] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.W)){
KeyUp((keyboardOctave*12) + 1 + (int)accidentalState);
accidentalStates[1] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.S)){
KeyUp((keyboardOctave*12) + 2 + (int)accidentalState);
accidentalStates[2] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.E)){
KeyUp((keyboardOctave*12) + 3 + (int)accidentalState);
accidentalStates[3] = AccidentalState.none;
};
if(Input.GetKeyUp(KeyCode.D)){
KeyUp((keyboardOctave*12) + 4 + (int)accidentalState);
accidentalStates[4] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.F)){
KeyUp((keyboardOctave*12) + 5 + (int)accidentalState);
accidentalStates[5] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.T)){
KeyUp((keyboardOctave*12) + 6 + (int) accidentalState);
accidentalStates[6] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.G)){
KeyUp((keyboardOctave*12) + 7 + (int) accidentalState);
accidentalStates[7] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.Y)){
KeyUp((keyboardOctave*12) + 8 + (int) accidentalState);
accidentalStates[8] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.Z)){
KeyUp((keyboardOctave*12) + 8 + (int) accidentalState);
accidentalStates[8] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.H)){
KeyUp((keyboardOctave*12) + 9 + (int) accidentalState);
accidentalStates[9] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.U)){
KeyUp((keyboardOctave*12) + 10 + (int) accidentalState);
accidentalStates[10] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.J)){
KeyUp((keyboardOctave*12) + 11 + (int) accidentalState);
accidentalStates[11] = AccidentalState.none;
}
if(Input.GetKeyUp(KeyCode.K)){
KeyUp((keyboardOctave*12) + 12 + (int) accidentalState);
accidentalStates[12] = AccidentalState.none;
}
#endregion
break;
}
}
#endregion
}
| |
// Copyright 2017 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Text.RegularExpressions;
using UnityEngine;
namespace TiltBrushToolkit {
public class GltfMaterialConverter {
private static readonly Regex kTiltBrushMaterialRegex = new Regex(
@".*([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})$");
// Matches
// http://...<guid>/shadername.glsl
// <some local file>/.../<guid>-<version>.glsl
private static readonly Regex kTiltBrushShaderRegex = new Regex(
@".*([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})[/-]");
/// <summary>
/// Information about a Unity material corresponding to a Gltf node.
/// </summary>
public struct UnityMaterial {
/// <summary>
/// The material to be used in place of the GltfMaterial
/// </summary>
public Material material;
/// <summary>
/// The material that "material" is based on. This might be the same as
/// "material", if no customizations were needed.
/// </summary>
public Material template;
}
/// <summary>
/// List of NEW Unity materials we have created.
/// </summary>
private List<Material> m_newMaterials = new List<Material>();
/// <summary>
/// For memoizing GetMaterial()
/// </summary>
private Dictionary<GltfMaterialBase, UnityMaterial> m_getMaterialMemo =
new Dictionary<GltfMaterialBase, UnityMaterial>();
private static bool IsTiltBrushHostedUri(string uri) {
// Will always look like "https://www.tiltbrush.com/shaders/..."
if (uri.Contains("://")) { return true; }
return false;
}
/// <summary>
/// Enumerates those Textures associated with local materials, as distinguished
/// from well-known, global materials like BlocksPaper and Tilt Brush Light.
/// Textures associated with those latter materials will not be enumerated.
///
/// These are the textures that need UnityEngine.Textures created for them.
/// </summary>
public static IEnumerable<GltfTextureBase> NecessaryTextures(GltfRootBase root) {
foreach (var mat in root.Materials) {
if (! IsGlobalMaterial(mat)) {
foreach (var tex in mat.ReferencedTextures) {
yield return tex;
}
}
}
}
/// <summary>
/// Converts "Necessary" textures textures found in the gltf file.
/// Coroutine must be fully consumed before generating materials.
/// </summary>
/// <seealso cref="GltfMaterialConverter.NecessaryTextures" />
/// <param name="root">The root of the GLTF file.</param>
/// <param name="loader">The loader to use to load resources (textures, etc).</param>
/// <param name="loaded">Mutated to add any textures that were loaded.</param>
public static IEnumerable LoadTexturesCoroutine(
GltfRootBase root, IUriLoader loader, List<Texture2D> loaded) {
foreach (GltfTextureBase gltfTexture in NecessaryTextures(root)) {
if (IsTiltBrushHostedUri(gltfTexture.SourcePtr.uri)) {
Debug.LogWarningFormat("Texture {0} uri {1} was considered necessary",
gltfTexture.GltfId, gltfTexture.SourcePtr.uri);
continue;
}
foreach (var unused in ConvertTextureCoroutine(gltfTexture, loader)) {
yield return null;
}
if (gltfTexture.unityTexture != null) {
loaded.Add(gltfTexture.unityTexture);
}
}
// After textures are converted, we don't need the cached RawImage data any more.
// "Deallocate" it.
foreach (GltfImageBase image in root.Images) {
image.data = null;
}
}
/// <summary>
/// Gets (or creates) the Unity material corresponding to the given glTF material.
/// </summary>
/// <param name="gltfMaterial">The glTF material.</param>
/// <returns>The Unity material that correpsonds to the given GLTF2 material.</returns>
public UnityMaterial? GetMaterial(GltfMaterialBase gltfMaterial) {
if (m_getMaterialMemo.TryGetValue(gltfMaterial, out UnityMaterial memo)) {
return memo;
}
if (LookUpGlobalMaterial(gltfMaterial) is UnityMaterial global) {
Debug.Assert(global.material == global.template);
m_getMaterialMemo[gltfMaterial] = global;
return global;
}
if (ConvertGltfMaterial(gltfMaterial) is UnityMaterial created) {
Debug.Assert(created.material != created.template);
m_newMaterials.Add(created.material);
m_getMaterialMemo[gltfMaterial] = created;
return created;
}
Debug.LogErrorFormat("Failed to convert material {0}", gltfMaterial.name);
return null;
}
/// <summary>
/// Returns a list of new materials that were created as part of the material
/// conversion process.
/// </summary>
public List<Material> GetGeneratedMaterials() {
return new List<Material>(m_newMaterials);
}
/// <returns>true if there is a global material corresponding to the given glTF material,
/// false if a material needs to be created for this material.</returns>
private static bool IsGlobalMaterial(GltfMaterialBase gltfMaterial) {
// Simple implementation for now
return LookUpGlobalMaterial(gltfMaterial) != null;
}
/// <summary>
/// Looks up a built-in global material that corresponds to the given GLTF material.
/// This will NOT create new materials; it will only look up global ones.
/// </summary>
/// <param name="gltfMaterial">The material to look up.</param>
/// <param name="materialGuid">The guid parsed from the material name, or Guid.None</param>
/// <returns>The global material that corresponds to the given GLTF material,
/// if found. If not found, null.</returns>
private static UnityMaterial? LookUpGlobalMaterial(GltfMaterialBase gltfMaterial) {
// Check if it's a Tilt Brush material.
Guid guid = ParseGuidFromMaterial(gltfMaterial);
if (guid != Guid.Empty) {
// Tilt Brush global material. PBR materials will use unrecognized guids;
// these will be handled by the caller.
BrushDescriptor desc;
if (TbtSettings.Instance.TryGetBrush(guid, out desc)) {
return new UnityMaterial {
material = desc.Material,
template = desc.Material
};
}
}
return null;
}
private UnityMaterial? ConvertGltfMaterial(GltfMaterialBase gltfMat) {
if (gltfMat is Gltf1Material) {
return ConvertGltf1Material((Gltf1Material)gltfMat);
} else if (gltfMat is Gltf2Material) {
return ConvertGltf2Material((Gltf2Material)gltfMat);
} else {
Debug.LogErrorFormat("Unexpected type: {0}", gltfMat.GetType());
return null;
}
}
/// <summary>
/// Converts the given glTF1 material to a new Unity material.
/// This is only possible if the passed material is a Tilt Brush "PBR" material
/// squeezed into glTF1.
/// </summary>
/// <param name="gltfMat">The glTF1 material to convert.</param>
/// <returns>The result of the conversion, or null on failure.</returns>
private UnityMaterial? ConvertGltf1Material(Gltf1Material gltfMat) {
// We know this guid doesn't map to a brush; if it did, LookupGlobalMaterial would
// have succeeded and we wouldn't be trying to create an new material.
Guid instanceGuid = ParseGuidFromMaterial(gltfMat);
Guid templateGuid = ParseGuidFromShader(gltfMat);
BrushDescriptor desc;
if (!TbtSettings.Instance.TryGetBrush(templateGuid, out desc)) {
// If they are the same, there is no template/instance relationship.
if (instanceGuid != templateGuid) {
Debug.LogErrorFormat("Unexpected: cannot find template material {0} for {1}",
templateGuid, instanceGuid);
}
return null;
}
TiltBrushGltf1PbrValues tbPbr = gltfMat.values;
// The default values here are reasonable fallbacks if there is no tbPbr
Gltf2Material.PbrMetallicRoughness pbr = new Gltf2Material.PbrMetallicRoughness();
if (tbPbr != null) {
if (tbPbr.BaseColorFactor != null) {
pbr.baseColorFactor = tbPbr.BaseColorFactor.Value;
}
if (tbPbr.MetallicFactor != null) {
pbr.metallicFactor = tbPbr.MetallicFactor.Value;
}
if (tbPbr.RoughnessFactor != null) {
pbr.roughnessFactor = tbPbr.RoughnessFactor.Value;
}
if (tbPbr.BaseColorTexPtr != null) {
pbr.baseColorTexture = new Gltf2Material.TextureInfo {
index = -1,
texCoord = 0,
texture = tbPbr.BaseColorTexPtr
};
}
// Tilt Brush doesn't support metallicRoughnessTexture (yet?)
}
var pbrInfo = new TbtSettings.PbrMaterialInfo {
material = desc.Material
};
return CreateNewPbrMaterial(pbrInfo, gltfMat.name, pbr);
}
/// <summary>
/// Converts the given GLTF 2 material to a Unity Material.
/// This is "best effort": we only interpret SOME, but not all GLTF material parameters.
/// We try to be robust, and will always try to return *some* material rather than fail,
/// even if crucial fields are missing or can't be parsed.
/// </summary>
/// <param name="gltfMat">The GLTF 2 material to convert.</param>
/// <returns>The result of the conversion</returns>
private UnityMaterial? ConvertGltf2Material(Gltf2Material gltfMat) {
TbtSettings.PbrMaterialInfo pbrInfo;
string alphaMode = gltfMat.alphaMode == null ? null : gltfMat.alphaMode.ToUpperInvariant();
switch (alphaMode) {
case null:
case "":
case Gltf2Material.kAlphaModeOpaque:
pbrInfo = gltfMat.doubleSided
? TbtSettings.Instance.m_PbrOpaqueDoubleSided
: TbtSettings.Instance.m_PbrOpaqueSingleSided;
break;
case Gltf2Material.kAlphaModeBlend:
pbrInfo = gltfMat.doubleSided
? TbtSettings.Instance.m_PbrBlendDoubleSided
: TbtSettings.Instance.m_PbrBlendSingleSided;
break;
default:
Debug.LogWarning($"Not yet supported: alphaMode={alphaMode}");
goto case Gltf2Material.kAlphaModeOpaque;
}
if (gltfMat.pbrMetallicRoughness == null) {
var specGloss = gltfMat.extensions?.KHR_materials_pbrSpecularGlossiness;
if (specGloss != null) {
// Try and make the best of pbrSpecularGlossiness.
// Maybe it would be better to support "extensionsRequired" and raise an error
// if the asset requires pbrSpecularGlossiness.
gltfMat.pbrMetallicRoughness = new Gltf2Material.PbrMetallicRoughness {
baseColorFactor = specGloss.diffuseFactor,
baseColorTexture = specGloss.diffuseTexture,
roughnessFactor = 1f - specGloss.glossinessFactor
};
} else {
Debug.LogWarningFormat("Material #{0} has no PBR info.", gltfMat.gltfIndex);
return null;
}
}
return CreateNewPbrMaterial(pbrInfo, gltfMat.name, gltfMat.pbrMetallicRoughness);
}
// Helper for ConvertGltf{1,2}Material
private UnityMaterial CreateNewPbrMaterial(
TbtSettings.PbrMaterialInfo pbrInfo, string gltfMatName,
Gltf2Material.PbrMetallicRoughness pbr) {
Material mat = UnityEngine.Object.Instantiate(pbrInfo.material);
Texture tex = null;
if (pbr.baseColorTexture != null) {
tex = pbr.baseColorTexture.texture.unityTexture;
mat.SetTexture("_BaseColorTex", tex);
}
if (gltfMatName != null) {
// The gltf has a name it wants us to use
mat.name = gltfMatName;
} else {
// No name in the gltf; make up something reasonable
string matName = pbrInfo.material.name;
if (matName.StartsWith("Base")) { matName = matName.Substring(4); }
if (tex != null) {
matName = string.Format("{0}_{1}", matName, tex.name);
}
mat.name = matName;
}
mat.SetColor("_BaseColorFactor", pbr.baseColorFactor);
mat.SetFloat("_MetallicFactor", pbr.metallicFactor);
mat.SetFloat("_RoughnessFactor", pbr.roughnessFactor);
return new UnityMaterial {
material = mat,
template = pbrInfo.material
};
}
private static string SanitizeName(string uri) {
uri = System.IO.Path.ChangeExtension(uri, "");
return Regex.Replace(uri, @"[^a-zA-Z0-9_-]+", "");
}
/// <summary>
/// Fills in gltfTexture.unityTexture with a Texture2D.
/// </summary>
/// <param name="gltfTexture">The glTF texture to convert.</param>
/// <param name="loader">The IUriLoader to use for loading image files.</param>
/// <returns>On completion of the coroutine, gltfTexture.unityTexture will be non-null
/// on success.</returns>
private static IEnumerable ConvertTextureCoroutine(
GltfTextureBase gltfTexture, IUriLoader loader) {
if (gltfTexture.unityTexture != null) {
throw new InvalidOperationException("Already converted");
}
if (gltfTexture.SourcePtr == null) {
Debug.LogErrorFormat("No image for texture {0}", gltfTexture.GltfId);
yield break;
}
Texture2D tex;
if (gltfTexture.SourcePtr.data != null) {
// This case is hit if the client code hooks up its own threaded
// texture-loading mechanism.
var data = gltfTexture.SourcePtr.data;
tex = new Texture2D(data.colorWidth, data.colorHeight, data.format, true);
yield return null;
tex.SetPixels32(data.colorData);
yield return null;
tex.Apply();
yield return null;
} else {
#if UNITY_EDITOR
// Prefer to load the Asset rather than create a new Texture2D;
// that lets the resulting prefab reference the texture rather than
// embedding it inside the prefab.
tex = loader.LoadAsAsset(gltfTexture.SourcePtr.uri);
#else
tex = null;
#endif
if (tex == null) {
byte[] textureBytes;
try {
using (IBufferReader r = loader.Load(gltfTexture.SourcePtr.uri)) {
textureBytes = new byte[r.GetContentLength()];
r.Read(textureBytes, destinationOffset: 0, readStart: 0, readSize: textureBytes.Length);
}
} catch (IOException e) {
Debug.LogWarning($"Cannot read uri {gltfTexture.SourcePtr.uri}: {e}");
yield break;
}
tex = new Texture2D(1,1);
tex.LoadImage(textureBytes, markNonReadable: false);
yield return null;
}
}
tex.name = SanitizeName(gltfTexture.SourcePtr.uri);
gltfTexture.unityTexture = tex;
}
// Returns the guid that represents this material.
// The guid may refer to a pre-existing material (like Blocks Paper, or Tilt Brush Light).
// It may also refer to a dynamically-generated material, in which case the base material
// can be found by using ParseGuidFromShader.
private static Guid ParseGuidFromMaterial(GltfMaterialBase gltfMaterial) {
if (Guid.TryParse((gltfMaterial as Gltf2Material)?.extensions?.GOOGLE_tilt_brush_material?.guid,
out Guid guid)) {
return guid;
}
// Tilt Brush names its gltf materials like:
// material_Light-2241cd32-8ba2-48a5-9ee7-2caef7e9ed62
// .net 3.5 doesn't have Guid.TryParse, and raising FormatException generates
// tons of garbage for something that is done so often.
if (!kTiltBrushMaterialRegex.IsMatch(gltfMaterial.name)) {
return Guid.Empty;
}
int start = Mathf.Max(0, gltfMaterial.name.Length - 36);
if (start < 0) { return Guid.Empty; }
return new Guid(gltfMaterial.name.Substring(start));
}
// Returns the guid found on this material's vert or frag shader, or Empty on failure.
// This Guid represents the template from which a pbr material was created.
// For example, BasePbrOpaqueDoubleSided.
private static Guid ParseGuidFromShader(Gltf1Material material) {
var technique = material.techniquePtr;
if (technique == null) { return Guid.Empty; }
var program = technique.programPtr;
if (program == null) { return Guid.Empty; }
var shader = program.vertexShaderPtr ?? program.fragmentShaderPtr;
if (shader == null) { return Guid.Empty; }
var match = kTiltBrushShaderRegex.Match(shader.uri);
if (match.Success) {
return new Guid(match.Groups[1].Value);
} else {
return Guid.Empty;
}
}
/// Returns a BrushDescriptor given a gltf material, or null if not found.
/// If the material is an instance of a template, the descriptor for that
/// will be returned.
/// Note that gltf2 has pbr support, and Tilt Brush uses that instead of
/// template "brushes".
public static BrushDescriptor LookupBrushDescriptor(GltfMaterialBase gltfMaterial) {
Guid guid = ParseGuidFromMaterial(gltfMaterial);
if (guid == Guid.Empty) {
return null;
} else {
BrushDescriptor desc;
TbtSettings.Instance.TryGetBrush(
guid, out desc);
if (desc == null) {
// Maybe it's templated from a pbr material; the template guid
// can be found on the shader.
Gltf1Material gltf1Material = gltfMaterial as Gltf1Material;
if (gltf1Material == null) {
return null;
}
Guid templateGuid = ParseGuidFromShader((Gltf1Material)gltfMaterial);
TbtSettings.Instance.TryGetBrush(
templateGuid, out desc);
}
return desc;
}
}
}
}
| |
using System;
using System.Globalization;
namespace MiscUtil.Conversion
{
/// <summary>
/// A class to allow the conversion of doubles to string representations of
/// their exact decimal values. The implementation aims for readability over
/// efficiency.
/// </summary>
public class DoubleConverter
{
/// <summary>
/// Converts the given double to a string representation of its
/// exact decimal value.
/// </summary>
/// <param name="d">The double to convert.</param>
/// <returns>A string representation of the double's exact decimal value.</returns>
public static string ToExactString (double d)
{
if (double.IsPositiveInfinity(d))
return "+Infinity";
if (double.IsNegativeInfinity(d))
return "-Infinity";
if (double.IsNaN(d))
return "NaN";
// Translate the double into sign, exponent and mantissa.
long bits = BitConverter.DoubleToInt64Bits(d);
bool negative = (bits < 0);
int exponent = (int) ((bits >> 52) & 0x7ffL);
long mantissa = bits & 0xfffffffffffffL;
// Subnormal numbers; exponent is effectively one higher,
// but there's no extra normalisation bit in the mantissa
if (exponent==0)
{
exponent++;
}
// Normal numbers; leave exponent as it is but add extra
// bit to the front of the mantissa
else
{
mantissa = mantissa | (1L<<52);
}
// Bias the exponent. It's actually biased by 1023, but we're
// treating the mantissa as m.0 rather than 0.m, so we need
// to subtract another 52 from it.
exponent -= 1075;
if (mantissa == 0)
{
return "0";
}
/* Normalize */
while((mantissa & 1) == 0)
{ /* i.e., Mantissa is even */
mantissa >>= 1;
exponent++;
}
// Construct a new decimal expansion with the mantissa
ArbitraryDecimal ad = new ArbitraryDecimal (mantissa);
// If the exponent is less than 0, we need to repeatedly
// divide by 2 - which is the equivalent of multiplying
// by 5 and dividing by 10.
if (exponent < 0)
{
for (int i=0; i < -exponent; i++)
ad.MultiplyBy(5);
ad.Shift(-exponent);
}
// Otherwise, we need to repeatedly multiply by 2
else
{
for (int i=0; i < exponent; i++)
ad.MultiplyBy(2);
}
// Finally, return the string with an appropriate sign
if (negative)
return "-"+ad.ToString();
else
return ad.ToString();
}
/// <summary>
/// Private class used for manipulating sequences of decimal digits.
/// </summary>
class ArbitraryDecimal
{
/// <summary>Digits in the decimal expansion, one byte per digit</summary>
byte[] digits;
/// <summary>
/// How many digits are *after* the decimal point
/// </summary>
int decimalPoint=0;
/// <summary>
/// Constructs an arbitrary decimal expansion from the given long.
/// The long must not be negative.
/// </summary>
internal ArbitraryDecimal (long x)
{
string tmp = x.ToString(CultureInfo.InvariantCulture);
digits = new byte[tmp.Length];
for (int i=0; i < tmp.Length; i++)
digits[i] = (byte) (tmp[i]-'0');
Normalize();
}
/// <summary>
/// Multiplies the current expansion by the given amount, which should
/// only be 2 or 5.
/// </summary>
internal void MultiplyBy(int amount)
{
byte[] result = new byte[digits.Length+1];
for (int i=digits.Length-1; i >= 0; i--)
{
int resultDigit = digits[i]*amount+result[i+1];
result[i]=(byte)(resultDigit/10);
result[i+1]=(byte)(resultDigit%10);
}
if (result[0] != 0)
{
digits=result;
}
else
{
Array.Copy (result, 1, digits, 0, digits.Length);
}
Normalize();
}
/// <summary>
/// Shifts the decimal point; a negative value makes
/// the decimal expansion bigger (as fewer digits come after the
/// decimal place) and a positive value makes the decimal
/// expansion smaller.
/// </summary>
internal void Shift (int amount)
{
decimalPoint += amount;
}
/// <summary>
/// Removes leading/trailing zeroes from the expansion.
/// </summary>
internal void Normalize()
{
int first;
for (first=0; first < digits.Length; first++)
if (digits[first]!=0)
break;
int last;
for (last=digits.Length-1; last >= 0; last--)
if (digits[last]!=0)
break;
if (first==0 && last==digits.Length-1)
return;
byte[] tmp = new byte[last-first+1];
for (int i=0; i < tmp.Length; i++)
tmp[i]=digits[i+first];
decimalPoint -= digits.Length-(last+1);
digits=tmp;
}
/// <summary>
/// Converts the value to a proper decimal string representation.
/// </summary>
public override String ToString()
{
char[] digitString = new char[digits.Length];
for (int i=0; i < digits.Length; i++)
digitString[i] = (char)(digits[i]+'0');
// Simplest case - nothing after the decimal point,
// and last real digit is non-zero, eg value=35
if (decimalPoint==0)
{
return new string (digitString);
}
// Fairly simple case - nothing after the decimal
// point, but some 0s to add, eg value=350
if (decimalPoint < 0)
{
return new string (digitString)+
new string ('0', -decimalPoint);
}
// Nothing before the decimal point, eg 0.035
if (decimalPoint >= digitString.Length)
{
return "0."+
new string ('0',(decimalPoint-digitString.Length))+
new string (digitString);
}
// Most complicated case - part of the string comes
// before the decimal point, part comes after it,
// eg 3.5
return new string (digitString, 0,
digitString.Length-decimalPoint)+
"."+
new string (digitString,
digitString.Length-decimalPoint,
decimalPoint);
}
}
}
}
| |
#region License
/*
* TcpListenerWebSocketContext.cs
*
* The MIT License
*
* Copyright (c) 2012-2016 sta.blockhead
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#endregion
#region Contributors
/*
* Contributors:
* - Liryna <liryna.stark@gmail.com>
*/
#endregion
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.IO;
using System.Net.Security;
using System.Net.Sockets;
using System.Security.Principal;
using System.Text;
namespace WebSocketSharp.Net.WebSockets
{
/// <summary>
/// Provides the properties used to access the information in
/// a WebSocket handshake request received by the <see cref="TcpListener"/>.
/// </summary>
internal class TcpListenerWebSocketContext : WebSocketContext
{
#region Private Fields
private CookieCollection _cookies;
private Logger _logger;
private NameValueCollection _queryString;
private HttpRequest _request;
private bool _secure;
private Stream _stream;
private TcpClient _tcpClient;
private Uri _uri;
private IPrincipal _user;
private WebSocket _websocket;
#endregion
#region Internal Constructors
internal TcpListenerWebSocketContext (
TcpClient tcpClient,
string protocol,
bool secure,
ServerSslConfiguration sslConfig,
Logger logger
)
{
_tcpClient = tcpClient;
_secure = secure;
_logger = logger;
var netStream = tcpClient.GetStream ();
if (secure) {
var sslStream =
new SslStream (netStream, false, sslConfig.ClientCertificateValidationCallback);
sslStream.AuthenticateAsServer (
sslConfig.ServerCertificate,
sslConfig.ClientCertificateRequired,
sslConfig.EnabledSslProtocols,
sslConfig.CheckCertificateRevocation
);
_stream = sslStream;
}
else {
_stream = netStream;
}
_request = HttpRequest.Read (_stream, 90000);
_uri =
HttpUtility.CreateRequestUrl (
_request.RequestUri, _request.Headers["Host"], _request.IsWebSocketRequest, secure
);
_websocket = new WebSocket (this, protocol);
}
#endregion
#region Internal Properties
internal Logger Log {
get {
return _logger;
}
}
internal Stream Stream {
get {
return _stream;
}
}
#endregion
#region Public Properties
/// <summary>
/// Gets the HTTP cookies included in the request.
/// </summary>
/// <value>
/// A <see cref="WebSocketSharp.Net.CookieCollection"/> that contains the cookies.
/// </value>
public override CookieCollection CookieCollection {
get {
return _cookies ?? (_cookies = _request.Cookies);
}
}
/// <summary>
/// Gets the HTTP headers included in the request.
/// </summary>
/// <value>
/// A <see cref="NameValueCollection"/> that contains the headers.
/// </value>
public override NameValueCollection Headers {
get {
return _request.Headers;
}
}
/// <summary>
/// Gets the value of the Host header included in the request.
/// </summary>
/// <value>
/// A <see cref="string"/> that represents the value of the Host header.
/// </value>
public override string Host {
get {
return _request.Headers["Host"];
}
}
/// <summary>
/// Gets a value indicating whether the client is authenticated.
/// </summary>
/// <value>
/// <c>true</c> if the client is authenticated; otherwise, <c>false</c>.
/// </value>
public override bool IsAuthenticated {
get {
return _user != null;
}
}
/// <summary>
/// Gets a value indicating whether the client connected from the local computer.
/// </summary>
/// <value>
/// <c>true</c> if the client connected from the local computer; otherwise, <c>false</c>.
/// </value>
public override bool IsLocal {
get {
return UserEndPoint.Address.IsLocal ();
}
}
/// <summary>
/// Gets a value indicating whether the WebSocket connection is secured.
/// </summary>
/// <value>
/// <c>true</c> if the connection is secured; otherwise, <c>false</c>.
/// </value>
public override bool IsSecureConnection {
get {
return _secure;
}
}
/// <summary>
/// Gets a value indicating whether the request is a WebSocket handshake request.
/// </summary>
/// <value>
/// <c>true</c> if the request is a WebSocket handshake request; otherwise, <c>false</c>.
/// </value>
public override bool IsWebSocketRequest {
get {
return _request.IsWebSocketRequest;
}
}
/// <summary>
/// Gets the value of the Origin header included in the request.
/// </summary>
/// <value>
/// A <see cref="string"/> that represents the value of the Origin header.
/// </value>
public override string Origin {
get {
return _request.Headers["Origin"];
}
}
/// <summary>
/// Gets the query string included in the request.
/// </summary>
/// <value>
/// A <see cref="NameValueCollection"/> that contains the query string parameters.
/// </value>
public override NameValueCollection QueryString {
get {
return _queryString
?? (
_queryString =
HttpUtility.InternalParseQueryString (
_uri != null ? _uri.Query : null, Encoding.UTF8
)
);
}
}
/// <summary>
/// Gets the URI requested by the client.
/// </summary>
/// <value>
/// A <see cref="Uri"/> that represents the requested URI.
/// </value>
public override Uri RequestUri {
get {
return _uri;
}
}
/// <summary>
/// Gets the value of the Sec-WebSocket-Key header included in the request.
/// </summary>
/// <remarks>
/// This property provides a part of the information used by the server to prove that
/// it received a valid WebSocket handshake request.
/// </remarks>
/// <value>
/// A <see cref="string"/> that represents the value of the Sec-WebSocket-Key header.
/// </value>
public override string SecWebSocketKey {
get {
return _request.Headers["Sec-WebSocket-Key"];
}
}
/// <summary>
/// Gets the values of the Sec-WebSocket-Protocol header included in the request.
/// </summary>
/// <remarks>
/// This property represents the subprotocols requested by the client.
/// </remarks>
/// <value>
/// An <see cref="T:System.Collections.Generic.IEnumerable{string}"/> instance that provides
/// an enumerator which supports the iteration over the values of the Sec-WebSocket-Protocol
/// header.
/// </value>
public override IEnumerable<string> SecWebSocketProtocols {
get {
var protocols = _request.Headers["Sec-WebSocket-Protocol"];
if (protocols != null) {
foreach (var protocol in protocols.Split (','))
yield return protocol.Trim ();
}
}
}
/// <summary>
/// Gets the value of the Sec-WebSocket-Version header included in the request.
/// </summary>
/// <remarks>
/// This property represents the WebSocket protocol version.
/// </remarks>
/// <value>
/// A <see cref="string"/> that represents the value of the Sec-WebSocket-Version header.
/// </value>
public override string SecWebSocketVersion {
get {
return _request.Headers["Sec-WebSocket-Version"];
}
}
/// <summary>
/// Gets the server endpoint as an IP address and a port number.
/// </summary>
/// <value>
/// A <see cref="System.Net.IPEndPoint"/> that represents the server endpoint.
/// </value>
public override System.Net.IPEndPoint ServerEndPoint {
get {
return (System.Net.IPEndPoint) _tcpClient.Client.LocalEndPoint;
}
}
/// <summary>
/// Gets the client information (identity, authentication, and security roles).
/// </summary>
/// <value>
/// A <see cref="IPrincipal"/> instance that represents the client information.
/// </value>
public override IPrincipal User {
get {
return _user;
}
}
/// <summary>
/// Gets the client endpoint as an IP address and a port number.
/// </summary>
/// <value>
/// A <see cref="System.Net.IPEndPoint"/> that represents the client endpoint.
/// </value>
public override System.Net.IPEndPoint UserEndPoint {
get {
return (System.Net.IPEndPoint) _tcpClient.Client.RemoteEndPoint;
}
}
/// <summary>
/// Gets the <see cref="WebSocketSharp.WebSocket"/> instance used for
/// two-way communication between client and server.
/// </summary>
/// <value>
/// A <see cref="WebSocketSharp.WebSocket"/>.
/// </value>
public override WebSocket WebSocket {
get {
return _websocket;
}
}
#endregion
#region Internal Methods
internal bool Authenticate (
AuthenticationSchemes scheme,
string realm,
Func<IIdentity, NetworkCredential> credentialsFinder
)
{
if (scheme == AuthenticationSchemes.Anonymous)
return true;
if (scheme == AuthenticationSchemes.None) {
Close (HttpStatusCode.Forbidden);
return false;
}
var chal = new AuthenticationChallenge (scheme, realm).ToString ();
var retry = -1;
Func<bool> auth = null;
auth =
() => {
retry++;
if (retry > 99) {
Close (HttpStatusCode.Forbidden);
return false;
}
var user =
HttpUtility.CreateUser (
_request.Headers["Authorization"],
scheme,
realm,
_request.HttpMethod,
credentialsFinder
);
if (user == null || !user.Identity.IsAuthenticated) {
SendAuthenticationChallenge (chal);
return auth ();
}
_user = user;
return true;
};
return auth ();
}
internal void Close ()
{
_stream.Close ();
_tcpClient.Close ();
}
internal void Close (HttpStatusCode code)
{
_websocket.Close (HttpResponse.CreateCloseResponse (code));
}
internal void SendAuthenticationChallenge (string challenge)
{
var buff = HttpResponse.CreateUnauthorizedResponse (challenge).ToByteArray ();
_stream.Write (buff, 0, buff.Length);
_request = HttpRequest.Read (_stream, 15000);
}
#endregion
#region Public Methods
/// <summary>
/// Returns a <see cref="string"/> that represents
/// the current <see cref="TcpListenerWebSocketContext"/>.
/// </summary>
/// <returns>
/// A <see cref="string"/> that represents
/// the current <see cref="TcpListenerWebSocketContext"/>.
/// </returns>
public override string ToString ()
{
return _request.ToString ();
}
#endregion
}
}
| |
using System;
using Htc.Vita.Core.Log;
namespace Htc.Vita.Core.Util
{
/// <summary>
/// Class Win32Registry.
/// </summary>
public static partial class Win32Registry
{
/// <summary>
/// Deletes the key.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="keyName">Name of the key.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool DeleteKey(
Hive root,
string keyPath,
string keyName)
{
return DeleteKey32(
root,
keyPath,
keyName
) && DeleteKey64(
root,
keyPath,
keyName
);
}
/// <summary>
/// Deletes the key in 32-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="keyName">Name of the key.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool DeleteKey32(
Hive root,
string keyPath,
string keyName)
{
if (string.IsNullOrWhiteSpace(keyPath) || string.IsNullOrEmpty(keyName))
{
return false;
}
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry32))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadWriteSubTree))
{
subKey?.DeleteSubKeyTree(
keyName,
false
);
return true;
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail deleting key \"{keyName}\" from 32-bit registry: {e}");
}
return false;
}
/// <summary>
/// Deletes the key in 64-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="keyName">Name of the key.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool DeleteKey64(
Hive root,
string keyPath,
string keyName)
{
if (string.IsNullOrWhiteSpace(keyPath) || string.IsNullOrEmpty(keyName))
{
return false;
}
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry64))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadWriteSubTree))
{
subKey?.DeleteSubKeyTree(
keyName,
false
);
return true;
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail deleting key \"{keyName}\" from 64-bit registry: {e}");
}
return false;
}
/// <summary>
/// Deletes the value.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool DeleteValue(
Hive root,
string keyPath,
string valueName)
{
return DeleteValue32(
root,
keyPath,
valueName
) && DeleteValue64(
root,
keyPath,
valueName
);
}
/// <summary>
/// Deletes the value in 32-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool DeleteValue32(
Hive root,
string keyPath,
string valueName)
{
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry32))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadWriteSubTree))
{
subKey?.DeleteValue(
valueName,
false
);
return true;
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail deleting value \"{valueName}\" from 32-bit registry: {e}");
}
return false;
}
/// <summary>
/// Deletes the value in 64-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool DeleteValue64(
Hive root,
string keyPath,
string valueName)
{
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry64))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadWriteSubTree))
{
subKey?.DeleteValue(
valueName,
false
);
return true;
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail deleting value \"{valueName}\" from 64-bit registry: {e}");
}
return false;
}
/// <summary>
/// Gets the DWord value in 32-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns>System.Int32.</returns>
public static int GetDwordValue32(
Hive root,
string keyPath,
string valueName)
{
return GetDwordValue32(
root,
keyPath,
valueName,
0
);
}
/// <summary>
/// Gets the DWord value in 32-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>System.Int32.</returns>
public static int GetDwordValue32(
Hive root,
string keyPath,
string valueName,
int defaultValue)
{
if (string.IsNullOrWhiteSpace(keyPath))
{
return defaultValue;
}
var result = defaultValue;
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry32))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadSubTree))
{
if (subKey == null)
{
return defaultValue;
}
var value = subKey.GetValue(valueName);
if (value == null)
{
return defaultValue;
}
if (subKey.GetValueKind(valueName) == ValueKind.DWord)
{
result = (int)value;
}
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail getting dword value with name \"{valueName}\" from 32-bit registry: {e.Message}");
}
return result;
}
/// <summary>
/// Gets the DWord value in 64-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns>System.Int32.</returns>
public static int GetDwordValue64(
Hive root,
string keyPath,
string valueName)
{
return GetDwordValue64(
root,
keyPath,
valueName,
0
);
}
/// <summary>
/// Gets the DWord value in 64-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>System.Int32.</returns>
public static int GetDwordValue64(
Hive root,
string keyPath,
string valueName,
int defaultValue)
{
if (string.IsNullOrWhiteSpace(keyPath))
{
return defaultValue;
}
var result = defaultValue;
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry64))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadSubTree))
{
if (subKey == null)
{
return defaultValue;
}
var value = subKey.GetValue(valueName);
if (value == null)
{
return defaultValue;
}
if (subKey.GetValueKind(valueName) == ValueKind.DWord)
{
result = (int)value;
}
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail getting dword value with name \"{valueName}\" from 64-bit registry: {e.Message}");
}
return result;
}
/// <summary>
/// Gets the int value.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns>System.Int32.</returns>
public static int GetIntValue(
Hive root,
string keyPath,
string valueName)
{
return GetIntValue(
root,
keyPath,
valueName,
0
);
}
/// <summary>
/// Gets the int value.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>System.Int32.</returns>
public static int GetIntValue(
Hive root,
string keyPath,
string valueName,
int defaultValue)
{
var result = Convert.ToInt32(
GetStringValue(
root,
keyPath,
valueName
),
defaultValue
);
if (result != defaultValue)
{
return result;
}
result = GetDwordValue64(
root,
keyPath,
valueName,
result
);
if (result != defaultValue)
{
return result;
}
return GetDwordValue32(
root,
keyPath,
valueName,
result
);
}
/// <summary>
/// Gets the string value.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns>System.String.</returns>
public static string GetStringValue(
Hive root,
string keyPath,
string valueName)
{
return GetStringValue(
root,
keyPath,
valueName,
null
);
}
/// <summary>
/// Gets the string value.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>System.String.</returns>
public static string GetStringValue(
Hive root,
string keyPath,
string valueName,
string defaultValue)
{
return (GetStringValue64(root, keyPath, valueName) ?? GetStringValue32(root, keyPath, valueName)) ?? defaultValue;
}
/// <summary>
/// Gets the string value in 32-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns>System.String.</returns>
public static string GetStringValue32(
Hive root,
string keyPath,
string valueName)
{
return GetStringValue32(
root,
keyPath,
valueName,
null
);
}
/// <summary>
/// Gets the string value in 32-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>System.String.</returns>
public static string GetStringValue32(
Hive root,
string keyPath,
string valueName,
string defaultValue)
{
if (string.IsNullOrWhiteSpace(keyPath))
{
return defaultValue;
}
var result = defaultValue;
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry32))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadSubTree))
{
if (subKey == null)
{
return defaultValue;
}
var value = subKey.GetValue(valueName);
if (value == null)
{
return defaultValue;
}
if (subKey.GetValueKind(valueName) == ValueKind.String)
{
result = (string)value;
}
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail getting string value with name \"{valueName}\" from 32-bit registry: {e.Message}");
}
return result;
}
/// <summary>
/// Gets the string value in 64-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <returns>System.String.</returns>
public static string GetStringValue64(
Hive root,
string keyPath,
string valueName)
{
return GetStringValue64(
root,
keyPath,
valueName,
null
);
}
/// <summary>
/// Gets the string value in 64-bit section.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="defaultValue">The default value.</param>
/// <returns>System.String.</returns>
public static string GetStringValue64(
Hive root,
string keyPath,
string valueName,
string defaultValue)
{
if (string.IsNullOrWhiteSpace(keyPath))
{
return defaultValue;
}
var result = defaultValue;
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Registry64))
{
using (var subKey = baseKey.OpenSubKey(keyPath, KeyPermissionCheck.ReadSubTree))
{
if (subKey == null)
{
return defaultValue;
}
var value = subKey.GetValue(valueName);
if (value == null)
{
return defaultValue;
}
if (subKey.GetValueKind(valueName) == ValueKind.String)
{
result = (string)value;
}
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail getting string value with name \"{valueName}\" from 64-bit registry: {e.Message}");
}
return result;
}
/// <summary>
/// Sets the string value.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="valueData">The value data.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool SetStringValue(
Hive root,
string keyPath,
string valueName,
string valueData)
{
var result = false;
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Default))
{
using (var key = baseKey.CreateSubKey(keyPath))
{
if (key != null)
{
key.SetValue(valueName, valueData, ValueKind.String);
result = true;
}
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail setting string value with name \"{valueName}\" to registry: {e.Message}");
}
return result;
}
/// <summary>
/// Sets the DWord value.
/// </summary>
/// <param name="root">The root.</param>
/// <param name="keyPath">The key path.</param>
/// <param name="valueName">Name of the value.</param>
/// <param name="valueData">The value data.</param>
/// <returns><c>true</c> if success, <c>false</c> otherwise.</returns>
public static bool SetDwordValue(
Hive root,
string keyPath,
string valueName,
int valueData)
{
var result = false;
try
{
using (var baseKey = Key.OpenBaseKey(root, View.Default))
{
using (var key = baseKey.CreateSubKey(keyPath))
{
if (key != null)
{
key.SetValue(
valueName,
valueData,
ValueKind.DWord
);
result = true;
}
}
}
}
catch (Exception e)
{
Logger.GetInstance(typeof(Win32Registry)).Error($"Fail setting dword value with name \"{valueName}\" to registry: {e.Message}");
}
return result;
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Client;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Framework.Console;
using OpenSim.Region.Physics.Manager;
using Mono.Addins;
namespace OpenSim.Region.RegionCombinerModule
{
public class RegionCombinerModule : ISharedRegionModule, IRegionCombinerModule
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
// private static string LogHeader = "[REGION COMBINER MODULE]";
public string Name
{
get { return "RegionCombinerModule"; }
}
public Type ReplaceableInterface
{
get { return null; }
}
/// <summary>
/// Is this module enabled?
/// </summary>
private bool m_combineContiguousRegions = false;
/// <summary>
/// This holds the root regions for the megaregions.
/// </summary>
/// <remarks>
/// Usually there is only ever one megaregion (and hence only one entry here).
/// </remarks>
private Dictionary<UUID, RegionConnections> m_regions = new Dictionary<UUID, RegionConnections>();
/// <summary>
/// The scenes that comprise the megaregion.
/// </summary>
private Dictionary<UUID, Scene> m_startingScenes = new Dictionary<UUID, Scene>();
public void Initialise(IConfigSource source)
{
IConfig myConfig = source.Configs["Startup"];
m_combineContiguousRegions = myConfig.GetBoolean("CombineContiguousRegions", false);
MainConsole.Instance.Commands.AddCommand(
"RegionCombinerModule", false, "fix-phantoms", "fix-phantoms",
"Fixes phantom objects after an import to a megaregion or a change from a megaregion back to normal regions",
FixPhantoms);
}
public void Close()
{
}
public void AddRegion(Scene scene)
{
if (m_combineContiguousRegions)
scene.RegisterModuleInterface<IRegionCombinerModule>(this);
}
public void RemoveRegion(Scene scene)
{
lock (m_startingScenes)
m_startingScenes.Remove(scene.RegionInfo.originRegionID);
}
public void RegionLoaded(Scene scene)
{
lock (m_startingScenes)
m_startingScenes.Add(scene.RegionInfo.originRegionID, scene);
if (m_combineContiguousRegions)
{
RegionLoadedDoWork(scene);
scene.EventManager.OnNewPresence += NewPresence;
}
}
public bool IsRootForMegaregion(UUID regionId)
{
lock (m_regions)
return m_regions.ContainsKey(regionId);
}
public Vector2 GetSizeOfMegaregion(UUID regionId)
{
lock (m_regions)
{
if (m_regions.ContainsKey(regionId))
{
RegionConnections rootConn = m_regions[regionId];
return new Vector2((float)rootConn.XEnd, (float)rootConn.YEnd);
}
}
throw new Exception(string.Format("Region with id {0} not found", regionId));
}
// Test to see if this postiion (relative to the region) is within the area covered
// by this megaregion.
public bool PositionIsInMegaregion(UUID currentRegion, int xx, int yy)
{
bool ret = false;
if (xx < 0 || yy < 0)
return ret;
foreach (RegionConnections rootRegion in m_regions.Values)
{
if (currentRegion == rootRegion.RegionId)
{
// The caller is in the root region so this is an easy test
if (xx < rootRegion.XEnd && yy < rootRegion.YEnd)
{
ret = true;
}
break;
}
else
{
// Maybe the caller is in one of the sub-regions
foreach (RegionData childRegion in rootRegion.ConnectedRegions)
{
if (currentRegion == childRegion.RegionId)
{
// This is a child. Diddle the offsets and check if in
Vector3 positionInMegaregion = childRegion.Offset;
positionInMegaregion.X += xx;
positionInMegaregion.Y += yy;
if (positionInMegaregion.X < rootRegion.XEnd && positionInMegaregion.Y < rootRegion.YEnd)
{
ret = true;
}
break;
}
}
}
}
return ret;
}
private void NewPresence(ScenePresence presence)
{
if (presence.IsChildAgent)
{
byte[] throttleData;
try
{
throttleData = presence.ControllingClient.GetThrottlesPacked(1);
}
catch (NotImplementedException)
{
return;
}
if (throttleData == null)
return;
if (throttleData.Length == 0)
return;
if (throttleData.Length != 28)
return;
byte[] adjData;
int pos = 0;
if (!BitConverter.IsLittleEndian)
{
byte[] newData = new byte[7 * 4];
Buffer.BlockCopy(throttleData, 0, newData, 0, 7 * 4);
for (int i = 0; i < 7; i++)
Array.Reverse(newData, i * 4, 4);
adjData = newData;
}
else
{
adjData = throttleData;
}
// 0.125f converts from bits to bytes
int resend = (int)(BitConverter.ToSingle(adjData, pos) * 0.125f); pos += 4;
int land = (int)(BitConverter.ToSingle(adjData, pos) * 0.125f); pos += 4;
int wind = (int)(BitConverter.ToSingle(adjData, pos) * 0.125f); pos += 4;
int cloud = (int)(BitConverter.ToSingle(adjData, pos) * 0.125f); pos += 4;
int task = (int)(BitConverter.ToSingle(adjData, pos) * 0.125f); pos += 4;
int texture = (int)(BitConverter.ToSingle(adjData, pos) * 0.125f); pos += 4;
int asset = (int)(BitConverter.ToSingle(adjData, pos) * 0.125f);
// State is a subcategory of task that we allocate a percentage to
//int total = resend + land + wind + cloud + task + texture + asset;
byte[] data = new byte[7 * 4];
int ii = 0;
Buffer.BlockCopy(Utils.FloatToBytes(resend), 0, data, ii, 4); ii += 4;
Buffer.BlockCopy(Utils.FloatToBytes(land * 50), 0, data, ii, 4); ii += 4;
Buffer.BlockCopy(Utils.FloatToBytes(wind), 0, data, ii, 4); ii += 4;
Buffer.BlockCopy(Utils.FloatToBytes(cloud), 0, data, ii, 4); ii += 4;
Buffer.BlockCopy(Utils.FloatToBytes(task), 0, data, ii, 4); ii += 4;
Buffer.BlockCopy(Utils.FloatToBytes(texture), 0, data, ii, 4); ii += 4;
Buffer.BlockCopy(Utils.FloatToBytes(asset), 0, data, ii, 4);
try
{
presence.ControllingClient.SetChildAgentThrottle(data);
}
catch (NotImplementedException)
{
return;
}
}
}
private void RegionLoadedDoWork(Scene scene)
{
/*
// For testing on a single instance
if (scene.RegionInfo.RegionLocX == 1004 && scene.RegionInfo.RegionLocY == 1000)
return;
//
*/
RegionConnections newConn = new RegionConnections();
newConn.ConnectedRegions = new List<RegionData>();
newConn.RegionScene = scene;
newConn.RegionLandChannel = scene.LandChannel;
newConn.RegionId = scene.RegionInfo.originRegionID;
newConn.X = scene.RegionInfo.RegionLocX;
newConn.Y = scene.RegionInfo.RegionLocY;
newConn.XEnd = scene.RegionInfo.RegionSizeX;
newConn.YEnd = scene.RegionInfo.RegionSizeX;
lock (m_regions)
{
bool connectedYN = false;
foreach (RegionConnections rootConn in m_regions.Values)
{
#region commented
/*
// If we're one region over +x +y
//xxy
//xxx
//xxx
if ((((int)conn.X * (int)Constants.RegionSize) + conn.XEnd
== (regionConnections.X * (int)Constants.RegionSize))
&& (((int)conn.Y * (int)Constants.RegionSize) - conn.YEnd
== (regionConnections.Y * (int)Constants.RegionSize)))
{
Vector3 offset = Vector3.Zero;
offset.X = (((regionConnections.X * (int) Constants.RegionSize)) -
((conn.X * (int) Constants.RegionSize)));
offset.Y = (((regionConnections.Y * (int) Constants.RegionSize)) -
((conn.Y * (int) Constants.RegionSize)));
Vector3 extents = Vector3.Zero;
extents.Y = regionConnections.YEnd + conn.YEnd;
extents.X = conn.XEnd + conn.XEnd;
m_log.DebugFormat("Scene: {0} to the northwest of Scene{1}. Offset: {2}. Extents:{3}",
conn.RegionScene.RegionInfo.RegionName,
regionConnections.RegionScene.RegionInfo.RegionName,
offset, extents);
scene.PhysicsScene.Combine(conn.RegionScene.PhysicsScene, offset, extents);
connectedYN = true;
break;
}
*/
/*
//If we're one region over x +y
//xxx
//xxx
//xyx
if ((((int)conn.X * (int)Constants.RegionSize)
== (regionConnections.X * (int)Constants.RegionSize))
&& (((int)conn.Y * (int)Constants.RegionSize) - conn.YEnd
== (regionConnections.Y * (int)Constants.RegionSize)))
{
Vector3 offset = Vector3.Zero;
offset.X = (((regionConnections.X * (int)Constants.RegionSize)) -
((conn.X * (int)Constants.RegionSize)));
offset.Y = (((regionConnections.Y * (int)Constants.RegionSize)) -
((conn.Y * (int)Constants.RegionSize)));
Vector3 extents = Vector3.Zero;
extents.Y = regionConnections.YEnd + conn.YEnd;
extents.X = conn.XEnd;
m_log.DebugFormat("Scene: {0} to the north of Scene{1}. Offset: {2}. Extents:{3}",
conn.RegionScene.RegionInfo.RegionName,
regionConnections.RegionScene.RegionInfo.RegionName, offset, extents);
scene.PhysicsScene.Combine(conn.RegionScene.PhysicsScene, offset, extents);
connectedYN = true;
break;
}
*/
/*
// If we're one region over -x +y
//xxx
//xxx
//yxx
if ((((int)conn.X * (int)Constants.RegionSize) - conn.XEnd
== (regionConnections.X * (int)Constants.RegionSize))
&& (((int)conn.Y * (int)Constants.RegionSize) - conn.YEnd
== (regionConnections.Y * (int)Constants.RegionSize)))
{
Vector3 offset = Vector3.Zero;
offset.X = (((regionConnections.X * (int)Constants.RegionSize)) -
((conn.X * (int)Constants.RegionSize)));
offset.Y = (((regionConnections.Y * (int)Constants.RegionSize)) -
((conn.Y * (int)Constants.RegionSize)));
Vector3 extents = Vector3.Zero;
extents.Y = regionConnections.YEnd + conn.YEnd;
extents.X = conn.XEnd + conn.XEnd;
m_log.DebugFormat("Scene: {0} to the northeast of Scene. Offset: {2}. Extents:{3}",
conn.RegionScene.RegionInfo.RegionName,
regionConnections.RegionScene.RegionInfo.RegionName, offset, extents);
scene.PhysicsScene.Combine(conn.RegionScene.PhysicsScene, offset, extents);
connectedYN = true;
break;
}
*/
/*
// If we're one region over -x y
//xxx
//yxx
//xxx
if ((((int)conn.X * (int)Constants.RegionSize) - conn.XEnd
== (regionConnections.X * (int)Constants.RegionSize))
&& (((int)conn.Y * (int)Constants.RegionSize)
== (regionConnections.Y * (int)Constants.RegionSize)))
{
Vector3 offset = Vector3.Zero;
offset.X = (((regionConnections.X * (int)Constants.RegionSize)) -
((conn.X * (int)Constants.RegionSize)));
offset.Y = (((regionConnections.Y * (int)Constants.RegionSize)) -
((conn.Y * (int)Constants.RegionSize)));
Vector3 extents = Vector3.Zero;
extents.Y = regionConnections.YEnd;
extents.X = conn.XEnd + conn.XEnd;
m_log.DebugFormat("Scene: {0} to the east of Scene{1} Offset: {2}. Extents:{3}",
conn.RegionScene.RegionInfo.RegionName,
regionConnections.RegionScene.RegionInfo.RegionName, offset, extents);
scene.PhysicsScene.Combine(conn.RegionScene.PhysicsScene, offset, extents);
connectedYN = true;
break;
}
*/
/*
// If we're one region over -x -y
//yxx
//xxx
//xxx
if ((((int)conn.X * (int)Constants.RegionSize) - conn.XEnd
== (regionConnections.X * (int)Constants.RegionSize))
&& (((int)conn.Y * (int)Constants.RegionSize) + conn.YEnd
== (regionConnections.Y * (int)Constants.RegionSize)))
{
Vector3 offset = Vector3.Zero;
offset.X = (((regionConnections.X * (int)Constants.RegionSize)) -
((conn.X * (int)Constants.RegionSize)));
offset.Y = (((regionConnections.Y * (int)Constants.RegionSize)) -
((conn.Y * (int)Constants.RegionSize)));
Vector3 extents = Vector3.Zero;
extents.Y = regionConnections.YEnd + conn.YEnd;
extents.X = conn.XEnd + conn.XEnd;
m_log.DebugFormat("Scene: {0} to the northeast of Scene{1} Offset: {2}. Extents:{3}",
conn.RegionScene.RegionInfo.RegionName,
regionConnections.RegionScene.RegionInfo.RegionName, offset, extents);
scene.PhysicsScene.Combine(conn.RegionScene.PhysicsScene, offset, extents);
connectedYN = true;
break;
}
*/
#endregion
// Check to see if this new region is adjacent to the root region.
// Note that we expect the regions to be combined from the root region outward
// thus the requirement for the ordering in the configuration files.
// If we're one region over +x y (i.e. root region is to the west)
//xxx
//xxy
//xxx
if (rootConn.PosX + rootConn.XEnd >= newConn.PosX && rootConn.PosY >= newConn.PosY)
{
connectedYN = DoWorkForOneRegionOverPlusXY(rootConn, newConn, scene);
break;
}
// If we're one region over x +y (i.e. root region is to the south)
//xyx
//xxx
//xxx
if (rootConn.PosX >= newConn.PosX && rootConn.PosY + rootConn.YEnd >= newConn.PosY)
{
connectedYN = DoWorkForOneRegionOverPlusXY(rootConn, newConn, scene);
break;
}
// If we're one region over +x +y (i.e. root region is to the south-west)
//xxy
//xxx
//xxx
if (rootConn.PosX + rootConn.XEnd >= newConn.PosX && rootConn.PosY + rootConn.YEnd >= newConn.PosY)
{
connectedYN = DoWorkForOneRegionOverPlusXY(rootConn, newConn, scene);
break;
}
}
// If !connectYN means that this region is a root region
if (!connectedYN)
{
DoWorkForRootRegion(newConn, scene);
}
}
}
private bool DoWorkForOneRegionOverPlusXY(RegionConnections rootConn, RegionConnections newConn, Scene scene)
{
// Offset (in meters) from the base of this region to the base of the root region.
Vector3 offset = Vector3.Zero;
offset.X = newConn.PosX - rootConn.PosX;
offset.Y = newConn.PosY - rootConn.PosY;
// The new total size of the region (in meters)
// We just extend the X and Y dimensions so the extent might temporarily include areas without regions.
Vector3 extents = Vector3.Zero;
extents.X = Math.Max(rootConn.XEnd, offset.X + newConn.RegionScene.RegionInfo.RegionSizeX);
extents.Y = Math.Max(rootConn.YEnd, offset.Y + newConn.RegionScene.RegionInfo.RegionSizeY);
rootConn.UpdateExtents(extents);
m_log.DebugFormat(
"[REGION COMBINER MODULE]: Root region {0} is to the west of region {1}, Offset: {2}, Extents: {3}",
rootConn.RegionScene.RegionInfo.RegionName,
newConn.RegionScene.RegionInfo.RegionName, offset, extents);
RegionData ConnectedRegion = new RegionData();
ConnectedRegion.Offset = offset;
ConnectedRegion.RegionId = scene.RegionInfo.originRegionID;
ConnectedRegion.RegionScene = scene;
rootConn.ConnectedRegions.Add(ConnectedRegion);
// Inform root region Physics about the extents of this region
rootConn.RegionScene.PhysicsScene.Combine(null, Vector3.Zero, extents);
// Inform Child region that it needs to forward it's terrain to the root region
scene.PhysicsScene.Combine(rootConn.RegionScene.PhysicsScene, offset, Vector3.Zero);
// Reset Terrain.. since terrain loads before we get here, we need to load
// it again so it loads in the root region
scene.PhysicsScene.SetTerrain(scene.Heightmap.GetFloatsSerialised());
// Create a client event forwarder and add this region's events to the root region.
if (rootConn.ClientEventForwarder != null)
rootConn.ClientEventForwarder.AddSceneToEventForwarding(scene);
return true;
}
/*
* 20140215 radams1: The border stuff was removed and the addition of regions to the mega-regions
* was generalized. These functions are not needed for the generalized solution but left for reference.
private bool DoWorkForOneRegionOverXPlusY(RegionConnections rootConn, RegionConnections newConn, Scene scene)
{
Vector3 offset = Vector3.Zero;
offset.X = newConn.PosX - rootConn.PosX;
offset.Y = newConn.PosY - rootConn.PosY;
Vector3 extents = Vector3.Zero;
extents.Y = newConn.YEnd + rootConn.YEnd;
extents.X = rootConn.XEnd;
rootConn.UpdateExtents(extents);
RegionData ConnectedRegion = new RegionData();
ConnectedRegion.Offset = offset;
ConnectedRegion.RegionId = scene.RegionInfo.originRegionID;
ConnectedRegion.RegionScene = scene;
rootConn.ConnectedRegions.Add(ConnectedRegion);
m_log.DebugFormat(
"[REGION COMBINER MODULE]: Root region {0} is to the south of region {1}, Offset: {2}, Extents: {3}",
rootConn.RegionScene.RegionInfo.RegionName,
newConn.RegionScene.RegionInfo.RegionName, offset, extents);
rootConn.RegionScene.PhysicsScene.Combine(null, Vector3.Zero, extents);
scene.PhysicsScene.Combine(rootConn.RegionScene.PhysicsScene, offset, Vector3.Zero);
// Reset Terrain.. since terrain normally loads first.
//conn.RegionScene.PhysicsScene.SetTerrain(conn.RegionScene.Heightmap.GetFloatsSerialised());
scene.PhysicsScene.SetTerrain(scene.Heightmap.GetFloatsSerialised());
//conn.RegionScene.PhysicsScene.SetTerrain(conn.RegionScene.Heightmap.GetFloatsSerialised());
if (rootConn.ClientEventForwarder != null)
rootConn.ClientEventForwarder.AddSceneToEventForwarding(scene);
return true;
}
private bool DoWorkForOneRegionOverPlusXPlusY(RegionConnections rootConn, RegionConnections newConn, Scene scene)
{
Vector3 offset = Vector3.Zero;
offset.X = newConn.PosX - rootConn.PosX;
offset.Y = newConn.PosY - rootConn.PosY;
Vector3 extents = Vector3.Zero;
// We do not want to inflate the extents for regions strictly to the NE of the root region, since this
// would double count regions strictly to the north and east that have already been added.
// extents.Y = regionConnections.YEnd + conn.YEnd;
// extents.X = regionConnections.XEnd + conn.XEnd;
// conn.UpdateExtents(extents);
extents.Y = rootConn.YEnd;
extents.X = rootConn.XEnd;
RegionData ConnectedRegion = new RegionData();
ConnectedRegion.Offset = offset;
ConnectedRegion.RegionId = scene.RegionInfo.originRegionID;
ConnectedRegion.RegionScene = scene;
rootConn.ConnectedRegions.Add(ConnectedRegion);
m_log.DebugFormat(
"[REGION COMBINER MODULE]: Region {0} is to the southwest of Scene {1}, Offset: {2}, Extents: {3}",
rootConn.RegionScene.RegionInfo.RegionName,
newConn.RegionScene.RegionInfo.RegionName, offset, extents);
rootConn.RegionScene.PhysicsScene.Combine(null, Vector3.Zero, extents);
scene.PhysicsScene.Combine(rootConn.RegionScene.PhysicsScene, offset, Vector3.Zero);
// Reset Terrain.. since terrain normally loads first.
//conn.RegionScene.PhysicsScene.SetTerrain(conn.RegionScene.Heightmap.GetFloatsSerialised());
scene.PhysicsScene.SetTerrain(scene.Heightmap.GetFloatsSerialised());
//conn.RegionScene.PhysicsScene.SetTerrain(conn.RegionScene.Heightmap.GetFloatsSerialised());
if (rootConn.ClientEventForwarder != null)
rootConn.ClientEventForwarder.AddSceneToEventForwarding(scene);
return true;
//scene.PhysicsScene.Combine(conn.RegionScene.PhysicsScene, offset,extents);
}
*/
private void DoWorkForRootRegion(RegionConnections rootConn, Scene scene)
{
m_log.DebugFormat("[REGION COMBINER MODULE]: Adding root region {0}", scene.RegionInfo.RegionName);
RegionData rdata = new RegionData();
rdata.Offset = Vector3.Zero;
rdata.RegionId = scene.RegionInfo.originRegionID;
rdata.RegionScene = scene;
// save it's land channel
rootConn.RegionLandChannel = scene.LandChannel;
// Substitue our landchannel
RegionCombinerLargeLandChannel lnd = new RegionCombinerLargeLandChannel(rdata, scene.LandChannel,
rootConn.ConnectedRegions);
scene.LandChannel = lnd;
// Forward the permissions modules of each of the connected regions to the root region
lock (m_regions)
{
foreach (RegionData r in rootConn.ConnectedRegions)
{
ForwardPermissionRequests(rootConn, r.RegionScene);
}
// Create the root region's Client Event Forwarder
rootConn.ClientEventForwarder = new RegionCombinerClientEventForwarder(rootConn);
// Sets up the CoarseLocationUpdate forwarder for this root region
scene.EventManager.OnNewPresence += SetCoarseLocationDelegate;
// Adds this root region to a dictionary of regions that are connectable
m_regions.Add(scene.RegionInfo.originRegionID, rootConn);
}
}
private void SetCoarseLocationDelegate(ScenePresence presence)
{
presence.SetSendCoarseLocationMethod(SendCoarseLocationUpdates);
}
// This delegate was refactored for non-combined regions.
// This combined region version will not use the pre-compiled lists of locations and ids
private void SendCoarseLocationUpdates(UUID sceneId, ScenePresence presence, List<Vector3> coarseLocations, List<UUID> avatarUUIDs)
{
RegionConnections connectiondata = null;
lock (m_regions)
{
if (m_regions.ContainsKey(sceneId))
connectiondata = m_regions[sceneId];
else
return;
}
List<Vector3> CoarseLocations = new List<Vector3>();
List<UUID> AvatarUUIDs = new List<UUID>();
connectiondata.RegionScene.ForEachRootScenePresence(delegate(ScenePresence sp)
{
if (sp.UUID != presence.UUID)
{
CoarseLocations.Add(sp.AbsolutePosition);
AvatarUUIDs.Add(sp.UUID);
}
});
DistributeCoarseLocationUpdates(CoarseLocations, AvatarUUIDs, connectiondata, presence);
}
private void DistributeCoarseLocationUpdates(List<Vector3> locations, List<UUID> uuids,
RegionConnections connectiondata, ScenePresence rootPresence)
{
RegionData[] rdata = connectiondata.ConnectedRegions.ToArray();
//List<IClientAPI> clients = new List<IClientAPI>();
Dictionary<Vector2, RegionCoarseLocationStruct> updates = new Dictionary<Vector2, RegionCoarseLocationStruct>();
// Root Region entry
RegionCoarseLocationStruct rootupdatedata = new RegionCoarseLocationStruct();
rootupdatedata.Locations = new List<Vector3>();
rootupdatedata.Uuids = new List<UUID>();
rootupdatedata.Offset = Vector2.Zero;
rootupdatedata.UserAPI = rootPresence.ControllingClient;
if (rootupdatedata.UserAPI != null)
updates.Add(Vector2.Zero, rootupdatedata);
//Each Region needs an entry or we will end up with dead minimap dots
foreach (RegionData regiondata in rdata)
{
Vector2 offset = new Vector2(regiondata.Offset.X, regiondata.Offset.Y);
RegionCoarseLocationStruct updatedata = new RegionCoarseLocationStruct();
updatedata.Locations = new List<Vector3>();
updatedata.Uuids = new List<UUID>();
updatedata.Offset = offset;
if (offset == Vector2.Zero)
updatedata.UserAPI = rootPresence.ControllingClient;
else
updatedata.UserAPI = LocateUsersChildAgentIClientAPI(offset, rootPresence.UUID, rdata);
if (updatedata.UserAPI != null)
updates.Add(offset, updatedata);
}
// go over the locations and assign them to an IClientAPI
for (int i = 0; i < locations.Count; i++)
//{locations[i]/(int) Constants.RegionSize;
{
Vector3 pPosition = new Vector3((int)locations[i].X / (int)Constants.RegionSize,
(int)locations[i].Y / (int)Constants.RegionSize, locations[i].Z);
Vector2 offset = new Vector2(pPosition.X*(int) Constants.RegionSize,
pPosition.Y*(int) Constants.RegionSize);
if (!updates.ContainsKey(offset))
{
// This shouldn't happen
RegionCoarseLocationStruct updatedata = new RegionCoarseLocationStruct();
updatedata.Locations = new List<Vector3>();
updatedata.Uuids = new List<UUID>();
updatedata.Offset = offset;
if (offset == Vector2.Zero)
updatedata.UserAPI = rootPresence.ControllingClient;
else
updatedata.UserAPI = LocateUsersChildAgentIClientAPI(offset, rootPresence.UUID, rdata);
updates.Add(offset,updatedata);
}
updates[offset].Locations.Add(locations[i]);
updates[offset].Uuids.Add(uuids[i]);
}
// Send out the CoarseLocationupdates from their respective client connection based on where the avatar is
foreach (Vector2 offset in updates.Keys)
{
if (updates[offset].UserAPI != null)
{
updates[offset].UserAPI.SendCoarseLocationUpdate(updates[offset].Uuids,updates[offset].Locations);
}
}
}
/// <summary>
/// Locates a the Client of a particular region in an Array of RegionData based on offset
/// </summary>
/// <param name="offset"></param>
/// <param name="uUID"></param>
/// <param name="rdata"></param>
/// <returns>IClientAPI or null</returns>
private IClientAPI LocateUsersChildAgentIClientAPI(Vector2 offset, UUID uUID, RegionData[] rdata)
{
IClientAPI returnclient = null;
foreach (RegionData r in rdata)
{
if (r.Offset.X == offset.X && r.Offset.Y == offset.Y)
{
return r.RegionScene.SceneGraph.GetControllingClient(uUID);
}
}
return returnclient;
}
public void PostInitialise()
{
}
// /// <summary>
// /// TODO:
// /// </summary>
// /// <param name="rdata"></param>
// public void UnCombineRegion(RegionData rdata)
// {
// lock (m_regions)
// {
// if (m_regions.ContainsKey(rdata.RegionId))
// {
// // uncombine root region and virtual regions
// }
// else
// {
// foreach (RegionConnections r in m_regions.Values)
// {
// foreach (RegionData rd in r.ConnectedRegions)
// {
// if (rd.RegionId == rdata.RegionId)
// {
// // uncombine virtual region
// }
// }
// }
// }
// }
// }
public void ForwardPermissionRequests(RegionConnections BigRegion, Scene VirtualRegion)
{
if (BigRegion.PermissionModule == null)
BigRegion.PermissionModule = new RegionCombinerPermissionModule(BigRegion.RegionScene);
VirtualRegion.Permissions.OnBypassPermissions += BigRegion.PermissionModule.BypassPermissions;
VirtualRegion.Permissions.OnSetBypassPermissions += BigRegion.PermissionModule.SetBypassPermissions;
VirtualRegion.Permissions.OnPropagatePermissions += BigRegion.PermissionModule.PropagatePermissions;
VirtualRegion.Permissions.OnGenerateClientFlags += BigRegion.PermissionModule.GenerateClientFlags;
VirtualRegion.Permissions.OnAbandonParcel += BigRegion.PermissionModule.CanAbandonParcel;
VirtualRegion.Permissions.OnReclaimParcel += BigRegion.PermissionModule.CanReclaimParcel;
VirtualRegion.Permissions.OnDeedParcel += BigRegion.PermissionModule.CanDeedParcel;
VirtualRegion.Permissions.OnDeedObject += BigRegion.PermissionModule.CanDeedObject;
VirtualRegion.Permissions.OnIsGod += BigRegion.PermissionModule.IsGod;
VirtualRegion.Permissions.OnDuplicateObject += BigRegion.PermissionModule.CanDuplicateObject;
VirtualRegion.Permissions.OnDeleteObject += BigRegion.PermissionModule.CanDeleteObject; //MAYBE FULLY IMPLEMENTED
VirtualRegion.Permissions.OnEditObject += BigRegion.PermissionModule.CanEditObject; //MAYBE FULLY IMPLEMENTED
VirtualRegion.Permissions.OnEditParcelProperties += BigRegion.PermissionModule.CanEditParcelProperties; //MAYBE FULLY IMPLEMENTED
VirtualRegion.Permissions.OnInstantMessage += BigRegion.PermissionModule.CanInstantMessage;
VirtualRegion.Permissions.OnInventoryTransfer += BigRegion.PermissionModule.CanInventoryTransfer; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnIssueEstateCommand += BigRegion.PermissionModule.CanIssueEstateCommand; //FULLY IMPLEMENTED
VirtualRegion.Permissions.OnMoveObject += BigRegion.PermissionModule.CanMoveObject; //MAYBE FULLY IMPLEMENTED
VirtualRegion.Permissions.OnObjectEntry += BigRegion.PermissionModule.CanObjectEntry;
VirtualRegion.Permissions.OnReturnObjects += BigRegion.PermissionModule.CanReturnObjects; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnRezObject += BigRegion.PermissionModule.CanRezObject; //MAYBE FULLY IMPLEMENTED
VirtualRegion.Permissions.OnRunConsoleCommand += BigRegion.PermissionModule.CanRunConsoleCommand;
VirtualRegion.Permissions.OnRunScript += BigRegion.PermissionModule.CanRunScript; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnCompileScript += BigRegion.PermissionModule.CanCompileScript;
VirtualRegion.Permissions.OnSellParcel += BigRegion.PermissionModule.CanSellParcel;
VirtualRegion.Permissions.OnTakeObject += BigRegion.PermissionModule.CanTakeObject;
VirtualRegion.Permissions.OnTakeCopyObject += BigRegion.PermissionModule.CanTakeCopyObject;
VirtualRegion.Permissions.OnTerraformLand += BigRegion.PermissionModule.CanTerraformLand;
VirtualRegion.Permissions.OnLinkObject += BigRegion.PermissionModule.CanLinkObject; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnDelinkObject += BigRegion.PermissionModule.CanDelinkObject; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnBuyLand += BigRegion.PermissionModule.CanBuyLand; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnViewNotecard += BigRegion.PermissionModule.CanViewNotecard; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnViewScript += BigRegion.PermissionModule.CanViewScript; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnEditNotecard += BigRegion.PermissionModule.CanEditNotecard; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnEditScript += BigRegion.PermissionModule.CanEditScript; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnCreateObjectInventory += BigRegion.PermissionModule.CanCreateObjectInventory; //NOT IMPLEMENTED HERE
VirtualRegion.Permissions.OnEditObjectInventory += BigRegion.PermissionModule.CanEditObjectInventory;//MAYBE FULLY IMPLEMENTED
VirtualRegion.Permissions.OnCopyObjectInventory += BigRegion.PermissionModule.CanCopyObjectInventory; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnDeleteObjectInventory += BigRegion.PermissionModule.CanDeleteObjectInventory; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnResetScript += BigRegion.PermissionModule.CanResetScript;
VirtualRegion.Permissions.OnCreateUserInventory += BigRegion.PermissionModule.CanCreateUserInventory; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnCopyUserInventory += BigRegion.PermissionModule.CanCopyUserInventory; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnEditUserInventory += BigRegion.PermissionModule.CanEditUserInventory; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnDeleteUserInventory += BigRegion.PermissionModule.CanDeleteUserInventory; //NOT YET IMPLEMENTED
VirtualRegion.Permissions.OnTeleport += BigRegion.PermissionModule.CanTeleport; //NOT YET IMPLEMENTED
}
#region console commands
public void FixPhantoms(string module, string[] cmdparams)
{
List<Scene> scenes = new List<Scene>(m_startingScenes.Values);
foreach (Scene s in scenes)
{
MainConsole.Instance.OutputFormat("Fixing phantoms for {0}", s.RegionInfo.RegionName);
s.ForEachSOG(so => so.AbsolutePosition = so.AbsolutePosition);
}
}
#endregion
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Localization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using OrchardCore.Environment.Extensions.Features;
using OrchardCore.Environment.Extensions.Loaders;
using OrchardCore.Environment.Extensions.Manifests;
using OrchardCore.Environment.Extensions.Utility;
using OrchardCore.Modules;
namespace OrchardCore.Environment.Extensions
{
public class ExtensionManager : IExtensionManager
{
private readonly ExtensionExpanderOptions _extensionExpanderOptions;
private readonly ManifestOptions _manifestOptions;
private readonly IHostingEnvironment _hostingEnvironment;
private readonly IManifestProvider _manifestProvider;
private readonly IExtensionProvider _extensionProvider;
private readonly IExtensionLoader _extensionLoader;
private readonly IEnumerable<IExtensionDependencyStrategy> _extensionDependencyStrategies;
private readonly IEnumerable<IExtensionPriorityStrategy> _extensionPriorityStrategies;
private readonly ITypeFeatureProvider _typeFeatureProvider;
private IDictionary<string, ExtensionEntry> _extensions;
private IDictionary<string, FeatureEntry> _features;
private ConcurrentDictionary<string, Lazy<IEnumerable<IFeatureInfo>>> _featureDependencies
= new ConcurrentDictionary<string, Lazy<IEnumerable<IFeatureInfo>>>();
private ConcurrentDictionary<string, Lazy<IEnumerable<IFeatureInfo>>> _dependentFeatures
= new ConcurrentDictionary<string, Lazy<IEnumerable<IFeatureInfo>>>();
private IFeatureInfo[] _allOrderedFeatureInfos;
private static Func<IFeatureInfo, IFeatureInfo[], IFeatureInfo[]> GetDependantFeaturesFunc =
new Func<IFeatureInfo, IFeatureInfo[], IFeatureInfo[]>(
(currentFeature, fs) => fs
.Where(f =>
f.Dependencies.Any(dep => dep == currentFeature.Id)
).OrderBy(x => x.Id).ToArray());
private static Func<IFeatureInfo, IFeatureInfo[], IFeatureInfo[]> GetFeatureDependenciesFunc =
new Func<IFeatureInfo, IFeatureInfo[], IFeatureInfo[]>(
(currentFeature, fs) => fs
.Where(f =>
currentFeature.Dependencies.Any(dep => dep == f.Id)
).OrderByDescending(x => x.Id).ToArray());
private bool _isInitialized = false;
private static object InitializationSyncLock = new object();
public ExtensionManager(
IOptions<ExtensionExpanderOptions> extensionExpanderOptionsAccessor,
IOptions<ManifestOptions> manifestOptionsAccessor,
IHostingEnvironment hostingEnvironment,
IEnumerable<IManifestProvider> manifestProviders,
IEnumerable<IExtensionProvider> extensionProviders,
IEnumerable<IExtensionLoader> extensionLoaders,
IEnumerable<IExtensionDependencyStrategy> extensionDependencyStrategies,
IEnumerable<IExtensionPriorityStrategy> extensionPriorityStrategies,
ITypeFeatureProvider typeFeatureProvider,
ILogger<ExtensionManager> logger,
IStringLocalizer<ExtensionManager> localizer)
{
_extensionExpanderOptions = extensionExpanderOptionsAccessor.Value;
_manifestOptions = manifestOptionsAccessor.Value;
_hostingEnvironment = hostingEnvironment;
_manifestProvider = new CompositeManifestProvider(manifestProviders);
_extensionProvider = new CompositeExtensionProvider(extensionProviders);
_extensionLoader = new CompositeExtensionLoader(extensionLoaders);
_extensionDependencyStrategies = extensionDependencyStrategies;
_extensionPriorityStrategies = extensionPriorityStrategies;
_typeFeatureProvider = typeFeatureProvider;
L = logger;
T = localizer;
}
public ILogger L { get; set; }
public IStringLocalizer T { get; set; }
public IExtensionInfo GetExtension(string extensionId)
{
EnsureInitialized();
ExtensionEntry extension;
if (!String.IsNullOrEmpty(extensionId) && _extensions.TryGetValue(extensionId, out extension))
{
return extension.ExtensionInfo;
}
return new NotFoundExtensionInfo(extensionId);
}
public IEnumerable<IExtensionInfo> GetExtensions()
{
EnsureInitialized();
return _extensions.Values.Select(ex => ex.ExtensionInfo);
}
public IEnumerable<IFeatureInfo> GetFeatures(string[] featureIdsToLoad)
{
EnsureInitialized();
var allDependencies = featureIdsToLoad
.SelectMany(featureId => GetFeatureDependencies(featureId))
.Distinct();
return _allOrderedFeatureInfos
.Where(f => allDependencies.Any(d => d.Id == f.Id));
}
public Task<ExtensionEntry> LoadExtensionAsync(IExtensionInfo extensionInfo)
{
EnsureInitialized();
ExtensionEntry extension;
if (_extensions.TryGetValue(extensionInfo.Id, out extension))
{
return Task.FromResult(extension);
}
return Task.FromResult<ExtensionEntry>(null);
}
public Task<IEnumerable<FeatureEntry>> LoadFeaturesAsync()
{
var orderedFeaturesIds = GetFeatures().Select(f => f.Id).ToList();
var loadedFeatures = _features.Values
.OrderBy(f => orderedFeaturesIds.IndexOf(f.FeatureInfo.Id));
return Task.FromResult<IEnumerable<FeatureEntry>>(loadedFeatures);
}
public Task<IEnumerable<FeatureEntry>> LoadFeaturesAsync(string[] featureIdsToLoad)
{
EnsureInitialized();
var orderedFeaturesIds = GetFeatures(featureIdsToLoad).Select(f => f.Id).ToList();
var loadedFeatures = _features.Values
.Where(f => orderedFeaturesIds.Contains(f.FeatureInfo.Id))
.OrderBy(f => orderedFeaturesIds.IndexOf(f.FeatureInfo.Id));
return Task.FromResult<IEnumerable<FeatureEntry>>(loadedFeatures);
}
public IEnumerable<IFeatureInfo> GetFeatureDependencies(string featureId)
{
EnsureInitialized();
return _featureDependencies.GetOrAdd(featureId, (key) => new Lazy<IEnumerable<IFeatureInfo>>(() =>
{
if (!_features.ContainsKey(key))
{
return Enumerable.Empty<IFeatureInfo>();
}
var feature = _features[key].FeatureInfo;
var dependencies = new HashSet<IFeatureInfo>() { feature };
var stack = new Stack<IFeatureInfo[]>();
stack.Push(GetFeatureDependenciesFunc(feature, _allOrderedFeatureInfos));
while (stack.Count > 0)
{
var next = stack.Pop();
foreach (var dependency in next.Where(dependency => !dependencies.Contains(dependency)))
{
dependencies.Add(dependency);
stack.Push(GetFeatureDependenciesFunc(dependency, _allOrderedFeatureInfos));
}
}
return dependencies.Reverse();
})).Value;
}
public IEnumerable<IFeatureInfo> GetDependentFeatures(string featureId)
{
EnsureInitialized();
return _dependentFeatures.GetOrAdd(featureId, (key) => new Lazy<IEnumerable<IFeatureInfo>>(() =>
{
if (!_features.ContainsKey(key))
{
return Enumerable.Empty<IFeatureInfo>();
}
var feature = _features[key].FeatureInfo;
if (feature == null)
{
return Enumerable.Empty<IFeatureInfo>();
}
return GetDependentFeatures(feature, _allOrderedFeatureInfos);
})).Value;
}
private IEnumerable<IFeatureInfo> GetDependentFeatures(
IFeatureInfo feature,
IFeatureInfo[] features)
{
var dependencies = new HashSet<IFeatureInfo>() { feature };
var stack = new Stack<IFeatureInfo[]>();
stack.Push(GetDependantFeaturesFunc(feature, features));
while (stack.Count > 0)
{
var next = stack.Pop();
foreach (var dependency in next.Where(dependency => !dependencies.Contains(dependency)))
{
dependencies.Add(dependency);
stack.Push(GetDependantFeaturesFunc(dependency, features));
}
}
return dependencies;
}
public IEnumerable<IFeatureInfo> GetFeatures()
{
EnsureInitialized();
return _allOrderedFeatureInfos;
}
private static string GetSourceFeatureNameForType(Type type, string extensionId)
{
var attribute = type.GetTypeInfo().GetCustomAttributes<FeatureAttribute>(false).FirstOrDefault();
return attribute?.FeatureName ?? extensionId;
}
private void EnsureInitialized()
{
if (_isInitialized)
{
return;
}
lock (InitializationSyncLock)
{
if (_isInitialized)
{
return;
}
var extensions = HarvestExtensions();
var loadedExtensions = new ConcurrentDictionary<string, ExtensionEntry>();
// Load all extensions in parallel
Parallel.ForEach(extensions, (extension) =>
{
if (!extension.Exists)
{
return;
}
var entry = _extensionLoader.Load(extension);
if (entry.IsError && L.IsEnabled(LogLevel.Warning))
{
L.LogWarning("No loader found for extension \"{0}\". This might denote a dependency is missing or the extension doesn't have an assembly.", extension.Id);
}
loadedExtensions.TryAdd(extension.Id, entry);
});
var loadedFeatures = new Dictionary<string, FeatureEntry>();
// Get all valid types from any extension
var allTypesByExtension = loadedExtensions.SelectMany(extension =>
extension.Value.ExportedTypes.Where(IsComponentType)
.Select(type => new
{
ExtensionEntry = extension.Value,
Type = type
})).ToArray();
var typesByFeature = allTypesByExtension
.GroupBy(typeByExtension => GetSourceFeatureNameForType(
typeByExtension.Type,
typeByExtension.ExtensionEntry.ExtensionInfo.Id))
.ToDictionary(
group => group.Key,
group => group.Select(typesByExtension => typesByExtension.Type).ToArray());
foreach (var loadedExtension in loadedExtensions)
{
var extension = loadedExtension.Value;
foreach (var feature in extension.ExtensionInfo.Features)
{
// Features can have no types
if (typesByFeature.TryGetValue(feature.Id, out var featureTypes))
{
foreach (var type in featureTypes)
{
_typeFeatureProvider.TryAdd(type, feature);
}
}
else
{
featureTypes = Array.Empty<Type>();
}
loadedFeatures.Add(feature.Id, new CompiledFeatureEntry(feature, featureTypes));
}
};
_extensions = loadedExtensions;
// Could we get rid of _allOrderedFeatureInfos and just have _features?
_features = loadedFeatures;
_allOrderedFeatureInfos = Order(loadedFeatures.Values.Select(x => x.FeatureInfo));
_isInitialized = true;
}
}
private bool IsComponentType(Type type)
{
var typeInfo = type.GetTypeInfo();
return typeInfo.IsClass && !typeInfo.IsAbstract && typeInfo.IsPublic;
}
private IFeatureInfo[] Order(IEnumerable<IFeatureInfo> featuresToOrder)
{
return featuresToOrder
.OrderBy(x => x.Id)
.Distinct()
.OrderByDependenciesAndPriorities(HasDependency, GetPriority)
.ToArray();
}
private bool HasDependency(IFeatureInfo f1, IFeatureInfo f2)
{
return _extensionDependencyStrategies.Any(s => s.HasDependency(f1, f2));
}
private int GetPriority(IFeatureInfo feature)
{
return _extensionPriorityStrategies.Sum(s => s.GetPriority(feature));
}
private ISet<IExtensionInfo> HarvestExtensions()
{
var searchOptions = _extensionExpanderOptions.Options;
var extensionSet = new HashSet<IExtensionInfo>();
if (searchOptions.Count == 0)
{
return extensionSet;
}
foreach (var searchOption in searchOptions)
{
foreach (var subDirectory in _hostingEnvironment
.ContentRootFileProvider
.GetDirectoryContents(searchOption.SearchPath)
.Where(x => x.IsDirectory))
{
var manifestConfiguration = _manifestOptions
.ManifestConfigurations
.FirstOrDefault(mc =>
{
return File.Exists(Path.Combine(subDirectory.PhysicalPath, mc.ManifestFileName));
}
);
if (manifestConfiguration == null)
{
continue;
}
var manifestsubPath = searchOption.SearchPath + '/' + subDirectory.Name;
var manifestFilesubPath = manifestsubPath + '/' + manifestConfiguration.ManifestFileName;
IConfigurationBuilder configurationBuilder =
_manifestProvider.GetManifestConfiguration(new ConfigurationBuilder(), manifestFilesubPath);
if (!configurationBuilder.Sources.Any())
{
continue;
}
var configurationRoot = configurationBuilder.Build();
var manifestInfo = new ManifestInfo(configurationRoot, manifestConfiguration.Type);
// Manifest tells you what your loading, subpath is where you are loading it
var extensionInfo = _extensionProvider.GetExtensionInfo(manifestInfo, manifestsubPath);
extensionSet.Add(extensionInfo);
}
}
return extensionSet;
}
}
}
| |
using System.Collections.Generic;
using System.Text.RegularExpressions;
namespace GodLesZ.Library.Controls.Html
{
public class HtmlTag
{
#region Fields
private string _tagName;
private bool _isClosing;
private Dictionary<string, string> _attributes;
#endregion
#region Ctor
private HtmlTag()
{
_attributes = new Dictionary<string, string>();
}
public HtmlTag(string tag)
: this()
{
tag = tag.Substring(1, tag.Length - 2);
int spaceIndex = tag.IndexOf(" ");
//Extract tag name
if (spaceIndex < 0)
{
_tagName = tag;
}
else
{
_tagName = tag.Substring(0, spaceIndex);
}
//Check if is end tag
if (_tagName.StartsWith("/"))
{
_isClosing = true;
_tagName = _tagName.Substring(1);
}
_tagName = _tagName.ToLower();
//Extract attributes
MatchCollection atts = Parser.Match(Parser.HmlTagAttributes, tag);
foreach (Match att in atts)
{
//Extract attribute and value
string[] chunks = att.Value.Split('=');
if (chunks.Length == 1)
{
if(!Attributes.ContainsKey(chunks[0]))
Attributes.Add(chunks[0].ToLower(), string.Empty);
}
else if (chunks.Length == 2)
{
string attname = chunks[0].Trim();
string attvalue = chunks[1].Trim();
if (attvalue.StartsWith("\"") && attvalue.EndsWith("\"") && attvalue.Length > 2)
{
attvalue = attvalue.Substring(1, attvalue.Length - 2);
}
if (!Attributes.ContainsKey(attname))
Attributes.Add(attname, attvalue);
}
}
}
#endregion
#region Props
/// <summary>
/// Gets the dictionary of attributes in the tag
/// </summary>
public Dictionary<string, string> Attributes
{
get { return _attributes; }
}
/// <summary>
/// Gets the name of this tag
/// </summary>
public string TagName
{
get { return _tagName; }
}
/// <summary>
/// Gets if the tag is actually a closing tag
/// </summary>
public bool IsClosing
{
get { return _isClosing; }
}
/// <summary>
/// Gets if the tag is single placed; in other words it doesn't need a closing tag;
/// e.g. <br>
/// </summary>
public bool IsSingle
{
get
{
return TagName.StartsWith("!")
|| (new List<string>(
new string[]{
"area", "base", "basefont", "br", "col",
"frame", "hr", "img", "input", "isindex",
"link", "meta", "param"
}
)).Contains(TagName)
;
}
}
internal void TranslateAttributes(CssBox box)
{
string t = TagName.ToUpper();
foreach (string att in Attributes.Keys)
{
string value = Attributes[att];
switch (att)
{
case HtmlConstants.align:
if (value == HtmlConstants.left || value == HtmlConstants.center || value == HtmlConstants.right || value == HtmlConstants.justify)
box.TextAlign = value;
else
box.VerticalAlign = value;
break;
case HtmlConstants.background:
box.BackgroundImage = value;
break;
case HtmlConstants.bgcolor:
box.BackgroundColor = value;
break;
case HtmlConstants.border:
box.BorderWidth = TranslateLength(value);
if (t == HtmlConstants.TABLE)
{
ApplyTableBorder(box, value);
}
else
{
box.BorderStyle = CssConstants.Solid;
}
break;
case HtmlConstants.bordercolor:
box.BorderColor = value;
break;
case HtmlConstants.cellspacing:
box.BorderSpacing = TranslateLength(value);
break;
case HtmlConstants.cellpadding:
ApplyTablePadding(box, value);
break;
case HtmlConstants.color:
box.Color = value;
break;
case HtmlConstants.dir:
box.Direction = value;
break;
case HtmlConstants.face:
box.FontFamily = value;
break;
case HtmlConstants.height:
box.Height = TranslateLength(value);
break;
case HtmlConstants.hspace:
box.MarginRight = box.MarginLeft = TranslateLength(value);
break;
case HtmlConstants.nowrap:
box.WhiteSpace = CssConstants.Nowrap;
break;
case HtmlConstants.size:
if (t == HtmlConstants.HR)
box.Height = TranslateLength(value);
break;
case HtmlConstants.valign:
box.VerticalAlign = value;
break;
case HtmlConstants.vspace:
box.MarginTop = box.MarginBottom = TranslateLength(value);
break;
case HtmlConstants.width:
box.Width = TranslateLength(value);
break;
}
}
}
#endregion
#region Methods
/// <summary>
/// Converts an HTML length into a Css length
/// </summary>
/// <param name="htmlLength"></param>
/// <returns></returns>
private string TranslateLength(string htmlLength)
{
CssLength len = new CssLength(htmlLength);
if (len.HasError)
{
return htmlLength + "px";
}
return htmlLength;
}
/// <summary>
/// Cascades to the TD's the border spacified in the TABLE tag.
/// </summary>
/// <param name="table"></param>
/// <param name="border"></param>
private void ApplyTableBorder(CssBox table, string border)
{
foreach (CssBox box in table.Boxes)
{
foreach (CssBox cell in box.Boxes)
{
cell.BorderWidth = TranslateLength(border);
}
}
}
/// <summary>
/// Cascades to the TD's the border spacified in the TABLE tag.
/// </summary>
/// <param name="table"></param>
/// <param name="border"></param>
private void ApplyTablePadding(CssBox table, string padding)
{
foreach (CssBox box in table.Boxes)
{
foreach (CssBox cell in box.Boxes)
{
cell.Padding = TranslateLength(padding);
}
}
}
/// <summary>
/// Gets a boolean indicating if the attribute list has the specified attribute
/// </summary>
/// <param name="attribute"></param>
/// <returns></returns>
public bool HasAttribute(string attribute)
{
return Attributes.ContainsKey(attribute);
}
public override string ToString()
{
return string.Format("<{1}{0}>", TagName, IsClosing ? "/" : string.Empty);
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using System.Windows.Forms;
using AppControl;
using freeasyExplorer;
using System.Threading.Tasks;
using HWND = System.IntPtr;
namespace WindowsApplication1
{
/// <summary>
/// Summary description for Form1.
/// </summary>
public class MainWnd : Form
{
[DllImport("USER32.DLL")]
static extern int GetWindowText(HWND hWnd, StringBuilder lpString, int nMaxCount);
[DllImport("USER32.DLL")]
static extern int GetWindowTextLength(HWND hWnd);
internal delegate void WinEventProc(IntPtr hWinEventHook, int iEvent, IntPtr hWnd, int idObject, int idChild, int dwEventThread, int dwmsEventTime);
[DllImport("user32.dll")]
static extern IntPtr SetWinEventHook(uint eventMin, uint eventMax, IntPtr
hmodWinEventProc, WinEventProc lpfnWinEventProc, uint idProcess,
uint idThread, uint dwFlags);
#region CONSTANTS
const uint WINEVENT_OUTOFCONTEXT = 0x0000; // Events are ASYNC
const uint WINEVENT_SKIPOWNTHREAD = 0x0001; // Don't call back for events on installer's thread
const uint WINEVENT_SKIPOWNPROCESS = 0x0002; // Don't call back for events on installer's process
const uint WINEVENT_INCONTEXT = 0x0004; // Events are SYNC, this causes your dll to be injected into every process
const uint EVENT_MIN = 0x00000001;
const uint EVENT_MAX = 0x7FFFFFFF;
const uint EVENT_SYSTEM_SOUND = 0x0001;
const uint EVENT_SYSTEM_ALERT = 0x0002;
const uint EVENT_SYSTEM_FOREGROUND = 0x0003;
const uint EVENT_SYSTEM_MENUSTART = 0x0004;
const uint EVENT_SYSTEM_MENUEND = 0x0005;
const uint EVENT_SYSTEM_MENUPOPUPSTART = 0x0006;
const uint EVENT_SYSTEM_MENUPOPUPEND = 0x0007;
const uint EVENT_SYSTEM_CAPTURESTART = 0x0008;
const uint EVENT_SYSTEM_CAPTUREEND = 0x0009;
const uint EVENT_SYSTEM_MOVESIZESTART = 0x000A;
const uint EVENT_SYSTEM_MOVESIZEEND = 0x000B;
const uint EVENT_SYSTEM_CONTEXTHELPSTART = 0x000C;
const uint EVENT_SYSTEM_CONTEXTHELPEND = 0x000D;
const uint EVENT_SYSTEM_DRAGDROPSTART = 0x000E;
const uint EVENT_SYSTEM_DRAGDROPEND = 0x000F;
const uint EVENT_SYSTEM_DIALOGSTART = 0x0010;
const uint EVENT_SYSTEM_DIALOGEND = 0x0011;
const uint EVENT_SYSTEM_SCROLLINGSTART = 0x0012;
const uint EVENT_SYSTEM_SCROLLINGEND = 0x0013;
const uint EVENT_SYSTEM_SWITCHSTART = 0x0014;
const uint EVENT_SYSTEM_SWITCHEND = 0x0015;
const uint EVENT_SYSTEM_MINIMIZESTART = 0x0016;
const uint EVENT_SYSTEM_MINIMIZEEND = 0x0017;
const uint EVENT_SYSTEM_DESKTOPSWITCH = 0x0020;
const uint EVENT_SYSTEM_END = 0x00FF;
const uint EVENT_OEM_DEFINED_START = 0x0101;
const uint EVENT_OEM_DEFINED_END = 0x01FF;
const uint EVENT_UIA_EVENTID_START = 0x4E00;
const uint EVENT_UIA_EVENTID_END = 0x4EFF;
const uint EVENT_UIA_PROPID_START = 0x7500;
const uint EVENT_UIA_PROPID_END = 0x75FF;
const uint EVENT_CONSOLE_CARET = 0x4001;
const uint EVENT_CONSOLE_UPDATE_REGION = 0x4002;
const uint EVENT_CONSOLE_UPDATE_SIMPLE = 0x4003;
const uint EVENT_CONSOLE_UPDATE_SCROLL = 0x4004;
const uint EVENT_CONSOLE_LAYOUT = 0x4005;
const uint EVENT_CONSOLE_START_APPLICATION = 0x4006;
const uint EVENT_CONSOLE_END_APPLICATION = 0x4007;
const uint EVENT_CONSOLE_END = 0x40FF;
const uint EVENT_OBJECT_CREATE = 0x8000; // hwnd ID idChild is created item
const uint EVENT_OBJECT_DESTROY = 0x8001; // hwnd ID idChild is destroyed item
const uint EVENT_OBJECT_SHOW = 0x8002; // hwnd ID idChild is shown item
const uint EVENT_OBJECT_HIDE = 0x8003; // hwnd ID idChild is hidden item
const uint EVENT_OBJECT_REORDER = 0x8004; // hwnd ID idChild is parent of zordering children
const uint EVENT_OBJECT_FOCUS = 0x8005; // hwnd ID idChild is focused item
const uint EVENT_OBJECT_SELECTION = 0x8006; // hwnd ID idChild is selected item (if only one), or idChild is OBJID_WINDOW if complex
const uint EVENT_OBJECT_SELECTIONADD = 0x8007; // hwnd ID idChild is item added
const uint EVENT_OBJECT_SELECTIONREMOVE = 0x8008; // hwnd ID idChild is item removed
const uint EVENT_OBJECT_SELECTIONWITHIN = 0x8009; // hwnd ID idChild is parent of changed selected items
const uint EVENT_OBJECT_STATECHANGE = 0x800A; // hwnd ID idChild is item w/ state change
const uint EVENT_OBJECT_LOCATIONCHANGE = 0x800B; // hwnd ID idChild is moved/sized item
const uint EVENT_OBJECT_NAMECHANGE = 0x800C; // hwnd ID idChild is item w/ name change
const uint EVENT_OBJECT_DESCRIPTIONCHANGE = 0x800D; // hwnd ID idChild is item w/ desc change
const uint EVENT_OBJECT_VALUECHANGE = 0x800E; // hwnd ID idChild is item w/ value change
const uint EVENT_OBJECT_PARENTCHANGE = 0x800F; // hwnd ID idChild is item w/ new parent
const uint EVENT_OBJECT_HELPCHANGE = 0x8010; // hwnd ID idChild is item w/ help change
const uint EVENT_OBJECT_DEFACTIONCHANGE = 0x8011; // hwnd ID idChild is item w/ def action change
const uint EVENT_OBJECT_ACCELERATORCHANGE = 0x8012; // hwnd ID idChild is item w/ keybd accel change
const uint EVENT_OBJECT_INVOKED = 0x8013; // hwnd ID idChild is item invoked
const uint EVENT_OBJECT_TEXTSELECTIONCHANGED = 0x8014; // hwnd ID idChild is item w? test selection change
const uint EVENT_OBJECT_CONTENTSCROLLED = 0x8015;
const uint EVENT_SYSTEM_ARRANGMENTPREVIEW = 0x8016;
const uint EVENT_OBJECT_END = 0x80FF;
const uint EVENT_AIA_START = 0xA000;
const uint EVENT_AIA_END = 0xAFFF;
#endregion
internal enum SetWinEventHookFlags
{
WINEVENT_INCONTEXT = 4,
WINEVENT_OUTOFCONTEXT = 0,
WINEVENT_SKIPOWNPROCESS = 2,
WINEVENT_SKIPOWNTHREAD = 1
}
struct UsedWindow
{
public IntPtr handle;
public ApplicationControl control;
public TabPage tab;
}
struct WindowInfo
{
public IntPtr handle;
public uint id;
}
private List<uint> registeredHooks = new List<uint>();
private List<WindowInfo> newWindowList = new List<WindowInfo>();
private List<UsedWindow> usedWindowList = new List<UsedWindow>();
private TabControl tabControl;
private TabPage tabPage2;
public static MainWnd statThis;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
public MainWnd()
{
InitializeComponent();
FindAllVisibleWindows();
if (newWindowList.Count <= 0)
{
StartNewExplorer();
FindAllVisibleWindows();
}
AddControls();
statThis = this;
Task backgroudSearchTask = new Task(new Action(BackgroundExplorerSearch));
backgroudSearchTask.Start();
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.tabControl = new System.Windows.Forms.TabControl();
this.tabPage2 = new System.Windows.Forms.TabPage();
this.tabControl.SuspendLayout();
this.SuspendLayout();
//
// tabControl
//
this.tabControl.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.tabControl.Controls.Add(this.tabPage2);
this.tabControl.Location = new System.Drawing.Point(0, 0);
this.tabControl.Name = "tabControl";
this.tabControl.SelectedIndex = 0;
this.tabControl.Size = new System.Drawing.Size(988, 673);
this.tabControl.TabIndex = 0;
this.tabControl.Selecting += new System.Windows.Forms.TabControlCancelEventHandler(this.tabControl_Selecting);
//
// tabPage2
//
this.tabPage2.Location = new System.Drawing.Point(4, 22);
this.tabPage2.Name = "tabPage2";
this.tabPage2.Size = new System.Drawing.Size(980, 647);
this.tabPage2.TabIndex = 1;
this.tabPage2.Text = "+";
//
// MainWnd
//
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.ClientSize = new System.Drawing.Size(988, 673);
this.Controls.Add(this.tabControl);
this.Name = "MainWnd";
this.Text = "freeasyExplorer";
this.tabControl.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
MainWnd bla = new MainWnd();
Application.Run(bla);
}
private static WinEventProc listener = new WinEventProc(EventCallback);
delegate void SetTextCallback();
private void AddControls()
{
if(newWindowList.Count <= 0) return;
while (newWindowList.Count > 0)
{
if (this.tabControl.InvokeRequired)
{
SetTextCallback d = new SetTextCallback(AddControls);
this.Invoke(d, new object[] { });
return;
}
UsedWindow tmpWindow = new UsedWindow {handle = IntPtr.Zero ,control = new ApplicationControl(), tab = new TabPage()};
tmpWindow.control.SuspendLayout();
tmpWindow.handle = newWindowList[0].handle;
tmpWindow.control.WindowHandle = newWindowList[0].handle;
tmpWindow.control.Location = new Point(0, 0);
tmpWindow.control.Size = new Size(936, 607);
tmpWindow.tab.Controls.Add(tmpWindow.control);
tmpWindow.tab.BorderStyle = BorderStyle.FixedSingle;
this.tabControl.TabPages.Add(tmpWindow.tab);
tmpWindow.control.Dock = DockStyle.Fill;
int length = GetWindowTextLength(tmpWindow.handle);
if (length != 0)
{
StringBuilder builder = new StringBuilder(length);
GetWindowText(tmpWindow.handle, builder, length + 1);
tmpWindow.tab.Text = builder.ToString();
}
tabControl.SelectedTab = tmpWindow.tab;
if(registeredHooks.Count <= 0 || !registeredHooks.Contains(newWindowList[0].id))
{
SetWinEventHook(EVENT_MIN, EVENT_MAX, IntPtr.Zero, listener, newWindowList[0].id, 0,
WINEVENT_OUTOFCONTEXT | WINEVENT_SKIPOWNTHREAD);
registeredHooks.Add(newWindowList[0].id);
}
usedWindowList.Add(tmpWindow);
newWindowList.RemoveAt(0);
}
}
private static void EventCallback(IntPtr hWinEventHook, int iEvent, IntPtr hWnd, int idObject, int idChild, int dwEventThread, int dwmsEventTime)
{
//callback function, called when message is intercepted
Console.WriteLine(iEvent.ToString());
//if(iEvent == 8 || iEvent == 9)
//{
statThis.CustomEvent_Handler();
//}
}
private void CustomEvent_Handler()
{
UsedWindow tmpWindow = new UsedWindow { handle = IntPtr.Zero, control = new ApplicationControl(), tab = new TabPage() };
foreach (var usedWindow in usedWindowList)
{
if (usedWindow.tab == tabControl.SelectedTab)
{
tmpWindow = usedWindow;
}
}
int length = GetWindowTextLength(tmpWindow.handle);
if (length != 0)
{
StringBuilder builder = new StringBuilder(length);
GetWindowText(tmpWindow.handle, builder, length + 1);
tmpWindow.tab.Text = builder.ToString();
}
}
private void FindAllVisibleWindows()
{
foreach (KeyValuePair<IntPtr, uint> window in OpenWindowGetter.GetOpenWindows())
{
IntPtr handle = window.Key;
uint process = window.Value;
newWindowList.Add(new WindowInfo(){handle = handle, id = process});
}
}
private void StartNewExplorer()
{
ProcessStartInfo tmp = new ProcessStartInfo();
tmp.FileName = "explorer.exe";
tmp.WindowStyle = ProcessWindowStyle.Minimized;
Process.Start(tmp);
Thread.Sleep(1000);
}
private void FindOnlyNewWindows()
{
foreach (var usedWindow in usedWindowList)
{
foreach (var newWindow in newWindowList)
{
if (newWindow.handle == usedWindow.handle)
{
newWindowList.Remove(newWindow);
}
}
}
}
private void tabControl_Selecting(object sender, TabControlCancelEventArgs e)
{
if (e.TabPage.Text.Equals("+"))
{
StartNewExplorer();
FindAllVisibleWindows();
FindOnlyNewWindows();
AddControls();
}
}
private static void BackgroundExplorerSearch()
{
while(true)
{
Thread.Sleep(500);
statThis.FindAllVisibleWindows();
statThis.FindOnlyNewWindows();
if (statThis.newWindowList.Count > 0)
{
statThis.AddControls();
}
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Immutable;
using System.Diagnostics;
using System.Reflection.Internal;
using System.Reflection.Metadata.Ecma335;
using System.Runtime.InteropServices;
namespace System.Reflection.Metadata
{
public sealed class MethodBodyBlock
{
private readonly MemoryBlock _il;
private readonly int _size;
private readonly ushort _maxStack;
private readonly bool _localVariablesInitialized;
private readonly StandaloneSignatureHandle _localSignature;
private readonly ImmutableArray<ExceptionRegion> _exceptionRegions;
private MethodBodyBlock(
bool localVariablesInitialized,
ushort maxStack,
StandaloneSignatureHandle localSignatureHandle,
MemoryBlock il,
ImmutableArray<ExceptionRegion> exceptionRegions,
int size)
{
Debug.Assert(!exceptionRegions.IsDefault);
_localVariablesInitialized = localVariablesInitialized;
_maxStack = maxStack;
_localSignature = localSignatureHandle;
_il = il;
_exceptionRegions = exceptionRegions;
_size = size;
}
/// <summary>
/// Size of the method body - includes the header, IL and exception regions.
/// </summary>
public int Size
{
get { return _size; }
}
public int MaxStack
{
get { return _maxStack; }
}
public bool LocalVariablesInitialized
{
get { return _localVariablesInitialized; }
}
public StandaloneSignatureHandle LocalSignature
{
get { return _localSignature; }
}
public ImmutableArray<ExceptionRegion> ExceptionRegions
{
get { return _exceptionRegions; }
}
public byte[] GetILBytes()
{
return _il.ToArray();
}
public ImmutableArray<byte> GetILContent()
{
byte[] bytes = GetILBytes();
return ImmutableByteArrayInterop.DangerousCreateFromUnderlyingArray(ref bytes);
}
public BlobReader GetILReader()
{
return new BlobReader(_il);
}
private const byte ILTinyFormat = 0x02;
private const byte ILFatFormat = 0x03;
private const byte ILFormatMask = 0x03;
private const int ILTinyFormatSizeShift = 2;
private const byte ILMoreSects = 0x08;
private const byte ILInitLocals = 0x10;
private const byte ILFatFormatHeaderSize = 0x03;
private const int ILFatFormatHeaderSizeShift = 4;
private const byte SectEHTable = 0x01;
private const byte SectOptILTable = 0x02;
private const byte SectFatFormat = 0x40;
private const byte SectMoreSects = 0x40;
public static MethodBodyBlock Create(BlobReader reader)
{
int startOffset = reader.Offset;
int ilSize;
// Error need to check if the Memory Block is empty. This is false for all the calls...
byte headByte = reader.ReadByte();
if ((headByte & ILFormatMask) == ILTinyFormat)
{
// tiny IL can't have locals so technically this shouldn't matter,
// but false is consistent with other metadata readers and helps
// for use cases involving comparing our output with theirs.
const bool initLocalsForTinyIL = false;
ilSize = headByte >> ILTinyFormatSizeShift;
return new MethodBodyBlock(
initLocalsForTinyIL,
8,
default(StandaloneSignatureHandle),
reader.GetMemoryBlockAt(0, ilSize),
ImmutableArray<ExceptionRegion>.Empty,
1 + ilSize // header + IL
);
}
if ((headByte & ILFormatMask) != ILFatFormat)
{
throw new BadImageFormatException(string.Format(SR.InvalidMethodHeader1, headByte));
}
// FatILFormat
byte headByte2 = reader.ReadByte();
if ((headByte2 >> ILFatFormatHeaderSizeShift) != ILFatFormatHeaderSize)
{
throw new BadImageFormatException(string.Format(SR.InvalidMethodHeader2, headByte, headByte2));
}
bool localsInitialized = (headByte & ILInitLocals) == ILInitLocals;
bool hasExceptionHandlers = (headByte & ILMoreSects) == ILMoreSects;
ushort maxStack = reader.ReadUInt16();
ilSize = reader.ReadInt32();
int localSignatureToken = reader.ReadInt32();
StandaloneSignatureHandle localSignatureHandle;
if (localSignatureToken == 0)
{
localSignatureHandle = default(StandaloneSignatureHandle);
}
else if ((localSignatureToken & TokenTypeIds.TypeMask) == TokenTypeIds.Signature)
{
localSignatureHandle = StandaloneSignatureHandle.FromRowId((int)((uint)localSignatureToken & TokenTypeIds.RIDMask));
}
else
{
throw new BadImageFormatException(string.Format(SR.InvalidLocalSignatureToken, unchecked((uint)localSignatureToken)));
}
var ilBlock = reader.GetMemoryBlockAt(0, ilSize);
reader.SkipBytes(ilSize);
ImmutableArray<ExceptionRegion> exceptionHandlers;
if (hasExceptionHandlers)
{
reader.Align(4);
byte sehHeader = reader.ReadByte();
if ((sehHeader & SectEHTable) != SectEHTable)
{
throw new BadImageFormatException(string.Format(SR.InvalidSehHeader, sehHeader));
}
bool sehFatFormat = (sehHeader & SectFatFormat) == SectFatFormat;
int dataSize = reader.ReadByte();
if (sehFatFormat)
{
dataSize += reader.ReadUInt16() << 8;
exceptionHandlers = ReadFatExceptionHandlers(ref reader, dataSize / 24);
}
else
{
reader.SkipBytes(2); // skip over reserved field
exceptionHandlers = ReadSmallExceptionHandlers(ref reader, dataSize / 12);
}
}
else
{
exceptionHandlers = ImmutableArray<ExceptionRegion>.Empty;
}
return new MethodBodyBlock(
localsInitialized,
maxStack,
localSignatureHandle,
ilBlock,
exceptionHandlers,
reader.Offset - startOffset);
}
private static ImmutableArray<ExceptionRegion> ReadSmallExceptionHandlers(ref BlobReader memReader, int count)
{
var result = new ExceptionRegion[count];
for (int i = 0; i < result.Length; i++)
{
var kind = (ExceptionRegionKind)memReader.ReadUInt16();
var tryOffset = memReader.ReadUInt16();
var tryLength = memReader.ReadByte();
var handlerOffset = memReader.ReadUInt16();
var handlerLength = memReader.ReadByte();
var classTokenOrFilterOffset = memReader.ReadInt32();
result[i] = new ExceptionRegion(kind, tryOffset, tryLength, handlerOffset, handlerLength, classTokenOrFilterOffset);
}
return ImmutableArray.Create(result);
}
private static ImmutableArray<ExceptionRegion> ReadFatExceptionHandlers(ref BlobReader memReader, int count)
{
var result = new ExceptionRegion[count];
for (int i = 0; i < result.Length; i++)
{
var sehFlags = (ExceptionRegionKind)memReader.ReadUInt32();
int tryOffset = memReader.ReadInt32();
int tryLength = memReader.ReadInt32();
int handlerOffset = memReader.ReadInt32();
int handlerLength = memReader.ReadInt32();
int classTokenOrFilterOffset = memReader.ReadInt32();
result[i] = new ExceptionRegion(sehFlags, tryOffset, tryLength, handlerOffset, handlerLength, classTokenOrFilterOffset);
}
return ImmutableArray.Create(result);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using OpenMetaverse;
using OpenSim.Region.Physics.Manager;
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
namespace OpenSim.Region.Physics.Meshing
{
public class Mesh : IMesh
{
private Dictionary<Vertex, int> m_vertices;
private List<Triangle> m_triangles;
GCHandle m_pinnedVertexes;
GCHandle m_pinnedIndex;
IntPtr m_verticesPtr = IntPtr.Zero;
int m_vertexCount = 0;
IntPtr m_indicesPtr = IntPtr.Zero;
int m_indexCount = 0;
public float[] m_normals;
public Mesh()
{
m_vertices = new Dictionary<Vertex, int>();
m_triangles = new List<Triangle>();
}
public Mesh Clone()
{
Mesh result = new Mesh();
foreach (Triangle t in m_triangles)
{
result.Add(new Triangle(t.v1.Clone(), t.v2.Clone(), t.v3.Clone()));
}
return result;
}
public void Add(Triangle triangle)
{
if (m_pinnedIndex.IsAllocated || m_pinnedVertexes.IsAllocated || m_indicesPtr != IntPtr.Zero || m_verticesPtr != IntPtr.Zero)
throw new NotSupportedException("Attempt to Add to a pinned Mesh");
// If a vertex of the triangle is not yet in the vertices list,
// add it and set its index to the current index count
if (!m_vertices.ContainsKey(triangle.v1))
m_vertices[triangle.v1] = m_vertices.Count;
if (!m_vertices.ContainsKey(triangle.v2))
m_vertices[triangle.v2] = m_vertices.Count;
if (!m_vertices.ContainsKey(triangle.v3))
m_vertices[triangle.v3] = m_vertices.Count;
m_triangles.Add(triangle);
}
public void CalcNormals()
{
int iTriangles = m_triangles.Count;
this.m_normals = new float[iTriangles * 3];
int i = 0;
foreach (Triangle t in m_triangles)
{
float ux, uy, uz;
float vx, vy, vz;
float wx, wy, wz;
ux = t.v1.X;
uy = t.v1.Y;
uz = t.v1.Z;
vx = t.v2.X;
vy = t.v2.Y;
vz = t.v2.Z;
wx = t.v3.X;
wy = t.v3.Y;
wz = t.v3.Z;
// Vectors for edges
float e1x, e1y, e1z;
float e2x, e2y, e2z;
e1x = ux - vx;
e1y = uy - vy;
e1z = uz - vz;
e2x = ux - wx;
e2y = uy - wy;
e2z = uz - wz;
// Cross product for normal
float nx, ny, nz;
nx = e1y * e2z - e1z * e2y;
ny = e1z * e2x - e1x * e2z;
nz = e1x * e2y - e1y * e2x;
// Length
float l = (float)Math.Sqrt(nx * nx + ny * ny + nz * nz);
float lReciprocal = 1.0f / l;
// Normalized "normal"
//nx /= l;
//ny /= l;
//nz /= l;
m_normals[i] = nx * lReciprocal;
m_normals[i + 1] = ny * lReciprocal;
m_normals[i + 2] = nz * lReciprocal;
i += 3;
}
}
public List<Vector3> getVertexList()
{
List<Vector3> result = new List<Vector3>();
foreach (Vertex v in m_vertices.Keys)
{
result.Add(new Vector3(v.X, v.Y, v.Z));
}
return result;
}
public float[] getVertexListAsFloat()
{
if (m_vertices == null)
throw new NotSupportedException();
float[] result = new float[m_vertices.Count * 3];
foreach (KeyValuePair<Vertex, int> kvp in m_vertices)
{
Vertex v = kvp.Key;
int i = kvp.Value;
result[3 * i + 0] = v.X;
result[3 * i + 1] = v.Y;
result[3 * i + 2] = v.Z;
}
return result;
}
public float[] getVertexListAsFloatLocked()
{
if (m_pinnedVertexes.IsAllocated)
return (float[])(m_pinnedVertexes.Target);
float[] result = getVertexListAsFloat();
m_pinnedVertexes = GCHandle.Alloc(result, GCHandleType.Pinned);
// Inform the garbage collector of this unmanaged allocation so it can schedule
// the next GC round more intelligently
GC.AddMemoryPressure(Buffer.ByteLength(result));
return result;
}
public void getVertexListAsPtrToFloatArray(out IntPtr vertices, out int vertexStride, out int vertexCount)
{
// A vertex is 3 floats
vertexStride = 3 * sizeof(float);
// If there isn't an unmanaged array allocated yet, do it now
if (m_verticesPtr == IntPtr.Zero)
{
float[] vertexList = getVertexListAsFloat();
// Each vertex is 3 elements (floats)
m_vertexCount = vertexList.Length / 3;
int byteCount = m_vertexCount * vertexStride;
m_verticesPtr = System.Runtime.InteropServices.Marshal.AllocHGlobal(byteCount);
System.Runtime.InteropServices.Marshal.Copy(vertexList, 0, m_verticesPtr, m_vertexCount * 3);
}
vertices = m_verticesPtr;
vertexCount = m_vertexCount;
}
public int[] getIndexListAsInt()
{
if (m_triangles == null)
throw new NotSupportedException();
int[] result = new int[m_triangles.Count * 3];
for (int i = 0; i < m_triangles.Count; i++)
{
Triangle t = m_triangles[i];
result[3 * i + 0] = m_vertices[t.v1];
result[3 * i + 1] = m_vertices[t.v2];
result[3 * i + 2] = m_vertices[t.v3];
}
return result;
}
/// <summary>
/// creates a list of index values that defines triangle faces. THIS METHOD FREES ALL NON-PINNED MESH DATA
/// </summary>
/// <returns></returns>
public int[] getIndexListAsIntLocked()
{
if (m_pinnedIndex.IsAllocated)
return (int[])(m_pinnedIndex.Target);
int[] result = getIndexListAsInt();
m_pinnedIndex = GCHandle.Alloc(result, GCHandleType.Pinned);
// Inform the garbage collector of this unmanaged allocation so it can schedule
// the next GC round more intelligently
GC.AddMemoryPressure(Buffer.ByteLength(result));
return result;
}
public void getIndexListAsPtrToIntArray(out IntPtr indices, out int triStride, out int indexCount)
{
// If there isn't an unmanaged array allocated yet, do it now
if (m_indicesPtr == IntPtr.Zero)
{
int[] indexList = getIndexListAsInt();
m_indexCount = indexList.Length;
int byteCount = m_indexCount * sizeof(int);
m_indicesPtr = System.Runtime.InteropServices.Marshal.AllocHGlobal(byteCount);
System.Runtime.InteropServices.Marshal.Copy(indexList, 0, m_indicesPtr, m_indexCount);
}
// A triangle is 3 ints (indices)
triStride = 3 * sizeof(int);
indices = m_indicesPtr;
indexCount = m_indexCount;
}
public void releasePinned()
{
if (m_pinnedVertexes.IsAllocated)
m_pinnedVertexes.Free();
if (m_pinnedIndex.IsAllocated)
m_pinnedIndex.Free();
if (m_verticesPtr != IntPtr.Zero)
{
System.Runtime.InteropServices.Marshal.FreeHGlobal(m_verticesPtr);
m_verticesPtr = IntPtr.Zero;
}
if (m_indicesPtr != IntPtr.Zero)
{
System.Runtime.InteropServices.Marshal.FreeHGlobal(m_indicesPtr);
m_indicesPtr = IntPtr.Zero;
}
}
/// <summary>
/// frees up the source mesh data to minimize memory - call this method after calling get*Locked() functions
/// </summary>
public void releaseSourceMeshData()
{
m_triangles = null;
m_vertices = null;
}
public void Append(IMesh newMesh)
{
if (m_pinnedIndex.IsAllocated || m_pinnedVertexes.IsAllocated || m_indicesPtr != IntPtr.Zero || m_verticesPtr != IntPtr.Zero)
throw new NotSupportedException("Attempt to Append to a pinned Mesh");
if (!(newMesh is Mesh))
return;
foreach (Triangle t in ((Mesh)newMesh).m_triangles)
Add(t);
}
// Do a linear transformation of mesh.
public void TransformLinear(float[,] matrix, float[] offset)
{
if (m_pinnedIndex.IsAllocated || m_pinnedVertexes.IsAllocated || m_indicesPtr != IntPtr.Zero || m_verticesPtr != IntPtr.Zero)
throw new NotSupportedException("Attempt to TransformLinear a pinned Mesh");
foreach (Vertex v in m_vertices.Keys)
{
if (v == null)
continue;
float x, y, z;
x = v.X*matrix[0, 0] + v.Y*matrix[1, 0] + v.Z*matrix[2, 0];
y = v.X*matrix[0, 1] + v.Y*matrix[1, 1] + v.Z*matrix[2, 1];
z = v.X*matrix[0, 2] + v.Y*matrix[1, 2] + v.Z*matrix[2, 2];
v.X = x + offset[0];
v.Y = y + offset[1];
v.Z = z + offset[2];
}
}
public void DumpRaw(String path, String name, String title)
{
if (path == null)
return;
String fileName = name + "_" + title + ".raw";
String completePath = System.IO.Path.Combine(path, fileName);
StreamWriter sw = new StreamWriter(completePath);
foreach (Triangle t in m_triangles)
{
String s = t.ToStringRaw();
sw.WriteLine(s);
}
sw.Close();
}
public void TrimExcess()
{
m_triangles.TrimExcess();
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyFile
{
using Microsoft.Rest;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Files operations.
/// </summary>
public partial class Files : IServiceOperations<AutoRestSwaggerBATFileService>, IFiles
{
/// <summary>
/// Initializes a new instance of the Files class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public Files(AutoRestSwaggerBATFileService client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestSwaggerBATFileService
/// </summary>
public AutoRestSwaggerBATFileService Client { get; private set; }
/// <summary>
/// Get file
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Stream>> GetFileWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetFile", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "files/stream/nonempty").ToString();
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get a large file
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Stream>> GetFileLargeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetFileLarge", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "files/stream/verylarge").ToString();
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get empty file
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<Stream>> GetEmptyFileWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetEmptyFile", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "files/stream/empty").ToString();
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, System.Net.Http.HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.14.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyDateTimeRfc1123
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Models;
/// <summary>
/// Datetimerfc1123 operations.
/// </summary>
public partial class Datetimerfc1123 : IServiceOperations<AutoRestRFC1123DateTimeTestService>, IDatetimerfc1123
{
/// <summary>
/// Initializes a new instance of the Datetimerfc1123 class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
public Datetimerfc1123(AutoRestRFC1123DateTimeTestService client)
{
if (client == null)
{
throw new ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestRFC1123DateTimeTestService
/// </summary>
public AutoRestRFC1123DateTimeTestService Client { get; private set; }
/// <summary>
/// Get null datetime value
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<DateTime?>> GetNullWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetNull", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/null").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<DateTime?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
_result.Body = SafeJsonConvert.DeserializeObject<DateTime?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
throw new RestException("Unable to deserialize the response.", ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get invalid datetime value
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<DateTime?>> GetInvalidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetInvalid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/invalid").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<DateTime?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
_result.Body = SafeJsonConvert.DeserializeObject<DateTime?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
throw new RestException("Unable to deserialize the response.", ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get overflow datetime value
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<DateTime?>> GetOverflowWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetOverflow", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/overflow").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<DateTime?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
_result.Body = SafeJsonConvert.DeserializeObject<DateTime?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
throw new RestException("Unable to deserialize the response.", ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get underflow datetime value
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<DateTime?>> GetUnderflowWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetUnderflow", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/underflow").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<DateTime?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
_result.Body = SafeJsonConvert.DeserializeObject<DateTime?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
throw new RestException("Unable to deserialize the response.", ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Put max datetime value Fri, 31 Dec 9999 23:59:59 GMT
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse> PutUtcMaxDateTimeWithHttpMessagesAsync(DateTime? datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (datetimeBody == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "datetimeBody");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("datetimeBody", datetimeBody);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PutUtcMaxDateTime", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/max").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = SafeJsonConvert.SerializeObject(datetimeBody, new DateTimeRfc1123JsonConverter());
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get max datetime value fri, 31 dec 9999 23:59:59 gmt
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<DateTime?>> GetUtcLowercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetUtcLowercaseMaxDateTime", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/max/lowercase").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<DateTime?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
_result.Body = SafeJsonConvert.DeserializeObject<DateTime?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
throw new RestException("Unable to deserialize the response.", ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get max datetime value FRI, 31 DEC 9999 23:59:59 GMT
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<DateTime?>> GetUtcUppercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetUtcUppercaseMaxDateTime", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/max/uppercase").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<DateTime?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
_result.Body = SafeJsonConvert.DeserializeObject<DateTime?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
throw new RestException("Unable to deserialize the response.", ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Put min datetime value Mon, 1 Jan 0001 00:00:00 GMT
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse> PutUtcMinDateTimeWithHttpMessagesAsync(DateTime? datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (datetimeBody == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "datetimeBody");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("datetimeBody", datetimeBody);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PutUtcMinDateTime", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/min").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = SafeJsonConvert.SerializeObject(datetimeBody, new DateTimeRfc1123JsonConverter());
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get min datetime value Mon, 1 Jan 0001 00:00:00 GMT
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<DateTime?>> GetUtcMinDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetUtcMinDateTime", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "datetimerfc1123/min").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
HttpResponseMessage _httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = _httpRequest;
ex.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<DateTime?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
try
{
string _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
_result.Body = SafeJsonConvert.DeserializeObject<DateTime?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
throw new RestException("Unable to deserialize the response.", ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
#region Using directives
using System;
using System.Data;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Xml;
using System.Xml.XPath;
using System.Xml.Schema;
using log4net;
using Commanigy.Iquomi.Api;
using System.ComponentModel;
#endregion
namespace Commanigy.Iquomi.Data {
/// <summary>
///
/// </summary>
[Serializable(), DataObject()]
public class DbService : Service, IDbObject<DbService> {
private static readonly ILog log = LogManager.GetLogger(
System.Reflection.MethodBase.GetCurrentMethod().DeclaringType
);
public DbService() {
}
public static DbService DbCreate(DbService v) {
using (DbUtility db = new DbUtility("iqServiceCreate")) {
db.In("@author_id", v.AuthorId);
db.In("@name", v.Name);
db.In("@version", v.Version);
db.In("@xsd", v.Xsd);
db.In("@url_xsd", v.UrlXsd);
db.In("@url_icon", v.UrlIcon);
db.In("@url_homepage", v.UrlHomepage);
db.In("@state", v.State);
db.In("@role_map", v.RoleMap);
v.Id = (int)db.ExecuteScalar();
return v;
}
}
public static DbService DbRead(Int32 id, Int32 authorId, Int32 languageId) {
using (DbUtility db = new DbUtility("iqServiceFindByAuthor")) {
db.In("@id", id);
db.In("@author_id", authorId);
db.In("@language_id", languageId);
return (DbService)db.Fill(new DbService());
}
}
public static DbService DbRead(Int32 id) {
DbService a = new DbService();
a.Id = id;
return a.DbRead();
}
public static DbService DbUpdate(DbService v) {
using (DbUtility db = new DbUtility("iqServiceUpdate")) {
db.In("@id", v.Id);
db.In("@author_id", v.AuthorId);
db.In("@name", v.Name);
db.In("@version", v.Version);
db.In("@xsd", v.Xsd);
db.In("@url_xsd", v.UrlXsd);
db.In("@url_icon", v.UrlIcon);
db.In("@url_homepage", v.UrlHomepage);
db.In("@state", v.State);
db.In("@role_map", v.RoleMap);
return (db.ExecuteNonQuery() == 1) ? v : null;
}
}
public static DbService DbDelete(DbService v) {
using (DbUtility db = new DbUtility("iqServiceDelete")) {
db.In("@id", v.Id);
return (db.ExecuteNonQuery() == 1) ? v : null;
}
}
// ---
#region IDbObject<DbService> Members
/// <summary>
/// Creates new Service.
/// </summary>
/// <returns></returns>
public DbService DbCreate() {
using (DbUtility db = new DbUtility("iqServiceCreate")) {
db.In("@author_id", this.AuthorId);
db.In("@name", this.Name);
db.In("@version", this.Version);
db.In("@xsd", this.Xsd);
db.In("@url_xsd", this.UrlXsd);
db.In("@url_icon", this.UrlIcon);
db.In("@url_homepage", this.UrlHomepage);
db.In("@state", this.State);
db.In("@role_map", this.RoleMap);
Id = (int)db.ExecuteScalar();
return this;
}
}
public DbService DbRead() {
using (DbUtility db = new DbUtility("iqServiceRead")) {
db.In("@id", this.Id);
db.In("@language_id", 1);
return (DbService)db.Fill(this);
}
}
public DbService DbUpdate() {
using (DbUtility db = new DbUtility("iqServiceUpdate")) {
db.In("@id", DbType.Int32, Id);
db.In("@author_id", DbType.Int32, AuthorId);
db.In("@name", DbType.String, Name);
db.In("@version", DbType.String, Version);
db.In("@xsd", DbType.String, Xsd);
db.In("@url_xsd", DbType.String, UrlXsd);
db.In("@url_icon", DbType.String, UrlIcon);
db.In("@url_homepage", DbType.String, UrlHomepage);
db.In("@state", DbType.String, State);
db.In("@role_map", DbType.String, this.RoleMap);
return (db.ExecuteNonQuery() == 1) ? this : null;
}
}
public DbService DbDelete() {
using (DbUtility db = new DbUtility("iqServiceDelete")) {
db.In("@id", this.Id);
return (db.ExecuteNonQuery() == 1) ? this : null;
}
}
#endregion
public DbService DbFindByName() {
using (DbUtility db = new DbUtility("iqServiceFindByName")) {
db.In("@name", this.Name);
db.In("@language_id", 1);
return (DbService)db.Fill(this);
}
}
public static DbService DbFindByName(string name) {
DbService service = new DbService();
service.Name = name;
return service.DbFindByName();
}
public static DbService[] DbFindAllByAuthor(int authorId, int languageId) {
using (DbUtility db = new DbUtility("iqServiceFindAllByAuthor")) {
db.In("@author_id", authorId);
db.In("@language_id", languageId);
return (DbService[])db.FillAll(typeof(DbService));
}
}
public DbService DbFindByAuthorId() {
using (DbUtility db = new DbUtility("iqServiceFindByAuthor")) {
db.In("@id", this.Id);
db.In("@author_id", this.AuthorId);
db.In("@language_id", 1);
return (DbService)db.Fill(this);
}
}
public static DataTable DbListAllByAuthor(int authorId, int languageId) {
using (DbUtility db = new DbUtility("iqListAllServicesByAuthorId")) {
db.In("@author_id", authorId);
db.In("@language_id", languageId);
return db.GetDataTable();
}
}
public DbScope DbCreateScope(DbScope v) {
using (DbUtility db = new DbUtility("iqScopeCreateByService")) {
db.In("@service_id", this.Id);
db.In("@language_id", v.LanguageId);
db.In("@name", v.Name);
db.In("@base", v.Base);
v.Id = (int)db.ExecuteScalar();
return (DbScope)v;
}
}
/// <summary>
/// Attach standard role template to this new service. These
/// templates are read from an Xml document and is currently
/// the same for all created services. In the future it might
/// be possible to specify a specific role-set upon service
/// creation
/// </summary>
/// <param name="standardRoleTemplates"></param>
/// <returns></returns>
public bool InsertStandardRoleTemplates(XmlNode standardRoleTemplates) {
Hashtable ht = new Hashtable();
foreach (XmlElement scope in standardRoleTemplates.SelectNodes("//Scope")) {
DbScope s = new DbScope();
s.Name = scope.GetAttribute("Name");
s.Base = scope.GetAttribute("Base");
s.DbCreate();
ht.Add(s.Name, s);
foreach (XmlElement shape in scope.SelectNodes("Shape")) {
DbShape shp = new DbShape();
shp.ScopeId = s.Id;
shp.Select = shape.GetAttribute("Select");
shp.Type = shape.GetAttribute("Type");
shp.DbCreate();
}
DbServiceScope ss = new DbServiceScope();
ss.ServiceId = this.Id;
ss.ScopeId = s.Id;
ss.DbCreate();
}
foreach (XmlElement roleTemplate in standardRoleTemplates.SelectNodes("//RoleTemplate")) {
DbRoleTemplate rt = new DbRoleTemplate();
rt.ServiceId = this.Id;
rt.Name = roleTemplate.GetAttribute("Name");
rt.Priority = roleTemplate.HasAttribute("Priority") ? Convert.ToInt32(roleTemplate.GetAttribute("Priority")) : 0;
rt.DbCreate();
foreach (XmlElement method in roleTemplate.SelectNodes("Method")) {
DbRoleTemplateMethod rtm = new DbRoleTemplateMethod();
rtm.RoleTemplateId = rt.Id;
rtm.MethodTypeId = DbMethodType.GetIdForName(method.GetAttribute("Name"));
rtm.ScopeId = ((DbScope)ht[method.GetAttribute("ScopeRef")]).Id;
rtm.DbCreate();
}
}
return true;
}
// Write out the example of the XSD usage
public string GetXmlTemplate() {
log.Debug("Getting XmlTemplate");
MemoryStream mem = new MemoryStream();
XmlTextWriter myXmlTextWriter = new XmlTextWriter(mem, System.Text.Encoding.UTF8);
myXmlTextWriter.Formatting = Formatting.Indented;
myXmlTextWriter.Indentation = 2;
XmlSchema xsd = this.GetXmlSchema();
xsd.Compile(null);
foreach (XmlSchemaElement element in xsd.Elements.Values) {
WriteExampleElement(myXmlTextWriter, element);
}
myXmlTextWriter.Flush();
mem.Position = 0;
StreamReader sr = new StreamReader(mem);
string xml = sr.ReadToEnd();
log.Debug("Returning sample xml as:\n" + xml);
return xml;
}
// Write some example elements
void WriteExampleElement(XmlTextWriter myXmlTextWriter, XmlSchemaElement element) {
myXmlTextWriter.WriteStartElement(element.QualifiedName.Name, element.QualifiedName.Namespace);
if (element.ElementType is XmlSchemaComplexType) {
XmlSchemaComplexType type = (XmlSchemaComplexType)element.ElementType;
if (type.ContentModel != null) {
Console.WriteLine("Not Implemented for this ContentModel");
}
WriteExampleAttributes(myXmlTextWriter, type.Attributes);
WriteExampleParticle(myXmlTextWriter, type.Particle);
}
else {
WriteExampleValue(myXmlTextWriter, element.ElementType);
}
myXmlTextWriter.WriteEndElement();
}
// Write some example attributes
void WriteExampleAttributes(XmlTextWriter myXmlTextWriter, XmlSchemaObjectCollection attributes) {
foreach (object o in attributes) {
if (o is XmlSchemaAttribute) {
WriteExampleAttribute(myXmlTextWriter, (XmlSchemaAttribute)o);
}
else {
XmlSchemaAttributeGroup group = (XmlSchemaAttributeGroup)this.GetXmlSchema().Groups[((XmlSchemaAttributeGroupRef)o).RefName];
WriteExampleAttributes(myXmlTextWriter, group.Attributes);
}
}
}
// Write a single example attribute
void WriteExampleAttribute(XmlTextWriter myXmlTextWriter, XmlSchemaAttribute attribute) {
myXmlTextWriter.WriteStartAttribute(attribute.QualifiedName.Name, attribute.QualifiedName.Namespace);
// The examples value
WriteExampleValue(myXmlTextWriter, attribute.AttributeType);
myXmlTextWriter.WriteEndAttribute();
}
// Write example particles
void WriteExampleParticle(XmlTextWriter myXmlTextWriter, XmlSchemaParticle particle) {
Decimal max;
if (particle.MaxOccurs == -1 || particle.MaxOccurs > 10000)
max = 5;
else
max = particle.MaxOccurs;
for (int i = 0; i < max; i++) {
if (particle is XmlSchemaElement)
WriteExampleElement(myXmlTextWriter, (XmlSchemaElement)particle);
else if (particle is XmlSchemaSequence) {
foreach (XmlSchemaParticle particle1 in ((XmlSchemaSequence)particle).Items)
WriteExampleParticle(myXmlTextWriter, particle1);
}
else
Console.WriteLine("Not Implemented for this type: {0}", particle.ToString());
}
}
// Write the examples text values
void WriteExampleValue(XmlTextWriter myXmlTextWriter, object schemaType) {
XmlSchemaDatatype datatype = (schemaType is XmlSchemaSimpleType) ? ((XmlSchemaSimpleType)schemaType).Datatype : (XmlSchemaDatatype)schemaType;
// Consult the XSD to CLR conversion table for the correct type mappings
Type type = datatype.ValueType;
if (type == typeof(bool))
myXmlTextWriter.WriteString("true");
else if (type == typeof(int) || type == typeof(long))
myXmlTextWriter.WriteString("100");
else if (type == typeof(float) || type == typeof(decimal))
myXmlTextWriter.WriteString("279.42");
else if (type == typeof(System.Xml.XmlQualifiedName))
myXmlTextWriter.WriteString("qualified_name");
else if (type == typeof(DateTime))
myXmlTextWriter.WriteString("12-12-2001");
else if (type == typeof(string))
myXmlTextWriter.WriteString("ExampleString");
// Handle the 'xsd:positiveInteger' XSD type in the SOMsample.xsd
else if (type == typeof(System.UInt64))
//positiveInteger
myXmlTextWriter.WriteString("42789");
else
myXmlTextWriter.WriteString("Not Implemented for this datatype: " + datatype.ToString());
}
}
}
| |
// UrlRewriter - A .NET URL Rewriter module
// Version 2.0
//
// Copyright 2011 Intelligencia
// Copyright 2011 Seth Yates
//
using System;
using System.IO;
using System.Net;
using System.Web;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using Intelligencia.UrlRewriter.Configuration;
using Intelligencia.UrlRewriter.Utilities;
namespace Intelligencia.UrlRewriter
{
/// <summary>
/// The core RewriterEngine class.
/// </summary>
public class RewriterEngine
{
private const char EndChar = (char)65535;
/// <summary>
/// Constructor.
/// </summary>
/// <param name="httpContext">The HTTP context facade.</param>
/// <param name="configurationManager">The configuration manager facade.</param>
/// <param name="configuration">The URL rewriter configuration.</param>
public RewriterEngine(
IHttpContext httpContext,
IConfigurationManager configurationManager,
IRewriterConfiguration configuration)
{
if (httpContext == null)
{
throw new ArgumentNullException("httpContext");
}
if (configurationManager == null)
{
throw new ArgumentNullException("configurationManager");
}
if (configuration == null)
{
throw new ArgumentNullException("configuration");
}
_httpContext = httpContext;
_configurationManager = configurationManager;
_configuration = configuration;
}
/// <summary>
/// Resolves an Application-path relative location
/// </summary>
/// <param name="location">The location</param>
/// <returns>The absolute location.</returns>
public string ResolveLocation(string location)
{
if (location == null)
{
throw new ArgumentNullException("location");
}
string appPath = _httpContext.ApplicationPath;
if (appPath.Length > 1)
{
appPath += "/";
}
return location.Replace("~/", appPath);
}
/// <summary>
/// Performs the rewriting.
/// </summary>
public void Rewrite()
{
string originalUrl = _httpContext.RawUrl.Replace("+", " ");
RawUrl = originalUrl;
_configuration.Logger.Debug(MessageProvider.FormatString(Message.StartedProcessing, originalUrl));
// Create the context
RewriteContext context = new RewriteContext(this, originalUrl, _httpContext, _configurationManager);
// Process each rule.
ProcessRules(context);
// Append any headers defined.
AppendHeaders(context);
// Append any cookies defined.
AppendCookies(context);
// Rewrite the path if the location has changed.
_httpContext.SetStatusCode((int)context.StatusCode);
if ((context.Location != originalUrl) && ((int)context.StatusCode < 400))
{
if ((int)context.StatusCode < 300)
{
// Successful status if less than 300
_configuration.Logger.Info(MessageProvider.FormatString(Message.RewritingXtoY, _httpContext.RawUrl, context.Location));
// To verify that the url exists on this server:
// VerifyResultExists(context);
// To ensure that directories are rewritten to their default document:
// HandleDefaultDocument(context);
_httpContext.RewritePath(context.Location);
}
else
{
// Redirection
_configuration.Logger.Info(MessageProvider.FormatString(Message.RedirectingXtoY, _httpContext.RawUrl, context.Location));
_httpContext.SetRedirectLocation(context.Location);
}
}
else if ((int)context.StatusCode >= 400)
{
HandleError(context);
}
// To ensure that directories are rewritten to their default document:
// else if (HandleDefaultDocument(context))
// {
// _contextFacade.RewritePath(context.Location);
// }
// Sets the context items.
SetContextItems(context);
}
/// <summary>
/// Expands the given input based on the current context.
/// </summary>
/// <param name="context">The current context</param>
/// <param name="input">The input to expand.</param>
/// <returns>The expanded input</returns>
public string Expand(RewriteContext context, string input)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (input == null)
{
throw new ArgumentNullException("input");
}
/* replacement :- $n
* | ${[a-zA-Z0-9\-]+}
* | ${fn( <replacement> )}
* | ${<replacement-or-id>:<replacement-or-value>:<replacement-or-value>}
*
* replacement-or-id :- <replacement> | <id>
* replacement-or-value :- <replacement> | <value>
*/
/* $1 - regex replacement
* ${propertyname}
* ${map-name:value} map-name is replacement, value is replacement
* ${map-name:value|default-value} map-name is replacement, value is replacement, default-value is replacement
* ${fn(value)} value is replacement
*/
using (StringReader reader = new StringReader(input))
{
using (StringWriter writer = new StringWriter())
{
char ch = (char)reader.Read();
while (ch != EndChar)
{
if (ch == '$')
{
writer.Write(Reduce(context, reader));
}
else
{
writer.Write(ch);
}
ch = (char)reader.Read();
}
return writer.GetStringBuilder().ToString();
}
}
}
private void ProcessRules(RewriteContext context)
{
const int MaxRestart = 10; // Controls the number of restarts so we don't get into an infinite loop
IList<IRewriteAction> rewriteRules = _configuration.Rules;
int restarts = 0;
for (int i = 0; i < rewriteRules.Count; i++)
{
// If the rule is conditional, ensure the conditions are met.
IRewriteCondition condition = rewriteRules[i] as IRewriteCondition;
if (condition == null || condition.IsMatch(context))
{
// Execute the action.
IRewriteAction action = rewriteRules[i];
RewriteProcessing processing = action.Execute(context);
// If the action is Stop, then break out of the processing loop
if (processing == RewriteProcessing.StopProcessing)
{
_configuration.Logger.Debug(MessageProvider.FormatString(Message.StoppingBecauseOfRule));
break;
}
else if (processing == RewriteProcessing.RestartProcessing)
{
_configuration.Logger.Debug(MessageProvider.FormatString(Message.RestartingBecauseOfRule));
// Restart from the first rule.
i = 0;
if (++restarts > MaxRestart)
{
throw new InvalidOperationException(MessageProvider.FormatString(Message.TooManyRestarts));
}
}
}
}
}
private bool HandleDefaultDocument(RewriteContext context)
{
Uri uri = new Uri(_httpContext.RequestUrl, context.Location);
UriBuilder b = new UriBuilder(uri);
b.Path += "/";
uri = b.Uri;
if (uri.Host == _httpContext.RequestUrl.Host)
{
string filename = _httpContext.MapPath(uri.AbsolutePath);
if (Directory.Exists(filename))
{
foreach (string document in RewriterConfiguration.Current.DefaultDocuments)
{
string pathName = Path.Combine(filename, document);
if (File.Exists(pathName))
{
context.Location = new Uri(uri, document).AbsolutePath;
return true;
}
}
}
}
return false;
}
private void VerifyResultExists(RewriteContext context)
{
if ((String.Compare(context.Location, _httpContext.RawUrl) != 0) &&
((int)context.StatusCode < 300))
{
Uri uri = new Uri(_httpContext.RequestUrl, context.Location);
if (uri.Host == _httpContext.RequestUrl.Host)
{
string filename = _httpContext.MapPath(uri.AbsolutePath);
if (!File.Exists(filename))
{
_configuration.Logger.Debug(MessageProvider.FormatString(Message.ResultNotFound, filename));
context.StatusCode = HttpStatusCode.NotFound;
}
else
{
HandleDefaultDocument(context);
}
}
}
}
private void HandleError(RewriteContext context)
{
// Return the status code.
_httpContext.SetStatusCode((int)context.StatusCode);
// Get the error handler if there is one.
if (_configuration.ErrorHandlers.ContainsKey((int)context.StatusCode))
{
IRewriteErrorHandler handler = _configuration.ErrorHandlers[(int)context.StatusCode];
try
{
_configuration.Logger.Debug(MessageProvider.FormatString(Message.CallingErrorHandler));
// Execute the error handler.
_httpContext.HandleError(handler);
}
catch (HttpException)
{
throw;
}
catch (Exception exc)
{
_configuration.Logger.Fatal(exc.Message, exc);
throw new HttpException((int)HttpStatusCode.InternalServerError, HttpStatusCode.InternalServerError.ToString());
}
}
else
{
throw new HttpException((int)context.StatusCode, context.StatusCode.ToString());
}
}
private void AppendHeaders(RewriteContext context)
{
foreach (string headerKey in context.ResponseHeaders)
{
_httpContext.SetResponseHeader(headerKey, context.ResponseHeaders[headerKey]);
}
}
private void AppendCookies(RewriteContext context)
{
for (int i = 0; i < context.ResponseCookies.Count; i++)
{
_httpContext.SetResponseCookie(context.ResponseCookies[i]);
}
}
private void SetContextItems(RewriteContext context)
{
OriginalQueryString = new Uri(_httpContext.RequestUrl, _httpContext.RawUrl).Query.Replace("?", "");
QueryString = new Uri(_httpContext.RequestUrl, context.Location).Query.Replace("?", "");
// Add in the properties as context items, so these will be accessible to the handler
foreach (string key in context.Properties.Keys)
{
_httpContext.SetItem(String.Format("Rewriter.{0}", key), context.Properties[key]);
}
}
/// <summary>
/// The raw url.
/// </summary>
public string RawUrl
{
get { return (string) _httpContext.GetItem(ContextRawUrl); }
set { _httpContext.SetItem(ContextRawUrl, value); }
}
/// <summary>
/// The original query string.
/// </summary>
public string OriginalQueryString
{
get { return (string) _httpContext.GetItem(ContextOriginalQueryString); }
set { _httpContext.SetItem(ContextOriginalQueryString, value); }
}
/// <summary>
/// The final querystring, after rewriting.
/// </summary>
public string QueryString
{
get { return (string) _httpContext.GetItem(ContextQueryString); }
set { _httpContext.SetItem(ContextQueryString, value); }
}
private string Reduce(RewriteContext context, StringReader reader)
{
string result;
char ch = (char)reader.Read();
if (Char.IsDigit(ch))
{
string num = ch.ToString();
if (Char.IsDigit((char)reader.Peek()))
{
ch = (char)reader.Read();
num += ch.ToString();
}
if (context.LastMatch != null)
{
Group group = context.LastMatch.Groups[Convert.ToInt32(num)];
result = (group == null) ? String.Empty : group.Value;
}
else
{
result = String.Empty;
}
}
else if (ch == '<')
{
string expr;
using (StringWriter writer = new StringWriter())
{
ch = (char)reader.Read();
while (ch != '>' && ch != EndChar)
{
if (ch == '$')
{
writer.Write(Reduce(context, reader));
}
else
{
writer.Write(ch);
}
ch = (char)reader.Read();
}
expr = writer.GetStringBuilder().ToString();
}
if (context.LastMatch != null)
{
Group group = context.LastMatch.Groups[expr];
result = (group == null) ? String.Empty : group.Value;
}
else
{
result = String.Empty;
}
}
else if (ch == '{')
{
string expr;
bool isMap = false;
bool isFunction = false;
using (StringWriter writer = new StringWriter())
{
ch = (char)reader.Read();
while (ch != '}' && ch != EndChar)
{
if (ch == '$')
{
writer.Write(Reduce(context, reader));
}
else
{
if (ch == ':') isMap = true;
else if (ch == '(') isFunction = true;
writer.Write(ch);
}
ch = (char)reader.Read();
}
expr = writer.GetStringBuilder().ToString();
}
if (isMap)
{
Match match = Regex.Match(expr, @"^([^\:]+)\:([^\|]+)(\|(.+))?$");
string mapName = match.Groups[1].Value;
string mapArgument = match.Groups[2].Value;
string mapDefault = match.Groups[4].Value;
result = _configuration.TransformFactory.GetTransform(mapName).ApplyTransform(mapArgument);
if (result == null)
{
result = mapDefault;
}
}
else if (isFunction)
{
Match match = Regex.Match(expr, @"^([^\(]+)\((.+)\)$");
string functionName = match.Groups[1].Value;
string functionArgument = match.Groups[2].Value;
IRewriteTransform tx = _configuration.TransformFactory.GetTransform(functionName);
result = (tx == null) ? expr : tx.ApplyTransform(functionArgument);
}
else
{
result = context.Properties[expr];
}
}
else
{
result = ch.ToString();
}
return result;
}
private const string ContextQueryString = "UrlRewriter.NET.QueryString";
private const string ContextOriginalQueryString = "UrlRewriter.NET.OriginalQueryString";
private const string ContextRawUrl = "UrlRewriter.NET.RawUrl";
private IRewriterConfiguration _configuration;
private IHttpContext _httpContext;
private IConfigurationManager _configurationManager;
}
}
| |
#region License
/*
* Copyright 2002-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#endregion
#region Imports
using System;
using System.Collections;
using NUnit.Framework;
using Spring.Core;
using Spring.Objects.Support;
#endregion
namespace Spring.Objects.Factory.Config
{
/// <summary>
/// Set of unit tests for the MethodInvokingFactoryObject.
/// </summary>
/// <author>Colin Sampaleanu</author>
/// <author>Simon White (.NET)</author>
[TestFixture]
public class MethodInvokingFactoryObjectTests
{
[Test]
public void InvokeGenericMethod()
{
TestClass1 tc1 = new TestClass1();
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof(Activator);
mcfo.TargetMethod = "CreateInstance<Spring.Objects.TestObject>";
mcfo.AfterPropertiesSet();
object obj = mcfo.GetObject();
Assert.IsNotNull(obj);
Assert.IsTrue(obj is TestObject);
}
[Test]
public void GetSingletonNonStatic()
{
TestClass1 tc1 = new TestClass1();
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetObject = tc1;
mcfo.TargetMethod = "Method1";
mcfo.AfterPropertiesSet();
int i = (int) mcfo.GetObject();
Assert.IsTrue(i == 1);
i = (int) mcfo.GetObject();
Assert.IsTrue(i == 1);
Assert.IsTrue(mcfo.IsSingleton);
}
[Test]
public void GetNonSingletonNonStatic()
{
TestClass1 tc1 = new TestClass1();
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetObject = tc1;
mcfo.TargetMethod = "Method1";
mcfo.IsSingleton = false;
mcfo.AfterPropertiesSet();
int i = (int) mcfo.GetObject();
Assert.IsTrue(i == 1);
i = (int) mcfo.GetObject();
Assert.IsTrue(i == 2);
Assert.IsFalse(mcfo.IsSingleton);
}
[Test]
public void GetSingletonStatic()
{
TestClass1._staticField1 = 0;
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "StaticMethod1";
mcfo.AfterPropertiesSet();
int i = (int) mcfo.GetObject();
Assert.IsTrue(i == 1);
i = (int) mcfo.GetObject();
Assert.IsTrue(i == 1);
Assert.IsTrue(mcfo.IsSingleton);
}
[Test]
public void GetNonSingletonStatic()
{
TestClass1._staticField1 = 0;
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof(TestClass1);
mcfo.TargetMethod = "StaticMethod1";
mcfo.IsSingleton = false;
mcfo.AfterPropertiesSet();
int i = (int) mcfo.GetObject();
Assert.IsTrue(i == 1);
i = (int) mcfo.GetObject();
Assert.IsTrue(i == 2);
Assert.IsFalse(mcfo.IsSingleton);
}
[Test]
public void InvokingAMethodThatHasAVoidReturnTypeReturnsNullPlaceHolder()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "VoidRetvalMethod";
mcfo.AfterPropertiesSet();
Assert.AreEqual(MethodInvoker.Void, mcfo.GetObject());
}
[Test]
public void GetSupertypesMatchNumArgs()
{
TestClass1._staticField1 = 0;
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "Supertypes";
mcfo.Arguments = new Object[] {new ArrayList(), new ArrayList(), "hello"};
// should pass
mcfo.AfterPropertiesSet();
}
[Test]
public void GetSupertypesTooManyArgs()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "Supertypes2";
mcfo.Arguments = new Object[] {new ArrayList(), new ArrayList(), "hello", "bogus"};
Assert.Throws<ArgumentException>(() => mcfo.AfterPropertiesSet(), "Unable to determine which exact method to call; found '2' matches.");
}
[Test]
public void GetMisMatchedArgumentTypes()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "Supertypes";
mcfo.Arguments = new Object[] {"1", "2", "3"};
Assert.Throws<TypeMismatchException>(() => mcfo.AfterPropertiesSet());
}
[Test]
public void GetObjectType()
{
TestClass1 tc1 = new TestClass1();
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetObject = tc1;
mcfo.TargetMethod = "Method1";
mcfo.AfterPropertiesSet();
Assert.IsTrue(typeof (int).Equals(mcfo.ObjectType));
mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "VoidRetvalMethod";
mcfo.AfterPropertiesSet();
Type objType = mcfo.ObjectType;
Assert.IsTrue(objType.Equals(MethodInvoker.Void.GetType()));
// verify that we can call a method with args that are subtypes of the
// target method arg types
TestClass1._staticField1 = 0;
mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "Supertypes";
mcfo.Arguments = new Object[] {new ArrayList(), new ArrayList(), "hello"};
mcfo.AfterPropertiesSet();
objType = mcfo.ObjectType;
}
[Test]
public void ObjectTypeIsNullIfAfterPropertiesSetHasNotYetBeenInvoked()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "VoidRetvalMethod";
Assert.IsNull(mcfo.ObjectType,
"ObjectType property value must only be set to a non null value " +
"AFTER the AfterPropertiesSet() method has been invoked.");
}
[Test]
public void BailsIfTheTargetMethodPropertyAintSet()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
Assert.Throws<ArgumentException>(() => mcfo.AfterPropertiesSet(), "The 'TargetMethod' property is required.");
}
[Test]
public void AfterPropertiesSetBogusMethod()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetObject = this;
mcfo.TargetMethod = "whatever";
Assert.Throws<MissingMethodException>(() => mcfo.AfterPropertiesSet());
}
[Test]
public void AfterPropertiesSetBogusStaticMethod()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "some.bogus.Method.name";
Assert.Throws<MissingMethodException>(() => mcfo.AfterPropertiesSet());
}
[Test]
public void AfterPropertiesSetStaticMethodMissingArgs()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetType = typeof (TestClass1);
mcfo.TargetMethod = "Method1";
Assert.Throws<ArgumentException>(() => mcfo.AfterPropertiesSet());
}
[Test]
public void AfterPropertiesSetMissingMethod()
{
MethodInvokingFactoryObject mcfo = new MethodInvokingFactoryObject();
mcfo.TargetObject = this;
Assert.Throws<ArgumentException>(() => mcfo.AfterPropertiesSet());
}
[Test]
public void InvokeWithNullArgument()
{
MethodInvoker methodInvoker = new MethodInvoker();
methodInvoker.TargetType = GetType();
methodInvoker.TargetMethod = "NullArgument";
methodInvoker.Arguments = new object[] {null};
methodInvoker.Prepare();
methodInvoker.Invoke();
}
public static void NullArgument(object arg)
{
}
// a test class to work with
public class TestClass1
{
public static int _staticField1;
public int _field1 = 0;
public int Method1()
{
return ++_field1;
}
public static int StaticMethod1()
{
return ++_staticField1;
}
public static void VoidRetvalMethod()
{
}
public static void Supertypes(ICollection c, IList l, string s)
{
}
public static void Supertypes2(ICollection c, IList l, string s, object i)
{
}
public static void Supertypes2(ICollection c, IList l, string s, string s2)
{
}
}
}
}
| |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for Additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
namespace TestCases.SS.UserModel
{
using System;
using NUnit.Framework;
using NPOI.SS;
using NPOI.SS.UserModel;
using NPOI.SS.Util;
using TestCases.SS;
using NPOI.HSSF.Record.CF;
using NPOI.HSSF.Util;
/**
* @author Dmitriy Kumshayev
* @author Yegor Kozlov
*/
[TestFixture]
public class BaseTestConditionalFormatting
{
private ITestDataProvider _testDataProvider;
public BaseTestConditionalFormatting()
{
_testDataProvider = TestCases.HSSF.HSSFITestDataProvider.Instance;
}
public BaseTestConditionalFormatting(ITestDataProvider TestDataProvider)
{
_testDataProvider = TestDataProvider;
}
[Test]
public void TestBasic()
{
IWorkbook wb = _testDataProvider.CreateWorkbook();
ISheet sh = wb.CreateSheet();
ISheetConditionalFormatting sheetCF = sh.SheetConditionalFormatting;
Assert.AreEqual(0, sheetCF.NumConditionalFormattings);
try
{
Assert.IsNull(sheetCF.GetConditionalFormattingAt(0));
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("Specified CF index 0 is outside the allowable range"));
}
try
{
sheetCF.RemoveConditionalFormatting(0);
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("Specified CF index 0 is outside the allowable range"));
}
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule("1");
IConditionalFormattingRule rule2 = sheetCF.CreateConditionalFormattingRule("2");
IConditionalFormattingRule rule3 = sheetCF.CreateConditionalFormattingRule("3");
IConditionalFormattingRule rule4 = sheetCF.CreateConditionalFormattingRule("4");
try
{
sheetCF.AddConditionalFormatting(null, rule1);
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("regions must not be null"));
}
try
{
sheetCF.AddConditionalFormatting(
new CellRangeAddress[] { CellRangeAddress.ValueOf("A1:A3") },
(IConditionalFormattingRule)null);
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("cfRules must not be null"));
}
try
{
sheetCF.AddConditionalFormatting(
new CellRangeAddress[] { CellRangeAddress.ValueOf("A1:A3") },
new IConditionalFormattingRule[0]);
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("cfRules must not be empty"));
}
try
{
sheetCF.AddConditionalFormatting(
new CellRangeAddress[] { CellRangeAddress.ValueOf("A1:A3") },
new IConditionalFormattingRule[] { rule1, rule2, rule3, rule4 });
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("Number of rules must not exceed 3"));
}
}
/**
* Test format conditions based on a bool formula
*/
[Test]
public void TestBooleanFormulaConditions()
{
IWorkbook wb = _testDataProvider.CreateWorkbook();
ISheet sh = wb.CreateSheet();
ISheetConditionalFormatting sheetCF = sh.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule("SUM(A1:A5)>10");
Assert.AreEqual(ConditionType.Formula, rule1.ConditionType);
Assert.AreEqual("SUM(A1:A5)>10", rule1.Formula1);
int formatIndex1 = sheetCF.AddConditionalFormatting(
new CellRangeAddress[]{
CellRangeAddress.ValueOf("B1"),
CellRangeAddress.ValueOf("C3"),
}, rule1);
Assert.AreEqual(0, formatIndex1);
Assert.AreEqual(1, sheetCF.NumConditionalFormattings);
CellRangeAddress[] ranges1 = sheetCF.GetConditionalFormattingAt(formatIndex1).GetFormattingRanges();
Assert.AreEqual(2, ranges1.Length);
Assert.AreEqual("B1", ranges1[0].FormatAsString());
Assert.AreEqual("C3", ranges1[1].FormatAsString());
// adjacent Address are merged
int formatIndex2 = sheetCF.AddConditionalFormatting(
new CellRangeAddress[]{
CellRangeAddress.ValueOf("B1"),
CellRangeAddress.ValueOf("B2"),
CellRangeAddress.ValueOf("B3"),
}, rule1);
Assert.AreEqual(1, formatIndex2);
Assert.AreEqual(2, sheetCF.NumConditionalFormattings);
CellRangeAddress[] ranges2 = sheetCF.GetConditionalFormattingAt(formatIndex2).GetFormattingRanges();
Assert.AreEqual(1, ranges2.Length);
Assert.AreEqual("B1:B3", ranges2[0].FormatAsString());
}
[Test]
public void TestSingleFormulaConditions()
{
IWorkbook wb = _testDataProvider.CreateWorkbook();
ISheet sh = wb.CreateSheet();
ISheetConditionalFormatting sheetCF = sh.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.Equal, "SUM(A1:A5)+10");
Assert.AreEqual(ConditionType.CellValueIs, rule1.ConditionType);
Assert.AreEqual("SUM(A1:A5)+10", rule1.Formula1);
Assert.AreEqual(ComparisonOperator.Equal, rule1.ComparisonOperation);
IConditionalFormattingRule rule2 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.NotEqual, "15");
Assert.AreEqual(ConditionType.CellValueIs, rule2.ConditionType);
Assert.AreEqual("15", rule2.Formula1);
Assert.AreEqual(ComparisonOperator.NotEqual, rule2.ComparisonOperation);
IConditionalFormattingRule rule3 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.NotEqual, "15");
Assert.AreEqual(ConditionType.CellValueIs, rule3.ConditionType);
Assert.AreEqual("15", rule3.Formula1);
Assert.AreEqual(ComparisonOperator.NotEqual, rule3.ComparisonOperation);
IConditionalFormattingRule rule4 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.GreaterThan, "0");
Assert.AreEqual(ConditionType.CellValueIs, rule4.ConditionType);
Assert.AreEqual("0", rule4.Formula1);
Assert.AreEqual(ComparisonOperator.GreaterThan, rule4.ComparisonOperation);
IConditionalFormattingRule rule5 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.LessThan, "0");
Assert.AreEqual(ConditionType.CellValueIs, rule5.ConditionType);
Assert.AreEqual("0", rule5.Formula1);
Assert.AreEqual(ComparisonOperator.LessThan, rule5.ComparisonOperation);
IConditionalFormattingRule rule6 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.GreaterThanOrEqual, "0");
Assert.AreEqual(ConditionType.CellValueIs, rule6.ConditionType);
Assert.AreEqual("0", rule6.Formula1);
Assert.AreEqual(ComparisonOperator.GreaterThanOrEqual, rule6.ComparisonOperation);
IConditionalFormattingRule rule7 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.LessThanOrEqual, "0");
Assert.AreEqual(ConditionType.CellValueIs, rule7.ConditionType);
Assert.AreEqual("0", rule7.Formula1);
Assert.AreEqual(ComparisonOperator.LessThanOrEqual, rule7.ComparisonOperation);
IConditionalFormattingRule rule8 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.Between, "0", "5");
Assert.AreEqual(ConditionType.CellValueIs, rule8.ConditionType);
Assert.AreEqual("0", rule8.Formula1);
Assert.AreEqual("5", rule8.Formula2);
Assert.AreEqual(ComparisonOperator.Between, rule8.ComparisonOperation);
IConditionalFormattingRule rule9 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.NotBetween, "0", "5");
Assert.AreEqual(ConditionType.CellValueIs, rule9.ConditionType);
Assert.AreEqual("0", rule9.Formula1);
Assert.AreEqual("5", rule9.Formula2);
Assert.AreEqual(ComparisonOperator.NotBetween, rule9.ComparisonOperation);
}
[Test]
public void TestCopy()
{
IWorkbook wb = _testDataProvider.CreateWorkbook();
ISheet sheet1 = wb.CreateSheet();
ISheet sheet2 = wb.CreateSheet();
ISheetConditionalFormatting sheet1CF = sheet1.SheetConditionalFormatting;
ISheetConditionalFormatting sheet2CF = sheet2.SheetConditionalFormatting;
Assert.AreEqual(0, sheet1CF.NumConditionalFormattings);
Assert.AreEqual(0, sheet2CF.NumConditionalFormattings);
IConditionalFormattingRule rule1 = sheet1CF.CreateConditionalFormattingRule(
ComparisonOperator.Equal, "SUM(A1:A5)+10");
IConditionalFormattingRule rule2 = sheet1CF.CreateConditionalFormattingRule(
ComparisonOperator.NotEqual, "15");
// adjacent Address are merged
int formatIndex = sheet1CF.AddConditionalFormatting(
new CellRangeAddress[]{
CellRangeAddress.ValueOf("A1:A5"),
CellRangeAddress.ValueOf("C1:C5")
}, rule1, rule2);
Assert.AreEqual(0, formatIndex);
Assert.AreEqual(1, sheet1CF.NumConditionalFormattings);
Assert.AreEqual(0, sheet2CF.NumConditionalFormattings);
sheet2CF.AddConditionalFormatting(sheet1CF.GetConditionalFormattingAt(formatIndex));
Assert.AreEqual(1, sheet2CF.NumConditionalFormattings);
IConditionalFormatting sheet2cf = sheet2CF.GetConditionalFormattingAt(0);
Assert.AreEqual(2, sheet2cf.NumberOfRules);
Assert.AreEqual("SUM(A1:A5)+10", sheet2cf.GetRule(0).Formula1);
Assert.AreEqual(ComparisonOperator.Equal, sheet2cf.GetRule(0).ComparisonOperation);
Assert.AreEqual(ConditionType.CellValueIs, sheet2cf.GetRule(0).ConditionType);
Assert.AreEqual("15", sheet2cf.GetRule(1).Formula1);
Assert.AreEqual(ComparisonOperator.NotEqual, sheet2cf.GetRule(1).ComparisonOperation);
Assert.AreEqual(ConditionType.CellValueIs, sheet2cf.GetRule(1).ConditionType);
}
[Test]
public void TestRemove()
{
IWorkbook wb = _testDataProvider.CreateWorkbook();
ISheet sheet1 = wb.CreateSheet();
ISheetConditionalFormatting sheetCF = sheet1.SheetConditionalFormatting;
Assert.AreEqual(0, sheetCF.NumConditionalFormattings);
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.Equal, "SUM(A1:A5)");
// adjacent Address are merged
int formatIndex = sheetCF.AddConditionalFormatting(
new CellRangeAddress[]{
CellRangeAddress.ValueOf("A1:A5")
}, rule1);
Assert.AreEqual(0, formatIndex);
Assert.AreEqual(1, sheetCF.NumConditionalFormattings);
sheetCF.RemoveConditionalFormatting(0);
Assert.AreEqual(0, sheetCF.NumConditionalFormattings);
try
{
Assert.IsNull(sheetCF.GetConditionalFormattingAt(0));
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("Specified CF index 0 is outside the allowable range"));
}
formatIndex = sheetCF.AddConditionalFormatting(
new CellRangeAddress[]{
CellRangeAddress.ValueOf("A1:A5")
}, rule1);
Assert.AreEqual(0, formatIndex);
Assert.AreEqual(1, sheetCF.NumConditionalFormattings);
sheetCF.RemoveConditionalFormatting(0);
Assert.AreEqual(0, sheetCF.NumConditionalFormattings);
try
{
Assert.IsNull(sheetCF.GetConditionalFormattingAt(0));
Assert.Fail("expected exception");
}
catch (ArgumentException e)
{
Assert.IsTrue(e.Message.StartsWith("Specified CF index 0 is outside the allowable range"));
}
}
[Test]
public void TestCreateCF()
{
IWorkbook workbook = _testDataProvider.CreateWorkbook();
ISheet sheet = workbook.CreateSheet();
String formula = "7";
ISheetConditionalFormatting sheetCF = sheet.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(formula);
IFontFormatting fontFmt = rule1.CreateFontFormatting();
fontFmt.SetFontStyle(true, false);
IBorderFormatting bordFmt = rule1.CreateBorderFormatting();
bordFmt.BorderBottom = (/*setter*/BorderStyle.Thin);
bordFmt.BorderTop = (/*setter*/BorderStyle.Thick);
bordFmt.BorderLeft = (/*setter*/BorderStyle.Dashed);
bordFmt.BorderRight = (/*setter*/BorderStyle.Dotted);
IPatternFormatting patternFmt = rule1.CreatePatternFormatting();
patternFmt.FillBackgroundColor = (/*setter*/HSSFColor.Yellow.Index);
IConditionalFormattingRule rule2 = sheetCF.CreateConditionalFormattingRule(ComparisonOperator.Between, "1", "2");
IConditionalFormattingRule[] cfRules =
{
rule1, rule2
};
short col = 1;
CellRangeAddress[] regions = {
new CellRangeAddress(0, 65535, col, col)
};
sheetCF.AddConditionalFormatting(regions, cfRules);
sheetCF.AddConditionalFormatting(regions, cfRules);
// Verification
Assert.AreEqual(2, sheetCF.NumConditionalFormattings);
sheetCF.RemoveConditionalFormatting(1);
Assert.AreEqual(1, sheetCF.NumConditionalFormattings);
IConditionalFormatting cf = sheetCF.GetConditionalFormattingAt(0);
Assert.IsNotNull(cf);
regions = cf.GetFormattingRanges();
Assert.IsNotNull(regions);
Assert.AreEqual(1, regions.Length);
CellRangeAddress r = regions[0];
Assert.AreEqual(1, r.FirstColumn);
Assert.AreEqual(1, r.LastColumn);
Assert.AreEqual(0, r.FirstRow);
Assert.AreEqual(65535, r.LastRow);
Assert.AreEqual(2, cf.NumberOfRules);
rule1 = cf.GetRule(0);
Assert.AreEqual("7", rule1.Formula1);
Assert.IsNull(rule1.Formula2);
IFontFormatting r1fp = rule1.GetFontFormatting();
Assert.IsNotNull(r1fp);
Assert.IsTrue(r1fp.IsItalic);
Assert.IsFalse(r1fp.IsBold);
IBorderFormatting r1bf = rule1.GetBorderFormatting();
Assert.IsNotNull(r1bf);
Assert.AreEqual(BorderStyle.Thin, r1bf.BorderBottom);
Assert.AreEqual(BorderStyle.Thick, r1bf.BorderTop);
Assert.AreEqual(BorderStyle.Dashed, r1bf.BorderLeft);
Assert.AreEqual(BorderStyle.Dotted, r1bf.BorderRight);
IPatternFormatting r1pf = rule1.GetPatternFormatting();
Assert.IsNotNull(r1pf);
// Assert.AreEqual(HSSFColor.Yellow.index,r1pf.FillBackgroundColor);
rule2 = cf.GetRule(1);
Assert.AreEqual("2", rule2.Formula2);
Assert.AreEqual("1", rule2.Formula1);
}
[Test]
public void TestClone()
{
IWorkbook wb = _testDataProvider.CreateWorkbook();
ISheet sheet = wb.CreateSheet();
String formula = "7";
ISheetConditionalFormatting sheetCF = sheet.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(formula);
IFontFormatting fontFmt = rule1.CreateFontFormatting();
fontFmt.SetFontStyle(true, false);
IPatternFormatting patternFmt = rule1.CreatePatternFormatting();
patternFmt.FillBackgroundColor = (/*setter*/HSSFColor.Yellow.Index);
IConditionalFormattingRule rule2 = sheetCF.CreateConditionalFormattingRule(ComparisonOperator.Between, "1", "2");
IConditionalFormattingRule[] cfRules =
{
rule1, rule2
};
short col = 1;
CellRangeAddress[] regions = {
new CellRangeAddress(0, 65535, col, col)
};
sheetCF.AddConditionalFormatting(regions, cfRules);
try
{
wb.CloneSheet(0);
}
catch (Exception e)
{
if (e.Message.IndexOf("needs to define a clone method") > 0)
{
Assert.Fail("Indentified bug 45682");
}
throw e;
}
Assert.AreEqual(2, wb.NumberOfSheets);
}
[Test]
public void TestShiftRows()
{
IWorkbook wb = _testDataProvider.CreateWorkbook();
ISheet sheet = wb.CreateSheet();
ISheetConditionalFormatting sheetCF = sheet.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(
ComparisonOperator.Between, "SUM(A10:A15)", "1+SUM(B16:B30)");
IFontFormatting fontFmt = rule1.CreateFontFormatting();
fontFmt.SetFontStyle(true, false);
IPatternFormatting patternFmt = rule1.CreatePatternFormatting();
patternFmt.FillBackgroundColor = (/*setter*/HSSFColor.Yellow.Index);
IConditionalFormattingRule[] cfRules = { rule1, };
CellRangeAddress[] regions = {
new CellRangeAddress(2, 4, 0, 0), // A3:A5
};
sheetCF.AddConditionalFormatting(regions, cfRules);
// This row-shift should destroy the CF region
sheet.ShiftRows(10, 20, -9);
Assert.AreEqual(0, sheetCF.NumConditionalFormattings);
// re-add the CF
sheetCF.AddConditionalFormatting(regions, cfRules);
// This row shift should only affect the formulas
sheet.ShiftRows(14, 17, 8);
IConditionalFormatting cf = sheetCF.GetConditionalFormattingAt(0);
Assert.AreEqual("SUM(A10:A23)", cf.GetRule(0).Formula1);
Assert.AreEqual("1+SUM(B24:B30)", cf.GetRule(0).Formula2);
sheet.ShiftRows(0, 8, 21);
cf = sheetCF.GetConditionalFormattingAt(0);
Assert.AreEqual("SUM(A10:A21)", cf.GetRule(0).Formula1);
Assert.AreEqual("1+SUM(#REF!)", cf.GetRule(0).Formula2);
}
//
public void TestRead(string sampleFile)
{
IWorkbook wb = _testDataProvider.OpenSampleWorkbook(sampleFile);
ISheet sh = wb.GetSheet("CF");
ISheetConditionalFormatting sheetCF = sh.SheetConditionalFormatting;
Assert.AreEqual(3, sheetCF.NumConditionalFormattings);
IConditionalFormatting cf1 = sheetCF.GetConditionalFormattingAt(0);
Assert.AreEqual(2, cf1.NumberOfRules);
CellRangeAddress[] regions1 = cf1.GetFormattingRanges();
Assert.AreEqual(1, regions1.Length);
Assert.AreEqual("A1:A8", regions1[0].FormatAsString());
// CF1 has two rules: values less than -3 are bold-italic red, values greater than 3 are green
IConditionalFormattingRule rule1 = cf1.GetRule(0);
Assert.AreEqual(ConditionType.CellValueIs, rule1.ConditionType);
Assert.AreEqual(ComparisonOperator.GreaterThan, rule1.ComparisonOperation);
Assert.AreEqual("3", rule1.Formula1);
Assert.IsNull(rule1.Formula2);
// Fills and borders are not Set
Assert.IsNull(rule1.GetPatternFormatting());
Assert.IsNull(rule1.GetBorderFormatting());
IFontFormatting fmt1 = rule1.GetFontFormatting();
// Assert.AreEqual(HSSFColor.GREEN.index, fmt1.FontColorIndex);
Assert.IsTrue(fmt1.IsBold);
Assert.IsFalse(fmt1.IsItalic);
IConditionalFormattingRule rule2 = cf1.GetRule(1);
Assert.AreEqual(ConditionType.CellValueIs, rule2.ConditionType);
Assert.AreEqual(ComparisonOperator.LessThan, rule2.ComparisonOperation);
Assert.AreEqual("-3", rule2.Formula1);
Assert.IsNull(rule2.Formula2);
Assert.IsNull(rule2.GetPatternFormatting());
Assert.IsNull(rule2.GetBorderFormatting());
IFontFormatting fmt2 = rule2.GetFontFormatting();
// Assert.AreEqual(HSSFColor.Red.index, fmt2.FontColorIndex);
Assert.IsTrue(fmt2.IsBold);
Assert.IsTrue(fmt2.IsItalic);
IConditionalFormatting cf2 = sheetCF.GetConditionalFormattingAt(1);
Assert.AreEqual(1, cf2.NumberOfRules);
CellRangeAddress[] regions2 = cf2.GetFormattingRanges();
Assert.AreEqual(1, regions2.Length);
Assert.AreEqual("B9", regions2[0].FormatAsString());
IConditionalFormattingRule rule3 = cf2.GetRule(0);
Assert.AreEqual(ConditionType.Formula, rule3.ConditionType);
Assert.AreEqual(ComparisonOperator.NoComparison, rule3.ComparisonOperation);
Assert.AreEqual("$A$8>5", rule3.Formula1);
Assert.IsNull(rule3.Formula2);
IFontFormatting fmt3 = rule3.GetFontFormatting();
// Assert.AreEqual(HSSFColor.Red.index, fmt3.FontColorIndex);
Assert.IsTrue(fmt3.IsBold);
Assert.IsTrue(fmt3.IsItalic);
IPatternFormatting fmt4 = rule3.GetPatternFormatting();
// Assert.AreEqual(HSSFColor.LIGHT_CORNFLOWER_BLUE.index, fmt4.FillBackgroundColor);
// Assert.AreEqual(HSSFColor.Automatic.index, fmt4.FillForegroundColor);
Assert.AreEqual((short)FillPattern.NoFill, fmt4.FillPattern);
// borders are not Set
Assert.IsNull(rule3.GetBorderFormatting());
IConditionalFormatting cf3 = sheetCF.GetConditionalFormattingAt(2);
CellRangeAddress[] regions3 = cf3.GetFormattingRanges();
Assert.AreEqual(1, regions3.Length);
Assert.AreEqual("B1:B7", regions3[0].FormatAsString());
Assert.AreEqual(2, cf3.NumberOfRules);
IConditionalFormattingRule rule4 = cf3.GetRule(0);
Assert.AreEqual(ConditionType.CellValueIs, rule4.ConditionType);
Assert.AreEqual(ComparisonOperator.LessThanOrEqual, rule4.ComparisonOperation);
Assert.AreEqual("\"AAA\"", rule4.Formula1);
Assert.IsNull(rule4.Formula2);
IConditionalFormattingRule rule5 = cf3.GetRule(1);
Assert.AreEqual(ConditionType.CellValueIs, rule5.ConditionType);
Assert.AreEqual(ComparisonOperator.Between, rule5.ComparisonOperation);
Assert.AreEqual("\"A\"", rule5.Formula1);
Assert.AreEqual("\"AAA\"", rule5.Formula2);
}
[Test]
public void TestCreateFontFormatting()
{
IWorkbook workbook = _testDataProvider.CreateWorkbook();
ISheet sheet = workbook.CreateSheet();
ISheetConditionalFormatting sheetCF = sheet.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(ComparisonOperator.Equal, "7");
IFontFormatting fontFmt = rule1.CreateFontFormatting();
Assert.IsFalse(fontFmt.IsItalic);
Assert.IsFalse(fontFmt.IsBold);
fontFmt.SetFontStyle(true, true);
Assert.IsTrue(fontFmt.IsItalic);
Assert.IsTrue(fontFmt.IsBold);
Assert.AreEqual(-1, fontFmt.FontHeight); // not modified
fontFmt.FontHeight = (/*setter*/200);
Assert.AreEqual(200, fontFmt.FontHeight);
fontFmt.FontHeight = (/*setter*/100);
Assert.AreEqual(100, fontFmt.FontHeight);
Assert.AreEqual(FontSuperScript.None, fontFmt.EscapementType);
fontFmt.EscapementType = (/*setter*/FontSuperScript.Sub);
Assert.AreEqual(FontSuperScript.Sub, fontFmt.EscapementType);
fontFmt.EscapementType = (/*setter*/FontSuperScript.None);
Assert.AreEqual(FontSuperScript.None, fontFmt.EscapementType);
fontFmt.EscapementType = (/*setter*/FontSuperScript.Super);
Assert.AreEqual(FontSuperScript.Super, fontFmt.EscapementType);
Assert.AreEqual(FontUnderlineType.None, fontFmt.UnderlineType);
fontFmt.UnderlineType = (/*setter*/FontUnderlineType.Single);
Assert.AreEqual(FontUnderlineType.Single, fontFmt.UnderlineType);
fontFmt.UnderlineType = (/*setter*/FontUnderlineType.None);
Assert.AreEqual(FontUnderlineType.None, fontFmt.UnderlineType);
fontFmt.UnderlineType = (/*setter*/FontUnderlineType.Double);
Assert.AreEqual(FontUnderlineType.Double, fontFmt.UnderlineType);
Assert.AreEqual(-1, fontFmt.FontColorIndex);
fontFmt.FontColorIndex = (/*setter*/HSSFColor.Red.Index);
Assert.AreEqual(HSSFColor.Red.Index, fontFmt.FontColorIndex);
fontFmt.FontColorIndex = (/*setter*/HSSFColor.Automatic.Index);
Assert.AreEqual(HSSFColor.Automatic.Index, fontFmt.FontColorIndex);
fontFmt.FontColorIndex = (/*setter*/HSSFColor.Blue.Index);
Assert.AreEqual(HSSFColor.Blue.Index, fontFmt.FontColorIndex);
IConditionalFormattingRule[] cfRules = { rule1 };
CellRangeAddress[] regions = { CellRangeAddress.ValueOf("A1:A5") };
sheetCF.AddConditionalFormatting(regions, cfRules);
// Verification
IConditionalFormatting cf = sheetCF.GetConditionalFormattingAt(0);
Assert.IsNotNull(cf);
Assert.AreEqual(1, cf.NumberOfRules);
IFontFormatting r1fp = cf.GetRule(0).GetFontFormatting();
Assert.IsNotNull(r1fp);
Assert.IsTrue(r1fp.IsItalic);
Assert.IsTrue(r1fp.IsBold);
Assert.AreEqual(FontSuperScript.Super, r1fp.EscapementType);
Assert.AreEqual(FontUnderlineType.Double, r1fp.UnderlineType);
Assert.AreEqual(HSSFColor.Blue.Index, r1fp.FontColorIndex);
}
[Test]
public void TestCreatePatternFormatting()
{
IWorkbook workbook = _testDataProvider.CreateWorkbook();
ISheet sheet = workbook.CreateSheet();
ISheetConditionalFormatting sheetCF = sheet.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(ComparisonOperator.Equal, "7");
IPatternFormatting patternFmt = rule1.CreatePatternFormatting();
Assert.AreEqual(0, patternFmt.FillBackgroundColor);
patternFmt.FillBackgroundColor = (/*setter*/HSSFColor.Red.Index);
Assert.AreEqual(HSSFColor.Red.Index, patternFmt.FillBackgroundColor);
Assert.AreEqual(0, patternFmt.FillForegroundColor);
patternFmt.FillForegroundColor = (/*setter*/HSSFColor.Blue.Index);
Assert.AreEqual(HSSFColor.Blue.Index, patternFmt.FillForegroundColor);
Assert.AreEqual((short)FillPattern.NoFill, patternFmt.FillPattern);
patternFmt.FillPattern = (short)FillPattern.SolidForeground;
Assert.AreEqual((short)FillPattern.SolidForeground, patternFmt.FillPattern);
patternFmt.FillPattern = (short)FillPattern.NoFill;
Assert.AreEqual((short)FillPattern.NoFill, patternFmt.FillPattern);
if (this._testDataProvider.GetSpreadsheetVersion() == SpreadsheetVersion.EXCEL97)
{
patternFmt.FillPattern = (short)FillPattern.Bricks;
Assert.AreEqual((short)FillPattern.Bricks, patternFmt.FillPattern);
}
IConditionalFormattingRule[] cfRules = { rule1 };
CellRangeAddress[] regions = { CellRangeAddress.ValueOf("A1:A5") };
sheetCF.AddConditionalFormatting(regions, cfRules);
// Verification
IConditionalFormatting cf = sheetCF.GetConditionalFormattingAt(0);
Assert.IsNotNull(cf);
Assert.AreEqual(1, cf.NumberOfRules);
IPatternFormatting r1fp = cf.GetRule(0).GetPatternFormatting();
Assert.IsNotNull(r1fp);
Assert.AreEqual(HSSFColor.Red.Index, r1fp.FillBackgroundColor);
Assert.AreEqual(HSSFColor.Blue.Index, r1fp.FillForegroundColor);
if (this._testDataProvider.GetSpreadsheetVersion() == SpreadsheetVersion.EXCEL97)
{
Assert.AreEqual((short)FillPattern.Bricks, r1fp.FillPattern);
}
}
[Test]
public void TestCreateBorderFormatting()
{
IWorkbook workbook = _testDataProvider.CreateWorkbook();
ISheet sheet = workbook.CreateSheet();
ISheetConditionalFormatting sheetCF = sheet.SheetConditionalFormatting;
IConditionalFormattingRule rule1 = sheetCF.CreateConditionalFormattingRule(ComparisonOperator.Equal, "7");
IBorderFormatting borderFmt = rule1.CreateBorderFormatting();
Assert.AreEqual(BorderStyle.None, borderFmt.BorderBottom);
borderFmt.BorderBottom = (/*setter*/BorderStyle.Dotted);
Assert.AreEqual(BorderStyle.Dotted, borderFmt.BorderBottom);
borderFmt.BorderBottom = (/*setter*/BorderStyle.None);
Assert.AreEqual(BorderStyle.None, borderFmt.BorderBottom);
borderFmt.BorderBottom = (/*setter*/BorderStyle.Thick);
Assert.AreEqual(BorderStyle.Thick, borderFmt.BorderBottom);
Assert.AreEqual(BorderStyle.None, borderFmt.BorderTop);
borderFmt.BorderTop = (/*setter*/BorderStyle.Dotted);
Assert.AreEqual(BorderStyle.Dotted, borderFmt.BorderTop);
borderFmt.BorderTop = (/*setter*/BorderStyle.None);
Assert.AreEqual(BorderStyle.None, borderFmt.BorderTop);
borderFmt.BorderTop = (/*setter*/BorderStyle.Thick);
Assert.AreEqual(BorderStyle.Thick, borderFmt.BorderTop);
Assert.AreEqual(BorderStyle.None, borderFmt.BorderLeft);
borderFmt.BorderLeft = (/*setter*/BorderStyle.Dotted);
Assert.AreEqual(BorderStyle.Dotted, borderFmt.BorderLeft);
borderFmt.BorderLeft = (/*setter*/BorderStyle.None);
Assert.AreEqual(BorderStyle.None, borderFmt.BorderLeft);
borderFmt.BorderLeft = (/*setter*/BorderStyle.Thin);
Assert.AreEqual(BorderStyle.Thin, borderFmt.BorderLeft);
Assert.AreEqual(BorderStyle.None, borderFmt.BorderRight);
borderFmt.BorderRight = (/*setter*/BorderStyle.Dotted);
Assert.AreEqual(BorderStyle.Dotted, borderFmt.BorderRight);
borderFmt.BorderRight = (/*setter*/BorderStyle.None);
Assert.AreEqual(BorderStyle.None, borderFmt.BorderRight);
borderFmt.BorderRight = (/*setter*/BorderStyle.Hair);
Assert.AreEqual(BorderStyle.Hair, borderFmt.BorderRight);
IConditionalFormattingRule[] cfRules = { rule1 };
CellRangeAddress[] regions = { CellRangeAddress.ValueOf("A1:A5") };
sheetCF.AddConditionalFormatting(regions, cfRules);
// Verification
IConditionalFormatting cf = sheetCF.GetConditionalFormattingAt(0);
Assert.IsNotNull(cf);
Assert.AreEqual(1, cf.NumberOfRules);
IBorderFormatting r1fp = cf.GetRule(0).GetBorderFormatting();
Assert.IsNotNull(r1fp);
Assert.AreEqual(BorderStyle.Thick, r1fp.BorderBottom);
Assert.AreEqual(BorderStyle.Thick, r1fp.BorderTop);
Assert.AreEqual(BorderStyle.Thin, r1fp.BorderLeft);
Assert.AreEqual(BorderStyle.Hair, r1fp.BorderRight);
}
}
}
| |
// Copyright 2011 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using System;
using System.Collections.Generic;
using NodaTime.Calendars;
using NodaTime.Text;
using NodaTime.TimeZones;
using NodaTime.Utility;
using NUnit.Framework;
using NodaTime.Test.Calendars;
using System.Linq;
namespace NodaTime.Test
{
/// <summary>
/// Tests for <see cref="LocalDateTime" />.
/// </summary>
public partial class LocalDateTimeTest
{
private static readonly DateTimeZone Pacific = DateTimeZoneProviders.Tzdb["America/Los_Angeles"];
[Test]
public void ToDateTimeUnspecified()
{
LocalDateTime zoned = new LocalDateTime(2011, 3, 5, 1, 0, 0);
DateTime expected = new DateTime(2011, 3, 5, 1, 0, 0, DateTimeKind.Unspecified);
DateTime actual = zoned.ToDateTimeUnspecified();
Assert.AreEqual(expected, actual);
// Kind isn't checked by Equals...
Assert.AreEqual(DateTimeKind.Unspecified, actual.Kind);
}
[Test]
[TestCase(100)]
[TestCase(1900)]
[TestCase(2900)]
public void ToDateTimeUnspecified_TruncatesTowardsStartOfTime(int year)
{
var ldt = new LocalDateTime(year, 1, 1, 13, 15, 55).PlusNanoseconds(NodaConstants.NanosecondsPerSecond - 1);
var expected = new DateTime(year, 1, 1, 13, 15, 55, DateTimeKind.Unspecified).AddTicks(NodaConstants.TicksPerSecond - 1);
var actual = ldt.ToDateTimeUnspecified();
Assert.AreEqual(expected, actual);
}
[Test]
public void ToDateTimeUnspecified_OutOfRange()
{
// One day before 1st January, 1AD (which is DateTime.MinValue)
var ldt = new LocalDate(1, 1, 1).PlusDays(-1).AtMidnight();
Assert.Throws<InvalidOperationException>(() => ldt.ToDateTimeUnspecified());
}
[Test]
public void FromDateTime()
{
LocalDateTime expected = new LocalDateTime(2011, 08, 18, 20, 53);
foreach (var kind in Enum.GetValues(typeof(DateTimeKind)).Cast<DateTimeKind>())
{
DateTime x = new DateTime(2011, 08, 18, 20, 53, 0, kind);
LocalDateTime actual = LocalDateTime.FromDateTime(x);
Assert.AreEqual(expected, actual);
}
}
[Test]
public void FromDateTime_WithCalendar()
{
// Julian calendar is 13 days behind Gregorian calendar in the 21st century
LocalDateTime expected = new LocalDateTime(2011, 08, 05, 20, 53, CalendarSystem.Julian);
foreach (var kind in Enum.GetValues(typeof(DateTimeKind)).Cast<DateTimeKind>())
{
DateTime x = new DateTime(2011, 08, 18, 20, 53, 0, kind);
LocalDateTime actual = LocalDateTime.FromDateTime(x, CalendarSystem.Julian);
Assert.AreEqual(expected, actual);
}
}
[Test]
public void TimeProperties_AfterEpoch()
{
// Use the largest valid year as part of validating against overflow
LocalDateTime ldt = new LocalDateTime(GregorianYearMonthDayCalculator.MaxGregorianYear, 1, 2, 15, 48, 25).PlusNanoseconds(123456789);
Assert.AreEqual(15, ldt.Hour);
Assert.AreEqual(3, ldt.ClockHourOfHalfDay);
Assert.AreEqual(48, ldt.Minute);
Assert.AreEqual(25, ldt.Second);
Assert.AreEqual(123, ldt.Millisecond);
Assert.AreEqual(1234567, ldt.TickOfSecond);
Assert.AreEqual(15 * NodaConstants.TicksPerHour +
48 * NodaConstants.TicksPerMinute +
25 * NodaConstants.TicksPerSecond +
1234567, ldt.TickOfDay);
Assert.AreEqual(15 * NodaConstants.NanosecondsPerHour +
48 * NodaConstants.NanosecondsPerMinute +
25 * NodaConstants.NanosecondsPerSecond +
123456789, ldt.NanosecondOfDay);
Assert.AreEqual(123456789, ldt.NanosecondOfSecond);
}
[Test]
public void TimeProperties_BeforeEpoch()
{
// Use the smallest valid year number as part of validating against overflow
LocalDateTime ldt = new LocalDateTime(GregorianYearMonthDayCalculator.MinGregorianYear, 1, 2, 15, 48, 25).PlusNanoseconds(123456789);
Assert.AreEqual(15, ldt.Hour);
Assert.AreEqual(3, ldt.ClockHourOfHalfDay);
Assert.AreEqual(48, ldt.Minute);
Assert.AreEqual(25, ldt.Second);
Assert.AreEqual(123, ldt.Millisecond);
Assert.AreEqual(1234567, ldt.TickOfSecond);
Assert.AreEqual(15 * NodaConstants.TicksPerHour +
48 * NodaConstants.TicksPerMinute +
25 * NodaConstants.TicksPerSecond +
1234567, ldt.TickOfDay);
Assert.AreEqual(15 * NodaConstants.NanosecondsPerHour +
48 * NodaConstants.NanosecondsPerMinute +
25 * NodaConstants.NanosecondsPerSecond +
123456789, ldt.NanosecondOfDay);
Assert.AreEqual(123456789, ldt.NanosecondOfSecond);
}
[Test]
public void DateTime_Roundtrip_OtherCalendarInBcl()
{
var bcl = BclCalendars.Hijri;
DateTime original = bcl.ToDateTime(1376, 6, 19, 0, 0, 0, 0);
LocalDateTime noda = LocalDateTime.FromDateTime(original);
// The DateTime only knows about the ISO version...
Assert.AreNotEqual(1376, noda.Year);
Assert.AreEqual(CalendarSystem.Iso, noda.Calendar);
DateTime final = noda.ToDateTimeUnspecified();
Assert.AreEqual(original, final);
}
[Test]
public void WithCalendar()
{
LocalDateTime isoEpoch = new LocalDateTime(1970, 1, 1, 0, 0, 0);
LocalDateTime julianEpoch = isoEpoch.WithCalendar(CalendarSystem.Julian);
Assert.AreEqual(1969, julianEpoch.Year);
Assert.AreEqual(12, julianEpoch.Month);
Assert.AreEqual(19, julianEpoch.Day);
Assert.AreEqual(isoEpoch.TimeOfDay, julianEpoch.TimeOfDay);
}
// Verifies that negative local instant ticks don't cause a problem with the date
[Test]
public void TimeOfDay_Before1970()
{
LocalDateTime dateTime = new LocalDateTime(1965, 11, 8, 12, 5, 23);
LocalTime expected = new LocalTime(12, 5, 23);
Assert.AreEqual(expected, dateTime.TimeOfDay);
}
// Verifies that positive local instant ticks don't cause a problem with the date
[Test]
public void TimeOfDay_After1970()
{
LocalDateTime dateTime = new LocalDateTime(1975, 11, 8, 12, 5, 23);
LocalTime expected = new LocalTime(12, 5, 23);
Assert.AreEqual(expected, dateTime.TimeOfDay);
}
// Verifies that negative local instant ticks don't cause a problem with the date
[Test]
public void Date_Before1970()
{
LocalDateTime dateTime = new LocalDateTime(1965, 11, 8, 12, 5, 23);
LocalDate expected = new LocalDate(1965, 11, 8);
Assert.AreEqual(expected, dateTime.Date);
}
// Verifies that positive local instant ticks don't cause a problem with the date
[Test]
public void Date_After1970()
{
LocalDateTime dateTime = new LocalDateTime(1975, 11, 8, 12, 5, 23);
LocalDate expected = new LocalDate(1975, 11, 8);
Assert.AreEqual(expected, dateTime.Date);
}
[Test]
public void DayOfWeek_AroundEpoch()
{
// Test about couple of months around the Unix epoch. If that works, I'm confident the rest will.
LocalDateTime dateTime = new LocalDateTime(1969, 12, 1, 0, 0);
for (int i = 0; i < 60; i++)
{
// Check once per hour of the day, just in case something's messed up based on the time of day.
for (int hour = 0; hour < 24; hour++)
{
Assert.AreEqual(BclConversions.ToIsoDayOfWeek(dateTime.ToDateTimeUnspecified().DayOfWeek),
dateTime.DayOfWeek);
dateTime = dateTime.PlusHours(1);
}
}
}
[Test]
public void ClockHourOfHalfDay()
{
Assert.AreEqual(12, new LocalDateTime(1975, 11, 8, 0, 0, 0).ClockHourOfHalfDay);
Assert.AreEqual(1, new LocalDateTime(1975, 11, 8, 1, 0, 0).ClockHourOfHalfDay);
Assert.AreEqual(12, new LocalDateTime(1975, 11, 8, 12, 0, 0).ClockHourOfHalfDay);
Assert.AreEqual(1, new LocalDateTime(1975, 11, 8, 13, 0, 0).ClockHourOfHalfDay);
Assert.AreEqual(11, new LocalDateTime(1975, 11, 8, 23, 0, 0).ClockHourOfHalfDay);
}
[Test]
public void Operators_SameCalendar()
{
LocalDateTime value1 = new LocalDateTime(2011, 1, 2, 10, 30, 0);
LocalDateTime value2 = new LocalDateTime(2011, 1, 2, 10, 30, 0);
LocalDateTime value3 = new LocalDateTime(2011, 1, 2, 10, 45, 0);
TestHelper.TestOperatorComparisonEquality(value1, value2, value3);
}
[Test]
public void Operators_DifferentCalendars_Throws()
{
LocalDateTime value1 = new LocalDateTime(2011, 1, 2, 10, 30);
LocalDateTime value2 = new LocalDateTime(2011, 1, 3, 10, 30, CalendarSystem.Julian);
Assert.False(value1 == value2);
Assert.True(value1 != value2);
Assert.Throws<ArgumentException>(() => (value1 < value2).ToString());
Assert.Throws<ArgumentException>(() => (value1 <= value2).ToString());
Assert.Throws<ArgumentException>(() => (value1 > value2).ToString());
Assert.Throws<ArgumentException>(() => (value1 >= value2).ToString());
}
[Test]
public void CompareTo_SameCalendar()
{
LocalDateTime value1 = new LocalDateTime(2011, 1, 2, 10, 30);
LocalDateTime value2 = new LocalDateTime(2011, 1, 2, 10, 30);
LocalDateTime value3 = new LocalDateTime(2011, 1, 2, 10, 45);
Assert.That(value1.CompareTo(value2), Is.EqualTo(0));
Assert.That(value1.CompareTo(value3), Is.LessThan(0));
Assert.That(value3.CompareTo(value2), Is.GreaterThan(0));
}
[Test]
public void CompareTo_DifferentCalendars_Throws()
{
CalendarSystem islamic = CalendarSystem.GetIslamicCalendar(IslamicLeapYearPattern.Base15, IslamicEpoch.Astronomical);
LocalDateTime value1 = new LocalDateTime(2011, 1, 2, 10, 30);
LocalDateTime value2 = new LocalDateTime(1500, 1, 1, 10, 30, islamic);
Assert.Throws<ArgumentException>(() => value1.CompareTo(value2));
Assert.Throws<ArgumentException>(() => ((IComparable)value1).CompareTo(value2));
}
/// <summary>
/// IComparable.CompareTo works properly for LocalDateTime inputs with different calendars.
/// </summary>
[Test]
public void IComparableCompareTo_SameCalendar()
{
LocalDateTime value1 = new LocalDateTime(2011, 1, 2, 10, 30);
LocalDateTime value2 = new LocalDateTime(2011, 1, 2, 10, 30);
LocalDateTime value3 = new LocalDateTime(2011, 1, 2, 10, 45);
IComparable i_value1 = (IComparable)value1;
IComparable i_value3 = (IComparable)value3;
Assert.That(i_value1.CompareTo(value2), Is.EqualTo(0));
Assert.That(i_value1.CompareTo(value3), Is.LessThan(0));
Assert.That(i_value3.CompareTo(value2), Is.GreaterThan(0));
}
/// <summary>
/// IComparable.CompareTo returns a positive number for a null input.
/// </summary>
[Test]
public void IComparableCompareTo_Null_Positive()
{
var instance = new LocalDateTime(2012, 3, 5, 10, 45);
var comparable = (IComparable)instance;
var result = comparable.CompareTo(null);
Assert.That(result, Is.GreaterThan(0));
}
/// <summary>
/// IComparable.CompareTo throws an ArgumentException for non-null arguments
/// that are not a LocalDateTime.
/// </summary>
[Test]
public void IComparableCompareTo_WrongType_ArgumentException()
{
var instance = new LocalDateTime(2012, 3, 5, 10, 45);
var i_instance = (IComparable)instance;
var arg = new LocalDate(2012, 3, 6);
Assert.Throws<ArgumentException>(() => i_instance.CompareTo(arg));
}
[Test]
public void WithOffset()
{
var offset = Offset.FromHoursAndMinutes(5, 10);
var localDateTime = new LocalDateTime(2009, 12, 22, 21, 39, 30);
var offsetDateTime = localDateTime.WithOffset(offset);
Assert.AreEqual(localDateTime, offsetDateTime.LocalDateTime);
Assert.AreEqual(offset, offsetDateTime.Offset);
}
[Test]
public void InUtc()
{
var local = new LocalDateTime(2009, 12, 22, 21, 39, 30);
var zoned = local.InUtc();
Assert.AreEqual(local, zoned.LocalDateTime);
Assert.AreEqual(Offset.Zero, zoned.Offset);
Assert.AreSame(DateTimeZone.Utc, zoned.Zone);
}
[Test]
public void InZoneStrictly_InWinter()
{
var local = new LocalDateTime(2009, 12, 22, 21, 39, 30);
var zoned = local.InZoneStrictly(Pacific);
Assert.AreEqual(local, zoned.LocalDateTime);
Assert.AreEqual(Offset.FromHours(-8), zoned.Offset);
}
[Test]
public void InZoneStrictly_InSummer()
{
var local = new LocalDateTime(2009, 6, 22, 21, 39, 30);
var zoned = local.InZoneStrictly(Pacific);
Assert.AreEqual(local, zoned.LocalDateTime);
Assert.AreEqual(Offset.FromHours(-7), zoned.Offset);
}
/// <summary>
/// Pacific time changed from -7 to -8 at 2am wall time on November 2nd 2009,
/// so 2am became 1am.
/// </summary>
[Test]
public void InZoneStrictly_ThrowsWhenAmbiguous()
{
var local = new LocalDateTime(2009, 11, 1, 1, 30, 0);
Assert.Throws<AmbiguousTimeException>(() => local.InZoneStrictly(Pacific));
}
/// <summary>
/// Pacific time changed from -8 to -7 at 2am wall time on March 8th 2009,
/// so 2am became 3am. This means that 2.30am doesn't exist on that day.
/// </summary>
[Test]
public void InZoneStrictly_ThrowsWhenSkipped()
{
var local = new LocalDateTime(2009, 3, 8, 2, 30, 0);
Assert.Throws<SkippedTimeException>(() => local.InZoneStrictly(Pacific));
}
/// <summary>
/// Pacific time changed from -7 to -8 at 2am wall time on November 2nd 2009,
/// so 2am became 1am. We'll return the earlier result, i.e. with the offset of -7
/// </summary>
[Test]
public void InZoneLeniently_AmbiguousTime_ReturnsEarlierMapping()
{
var local = new LocalDateTime(2009, 11, 1, 1, 30, 0);
var zoned = local.InZoneLeniently(Pacific);
Assert.AreEqual(local, zoned.LocalDateTime);
Assert.AreEqual(Offset.FromHours(-7), zoned.Offset);
}
/// <summary>
/// Pacific time changed from -8 to -7 at 2am wall time on March 8th 2009,
/// so 2am became 3am. This means that 2:30am doesn't exist on that day.
/// We'll return 3:30am, the forward-shifted value.
/// </summary>
[Test]
public void InZoneLeniently_ReturnsStartOfSecondInterval()
{
var local = new LocalDateTime(2009, 3, 8, 2, 30, 0);
var zoned = local.InZoneLeniently(Pacific);
Assert.AreEqual(new LocalDateTime(2009, 3, 8, 3, 30, 0), zoned.LocalDateTime);
Assert.AreEqual(Offset.FromHours(-7), zoned.Offset);
}
[Test]
public void InZone()
{
// Don't need much for this - it only delegates.
var ambiguous = new LocalDateTime(2009, 11, 1, 1, 30, 0);
var skipped = new LocalDateTime(2009, 3, 8, 2, 30, 0);
Assert.AreEqual(Pacific.AtLeniently(ambiguous), ambiguous.InZone(Pacific, Resolvers.LenientResolver));
Assert.AreEqual(Pacific.AtLeniently(skipped), skipped.InZone(Pacific, Resolvers.LenientResolver));
}
/// <summary>
/// Using the default constructor is equivalent to January 1st 1970, midnight, UTC, ISO calendar
/// </summary>
[Test]
public void DefaultConstructor()
{
var actual = new LocalDateTime();
Assert.AreEqual(new LocalDateTime(1, 1, 1, 0, 0), actual);
}
[Test]
public void XmlSerialization_Iso()
{
var value = new LocalDateTime(2013, 4, 12, 17, 53, 23).PlusNanoseconds(123456789);
TestHelper.AssertXmlRoundtrip(value, "<value>2013-04-12T17:53:23.123456789</value>");
}
[Test]
public void XmlSerialization_NonIso()
{
var value = new LocalDateTime(2013, 4, 12, 17, 53, 23, CalendarSystem.Julian);
TestHelper.AssertXmlRoundtrip(value, "<value calendar=\"Julian\">2013-04-12T17:53:23</value>");
}
[Test]
[TestCase("<value calendar=\"Rubbish\">2013-06-12T17:53:23</value>", typeof(KeyNotFoundException), Description = "Unknown calendar system")]
[TestCase("<value>2013-15-12T17:53:23</value>", typeof(UnparsableValueException), Description = "Invalid month")]
public void XmlSerialization_Invalid(string xml, Type expectedExceptionType)
{
TestHelper.AssertXmlInvalid<LocalDateTime>(xml, expectedExceptionType);
}
[Test]
public void MinMax_DifferentCalendars_Throws()
{
LocalDateTime ldt1 = new LocalDateTime(2011, 1, 2, 2, 20);
LocalDateTime ldt2 = new LocalDateTime(1500, 1, 1, 5, 10, CalendarSystem.Julian);
Assert.Throws<ArgumentException>(() => LocalDateTime.Max(ldt1, ldt2));
Assert.Throws<ArgumentException>(() => LocalDateTime.Min(ldt1, ldt2));
}
[Test]
public void MinMax_SameCalendar()
{
LocalDateTime ldt1 = new LocalDateTime(1500, 1, 1, 7, 20, CalendarSystem.Julian);
LocalDateTime ldt2 = new LocalDateTime(1500, 1, 1, 5, 10, CalendarSystem.Julian);
Assert.AreEqual(ldt1, LocalDateTime.Max(ldt1, ldt2));
Assert.AreEqual(ldt1, LocalDateTime.Max(ldt2, ldt1));
Assert.AreEqual(ldt2, LocalDateTime.Min(ldt1, ldt2));
Assert.AreEqual(ldt2, LocalDateTime.Min(ldt2, ldt1));
}
[Test]
public void Deconstruction()
{
var value = new LocalDateTime(2017, 10, 15, 21, 30, 0);
var expectedDate = new LocalDate(2017, 10, 15);
var expectedTime = new LocalTime(21, 30, 0);
var (actualDate, actualTime) = value;
Assert.Multiple(() =>
{
Assert.AreEqual(expectedDate, actualDate);
Assert.AreEqual(expectedTime, actualTime);
});
}
[Test]
public void Equality() => TestHelper.TestEqualsStruct(
value: new LocalDateTime(2017, 10, 15, 21, 30, 0, 0, CalendarSystem.Iso),
equalValue: new LocalDateTime(2017, 10, 15, 21, 30, 0, 0, CalendarSystem.Iso),
unequalValues: new[]
{
new LocalDateTime(2018, 10, 15, 21, 30, 0, 0, CalendarSystem.Iso),
new LocalDateTime(2017, 11, 15, 21, 30, 0, 0, CalendarSystem.Iso),
new LocalDateTime(2017, 10, 16, 21, 30, 0, 0, CalendarSystem.Iso),
new LocalDateTime(2017, 10, 15, 22, 30, 0, 0, CalendarSystem.Iso),
new LocalDateTime(2017, 10, 15, 21, 31, 0, 0, CalendarSystem.Iso),
new LocalDateTime(2017, 10, 15, 21, 30, 1, 0, CalendarSystem.Iso),
new LocalDateTime(2017, 10, 15, 21, 30, 0, 1, CalendarSystem.Iso),
new LocalDateTime(2017, 10, 15, 21, 30, 0, 0, CalendarSystem.Gregorian),
});
}
}
| |
/* Copyright (c) 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.IO;
using System.Net;
using System.Text;
using Google.GData.Client;
using Google.GData.Extensions.Apps;
namespace Google.GData.Apps.AdminSettings
{
/// <summary>
/// Base service for accessing Google Admin Settings item feeds from the
/// Google Apps Google Domain Settings API.
/// </summary>
public class AdminSettingsService : Service
{
private string domain;
/// <summary>
/// Constructor
/// </summary>
/// <param name="domain">The hosted domain in which the Google Mail Settings are
/// being set up</param>
/// <param name="applicationName">The name of the client application
/// using this service.</param>
public AdminSettingsService(string domain, string applicationName)
: base(AppsNameTable.GAppsService, applicationName)
{
this.domain = domain;
this.NewAtomEntry += new FeedParserEventHandler(this.OnParsedNewGoogleMailSettingsItemEntry);
this.NewFeed += new ServiceEventHandler(this.OnNewFeed);
// You can set factory.methodOverride = true if you are behind a
// proxy that filters out HTTP methods such as PUT and DELETE.
}
/// <summary>
/// Accessor for Domain property.
/// </summary>
public string Domain
{
get { return domain; }
set { this.domain = value; }
}
/// <summary>
/// Gets the domain's default language
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetDefaultLanguage()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.DefaultLanguageUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's default language
/// </summary>
/// <param name="defaultLanguage">the new default language for the domain</param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateDefaultLanguage(string defaultLanguage)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.DefaultLanguageUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.DefaultLanguage, defaultLanguage));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's organization name
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetOrganizationName()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.OrganizationNameUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's organization name
/// </summary>
/// <param name="organizationName">the new organization name for the domain</param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateOrganizationName(string organizationName)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.OrganizationNameUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.OrganizationName, organizationName));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's Maximum Number Of Users
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetMaximumNumberOfUsers()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.MaximumNumberOfUsersUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's Current Number Of Users
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetCurrentNumberOfUsers()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.CurrentNumberOfUsersUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's verification status
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetIsVerified()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.IsVerifiedUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's Support PIN
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetSupportPIN()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.SupportPINUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's Google Apps Edition
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetDomainEdition()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.EditionUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's Customers PIN
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetCustomerPIN()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.CustomerPINUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's Creation Time
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetCreationTime()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.CreationTimeUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's Country Code
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetCountryCode()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.CountryCodeUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Gets the domain's Administrator Secondary Email address
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetAdminSecondaryEmail()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.AdminSecondaryEmailUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's Administrator Secondary Email address
/// </summary>
/// <param name="adminSecondaryEmail">the new domain's admin Secondary Email domain</param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateAdminSecondaryEmail(string adminSecondaryEmail)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.AdminSecondaryEmailUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(
new PropertyElement(AppsDomainSettingsNameTable.AdminSecondaryEmail, adminSecondaryEmail));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Updates the domain's Custom Logo
/// </summary>
/// <param name="base64EncodedLogoImage">base 64 encoded binary data of logo image</param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateCustomLogo(string base64EncodedLogoImage)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.CustomLogoUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.LogoImage, base64EncodedLogoImage));
return base.Update<AdminSettingsEntry>(entry);
}
public string FileToBase64(string path)
{
string base64String = "";
FileStream fs = new FileStream(path, System.IO.FileMode.Open, FileAccess.Read);
byte[] binaryData = new Byte[fs.Length];
long bytesRead = fs.Read(binaryData, 0, (int)fs.Length);
fs.Close();
base64String = System.Convert.ToBase64String(binaryData, 0, binaryData.Length);
return base64String;
}
public string UrlToBase64(Uri uri)
{
string base64String = "";
WebClient webClient = new WebClient();
byte[] binaryData = webClient.DownloadData(uri);
base64String = System.Convert.ToBase64String(binaryData, 0, binaryData.Length);
webClient.Dispose();
return base64String;
}
/// <summary>
/// Gets the domain's CNAME verification status
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetCnameVerificationStatus()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.CnameUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's CNAME verification status
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateCnameVerificationStatus()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.CnameUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.Verified, Boolean.TrueString));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's MX verification status
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetMxVerificationStatus()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.MxUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's MX verification status
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateMxVerificationStatus()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.MxUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.Verified, Boolean.TrueString));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's SSO settings
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetSsoSettings()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.SsoGeneralUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's SSO settings
/// </summary>
/// <param name="enableSSO">Enable or Disable SSO for the domain</param>
/// <param name="samlSignonUri">http://www.example.com/sso/signon</param>
/// <param name="samlLogoutUri">http://www.example.com/sso/logout</param>
/// <param name="changePasswordUri">http://www.example.com/sso/changepassword</param>
/// <param name="ssoWhitelist">CIDR formated IP address</param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateSsoSettings( Boolean enableSSO, String samlSignonUri,
String samlLogoutUri, String changePasswordUri, String ssoWhitelist, Boolean useDomainSpecificIssuer)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.SsoGeneralUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.EnableSSO, enableSSO.ToString()));
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.SamlSignonUri, samlSignonUri));
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.SamlLogoutUri, samlLogoutUri));
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.ChangePasswordUri, changePasswordUri));
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.SsoWhitelist, ssoWhitelist));
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.UseDomainSpecificIssuer, useDomainSpecificIssuer.ToString()));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's SSO Signing Key
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetSsoSigningkey()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.SsoSigningkeyUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's SSO Signing Key
/// </summary>
/// <param name="base64EncodedSigningKey">yourBase64EncodedPublicKey</param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateSsoSigningkey(string base64EncodedSigningKey)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.SsoSigningkeyUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.SigningKey, base64EncodedSigningKey));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's Migration Access settings
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetMigrationAccess()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.MigrationUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's Migration Access settings
/// </summary>
/// <param name="enableUserMigration">Enable or Disable User migration for the domain</param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateMigrationAccess(Boolean enableUserMigration)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.MigrationUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.EnableUserMigration, enableUserMigration.ToString()));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's Email Gateway settings
/// </summary>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry GetEmailGateway()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.GatewayUriSuffix;
return Get(uri) as AdminSettingsEntry;
}
/// <summary>
/// Updates the domain's Email Gateway settings
/// </summary>
/// <param name="smartHost">Either the IP address or hostname of your SMTP server.
/// Google Apps routes outgoing mail to this server.</param>
/// <param name="smtpMode"> The default value is SMTP. Another value, SMTP_TLS,
/// secures a connection with TLS when delivering the message. </param>
/// <returns>a <code>AdminSettingsEntry</code> containing the results of the
/// operation</returns>
public AdminSettingsEntry UpdateEmailGateway(string smartHost, string smtpMode)
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.GatewayUriSuffix;
AdminSettingsEntry entry = new AdminSettingsEntry();
entry.EditUri = uri;
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.SmartHost, smartHost));
entry.Properties.Add(new PropertyElement(AppsDomainSettingsNameTable.SmtpMode, smtpMode));
return base.Update<AdminSettingsEntry>(entry);
}
/// <summary>
/// Gets the domain's Email Routing settings
/// </summary>
/// <returns>a <code>AdminSettingsFeed</code> containing the results of the
/// operation</returns>
public AdminSettingsFeed GetEmailRouting()
{
string uri = AppsDomainSettingsNameTable.AppsAdminSettingsBaseFeedUri
+ domain + AppsDomainSettingsNameTable.EmailroutingUriSuffix;
FeedQuery feedQuery = new FeedQuery(uri);
return Query(feedQuery) as AdminSettingsFeed;
}
/// <summary>
/// Event handler. Called when a new Google Domain Settings entry is parsed.
/// </summary>
/// <param name="sender">the object that's sending the evet</param>
/// <param name="e">FeedParserEventArguments, holds the feedentry</param>
protected void OnParsedNewGoogleMailSettingsItemEntry(object sender, FeedParserEventArgs e)
{
if (e == null)
{
throw new ArgumentNullException("e");
}
if (e.CreatingEntry == true)
{
e.Entry = new AdminSettingsEntry();
}
}
/// <summary>
/// Overridden so that new feeds are returned as <code>AppsExtendedFeed</code>s
/// instead of base <code>AtomFeed</code>s.
/// </summary>
/// <param name="sender"> the object which sent the event</param>
/// <param name="e">FeedParserEventArguments, holds the FeedEntry</param>
protected void OnNewFeed(object sender, ServiceEventArgs e)
{
Tracing.TraceMsg("Created new Google Mail Settings Item Feed");
if (e == null)
{
throw new ArgumentNullException("e");
}
e.Feed = new AdminSettingsFeed(e.Uri, e.Service);
}
}
}
| |
//! \file ArcVF.cs
//! \date Wed Jun 08 00:27:36 2016
//! \brief LiveMaker resource archive.
//
// Copyright (C) 2016-2019 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.IO;
using System.Linq;
using GameRes.Compression;
namespace GameRes.Formats.LiveMaker
{
[Export(typeof(ArchiveFormat))]
public class VffOpener : ArchiveFormat
{
public override string Tag { get { return "DAT/vf"; } }
public override string Description { get { return "LiveMaker resource archive"; } }
public override uint Signature { get { return 0x666676; } } // 'vff'
public override bool IsHierarchic { get { return true; } }
public override bool CanWrite { get { return false; } }
public VffOpener ()
{
Extensions = new string[] { "dat" };
Signatures = new uint[] { 0x666676, 0 };
}
public override ArcFile TryOpen (ArcView file)
{
uint base_offset = 0;
ArcView index_file = file;
try
{
// possible filesystem structure:
// game.dat -- main archive body
// game.ext -- [optional] separate index (could be included into the main body)
// game.001 -- [optional] extra parts
// game.002
// ...
uint signature = index_file.View.ReadUInt32 (0);
if (file.Name.HasExtension (".exe")
&& (0x5A4D == (signature & 0xFFFF))) // 'MZ'
{
base_offset = SkipExeData (index_file);
signature = index_file.View.ReadUInt32 (base_offset);
}
else if (!file.Name.HasExtension (".dat"))
{
return null;
}
else if (0x666676 != signature)
{
var ext_filename = Path.ChangeExtension (file.Name, ".ext");
if (!VFS.FileExists (ext_filename))
return null;
index_file = VFS.OpenView (ext_filename);
signature = index_file.View.ReadUInt32 (0);
}
if (0x666676 != signature)
return null;
int count = index_file.View.ReadInt32 (base_offset+6);
if (!IsSaneCount (count))
return null;
var dir = ReadIndex (index_file, base_offset, count);
if (null == dir)
return null;
long max_offset = file.MaxOffset;
var parts = new List<ArcView>();
try
{
for (int i = 1; i < 100; ++i)
{
var ext = string.Format (".{0:D3}", i);
var part_filename = Path.ChangeExtension (file.Name, ext);
if (!VFS.FileExists (part_filename))
break;
var arc_file = VFS.OpenView (part_filename);
max_offset += arc_file.MaxOffset;
parts.Add (arc_file);
}
}
catch
{
foreach (var part in parts)
part.Dispose();
throw;
}
if (0 == parts.Count)
return new ArcFile (file, this, dir);
return new MultiFileArchive (file, this, dir, parts);
}
finally
{
if (index_file != file)
index_file.Dispose();
}
}
public override Stream OpenEntry (ArcFile arc, Entry entry)
{
var vff = arc as MultiFileArchive;
Stream input = null;
if (vff != null)
input = vff.OpenStream (entry);
else
input = arc.File.CreateStream (entry.Offset, entry.Size);
var pent = entry as VfEntry;
if (null == pent)
return input;
if (pent.IsScrambled)
{
byte[] data;
using (input)
{
if (entry.Size <= 8)
return Stream.Null;
data = ReshuffleStream (input);
}
input = new BinMemoryStream (data, entry.Name);
}
if (pent.IsPacked)
input = new ZLibStream (input, CompressionMode.Decompress);
return input;
}
List<Entry> ReadIndex (ArcView file, uint base_offset, int count)
{
uint index_offset = base_offset+0xA;
var name_buffer = new byte[0x100];
var rnd = new TpRandom (0x75D6EE39u);
var dir = new List<Entry> (count);
for (int i = 0; i < count; ++i)
{
uint name_length = file.View.ReadUInt32 (index_offset);
index_offset += 4;
if (0 == name_length || name_length > name_buffer.Length)
return null;
if (name_length != file.View.Read (index_offset, name_buffer, 0, name_length))
return null;
index_offset += name_length;
var name = DecryptName (name_buffer, (int)name_length, rnd);
dir.Add (Create<VfEntry> (name));
}
rnd.Reset();
long offset = base_offset + (file.View.ReadInt64 (index_offset) ^ (int)rnd.GetRand32());
foreach (var entry in dir)
{
index_offset += 8;
long next_offset = base_offset + (file.View.ReadInt64 (index_offset) ^ (int)rnd.GetRand32());
entry.Offset = offset;
entry.Size = (uint)(next_offset - offset);
offset = next_offset;
}
index_offset += 8;
foreach (VfEntry entry in dir)
{
byte flags = file.View.ReadByte (index_offset++);
entry.IsPacked = 0 == flags || 3 == flags;
entry.IsScrambled = 2 == flags || 3 == flags;
}
return dir;
}
string DecryptName (byte[] name_buf, int name_length, TpRandom key)
{
for (int i = 0; i < name_length; ++i)
{
name_buf[i] ^= (byte)key.GetRand32();
}
return Encodings.cp932.GetString (name_buf, 0, name_length);
}
uint SkipExeData (ArcView file)
{
var exe = new ExeFile (file);
return (uint)exe.Overlay.Offset;
}
byte[] ReshuffleStream (Stream input)
{
var header = new byte[8];
input.Read (header, 0, 8);
int chunk_size = header.ToInt32 (0);
uint seed = header.ToUInt32 (4) ^ 0xF8EAu;
int input_length = (int)input.Length - 8;
var output = new byte[input_length];
int count = (input_length - 1) / chunk_size + 1;
int dst = 0;
foreach (int i in RandomSequence (count, seed))
{
int position = i * chunk_size;
input.Position = 8 + position;
int length = Math.Min (chunk_size, input_length - position);
input.Read (output, dst, length);
dst += length;
}
return output;
}
static IEnumerable<int> RandomSequence (int count, uint seed)
{
var tp = new TpScramble (seed);
var order = Enumerable.Range (0, count).ToList<int>();
var seq = new int[order.Count];
for (int i = 0; order.Count > 1; ++i)
{
int n = tp.GetInt32 (0, order.Count - 2);
seq[order[n]] = i;
order.RemoveAt (n);
}
seq[order[0]] = count - 1;
return seq;
}
}
internal class VfEntry : PackedEntry
{
public bool IsScrambled;
}
internal class TpRandom
{
uint m_seed;
uint m_current;
public TpRandom (uint seed)
{
m_seed = seed;
m_current = 0;
}
public uint GetRand32 ()
{
m_current += m_current << 2;
m_current += m_seed;
return m_current;
}
public void Reset ()
{
m_current = 0;
}
}
internal class TpScramble
{
uint[] m_state = new uint[5];
const uint FactorA = 2111111111;
const uint FactorB = 1492;
const uint FactorC = 1776;
const uint FactorD = 5115;
public TpScramble (uint seed)
{
Init (seed);
}
public void Init (uint seed)
{
uint hash = seed != 0 ? seed : 0xFFFFFFFFu;
for (int i = 0; i < 5; ++i)
{
hash ^= hash << 13;
hash ^= hash >> 17;
hash ^= hash << 5;
m_state[i] = hash;
}
for (int i = 0; i < 19; ++i)
{
GetUInt32();
}
}
public int GetInt32 (int first, int last)
{
var num = GetDouble();
return (int)(first + (long)(num * (last - first + 1)));
}
double GetDouble ()
{
return (double)GetUInt32() / 0x100000000L;
}
uint GetUInt32 ()
{
ulong v = FactorA * (ulong)m_state[3]
+ FactorB * (ulong)m_state[2]
+ FactorC * (ulong)m_state[1]
+ FactorD * (ulong)m_state[0] + m_state[4];
m_state[3] = m_state[2];
m_state[2] = m_state[1];
m_state[1] = m_state[0];
m_state[4] = (uint)(v >> 32);
return m_state[0] = (uint)v;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.