context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
#region Apache Notice
/*****************************************************************************
* $Revision: 374175 $
* $LastChangedDate: 2006-05-20 23:56:36 +0200 (sam., 20 mai 2006) $
* $LastChangedBy: gbayon $
*
* iBATIS.NET Data Mapper
* Copyright (C) 2006/2005 - The Apache Software Foundation
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
********************************************************************************/
#endregion
using System;
using System.Collections;
//#if dotnet2
using System.Configuration;
//#endif
using System.Reflection;
using System.Xml.Serialization;
using IBatisNet.Common.Utilities;
using IBatisNet.DataMapper.MappedStatements.ArgumentStrategy;
using IBatisNet.DataMapper.Scope;
using IBatisNet.DataMapper.TypeHandlers;
namespace IBatisNet.DataMapper.Configuration.ResultMapping
{
/// <summary>
/// ArgumentProperty.
/// </summary>
[Serializable]
[XmlRoot("argument", Namespace = "http://ibatis.apache.org/mapping")]
public class ArgumentProperty : ResultProperty
{
#region Fields
[NonSerialized]
private string _argumentName = string.Empty;
[NonSerialized]
private Type _argumentType = null;
[NonSerialized]
private IArgumentStrategy _argumentStrategy = null;
#endregion
#region Properties
/// <summary>
/// Sets or gets the <see cref="IArgumentStrategy"/> used to fill the object property.
/// </summary>
[XmlIgnore]
public override IArgumentStrategy ArgumentStrategy
{
set { _argumentStrategy = value; }
get { return _argumentStrategy; }
}
/// <summary>
/// Specify the constructor argument name.
/// </summary>
[XmlAttribute("argumentName")]
public string ArgumentName
{
get { return _argumentName; }
set
{
if ((value == null) || (value.Length < 1))
{
throw new ArgumentNullException("The name attribute is mandatory in a argument tag.");
}
_argumentName = value;
}
}
/// <summary>
/// Tell us if we must lazy load this property..
/// </summary>
[XmlAttribute("lazyLoad")]
public override bool IsLazyLoad
{
get { return false; }
set { throw new InvalidOperationException("Argument property cannot be lazy load."); }
}
/// <summary>
/// Get the argument type
/// </summary>
[XmlIgnore]
public override Type MemberType
{
get { return _argumentType; }
}
#endregion
#region Constructor (s) / Destructor
/// <summary>
/// Do not use direclty, only for serialization.
/// </summary>
public ArgumentProperty()
{
}
#endregion
#region Methods
/// <summary>
/// Initialize the argument property.
/// </summary>
/// <param name="constructorInfo"></param>
/// <param name="configScope"></param>
public void Initialize(ConfigurationScope configScope, ConstructorInfo constructorInfo)
{
// Search argument by his name to set his type
ParameterInfo[] parameters = constructorInfo.GetParameters();
bool found = false;
for (int i = 0; i < parameters.Length; i++)
{
found = (parameters[i].Name == _argumentName);
if (found)
{
_argumentType = parameters[i].ParameterType;
break;
}
}
if (this.CallBackName != null && this.CallBackName.Length > 0)
{
configScope.ErrorContext.MoreInfo = "Argument property (" + _argumentName + "), check the typeHandler attribute '" + this.CallBackName + "' (must be a ITypeHandlerCallback implementation).";
try
{
Type type = configScope.SqlMapper.TypeHandlerFactory.GetType(this.CallBackName);
ITypeHandlerCallback typeHandlerCallback = (ITypeHandlerCallback)Activator.CreateInstance(type);
this.TypeHandler = new CustomTypeHandler(typeHandlerCallback);
}
catch (Exception e)
{
//#if dotnet2
throw new ConfigurationErrorsException("Error occurred during custom type handler configuration. Cause: " + e.Message, e);
//#else
// throw new ConfigurationException("Error occurred during custom type handler configuration. Cause: " + e.Message, e);
//#endif
}
}
else
{
configScope.ErrorContext.MoreInfo = "Argument property (" + _argumentName + ") set the typeHandler attribute.";
this.TypeHandler = this.ResolveTypeHandler(configScope, _argumentType, this.CLRType, this.DbType);
}
}
/// <summary>
///
/// </summary>
/// <param name="configScope"></param>
/// <param name="argumenType">The argument type</param>
/// <param name="clrType"></param>
/// <param name="dbType"></param>
/// <returns></returns>
public ITypeHandler ResolveTypeHandler(ConfigurationScope configScope, Type argumenType, string clrType, string dbType)
{
ITypeHandler handler = null;
if (argumenType == null)
{
handler = configScope.DataExchangeFactory.TypeHandlerFactory.GetUnkownTypeHandler();
}
else if (typeof(IDictionary).IsAssignableFrom(argumenType))
{
// IDictionary
if (clrType == null || clrType.Length == 0)
{
handler = configScope.DataExchangeFactory.TypeHandlerFactory.GetUnkownTypeHandler();
}
else
{
try
{
Type type = TypeUtils.ResolveType(clrType);
handler = configScope.DataExchangeFactory.TypeHandlerFactory.GetTypeHandler(type, dbType);
}
catch (Exception e)
{
//#if dotnet2
throw new ConfigurationErrorsException("Error. Could not set TypeHandler. Cause: " + e.Message, e);
//#else
// throw new ConfigurationException("Error. Could not set TypeHandler. Cause: " + e.Message, e);
//#endif
}
}
}
else if (configScope.DataExchangeFactory.TypeHandlerFactory.GetTypeHandler(argumenType, dbType) != null)
{
// Primitive
handler = configScope.DataExchangeFactory.TypeHandlerFactory.GetTypeHandler(argumenType, dbType);
}
else
{
// .NET object
if (clrType == null || clrType.Length == 0)
{
handler = configScope.DataExchangeFactory.TypeHandlerFactory.GetUnkownTypeHandler();
}
else
{
try
{
Type type = TypeUtils.ResolveType(clrType);
handler = configScope.DataExchangeFactory.TypeHandlerFactory.GetTypeHandler(type, dbType);
}
catch (Exception e)
{
//#if dotnet2
throw new ConfigurationErrorsException("Error. Could not set TypeHandler. Cause: " + e.Message, e);
//#else
// throw new ConfigurationException("Error. Could not set TypeHandler. Cause: " + e.Message, e);
//#endif
}
}
}
return handler;
}
#endregion
}
}
| |
#if !DISABLE_PLAYFABENTITY_API && !DISABLE_PLAYFAB_STATIC_API
using System;
using System.Collections.Generic;
using PlayFab.GroupsModels;
using PlayFab.Internal;
namespace PlayFab
{
/// <summary>
/// The Groups API is designed for any permanent or semi-permanent collections of Entities (players, or non-players). If you
/// want to make Guilds/Clans/Corporations/etc., then you should use groups. Groups can also be used to make chatrooms,
/// parties, or any other persistent collection of entities.
/// </summary>
public static class PlayFabGroupsAPI
{
static PlayFabGroupsAPI() {}
/// <summary>
/// Verify entity login.
/// </summary>
public static bool IsEntityLoggedIn()
{
return PlayFabSettings.staticPlayer.IsEntityLoggedIn();
}
/// <summary>
/// Clear the Client SessionToken which allows this Client to call API calls requiring login.
/// A new/fresh login will be required after calling this.
/// </summary>
public static void ForgetAllCredentials()
{
PlayFabSettings.staticPlayer.ForgetAllCredentials();
}
/// <summary>
/// Accepts an outstanding invitation to to join a group
/// </summary>
public static void AcceptGroupApplication(AcceptGroupApplicationRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/AcceptGroupApplication", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Accepts an invitation to join a group
/// </summary>
public static void AcceptGroupInvitation(AcceptGroupInvitationRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/AcceptGroupInvitation", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Adds members to a group or role.
/// </summary>
public static void AddMembers(AddMembersRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/AddMembers", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Applies to join a group
/// </summary>
public static void ApplyToGroup(ApplyToGroupRequest request, Action<ApplyToGroupResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ApplyToGroup", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Blocks a list of entities from joining a group.
/// </summary>
public static void BlockEntity(BlockEntityRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/BlockEntity", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Changes the role membership of a list of entities from one role to another.
/// </summary>
public static void ChangeMemberRole(ChangeMemberRoleRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ChangeMemberRole", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Creates a new group.
/// </summary>
public static void CreateGroup(CreateGroupRequest request, Action<CreateGroupResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/CreateGroup", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Creates a new group role.
/// </summary>
public static void CreateRole(CreateGroupRoleRequest request, Action<CreateGroupRoleResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/CreateRole", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Deletes a group and all roles, invitations, join requests, and blocks associated with it.
/// </summary>
public static void DeleteGroup(DeleteGroupRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/DeleteGroup", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Deletes an existing role in a group.
/// </summary>
public static void DeleteRole(DeleteRoleRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/DeleteRole", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Gets information about a group and its roles
/// </summary>
public static void GetGroup(GetGroupRequest request, Action<GetGroupResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/GetGroup", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Invites a player to join a group
/// </summary>
public static void InviteToGroup(InviteToGroupRequest request, Action<InviteToGroupResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/InviteToGroup", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Checks to see if an entity is a member of a group or role within the group
/// </summary>
public static void IsMember(IsMemberRequest request, Action<IsMemberResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/IsMember", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Lists all outstanding requests to join a group
/// </summary>
public static void ListGroupApplications(ListGroupApplicationsRequest request, Action<ListGroupApplicationsResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ListGroupApplications", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Lists all entities blocked from joining a group
/// </summary>
public static void ListGroupBlocks(ListGroupBlocksRequest request, Action<ListGroupBlocksResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ListGroupBlocks", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Lists all outstanding invitations for a group
/// </summary>
public static void ListGroupInvitations(ListGroupInvitationsRequest request, Action<ListGroupInvitationsResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ListGroupInvitations", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Lists all members for a group
/// </summary>
public static void ListGroupMembers(ListGroupMembersRequest request, Action<ListGroupMembersResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ListGroupMembers", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Lists all groups and roles for an entity
/// </summary>
public static void ListMembership(ListMembershipRequest request, Action<ListMembershipResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ListMembership", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Lists all outstanding invitations and group applications for an entity
/// </summary>
public static void ListMembershipOpportunities(ListMembershipOpportunitiesRequest request, Action<ListMembershipOpportunitiesResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/ListMembershipOpportunities", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Removes an application to join a group
/// </summary>
public static void RemoveGroupApplication(RemoveGroupApplicationRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/RemoveGroupApplication", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Removes an invitation join a group
/// </summary>
public static void RemoveGroupInvitation(RemoveGroupInvitationRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/RemoveGroupInvitation", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Removes members from a group.
/// </summary>
public static void RemoveMembers(RemoveMembersRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/RemoveMembers", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Unblocks a list of entities from joining a group
/// </summary>
public static void UnblockEntity(UnblockEntityRequest request, Action<EmptyResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/UnblockEntity", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Updates non-membership data about a group.
/// </summary>
public static void UpdateGroup(UpdateGroupRequest request, Action<UpdateGroupResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/UpdateGroup", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
/// <summary>
/// Updates metadata about a role.
/// </summary>
public static void UpdateRole(UpdateGroupRoleRequest request, Action<UpdateGroupRoleResponse> resultCallback, Action<PlayFabError> errorCallback, object customData = null, Dictionary<string, string> extraHeaders = null)
{
var context = (request == null ? null : request.AuthenticationContext) ?? PlayFabSettings.staticPlayer;
var callSettings = PlayFabSettings.staticSettings;
if (!context.IsEntityLoggedIn()) throw new PlayFabException(PlayFabExceptionCode.NotLoggedIn,"Must be logged in to call this method");
PlayFabHttp.MakeApiCall("/Group/UpdateRole", request, AuthType.EntityToken, resultCallback, errorCallback, customData, extraHeaders, context, callSettings);
}
}
}
#endif
| |
using System;
using System.Collections.Generic;
using UnityEngine.Rendering;
using UnityEngine.Serialization;
namespace UnityEngine.Experimental.Rendering.HDPipeline
{
public enum ShaderVariantLogLevel
{
Disabled,
OnlyHDRPShaders,
AllShaders,
}
// The HDRenderPipeline assumes linear lighting. Doesn't work with gamma.
public partial class HDRenderPipelineAsset : RenderPipelineAsset
{
HDRenderPipelineAsset()
{
}
protected override UnityEngine.Rendering.RenderPipeline CreatePipeline()
{
// safe: When we return a null render pipline it will do nothing in the rendering
HDRenderPipeline pipeline = null;
// We need to do catch every errors that happend during the HDRP build, when we upgrade the
// HDRP package, some required assets are not yet imported by the package manager when the
// pipeline is created so in that case, we just return a null pipeline. Some error may appear
// when we upgrade the pipeline but it's better than breaking HDRP resources an causing more
// errors.
try
{
pipeline = new HDRenderPipeline(this);
} catch (Exception e) {
UnityEngine.Debug.LogError(e);
}
return pipeline;
}
protected override void OnValidate()
{
//Do not reconstruct the pipeline if we modify other assets.
//OnValidate is called once at first selection of the asset.
if (GraphicsSettings.renderPipelineAsset == this)
base.OnValidate();
}
[SerializeField]
RenderPipelineResources m_RenderPipelineResources;
public RenderPipelineResources renderPipelineResources
{
get { return m_RenderPipelineResources; }
set { m_RenderPipelineResources = value; }
}
#if UNITY_EDITOR
HDRenderPipelineEditorResources m_RenderPipelineEditorResources;
public HDRenderPipelineEditorResources renderPipelineEditorResources
{
get
{
//there is no clean way to load editor resources without having it serialized
// - impossible to load them at deserialization
// - constructor only called at asset creation
// - cannot rely on OnEnable
//thus fallback with lazy init for them
if (m_RenderPipelineEditorResources == null || m_RenderPipelineEditorResources.Equals(null))
m_RenderPipelineEditorResources = UnityEditor.AssetDatabase.LoadAssetAtPath<HDRenderPipelineEditorResources>(HDUtils.GetHDRenderPipelinePath() + "Editor/RenderPipelineResources/HDRenderPipelineEditorResources.asset");
return m_RenderPipelineEditorResources;
}
set { m_RenderPipelineEditorResources = value; }
}
#endif
// To be able to turn on/off FrameSettings properties at runtime for debugging purpose without affecting the original one
// we create a runtime copy (m_ActiveFrameSettings that is used, and any parametrization is done on serialized frameSettings)
[SerializeField]
FrameSettings m_RenderingPathDefaultCameraFrameSettings = FrameSettings.defaultCamera;
[SerializeField]
FrameSettings m_RenderingPathDefaultBakedOrCustomReflectionFrameSettings = FrameSettings.defaultCustomOrBakeReflectionProbe;
[SerializeField]
FrameSettings m_RenderingPathDefaultRealtimeReflectionFrameSettings = FrameSettings.defaultRealtimeReflectionProbe;
public ref FrameSettings GetDefaultFrameSettings(FrameSettingsRenderType type)
{
switch(type)
{
case FrameSettingsRenderType.Camera:
return ref m_RenderingPathDefaultCameraFrameSettings;
case FrameSettingsRenderType.CustomOrBakedReflection:
return ref m_RenderingPathDefaultBakedOrCustomReflectionFrameSettings;
case FrameSettingsRenderType.RealtimeReflection:
return ref m_RenderingPathDefaultRealtimeReflectionFrameSettings;
default:
throw new ArgumentException("Unknown FrameSettingsRenderType");
}
}
public bool frameSettingsHistory { get; set; } = false;
public ReflectionSystemParameters reflectionSystemParameters
{
get
{
return new ReflectionSystemParameters
{
maxPlanarReflectionProbePerCamera = currentPlatformRenderPipelineSettings.lightLoopSettings.planarReflectionProbeCacheSize,
maxActivePlanarReflectionProbe = 512,
planarReflectionProbeSize = (int)currentPlatformRenderPipelineSettings.lightLoopSettings.planarReflectionTextureSize,
maxActiveReflectionProbe = 512,
reflectionProbeSize = (int)currentPlatformRenderPipelineSettings.lightLoopSettings.reflectionCubemapSize
};
}
}
// Note: having m_RenderPipelineSettings serializable allows it to be modified in editor.
// And having it private with a getter property force a copy.
// As there is no setter, it thus cannot be modified by code.
// This ensure immutability at runtime.
// Store the various RenderPipelineSettings for each platform (for now only one)
[SerializeField, FormerlySerializedAs("renderPipelineSettings")]
RenderPipelineSettings m_RenderPipelineSettings = RenderPipelineSettings.@default;
// Return the current use RenderPipelineSettings (i.e for the current platform)
public RenderPipelineSettings currentPlatformRenderPipelineSettings => m_RenderPipelineSettings;
public bool allowShaderVariantStripping = true;
public bool enableSRPBatcher = true;
public ShaderVariantLogLevel shaderVariantLogLevel = ShaderVariantLogLevel.Disabled;
[SerializeField]
[Obsolete("Use diffusionProfileSettingsList instead")]
public DiffusionProfileSettings diffusionProfileSettings;
[SerializeField]
public DiffusionProfileSettings[] diffusionProfileSettingsList = new DiffusionProfileSettings[0];
// HDRP use GetRenderingLayerMaskNames to create its light linking system
// Mean here we define our name for light linking.
[System.NonSerialized]
string[] m_RenderingLayerNames = null;
string[] renderingLayerNames
{
get
{
if (m_RenderingLayerNames == null)
{
m_RenderingLayerNames = new string[32];
// By design we can't touch this one, but we can rename it
m_RenderingLayerNames[0] = "Light Layer default";
// We only support up to 7 layer + default.
for (int i = 1; i < 8; ++i)
{
m_RenderingLayerNames[i] = string.Format("Light Layer {0}", i);
}
// Unused
for (int i = 8; i < m_RenderingLayerNames.Length; ++i)
{
m_RenderingLayerNames[i] = string.Format("Unused {0}", i);
}
}
return m_RenderingLayerNames;
}
}
public override string[] renderingLayerMaskNames
{
get
{
return renderingLayerNames;
}
}
public override Shader defaultShader
{
get
{
return m_RenderPipelineResources.shaders.defaultPS;
}
}
#if UNITY_EDITOR
public override Material defaultMaterial
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.materials.defaultDiffuseMat;
}
}
// call to GetAutodeskInteractiveShaderXXX are only from within editor
public override Shader autodeskInteractiveShader
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.shaderGraphs.autodeskInteractive;
}
}
public override Shader autodeskInteractiveTransparentShader
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.shaderGraphs.autodeskInteractiveTransparent;
}
}
public override Shader autodeskInteractiveMaskedShader
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.shaderGraphs.autodeskInteractiveMasked;
}
}
public override Shader terrainDetailLitShader
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.shaders.terrainDetailLitShader;
}
}
public override Shader terrainDetailGrassShader
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.shaders.terrainDetailGrassShader;
}
}
public override Shader terrainDetailGrassBillboardShader
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.shaders.terrainDetailGrassBillboardShader;
}
}
// Note: This function is HD specific
public Material GetDefaultDecalMaterial()
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.materials.defaultDecalMat;
}
// Note: This function is HD specific
public Material GetDefaultMirrorMaterial()
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.materials.defaultMirrorMat;
}
public override Material defaultTerrainMaterial
{
get
{
return renderPipelineEditorResources == null ? null : renderPipelineEditorResources.materials.defaultTerrainMat;
}
}
// Array structure that allow us to manipulate the set of defines that the HD render pipeline needs
List<string> defineArray = new List<string>();
bool UpdateDefineList(bool flagValue, string defineMacroValue)
{
bool macroExists = defineArray.Contains(defineMacroValue);
if (flagValue)
{
if (!macroExists)
{
defineArray.Add(defineMacroValue);
return true;
}
}
else
{
if (macroExists)
{
defineArray.Remove(defineMacroValue);
return true;
}
}
return false;
}
// This function allows us to raise or remove some preprocessing defines based on the render pipeline settings
public void EvaluateSettings()
{
#if REALTIME_RAYTRACING_SUPPORT
// Grab the current set of defines and split them
string currentDefineList = UnityEditor.PlayerSettings.GetScriptingDefineSymbolsForGroup(UnityEditor.BuildTargetGroup.Standalone);
defineArray.Clear();
defineArray.AddRange(currentDefineList.Split(';'));
// Update all the individual defines
bool needUpdate = false;
needUpdate |= UpdateDefineList(currentPlatformRenderPipelineSettings.supportRayTracing, "ENABLE_RAYTRACING");
// Only set if it changed
if (needUpdate)
{
UnityEditor.PlayerSettings.SetScriptingDefineSymbolsForGroup(UnityEditor.BuildTargetGroup.Standalone, string.Join(";", defineArray.ToArray()));
}
#endif
}
#endif
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.AspNetCore.Routing.Constraints;
using Microsoft.AspNetCore.Routing.Template;
using Moq;
using Xunit;
namespace Microsoft.AspNetCore.Routing.Patterns
{
public class RoutePatternFactoryTest
{
[Fact]
public void Pattern_MergesDefaultValues()
{
// Arrange
var template = "{a}/{b}/{c=19}";
var defaults = new { a = "15", b = 17 };
var constraints = new { };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Equal("15", actual.GetParameter("a").Default);
Assert.Equal(17, actual.GetParameter("b").Default);
Assert.Equal("19", actual.GetParameter("c").Default);
Assert.Collection(
actual.Defaults.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("a", kvp.Key); Assert.Equal("15", kvp.Value); },
kvp => { Assert.Equal("b", kvp.Key); Assert.Equal(17, kvp.Value); },
kvp => { Assert.Equal("c", kvp.Key); Assert.Equal("19", kvp.Value); });
}
[Fact]
public void Pattern_ExtraDefaultValues()
{
// Arrange
var template = "{a}/{b}/{c}";
var defaults = new { d = "15", e = 17 };
var constraints = new { };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.Defaults.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("d", kvp.Key); Assert.Equal("15", kvp.Value); },
kvp => { Assert.Equal("e", kvp.Key); Assert.Equal(17, kvp.Value); });
}
[Fact]
public void Pattern_DifferentDuplicateDefaultValue_Throws()
{
// Arrange
var template = "{a=13}/{b}/{c}";
var defaults = new { a = "15", };
var constraints = new { };
var original = RoutePatternFactory.Parse(template);
// Act
var ex = Assert.Throws<InvalidOperationException>(() => RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments));
// Assert
Assert.Equal(
"The route parameter 'a' has both an inline default value and an explicit default " +
"value specified. A route parameter cannot contain an inline default value when a " +
"default value is specified explicitly. Consider removing one of them.",
ex.Message);
}
[Fact]
public void Pattern_SameDuplicateDefaultValue()
{
// Arrange
var template = "{a=13}/{b}/{c}";
var defaults = new { a = "13", };
var constraints = new { };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.Defaults,
kvp => { Assert.Equal("a", kvp.Key); Assert.Equal("13", kvp.Value); });
}
[Fact]
public void Pattern_OptionalParameterDefaultValue_Throws()
{
// Arrange
var template = "{a}/{b}/{c?}";
var defaults = new { c = "15", };
var constraints = new { };
var original = RoutePatternFactory.Parse(template);
// Act
var ex = Assert.Throws<InvalidOperationException>(() => RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments));
// Assert
Assert.Equal(
"An optional parameter cannot have default value.",
ex.Message);
}
[Fact]
public void Pattern_MergesConstraints()
{
// Arrange
var template = "{a:int}/{b}/{c}";
var defaults = new { };
var constraints = new { a = new RegexRouteConstraint("foo"), b = new RegexRouteConstraint("bar") };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.GetParameter("a").ParameterPolicies,
c => Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy),
c => Assert.Equal("int", c.Content));
Assert.Collection(
actual.GetParameter("b").ParameterPolicies,
c => Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy));
Assert.Collection(
actual.ParameterPolicies.OrderBy(kvp => kvp.Key),
kvp =>
{
Assert.Equal("a", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy),
c => Assert.Equal("int", c.Content));
},
kvp =>
{
Assert.Equal("b", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy));
});
}
[Fact]
public void Pattern_ExtraConstraints()
{
// Arrange
var template = "{a}/{b}/{c}";
var defaults = new { };
var constraints = new { d = new RegexRouteConstraint("foo"), e = new RegexRouteConstraint("bar") };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.ParameterPolicies.OrderBy(kvp => kvp.Key),
kvp =>
{
Assert.Equal("d", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy));
},
kvp =>
{
Assert.Equal("e", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy));
});
}
[Fact]
public void Pattern_ExtraConstraints_MultipleConstraintsForKey()
{
// Arrange
var template = "{a}/{b}/{c}";
var defaults = new { };
var constraints = new { d = new object[] { new RegexRouteConstraint("foo"), new RegexRouteConstraint("bar"), "baz" } };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.ParameterPolicies.OrderBy(kvp => kvp.Key),
kvp =>
{
Assert.Equal("d", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.Equal("foo", Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy).Constraint.ToString()),
c => Assert.Equal("bar", Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy).Constraint.ToString()),
c => Assert.Equal("^(baz)$", Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy).Constraint.ToString()));
});
}
[Fact]
public void Pattern_ExtraConstraints_MergeMultipleConstraintsForKey()
{
// Arrange
var template = "{a:int}/{b}/{c:int}";
var defaults = new { };
var constraints = new { b = "fizz", c = new object[] { new RegexRouteConstraint("foo"), new RegexRouteConstraint("bar"), "baz" } };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.ParameterPolicies.OrderBy(kvp => kvp.Key),
kvp =>
{
Assert.Equal("a", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.Equal("int", c.Content));
},
kvp =>
{
Assert.Equal("b", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.Equal("^(fizz)$", Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy).Constraint.ToString()));
},
kvp =>
{
Assert.Equal("c", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.Equal("foo", Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy).Constraint.ToString()),
c => Assert.Equal("bar", Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy).Constraint.ToString()),
c => Assert.Equal("^(baz)$", Assert.IsType<RegexRouteConstraint>(c.ParameterPolicy).Constraint.ToString()),
c => Assert.Equal("int", c.Content));
});
}
[Fact]
public void Pattern_ExtraConstraints_NestedArray_Throws()
{
// Arrange
var template = "{a}/{b}/{c:int}";
var defaults = new { };
var constraints = new { c = new object[] { new object[0] } };
var original = RoutePatternFactory.Parse(template);
// Act & Assert
Assert.Throws<InvalidOperationException>(() =>
{
RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
});
}
[Fact]
public void Pattern_ExtraConstraints_RouteConstraint()
{
// Arrange
var template = "{a}/{b}/{c}";
var defaults = new { };
var constraints = new { d = Mock.Of<IRouteConstraint>(), e = Mock.Of<IRouteConstraint>(), };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.ParameterPolicies.OrderBy(kvp => kvp.Key),
kvp =>
{
Assert.Equal("d", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.NotNull(c.ParameterPolicy));
},
kvp =>
{
Assert.Equal("e", kvp.Key);
Assert.Collection(
kvp.Value,
c => Assert.NotNull(c.ParameterPolicy));
});
}
[Fact]
public void Pattern_CreatesConstraintFromString()
{
// Arrange
var template = "{a}/{b}/{c}";
var defaults = new { };
var constraints = new { d = "foo", };
var original = RoutePatternFactory.Parse(template);
// Act
var actual = RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments);
// Assert
Assert.Collection(
actual.ParameterPolicies.OrderBy(kvp => kvp.Key),
kvp =>
{
Assert.Equal("d", kvp.Key);
var regex = Assert.IsType<RegexRouteConstraint>(Assert.Single(kvp.Value).ParameterPolicy);
Assert.Equal("^(foo)$", regex.Constraint.ToString());
});
}
[Fact]
public void Pattern_InvalidConstraintTypeThrows()
{
// Arrange
var template = "{a}/{b}/{c}";
var defaults = new { };
var constraints = new { d = 17, };
var original = RoutePatternFactory.Parse(template);
// Act
var ex = Assert.Throws<InvalidOperationException>(() => RoutePatternFactory.Pattern(
original.RawText,
defaults,
constraints,
original.PathSegments));
// Assert
Assert.Equal(
$"Invalid constraint '17'. A constraint must be of type 'string' or '{typeof(IRouteConstraint)}'.",
ex.Message);
}
[Fact]
public void Pattern_ArrayOfSegments_ShouldMakeCopyOfArrayOfSegments()
{
// Arrange
var literalPartA = RoutePatternFactory.LiteralPart("A");
var paramPartB = RoutePatternFactory.ParameterPart("B");
var paramPartC = RoutePatternFactory.ParameterPart("C");
var paramPartD = RoutePatternFactory.ParameterPart("D");
var segments = new[]
{
RoutePatternFactory.Segment(literalPartA, paramPartB),
RoutePatternFactory.Segment(paramPartC, literalPartA),
RoutePatternFactory.Segment(paramPartD),
RoutePatternFactory.Segment(literalPartA)
};
// Act
var actual = RoutePatternFactory.Pattern(segments);
segments[1] = RoutePatternFactory.Segment(RoutePatternFactory.ParameterPart("E"));
Array.Resize(ref segments, 2);
// Assert
Assert.Equal(3, actual.Parameters.Count);
Assert.Same(paramPartB, actual.Parameters[0]);
Assert.Same(paramPartC, actual.Parameters[1]);
Assert.Same(paramPartD, actual.Parameters[2]);
}
[Fact]
public void Pattern_RawTextAndArrayOfSegments_ShouldMakeCopyOfArrayOfSegments()
{
// Arrange
var rawText = "raw";
var literalPartA = RoutePatternFactory.LiteralPart("A");
var paramPartB = RoutePatternFactory.ParameterPart("B");
var paramPartC = RoutePatternFactory.ParameterPart("C");
var paramPartD = RoutePatternFactory.ParameterPart("D");
var segments = new[]
{
RoutePatternFactory.Segment(literalPartA, paramPartB),
RoutePatternFactory.Segment(paramPartC, literalPartA),
RoutePatternFactory.Segment(paramPartD),
RoutePatternFactory.Segment(literalPartA)
};
// Act
var actual = RoutePatternFactory.Pattern(rawText, segments);
segments[1] = RoutePatternFactory.Segment(RoutePatternFactory.ParameterPart("E"));
Array.Resize(ref segments, 2);
// Assert
Assert.Equal(3, actual.Parameters.Count);
Assert.Same(paramPartB, actual.Parameters[0]);
Assert.Same(paramPartC, actual.Parameters[1]);
Assert.Same(paramPartD, actual.Parameters[2]);
}
[Fact]
public void Pattern_DefaultsAndParameterPoliciesAndArrayOfSegments_ShouldMakeCopyOfArrayOfSegments()
{
// Arrange
object defaults = new { B = 12, C = 4 };
object parameterPolicies = null;
var literalPartA = RoutePatternFactory.LiteralPart("A");
var paramPartB = RoutePatternFactory.ParameterPart("B");
var paramPartC = RoutePatternFactory.ParameterPart("C");
var paramPartD = RoutePatternFactory.ParameterPart("D");
var segments = new[]
{
RoutePatternFactory.Segment(literalPartA, paramPartB),
RoutePatternFactory.Segment(paramPartC, literalPartA),
RoutePatternFactory.Segment(paramPartD),
RoutePatternFactory.Segment(literalPartA)
};
// Act
var actual = RoutePatternFactory.Pattern(defaults, parameterPolicies, segments);
segments[1] = RoutePatternFactory.Segment(RoutePatternFactory.ParameterPart("E"));
Array.Resize(ref segments, 2);
// Assert
Assert.Equal(3, actual.Parameters.Count);
Assert.Equal(paramPartB.Name, actual.Parameters[0].Name);
Assert.Equal(12, actual.Parameters[0].Default);
Assert.Null(paramPartB.Default);
Assert.NotSame(paramPartB, actual.Parameters[0]);
Assert.Equal(paramPartC.Name, actual.Parameters[1].Name);
Assert.Equal(4, actual.Parameters[1].Default);
Assert.NotSame(paramPartC, actual.Parameters[1]);
Assert.Null(paramPartC.Default);
Assert.Equal(paramPartD.Name, actual.Parameters[2].Name);
Assert.Null(actual.Parameters[2].Default);
Assert.Same(paramPartD, actual.Parameters[2]);
Assert.Null(paramPartD.Default);
}
[Fact]
public void Pattern_RawTextAndDefaultsAndParameterPoliciesAndArrayOfSegments_ShouldMakeCopyOfArrayOfSegments()
{
// Arrange
var rawText = "raw";
object defaults = new { B = 12, C = 4 };
object parameterPolicies = null;
var literalPartA = RoutePatternFactory.LiteralPart("A");
var paramPartB = RoutePatternFactory.ParameterPart("B");
var paramPartC = RoutePatternFactory.ParameterPart("C");
var paramPartD = RoutePatternFactory.ParameterPart("D");
var segments = new[]
{
RoutePatternFactory.Segment(literalPartA, paramPartB),
RoutePatternFactory.Segment(paramPartC, literalPartA),
RoutePatternFactory.Segment(paramPartD),
RoutePatternFactory.Segment(literalPartA)
};
// Act
var actual = RoutePatternFactory.Pattern(rawText, defaults, parameterPolicies, segments);
segments[1] = RoutePatternFactory.Segment(RoutePatternFactory.ParameterPart("E"));
Array.Resize(ref segments, 2);
// Assert
Assert.Equal(3, actual.Parameters.Count);
Assert.Equal(paramPartB.Name, actual.Parameters[0].Name);
Assert.Equal(12, actual.Parameters[0].Default);
Assert.Null(paramPartB.Default);
Assert.NotSame(paramPartB, actual.Parameters[0]);
Assert.Equal(paramPartC.Name, actual.Parameters[1].Name);
Assert.Equal(4, actual.Parameters[1].Default);
Assert.NotSame(paramPartC, actual.Parameters[1]);
Assert.Null(paramPartC.Default);
Assert.Equal(paramPartD.Name, actual.Parameters[2].Name);
Assert.Null(actual.Parameters[2].Default);
Assert.Same(paramPartD, actual.Parameters[2]);
Assert.Null(paramPartD.Default);
}
[Fact]
public void Parse_WithRequiredValues()
{
// Arrange
var template = "{controller=Home}/{action=Index}/{id?}";
var defaults = new { area = "Admin", };
var policies = new { };
var requiredValues = new { area = "Admin", controller = "Store", action = "Index", };
// Act
var action = RoutePatternFactory.Parse(template, defaults, policies, requiredValues);
// Assert
Assert.Collection(
action.RequiredValues.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("action", kvp.Key); Assert.Equal("Index", kvp.Value); },
kvp => { Assert.Equal("area", kvp.Key); Assert.Equal("Admin", kvp.Value); },
kvp => { Assert.Equal("controller", kvp.Key); Assert.Equal("Store", kvp.Value); });
}
[Fact]
public void Parse_WithRequiredValues_AllowsNullRequiredValue()
{
// Arrange
var template = "{controller=Home}/{action=Index}/{id?}";
var defaults = new { };
var policies = new { };
var requiredValues = new { area = (string)null, controller = "Store", action = "Index", };
// Act
var action = RoutePatternFactory.Parse(template, defaults, policies, requiredValues);
// Assert
Assert.Collection(
action.RequiredValues.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("action", kvp.Key); Assert.Equal("Index", kvp.Value); },
kvp => { Assert.Equal("area", kvp.Key); Assert.Null(kvp.Value); },
kvp => { Assert.Equal("controller", kvp.Key); Assert.Equal("Store", kvp.Value); });
}
[Fact]
public void Parse_WithRequiredValues_AllowsEmptyRequiredValue()
{
// Arrange
var template = "{controller=Home}/{action=Index}/{id?}";
var defaults = new { };
var policies = new { };
var requiredValues = new { area = "", controller = "Store", action = "Index", };
// Act
var action = RoutePatternFactory.Parse(template, defaults, policies, requiredValues);
// Assert
Assert.Collection(
action.RequiredValues.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("action", kvp.Key); Assert.Equal("Index", kvp.Value); },
kvp => { Assert.Equal("area", kvp.Key); Assert.Equal("", kvp.Value); },
kvp => { Assert.Equal("controller", kvp.Key); Assert.Equal("Store", kvp.Value); });
}
[Fact]
public void Parse_WithRequiredValues_ThrowsForNonParameterNonDefault()
{
// Arrange
var template = "{controller=Home}/{action=Index}/{id?}";
var defaults = new { };
var policies = new { };
var requiredValues = new { area = "Admin", controller = "Store", action = "Index", };
// Act
var exception = Assert.Throws<InvalidOperationException>(() =>
{
var action = RoutePatternFactory.Parse(template, defaults, policies, requiredValues);
});
// Assert
Assert.Equal(
"No corresponding parameter or default value could be found for the required value " +
"'area=Admin'. A non-null required value must correspond to a route parameter or the " +
"route pattern must have a matching default value.",
exception.Message);
}
[Fact]
public void ParameterPart_ParameterNameAndDefaultAndParameterKindAndArrayOfParameterPolicies_ShouldMakeCopyOfParameterPolicies()
{
// Arrange (going through hoops to get an array of RoutePatternParameterPolicyReference)
const string name = "Id";
var defaults = new { a = "13", };
var x = new InlineConstraint("x");
var y = new InlineConstraint("y");
var z = new InlineConstraint("z");
var constraints = new[] { x, y, z };
var templatePart = TemplatePart.CreateParameter("t", false, false, null, constraints);
var routePatternParameterPart = (RoutePatternParameterPart) templatePart.ToRoutePatternPart();
var policies = routePatternParameterPart.ParameterPolicies.ToArray();
// Act
var parameterPart = RoutePatternFactory.ParameterPart(name, defaults, RoutePatternParameterKind.Standard, policies);
policies[0] = null;
Array.Resize(ref policies, 2);
// Assert
Assert.NotNull(parameterPart.ParameterPolicies);
Assert.Equal(3, parameterPart.ParameterPolicies.Count);
Assert.NotNull(parameterPart.ParameterPolicies[0]);
Assert.NotNull(parameterPart.ParameterPolicies[1]);
Assert.NotNull(parameterPart.ParameterPolicies[2]);
}
[Fact]
public void ParameterPart_ParameterNameAndDefaultAndParameterKindAndEnumerableOfParameterPolicies_ShouldMakeCopyOfParameterPolicies()
{
// Arrange (going through hoops to get an enumerable of RoutePatternParameterPolicyReference)
const string name = "Id";
var defaults = new { a = "13", };
var x = new InlineConstraint("x");
var y = new InlineConstraint("y");
var z = new InlineConstraint("z");
var constraints = new[] { x, y, z };
var templatePart = TemplatePart.CreateParameter("t", false, false, null, constraints);
var routePatternParameterPart = (RoutePatternParameterPart)templatePart.ToRoutePatternPart();
var policies = routePatternParameterPart.ParameterPolicies.ToList();
// Act
var parameterPart = RoutePatternFactory.ParameterPart(name, defaults, RoutePatternParameterKind.Standard, policies);
policies[0] = null;
policies.RemoveAt(1);
// Assert
Assert.NotNull(parameterPart.ParameterPolicies);
Assert.Equal(3, parameterPart.ParameterPolicies.Count);
Assert.NotNull(parameterPart.ParameterPolicies[0]);
Assert.NotNull(parameterPart.ParameterPolicies[1]);
Assert.NotNull(parameterPart.ParameterPolicies[2]);
}
[Fact]
public void Segment_EnumerableOfParts()
{
// Arrange
var paramPartB = RoutePatternFactory.ParameterPart("B");
var paramPartC = RoutePatternFactory.ParameterPart("C");
var paramPartD = RoutePatternFactory.ParameterPart("D");
var parts = new[] { paramPartB, paramPartC, paramPartD };
// Act
var actual = RoutePatternFactory.Segment((IEnumerable<RoutePatternParameterPart>) parts);
parts[1] = RoutePatternFactory.ParameterPart("E");
Array.Resize(ref parts, 2);
// Assert
Assert.Equal(3, actual.Parts.Count);
Assert.Same(paramPartB, actual.Parts[0]);
Assert.Same(paramPartC, actual.Parts[1]);
Assert.Same(paramPartD, actual.Parts[2]);
}
[Fact]
public void Segment_ArrayOfParts()
{
// Arrange
var paramPartB = RoutePatternFactory.ParameterPart("B");
var paramPartC = RoutePatternFactory.ParameterPart("C");
var paramPartD = RoutePatternFactory.ParameterPart("D");
var parts = new[] { paramPartB, paramPartC, paramPartD };
// Act
var actual = RoutePatternFactory.Segment(parts);
parts[1] = RoutePatternFactory.ParameterPart("E");
Array.Resize(ref parts, 2);
// Assert
Assert.Equal(3, actual.Parts.Count);
Assert.Same(paramPartB, actual.Parts[0]);
Assert.Same(paramPartC, actual.Parts[1]);
Assert.Same(paramPartD, actual.Parts[2]);
}
}
}
| |
// ***********************************************************************
// Copyright (c) 2015 Charlie Poole, Rob Prouse
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using NUnit.Options;
using System.Text;
namespace NUnit.Common
{
/// <summary>
/// CommandLineOptions is the base class the specific option classes
/// used for nunit3-console and nunitlite. It encapsulates all common
/// settings and features of both. This is done to ensure that common
/// features remain common and for the convenience of having the code
/// in a common location. The class inherits from the Mono
/// Options OptionSet class and provides a central location
/// for defining and parsing options.
/// </summary>
public class CommandLineOptions : OptionSet
{
private static readonly string DEFAULT_WORK_DIRECTORY =
Directory.GetCurrentDirectory();
private bool validated;
private bool noresult;
#region Constructors
// Currently used only by tests
internal CommandLineOptions(IDefaultOptionsProvider defaultOptionsProvider, bool requireInputFile, params string[] args)
{
// Apply default options
if (defaultOptionsProvider == null) throw new ArgumentNullException("defaultOptionsProvider");
TeamCity = defaultOptionsProvider.TeamCity;
ConfigureOptions(requireInputFile);
if (args != null)
Parse(PreParse(args));
}
public CommandLineOptions(bool requireInputFile, params string[] args)
{
ConfigureOptions(requireInputFile);
if (args != null)
Parse(PreParse(args));
}
private int _nesting = 0;
internal IEnumerable<string> PreParse(IEnumerable<string> args)
{
if (++_nesting > 3)
{
ErrorMessages.Add("@ nesting exceeds maximum depth of 3.");
--_nesting;
return args;
}
var listArgs = new List<string>();
foreach (var arg in args)
{
if (arg.Length == 0 || arg[0] != '@')
{
listArgs.Add(arg);
continue;
}
if (arg.Length == 1)
{
ErrorMessages.Add("You must include a file name after @.");
continue;
}
var filename = arg.Substring(1);
if (!File.Exists(filename))
{
ErrorMessages.Add("The file \"" + filename + "\" was not found.");
continue;
}
try
{
listArgs.AddRange(PreParse(GetArgsFromFile(filename)));
}
catch (IOException ex)
{
ErrorMessages.Add("Error reading \"" + filename + "\": " + ex.Message);
}
}
--_nesting;
return listArgs;
}
private static readonly Regex ArgsRegex = new Regex(@"\G(""((""""|[^""])+)""|(\S+)) *", RegexOptions.Compiled | RegexOptions.CultureInvariant);
// Get args from a string of args
internal static IEnumerable<string> GetArgs(string commandLine)
{
foreach (Match m in ArgsRegex.Matches(commandLine))
yield return Regex.Replace(m.Groups[2].Success ? m.Groups[2].Value : m.Groups[4].Value, @"""""", @"""");
}
// Get args from an included file
private static IEnumerable<string> GetArgsFromFile(string filename)
{
var sb = new StringBuilder();
foreach (var line in File.ReadAllLines(filename))
{
if (!string.IsNullOrEmpty(line) && line[0] != '#' && line.Trim().Length > 0)
{
if (sb.Length > 0)
sb.Append(' ');
sb.Append(line);
}
}
return GetArgs(sb.ToString());
}
#endregion
#region Properties
/// <summary>
/// Indicates whether an input file is required on the command-line.
/// Note that multiple input files are never allowed.
/// </summary>
public bool InputFileRequired { get; set; }
// Action to Perform
public bool Explore { get; private set; }
public bool ShowHelp { get; private set; }
public bool ShowVersion { get; private set; }
// Select tests
public string InputFile { get; private set; }
public IList<string> TestList { get; } = new List<string>();
public IDictionary<string, string> TestParameters { get; } = new Dictionary<string, string>();
public string WhereClause { get; private set; }
public bool WhereClauseSpecified { get { return WhereClause != null; } }
public int DefaultTimeout { get; private set; } = -1;
public bool DefaultTimeoutSpecified { get { return DefaultTimeout >= 0; } }
public int RandomSeed { get; private set; } = -1;
public bool RandomSeedSpecified { get { return RandomSeed >= 0; } }
public string DefaultTestNamePattern { get; private set; }
public int NumberOfTestWorkers { get; private set; } = -1;
public bool NumberOfTestWorkersSpecified { get { return NumberOfTestWorkers >= 0; } }
public bool StopOnError { get; private set; }
public bool WaitBeforeExit { get; private set; }
// Output Control
public bool NoHeader { get; private set; }
public bool NoColor { get; private set; }
public bool TeamCity { get; private set; }
public string OutFile { get; private set; }
public bool OutFileSpecified { get { return OutFile != null; } }
public string ErrFile { get; private set; }
public bool ErrFileSpecified { get { return ErrFile != null; } }
public string DisplayTestLabels { get; private set; }
private string workDirectory = null;
public string WorkDirectory
{
get { return workDirectory ?? DEFAULT_WORK_DIRECTORY; }
}
public bool WorkDirectorySpecified { get { return workDirectory != null; } }
public string InternalTraceLevel { get; private set; }
public bool InternalTraceLevelSpecified { get { return InternalTraceLevel != null; } }
private List<OutputSpecification> resultOutputSpecifications = new List<OutputSpecification>();
public IList<OutputSpecification> ResultOutputSpecifications
{
get
{
if (noresult)
return new OutputSpecification[0];
if (resultOutputSpecifications.Count == 0)
resultOutputSpecifications.Add(new OutputSpecification("TestResult.xml"));
return resultOutputSpecifications;
}
}
public IList<OutputSpecification> ExploreOutputSpecifications { get; } = new List<OutputSpecification>();
// Error Processing
public IList<string> ErrorMessages { get; } = new List<string>();
#endregion
#region Public Methods
public bool Validate()
{
if (!validated)
{
CheckOptionCombinations();
validated = true;
}
return ErrorMessages.Count == 0;
}
#endregion
#region Helper Methods
protected virtual void CheckOptionCombinations()
{
}
/// <summary>
/// Case is ignored when val is compared to validValues. When a match is found, the
/// returned value will be in the canonical case from validValues.
/// </summary>
protected string RequiredValue(string val, string option, params string[] validValues)
{
if (string.IsNullOrEmpty(val))
ErrorMessages.Add("Missing required value for option '" + option + "'.");
bool isValid = true;
if (validValues != null && validValues.Length > 0)
{
isValid = false;
foreach (string valid in validValues)
if (string.Compare(valid, val, StringComparison.OrdinalIgnoreCase) == 0)
return valid;
}
if (!isValid)
ErrorMessages.Add(string.Format("The value '{0}' is not valid for option '{1}'.", val, option));
return val;
}
protected int RequiredInt(string val, string option)
{
// We have to return something even though the value will
// be ignored if an error is reported. The -1 value seems
// like a safe bet in case it isn't ignored due to a bug.
int result = -1;
if (string.IsNullOrEmpty(val))
ErrorMessages.Add("Missing required value for option '" + option + "'.");
else
{
// NOTE: Don't replace this with TryParse or you'll break the CF build!
try
{
result = int.Parse(val);
}
catch (Exception)
{
ErrorMessages.Add("An int value was expected for option '{0}' but a value of '{1}' was used");
}
}
return result;
}
private string ExpandToFullPath(string path)
{
if (path == null) return null;
return Path.GetFullPath(path);
}
protected virtual void ConfigureOptions(bool allowInputFile)
{
InputFileRequired = allowInputFile;
// NOTE: The order in which patterns are added
// determines the display order for the help.
// Select Tests
this.Add("test=", "Comma-separated list of {NAMES} of tests to run or explore. This option may be repeated.",
v => ((List<string>)TestList).AddRange(TestNameParser.Parse(RequiredValue(v, "--test"))));
this.Add("testlist=", "File {PATH} containing a list of tests to run, one per line. This option may be repeated.",
v =>
{
string testListFile = RequiredValue(v, "--testlist");
var fullTestListPath = ExpandToFullPath(testListFile);
if (!File.Exists(fullTestListPath))
ErrorMessages.Add("Unable to locate file: " + testListFile);
else
{
try
{
using (var str = new FileStream(fullTestListPath, FileMode.Open))
using (var rdr = new StreamReader(str))
{
while (!rdr.EndOfStream)
{
var line = rdr.ReadLine().Trim();
if (!string.IsNullOrEmpty(line) && line[0] != '#')
((List<string>)TestList).Add(line);
}
}
}
catch (IOException)
{
ErrorMessages.Add("Unable to read file: " + testListFile);
}
}
});
this.Add("where=", "Test selection {EXPRESSION} indicating what tests will be run. See description below.",
v => WhereClause = RequiredValue(v, "--where"));
this.Add("params|p=", "Define a test parameter.",
v =>
{
string parameters = RequiredValue(v, "--params");
// This can be changed without breaking backwards compatibility with frameworks.
foreach (string param in parameters.Split(new[] { ';' }))
{
int eq = param.IndexOf("=");
if (eq == -1 || eq == param.Length - 1)
{
ErrorMessages.Add("Invalid format for test parameter. Use NAME=VALUE.");
}
else
{
string name = param.Substring(0, eq);
string val = param.Substring(eq + 1);
TestParameters[name] = val;
}
}
});
#if !NETSTANDARD1_6
this.Add("timeout=", "Set timeout for each test case in {MILLISECONDS}.",
v => DefaultTimeout = RequiredInt(v, "--timeout"));
#endif
this.Add("seed=", "Set the random {SEED} used to generate test cases.",
v => RandomSeed = RequiredInt(v, "--seed"));
this.Add("workers=", "Specify the {NUMBER} of worker threads to be used in running tests. If not specified, defaults to 2 or the number of processors, whichever is greater.",
v => NumberOfTestWorkers = RequiredInt(v, "--workers"));
this.Add("stoponerror", "Stop run immediately upon any test failure or error.",
v => StopOnError = v != null);
this.Add("wait", "Wait for input before closing console window.",
v => WaitBeforeExit = v != null);
// Output Control
this.Add("work=", "{PATH} of the directory to use for output files. If not specified, defaults to the current directory.",
v => workDirectory = RequiredValue(v, "--work"));
this.Add("output|out=", "File {PATH} to contain text output from the tests.",
v => OutFile = RequiredValue(v, "--output"));
this.Add("err=", "File {PATH} to contain error output from the tests.",
v => ErrFile = RequiredValue(v, "--err"));
this.Add("result=", "An output {SPEC} for saving the test results. This option may be repeated.",
v => ResolveOutputSpecification(RequiredValue(v, "--resultxml"), resultOutputSpecifications));
this.Add("explore:", "Display or save test info rather than running tests. Optionally provide an output {SPEC} for saving the test info. This option may be repeated.", v =>
{
Explore = true;
ResolveOutputSpecification(v, ExploreOutputSpecifications);
});
this.Add("noresult", "Don't save any test results.",
v => noresult = v != null);
this.Add("labels=", "Specify whether to write test case names to the output. Values: Off, On, All",
v => DisplayTestLabels = RequiredValue(v, "--labels", "Off", "On", "Before", "After", "All"));
this.Add("test-name-format=", "Non-standard naming pattern to use in generating test names.",
v => DefaultTestNamePattern = RequiredValue(v, "--test-name-format"));
this.Add("teamcity", "Turns on use of TeamCity service messages.",
v => TeamCity = v != null);
this.Add("trace=", "Set internal trace {LEVEL}.\nValues: Off, Error, Warning, Info, Verbose (Debug)",
v => InternalTraceLevel = RequiredValue(v, "--trace", "Off", "Error", "Warning", "Info", "Verbose", "Debug"));
this.Add("noheader|noh", "Suppress display of program information at start of run.",
v => NoHeader = v != null);
this.Add("nocolor|noc", "Displays console output without color.",
v => NoColor = v != null);
this.Add("help|h", "Display this message and exit.",
v => ShowHelp = v != null);
this.Add("version|V", "Display the header and exit.",
v => ShowVersion = v != null);
// Default
this.Add("<>", v =>
{
if (LooksLikeAnOption(v))
ErrorMessages.Add("Invalid argument: " + v);
else if (InputFileRequired)
if (InputFile == null)
InputFile = v;
else
ErrorMessages.Add("Multiple file names are not allowed on the command-line.\n Invalid entry: " + v);
else
ErrorMessages.Add("Do not provide a file name when running a self-executing test.\n Invalid entry: " + v);
});
}
private bool LooksLikeAnOption(string v)
{
return v.StartsWith("-") || v.StartsWith("/") && Path.DirectorySeparatorChar != '/';
}
private void ResolveOutputSpecification(string value, IList<OutputSpecification> outputSpecifications)
{
if (value == null)
return;
try
{
var spec = new OutputSpecification(value);
outputSpecifications.Add(spec);
}
catch (ArgumentException e)
{
ErrorMessages.Add(e.Message);
}
}
#endregion
}
}
| |
using System;
using System.IO.Pipelines;
using System.Net.Security;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.AspNetCore.Connections.Features;
using Microsoft.Extensions.Logging;
using Orleans.Connections.Security.Internal;
namespace Orleans.Connections.Security
{
internal class TlsClientConnectionMiddleware
{
private readonly ConnectionDelegate _next;
private readonly TlsOptions _options;
private readonly ILogger _logger;
private readonly X509Certificate2 _certificate;
public TlsClientConnectionMiddleware(ConnectionDelegate next, TlsOptions options, ILoggerFactory loggerFactory)
{
if (options == null)
{
throw new ArgumentNullException(nameof(options));
}
_next = next;
// capture the certificate now so it can't be switched after validation
_certificate = ValidateCertificate(options.LocalCertificate, options.ClientCertificateMode);
_options = options;
_logger = loggerFactory?.CreateLogger<TlsClientConnectionMiddleware>();
}
public Task OnConnectionAsync(ConnectionContext context)
{
return Task.Run(() => InnerOnConnectionAsync(context));
}
private async Task InnerOnConnectionAsync(ConnectionContext context)
{
var feature = new TlsConnectionFeature();
context.Features.Set<ITlsConnectionFeature>(feature);
context.Features.Set<ITlsHandshakeFeature>(feature);
var memoryPool = context.Features.Get<IMemoryPoolFeature>()?.MemoryPool;
var inputPipeOptions = new StreamPipeReaderOptions
(
pool: memoryPool,
bufferSize: memoryPool.GetMinimumSegmentSize(),
minimumReadSize: memoryPool.GetMinimumAllocSize(),
leaveOpen: true
);
var outputPipeOptions = new StreamPipeWriterOptions
(
pool: memoryPool,
leaveOpen: true
);
TlsDuplexPipe tlsDuplexPipe = null;
if (_options.RemoteCertificateMode == RemoteCertificateMode.NoCertificate)
{
tlsDuplexPipe = new TlsDuplexPipe(context.Transport, inputPipeOptions, outputPipeOptions);
}
else
{
tlsDuplexPipe = new TlsDuplexPipe(context.Transport, inputPipeOptions, outputPipeOptions, s => new SslStream(
s,
leaveInnerStreamOpen: false,
userCertificateValidationCallback: (sender, certificate, chain, sslPolicyErrors) =>
{
if (certificate == null)
{
return _options.RemoteCertificateMode != RemoteCertificateMode.RequireCertificate;
}
if (_options.RemoteCertificateValidation == null)
{
if (sslPolicyErrors != SslPolicyErrors.None)
{
return false;
}
}
var certificate2 = ConvertToX509Certificate2(certificate);
if (certificate2 == null)
{
return false;
}
if (_options.RemoteCertificateValidation != null)
{
if (!_options.RemoteCertificateValidation(certificate2, chain, sslPolicyErrors))
{
return false;
}
}
return true;
}));
}
var sslStream = tlsDuplexPipe.Stream;
using (var cancellationTokeSource = new CancellationTokenSource(_options.HandshakeTimeout))
using (cancellationTokeSource.Token.UnsafeRegisterCancellation(state => ((ConnectionContext)state).Abort(), context))
{
try
{
var sslOptions = new TlsClientAuthenticationOptions
{
ClientCertificates = _certificate == null ? null : new X509CertificateCollection { _certificate },
EnabledSslProtocols = _options.SslProtocols,
};
_options.OnAuthenticateAsClient?.Invoke(context, sslOptions);
#if NETCOREAPP
await sslStream.AuthenticateAsClientAsync(sslOptions.Value, cancellationTokeSource.Token);
#else
await sslStream.AuthenticateAsClientAsync(
sslOptions.TargetHost,
sslOptions.ClientCertificates,
sslOptions.EnabledSslProtocols,
sslOptions.CertificateRevocationCheckMode == X509RevocationMode.Online);
#endif
}
catch (OperationCanceledException ex)
{
_logger?.LogWarning(2, ex, "Authentication timed out");
#if NETCOREAPP
await sslStream.DisposeAsync();
#else
sslStream.Dispose();
#endif
return;
}
catch (Exception ex)
{
_logger?.LogWarning(1, ex, "Authentication failed");
#if NETCOREAPP
await sslStream.DisposeAsync();
#else
sslStream.Dispose();
#endif
return;
}
}
#if NETCOREAPP
feature.ApplicationProtocol = sslStream.NegotiatedApplicationProtocol.Protocol;
#endif
context.Features.Set<ITlsApplicationProtocolFeature>(feature);
feature.LocalCertificate = ConvertToX509Certificate2(sslStream.LocalCertificate);
feature.RemoteCertificate = ConvertToX509Certificate2(sslStream.RemoteCertificate);
feature.CipherAlgorithm = sslStream.CipherAlgorithm;
feature.CipherStrength = sslStream.CipherStrength;
feature.HashAlgorithm = sslStream.HashAlgorithm;
feature.HashStrength = sslStream.HashStrength;
feature.KeyExchangeAlgorithm = sslStream.KeyExchangeAlgorithm;
feature.KeyExchangeStrength = sslStream.KeyExchangeStrength;
feature.Protocol = sslStream.SslProtocol;
var originalTransport = context.Transport;
try
{
context.Transport = tlsDuplexPipe;
// Disposing the stream will dispose the tlsDuplexPipe
#if NETCOREAPP
await using (sslStream)
await using (tlsDuplexPipe)
#else
using (sslStream)
using (tlsDuplexPipe)
#endif
{
await _next(context);
// Dispose the inner stream (tlsDuplexPipe) before disposing the SslStream
// as the duplex pipe can hit an ODE as it still may be writing.
}
}
finally
{
// Restore the original so that it gets closed appropriately
context.Transport = originalTransport;
}
}
private static X509Certificate2 ValidateCertificate(X509Certificate2 certificate, RemoteCertificateMode mode)
{
switch (mode)
{
case RemoteCertificateMode.NoCertificate:
return null;
case RemoteCertificateMode.AllowCertificate:
//if certificate exists but can not be used for client authentication.
if (certificate != null && CertificateLoader.IsCertificateAllowedForClientAuth(certificate))
return certificate;
return null;
case RemoteCertificateMode.RequireCertificate:
EnsureCertificateIsAllowedForClientAuth(certificate);
return certificate;
default:
throw new ArgumentOutOfRangeException(nameof(mode), mode, null);
}
}
protected static void EnsureCertificateIsAllowedForClientAuth(X509Certificate2 certificate)
{
if (certificate is null)
{
throw new InvalidOperationException("No certificate provided for client authentication.");
}
if (!CertificateLoader.IsCertificateAllowedForClientAuth(certificate))
{
throw new InvalidOperationException($"Invalid client certificate for client authentication: {certificate.Thumbprint}");
}
}
private static X509Certificate2 ConvertToX509Certificate2(X509Certificate certificate)
{
if (certificate is null)
{
return null;
}
return certificate as X509Certificate2 ?? new X509Certificate2(certificate);
}
}
}
| |
// ==========================================================
// FreeImage 3 .NET wrapper
// Original FreeImage 3 functions and .NET compatible derived functions
//
// Design and implementation by
// - Jean-Philippe Goerke (jpgoerke@users.sourceforge.net)
// - Carsten Klein (cklein05@users.sourceforge.net)
//
// Contributors:
// - David Boland (davidboland@vodafone.ie)
//
// Main reference : MSDN Knowlede Base
//
// This file is part of FreeImage 3
//
// COVERED CODE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTY
// OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, WITHOUT LIMITATION, WARRANTIES
// THAT THE COVERED CODE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE
// OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE COVERED
// CODE IS WITH YOU. SHOULD ANY COVERED CODE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT
// THE INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY NECESSARY
// SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF WARRANTY CONSTITUTES AN ESSENTIAL
// PART OF THIS LICENSE. NO USE OF ANY COVERED CODE IS AUTHORIZED HEREUNDER EXCEPT UNDER
// THIS DISCLAIMER.
//
// Use at your own risk!
// ==========================================================
// ==========================================================
// CVS
// $Revision: 1.3 $
// $Date: 2009/02/20 07:41:08 $
// $Id: RGBTRIPLE.cs,v 1.3 2009/02/20 07:41:08 cklein05 Exp $
// ==========================================================
using System;
using System.Drawing;
using System.Runtime.InteropServices;
namespace FreeImageAPI
{
/// <summary>
/// The <b>RGBTRIPLE</b> structure describes a color consisting of relative
/// intensities of red, green and blue value. Each single color component
/// consumes 8 bits and so, takes values in the range from 0 to 255.
/// </summary>
/// <remarks>
/// <para>
/// The <b>RGBTRIPLE</b> structure provides access to an underlying Win32 <b>RGBTRIPLE</b>
/// structure. To determine the red, green or blue component of a color, use the
/// rgbtRed, rgbtGreen or rgbtBlue fields, respectively.
/// </para>
/// <para>For easy integration of the underlying structure into the .NET framework,
/// the <b>RGBTRIPLE</b> structure implements implicit conversion operators to
/// convert the represented color to and from the <see cref="System.Drawing.Color"/>
/// type. This makes the <see cref="System.Drawing.Color"/> type a real replacement
/// for the <b>RGBTRIPLE</b> structure and my be used in all situations which require
/// an <b>RGBTRIPLE</b> type.
/// </para>
/// <para>
/// Each of the color components rgbtRed, rgbtGreen or rgbtBlue of <b>RGBTRIPLE</b> is
/// translated into it's corresponding color component R, G or B of
/// <see cref="System.Drawing.Color"/> by an one-to-one manner and vice versa.
/// When converting from <see cref="System.Drawing.Color"/> into <b>RGBTRIPLE</b>, the
/// color's alpha value is ignored and assumed to be 255 when converting from
/// <b>RGBTRIPLE</b> into <see cref="System.Drawing.Color"/>, creating a fully
/// opaque color.
/// </para>
/// <para>
/// <b>Conversion from System.Drawing.Color to RGBTRIPLE</b>
/// </para>
/// <c>RGBTRIPLE.component = Color.component</c>
/// <para>
/// <b>Conversion from RGBTRIPLE to System.Drawing.Color</b>
/// </para>
/// <c>Color.component = RGBTRIPLE.component</c>
/// <para>
/// The same conversion is also applied when the <see cref="FreeImageAPI.RGBTRIPLE.Color"/>
/// property or the <see cref="FreeImageAPI.RGBTRIPLE(System.Drawing.Color)"/> constructor
/// is invoked.
/// </para>
/// </remarks>
/// <example>
/// The following code example demonstrates the various conversions between the
/// <b>RGBTRIPLE</b> structure and the <see cref="System.Drawing.Color"/> structure.
/// <code>
/// RGBTRIPLE rgbt;
/// // Initialize the structure using a native .NET Color structure.
/// rgbt = new RGBTRIPLE(Color.Indigo);
/// // Initialize the structure using the implicit operator.
/// rgbt = Color.DarkSeaGreen;
/// // Convert the RGBTRIPLE instance into a native .NET Color
/// // using its implicit operator.
/// Color color = rgbt;
/// // Using the structure's Color property for converting it
/// // into a native .NET Color.
/// Color another = rgbt.Color;
/// </code>
/// </example>
[Serializable, StructLayout(LayoutKind.Sequential)]
public struct RGBTRIPLE : IComparable, IComparable<RGBTRIPLE>, IEquatable<RGBTRIPLE>
{
/// <summary>
/// The blue color component.
/// </summary>
public byte rgbtBlue;
/// <summary>
/// The green color component.
/// </summary>
public byte rgbtGreen;
/// <summary>
/// The red color component.
/// </summary>
public byte rgbtRed;
/// <summary>
/// Initializes a new instance based on the specified <see cref="System.Drawing.Color"/>.
/// </summary>
/// <param name="color"><see cref="System.Drawing.Color"/> to initialize with.</param>
public RGBTRIPLE(Color color)
{
rgbtBlue = color.B;
rgbtGreen = color.G;
rgbtRed = color.R;
}
/// <summary>
/// Tests whether two specified <see cref="RGBTRIPLE"/> structures are equivalent.
/// </summary>
/// <param name="left">The <see cref="RGBTRIPLE"/> that is to the left of the equality operator.</param>
/// <param name="right">The <see cref="RGBTRIPLE"/> that is to the right of the equality operator.</param>
/// <returns>
/// <b>true</b> if the two <see cref="RGBTRIPLE"/> structures are equal; otherwise, <b>false</b>.
/// </returns>
public static bool operator ==(RGBTRIPLE left, RGBTRIPLE right)
{
return
left.rgbtBlue == right.rgbtBlue &&
left.rgbtGreen == right.rgbtGreen &&
left.rgbtRed == right.rgbtRed;
}
/// <summary>
/// Tests whether two specified <see cref="RGBTRIPLE"/> structures are different.
/// </summary>
/// <param name="left">The <see cref="RGBTRIPLE"/> that is to the left of the inequality operator.</param>
/// <param name="right">The <see cref="RGBTRIPLE"/> that is to the right of the inequality operator.</param>
/// <returns>
/// <b>true</b> if the two <see cref="RGBTRIPLE"/> structures are different; otherwise, <b>false</b>.
/// </returns>
public static bool operator !=(RGBTRIPLE left, RGBTRIPLE right)
{
return !(left == right);
}
/// <summary>
/// Converts the value of a <see cref="System.Drawing.Color"/> structure to a <see cref="RGBTRIPLE"/> structure.
/// </summary>
/// <param name="value">A <see cref="System.Drawing.Color"/> structure.</param>
/// <returns>A new instance of <see cref="RGBTRIPLE"/> initialized to <paramref name="value"/>.</returns>
public static implicit operator RGBTRIPLE(Color value)
{
return new RGBTRIPLE(value);
}
/// <summary>
/// Converts the value of a <see cref="RGBTRIPLE"/> structure to a <see cref="System.Drawing.Color"/> structure.
/// </summary>
/// <param name="value">A <see cref="RGBTRIPLE"/> structure.</param>
/// <returns>A new instance of <see cref="System.Drawing.Color"/> initialized to <paramref name="value"/>.</returns>
public static implicit operator Color(RGBTRIPLE value)
{
return value.Color;
}
/// <summary>
/// Converts the value of an <see cref="UInt32"/> structure to a <see cref="RGBTRIPLE"/> structure.
/// </summary>
/// <param name="value">An <see cref="UInt32"/> structure.</param>
/// <returns>A new instance of <see cref="RGBTRIPLE"/> initialized to <paramref name="value"/>.</returns>
public static implicit operator RGBTRIPLE(uint value)
{
RGBTRIPLE result = new RGBTRIPLE();
result.rgbtBlue = (byte)(value & 0xFF);
result.rgbtGreen = (byte)((value >> 8) & 0xFF);
result.rgbtRed = (byte)((value >> 16) & 0xFF);
return result;
}
/// <summary>
/// Converts the value of a <see cref="RGBTRIPLE"/> structure to an <see cref="UInt32"/> structure.
/// </summary>
/// <param name="value">A <see cref="RGBTRIPLE"/> structure.</param>
/// <returns>A new instance of <see cref="RGBTRIPLE"/> initialized to <paramref name="value"/>.</returns>
public static implicit operator uint(RGBTRIPLE value)
{
return (uint)((value.rgbtRed << 16) | (value.rgbtGreen << 8) | (value.rgbtBlue));
}
/// <summary>
/// Gets or sets the <see cref="System.Drawing.Color"/> of the structure.
/// </summary>
public Color Color
{
get
{
return Color.FromArgb(
rgbtRed,
rgbtGreen,
rgbtBlue);
}
set
{
rgbtBlue = value.B;
rgbtGreen = value.G;
rgbtRed = value.R;
}
}
/// <summary>
/// Compares this instance with a specified <see cref="Object"/>.
/// </summary>
/// <param name="obj">An object to compare with this instance.</param>
/// <returns>A 32-bit signed integer indicating the lexical relationship between the two comparands.</returns>
/// <exception cref="ArgumentException"><paramref name="obj"/> is not a <see cref="RGBTRIPLE"/>.</exception>
public int CompareTo(object obj)
{
if (obj == null)
{
return 1;
}
if (!(obj is RGBTRIPLE))
{
throw new ArgumentException("obj");
}
return CompareTo((RGBTRIPLE)obj);
}
/// <summary>
/// Compares this instance with a specified <see cref="RGBTRIPLE"/> object.
/// </summary>
/// <param name="other">A <see cref="RGBTRIPLE"/> to compare.</param>
/// <returns>A signed number indicating the relative values of this instance
/// and <paramref name="other"/>.</returns>
public int CompareTo(RGBTRIPLE other)
{
return this.Color.ToArgb().CompareTo(other.Color.ToArgb());
}
/// <summary>
/// Tests whether the specified object is a <see cref="RGBTRIPLE"/> structure
/// and is equivalent to this <see cref="RGBTRIPLE"/> structure.
/// </summary>
/// <param name="obj">The object to test.</param>
/// <returns><b>true</b> if <paramref name="obj"/> is a <see cref="RGBTRIPLE"/> structure
/// equivalent to this <see cref="RGBTRIPLE"/> structure; otherwise, <b>false</b>.</returns>
public override bool Equals(object obj)
{
return ((obj is RGBTRIPLE) && (this == ((RGBTRIPLE)obj)));
}
/// <summary>
/// Tests whether the specified <see cref="RGBTRIPLE"/> structure is equivalent to this
/// <see cref="RGBTRIPLE"/> structure.
/// </summary>
/// <param name="other">A <see cref="RGBTRIPLE"/> structure to compare to this instance.</param>
/// <returns><b>true</b> if <paramref name="obj"/> is a <see cref="RGBTRIPLE"/> structure
/// equivalent to this <see cref="RGBTRIPLE"/> structure; otherwise, <b>false</b>.</returns>
public bool Equals(RGBTRIPLE other)
{
return (this == other);
}
/// <summary>
/// Returns a hash code for this <see cref="RGBTRIPLE"/> structure.
/// </summary>
/// <returns>An integer value that specifies the hash code for this <see cref="RGBTRIPLE"/>.</returns>
public override int GetHashCode()
{
return base.GetHashCode();
}
/// <summary>
/// Converts the numeric value of the <see cref="RGBTRIPLE"/> object
/// to its equivalent string representation.
/// </summary>
/// <returns>The string representation of the value of this instance.</returns>
public override string ToString()
{
return FreeImage.ColorToString(Color);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Xml;
using System.Windows.Forms;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Diagnostics;
using System.ComponentModel;
using System.Text;
namespace XmlNotepad {
//========================================================================================
/// <summary>
/// Displays the text of the attributes, comments, text, cdata and leaf element nodes and
/// provides type-to-find and editing of those values.
/// </summary>
public class NodeTextView : UserControl, IEditableView {
Dictionary<TreeNode, string> visibleTextCache;
private Color containerBackground = Color.AliceBlue;
private TreeNode selectedNode;
Settings settings;
private TypeToFindHandler ttf;
Point scrollPosition;
private TextEditorOverlay editor;
private TreeNodeCollection nodes;
public event EventHandler<TreeViewEventArgs> AfterSelect;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
public NodeTextView() {
this.SetStyle(ControlStyles.ResizeRedraw,true);
this.SetStyle(ControlStyles.UserPaint,true);
this.SetStyle(ControlStyles.AllPaintingInWmPaint,true);
this.SetStyle(ControlStyles.OptimizedDoubleBuffer, true);
this.SetStyle(ControlStyles.Selectable,true);
InitializeComponent();
this.SuspendLayout();
this.editor = new TextEditorOverlay(this);
this.editor.MultiLine = true;
this.editor.CommitEdit += new EventHandler<TextEditorEventArgs>(OnCommitEdit);
this.editor.LayoutEditor += new EventHandler<TextEditorLayoutEventArgs>(OnLayoutEditor);
this.ttf = new TypeToFindHandler(this, 2000);
this.ttf.FindString += new TypeToFindEventHandler(FindString);
this.ResumeLayout();
this.AccessibleRole=System.Windows.Forms.AccessibleRole.List;
visibleTextCache = new Dictionary<TreeNode, string>();
}
#region Component Designer generated code
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing) {
if (disposing && (components != null)) {
components.Dispose();
}
if (this.ttf != null) {
this.ttf.Dispose();
this.ttf = null;
}
base.Dispose(disposing);
}
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent() {
this.components = new System.ComponentModel.Container();
this.SuspendLayout();
this.AccessibleRole = AccessibleRole.List;
this.Name = this.AccessibleName = "NodeTextView";
this.AccessibleDescription = "Right hand side of the XmlTreeView for editing node values";
this.ResumeLayout(false);
}
#endregion
public void Close() {
this.editor.Dispose();
ClearCache();
}
void ClearCache() {
if (visibleTextCache != null)
visibleTextCache.Clear();
}
// The nodes to display.
public TreeNodeCollection Nodes {
get { return this.nodes; }
set {
this.selectedNode = null;
this.nodes = value;
ClearCache();
PerformLayout();
}
}
[System.ComponentModel.Browsable(false)]
public UndoManager UndoManager {
get { return (UndoManager)this.Site.GetService(typeof(UndoManager)); }
}
[System.ComponentModel.Browsable(false)]
public IIntellisenseProvider IntellisenseProvider {
get { return (IIntellisenseProvider)this.Site.GetService(typeof(IIntellisenseProvider)); }
}
public void SetSite(ISite site) {
// Overriding the Site property directly breaks the WinForms designer.
this.Site = site;
this.settings = (Settings)site.GetService(typeof(Settings));
if (this.settings != null) {
this.settings.Changed += new SettingsEventHandler(settings_Changed);
}
settings_Changed(this, "");
this.editor.Site = site;
XmlCache model = (XmlCache)site.GetService(typeof(XmlCache));
model.ModelChanged += new EventHandler<ModelChangedEventArgs>(OnModelChanged);
}
void OnModelChanged(object sender, ModelChangedEventArgs e) {
ClearCache();
}
public Point ScrollPosition {
get { return this.scrollPosition; }
set { this.scrollPosition = value; }
}
public Point ApplyScrollOffset(int x, int y) {
return new Point(x - this.scrollPosition.X, y - this.scrollPosition.Y);
}
public Point ApplyScrollOffset(Point pt) {
return new Point(pt.X - this.scrollPosition.X, pt.Y - this.scrollPosition.Y);
}
private void settings_Changed(object sender, string name) {
// change the colors.
Invalidate();
if (this.settings != null) {
System.Collections.Hashtable colors = (System.Collections.Hashtable)this.settings["Colors"];
if (colors != null) {
object color = colors["ContainerBackground"];
if (color != null) {
this.containerBackground = (Color)color;
}
}
}
}
protected override void OnGotFocus(EventArgs e) {
if (this.selectedNode != null){
Invalidate(this.selectedNode);
}
base.OnGotFocus (e);
}
protected override void OnLostFocus(EventArgs e) {
if (this.selectedNode != null){
Invalidate(this.selectedNode);
}
base.OnLostFocus (e);
}
public void Reset(){
this.editor.EndEdit(false);
this.selectedNode = null;
Invalidate();
}
public TreeNode SelectedNode {
get { return this.selectedNode; }
set {
if (this.selectedNode != value) {
this.editor.EndEdit(false);
InternalSelect(value);
if (AfterSelect != null) {
AfterSelect(this, new TreeViewEventArgs(value, TreeViewAction.None));
}
}
}
}
internal void InternalSelect(TreeNode node) {
this.selectedNode = node;
Invalidate();
}
static bool IsTextEditable(TreeNode node) {
NodeImage img = (NodeImage)(node.ImageIndex+1);
return !(img == NodeImage.Element || img == NodeImage.OpenElement);
}
protected override void OnMove(EventArgs e) {
this.editor.EndEdit(false);
base.OnMove (e);
}
protected override void OnLayout(LayoutEventArgs levent) {
base.OnLayout (levent);
ClearCache();
if (this.editor.IsEditing) {
this.editor.PerformLayout();
}
}
void OnLayoutEditor(object sender, TextEditorLayoutEventArgs args) {
Rectangle r = this.GetTextBounds(this.selectedNode);
r.Offset(this.scrollPosition);
args.PreferredBounds = r;
args.MaxBounds = r;
}
string CheckTextLength(string text, out bool cancelled) {
cancelled = false;
if (text == null) return "";
int maxLine = GetMaxLineLength(text);
if (maxLine > (int)this.settings["MaximumLineLength"]) {
DialogResult rc = DialogResult.No;
if ((bool)this.settings["AutoFormatLongLines"])
{
rc = DialogResult.Yes;
}
else
{
rc = MessageBox.Show(this, SR.LongLinePrompt, SR.LongLineCaption,
MessageBoxButtons.YesNoCancel, MessageBoxIcon.Exclamation);
}
if (rc == DialogResult.Cancel) {
cancelled = true;
return text;
}
if (rc == DialogResult.No) {
return text;
}
return FormatLines(text);
}
return text;
}
private static string FormatLines(string text) {
StringBuilder sb = new StringBuilder();
int lineStart = 0;
int j = 0;
int spaces = 0;
for (int i = 0, len = text.Length; i < len; i++) {
char c = text[i];
bool lineEnd = false;
if (c == '\r') {
if (i + 1 < len && text[i + 1] == '\n') i++;
lineEnd = true;
} else if (c == '\n') {
lineEnd = true;
}
else if (c == ' ' || c == '\t')
{
spaces++;
}
if (lineEnd) {
string line = text.Substring(lineStart, i + 1 - lineStart);
sb.Append(line);
lineStart = i+1;
j = 0;
}
else if (++j >= 80)
{
if (spaces == 0 || (c == ' ' || c == '\t'))
{
// try and split on word boundary.
string line = text.Substring(lineStart, j);
sb.Append(line);
sb.Append("\r\n");
lineStart = i + 1;
j = 0;
}
} else if (i + 1 == len) {
string line = text.Substring(lineStart);
sb.Append(line);
}
}
return sb.ToString();
}
private static int GetMaxLineLength(string text) {
int maxLine = 0;
int lastLine = 0;
for (int i = 0, len = text.Length; i < len; i++) {
char c = text[i];
bool lineEnd = false;
if (c == '\r') {
if (i + 1 < len && text[i + 1] == '\n') i++;
lineEnd = true;
} else if (c == '\n') {
lineEnd = true;
}
if (lineEnd || i+1==len) {
int linelen = i - lastLine;
lastLine = i+1;
maxLine = Math.Max(maxLine, linelen);
}
}
return maxLine;
}
public bool FocusBeginEdit(string value) {
this.Focus();
return BeginEdit(value);
}
#region IEditableView
public bool BeginEdit(string value) {
if (this.selectedNode != null) {
if (string.IsNullOrEmpty(this.selectedNode.Label)) {
MessageBox.Show(this, SR.NodeNameRequiredPrompt, SR.NodeNameRequiredCaption,
MessageBoxButtons.OK, MessageBoxIcon.Exclamation);
}
else if (IsTextEditable(this.selectedNode)) {
// see if control has possible values that cannot be known in xsd
IIntellisenseProvider provider = this.IntellisenseProvider;
string text = value != null ? value : GetNodeText(this.selectedNode);
if (provider != null) {
provider.SetContextNode(this.selectedNode);
if (!provider.IsValueEditable) {
return false;
}
}
bool cancel = false;
text = CheckTextLength(text, out cancel);
if (cancel) return false;
this.editor.BeginEdit(text, provider, EditMode.Value, this.selectedNode.ForeColor, this.Focused);
return true;
}
}
return false;
}
public bool IsEditing {
get { return this.editor.IsEditing; }
}
public void SelectText(int index, int length) {
if (this.editor.IsEditing) {
this.editor.Select(index, length);
}
}
public bool ReplaceText(int index, int length, string replacement){
if (this.editor.IsEditing) {
return this.editor.Replace(index, length, replacement);
}
return false;
}
public Rectangle EditorBounds {
get {
return this.editor.Bounds;
}
}
public TextEditorOverlay Editor {
get { return this.editor; }
}
public bool EndEdit(bool cancel) {
return this.editor.EndEdit(cancel);
}
public int SelectionStart { get { return this.editor.SelectionStart; } }
public int SelectionLength { get { return this.editor.SelectionLength; } }
#endregion
void OnCommitEdit(object sender, TextEditorEventArgs args) {
if (!args.Cancelled) {
SetNodeText(this.selectedNode, args.Text);
}
}
public void StartIncrementalSearch() {
ttf.StartIncrementalSearch();
}
protected override bool IsInputKey(Keys keyData) {
Keys key = (keyData & ~Keys.Modifiers);
switch (key){
case Keys.F2:
case Keys.Enter:
case Keys.Home:
case Keys.End:
case Keys.Up:
case Keys.PageDown:
case Keys.PageUp:
case Keys.Right:
case Keys.Left:
return true;
}
return base.IsInputKey (keyData);
}
protected override void OnKeyDown(KeyEventArgs e) {
CurrentEvent.Event = e;
base.OnKeyDown(e);
if (!e.Handled) {
HandleKeyDown(e);
}
}
public void BubbleKeyDown(KeyEventArgs e) {
base.OnKeyDown(e);
}
public void HandleKeyDown(KeyEventArgs e) {
bool isLetterOrDigit = ((e.KeyCode >= Keys.A && e.KeyCode <= Keys.Z) ||
(e.KeyCode >= Keys.D0 && e.KeyCode <= Keys.D9)) &&
(e.Modifiers == Keys.Shift || e.Modifiers == 0);
switch (e.KeyCode) {
case Keys.F2:
case Keys.Enter:
BeginEdit(null);
e.Handled = true;
break;
case Keys.Left:
// do not give to tree view, this will do a shift-tab instead.
break;
default:
if (isLetterOrDigit && !e.Handled && this.ContainsFocus) {
if (ttf.Started) {
e.Handled = true; // let ttf handle it!
} else {
char ch = Convert.ToChar(e.KeyValue);
if (!e.Shift) ch = Char.ToLower(ch);
if (this.BeginEdit(ch.ToString())) {
this.editor.SelectEnd();
e.Handled = true;
}
}
}
break;
}
}
protected override void OnPaint(PaintEventArgs e) {
Rectangle clip = e.ClipRectangle;
Graphics g = e.Graphics;
Matrix m = g.Transform;
m.Translate(this.scrollPosition.X, this.scrollPosition.Y);
g.Transform = m;
base.OnPaint(e);
if (this.nodes != null){
clip = new Rectangle(this.ApplyScrollOffset(clip.X, clip.Y), new Size(clip.Width, clip.Height));
PaintNodes(this.nodes, g, ref clip);
}
}
void PaintNodes(TreeNodeCollection nodes, Graphics g, ref Rectangle clip) {
if (nodes == null) return;
g.SmoothingMode = SmoothingMode.AntiAlias;
foreach (TreeNode n in nodes) {
Rectangle r = GetTextBounds(n);
if (r.Top > clip.Bottom) return;
if (r.IntersectsWith(clip)){
DrawItem(r, n, g);
}
if (n.IsExpanded){
PaintNodes(n.Nodes, g, ref clip);
}
}
}
static string GetNodeText(TreeNode n) {
string text = n.Text;
return NormalizeNewLines(text);
}
public static string NormalizeNewLines(string text){
if (text == null) return null;
StringBuilder sb = new StringBuilder();
for (int i = 0, n = text.Length; i<n; i++){
char ch = text[i];
if (ch == '\r'){
if (i+1<n && text[i+1] == '\n')
i++;
sb.Append("\r\n");
} else if (ch == '\n'){
sb.Append("\r\n");
} else {
sb.Append(ch);
}
}
return sb.ToString();
}
static void SetNodeText(TreeNode n, string value) {
n.Text = value;
}
public void Invalidate(TreeNode n) {
if (n != null) {
Rectangle r = this.GetTextBounds(n);
r.Offset(this.scrollPosition);
Invalidate(r);
}
}
private void DrawItem(Rectangle bounds, TreeNode tn, Graphics g) {
//g.SmoothingMode = SmoothingMode.AntiAlias;
Color c = tn.ForeColor;
Brush myBrush = null;
bool focusSelected = false;
if (this.Focused && tn == this.SelectedNode) {
focusSelected = true;
g.FillRectangle(SystemBrushes.Highlight, bounds);
myBrush = Utilities.HighlightTextBrush(c);
} else {
myBrush = new SolidBrush(c);
}
Font font = this.Font;
Rectangle inset = new Rectangle(bounds.Left + 3, bounds.Top, bounds.Width - 3, bounds.Height);
string value = null;
if (this.visibleTextCache.ContainsKey(tn)) {
value = this.visibleTextCache[tn];
} else {
value = GetNodeText(tn);
}
if (value == null && !focusSelected) {
using (Brush b = new SolidBrush(containerBackground)) {
g.FillRectangle(b, bounds);
}
}
if (value != null && value.Length > 0) {
//inset.Inflate(-3, -2);
char ellipsis = Convert.ToChar(0x2026);
//value = value.Trim();
int i = value.IndexOfAny(new char[] { '\r', '\n' });
if (i > 0) {
value = value.Substring(0, i) + ellipsis;
}
// Figure out how much of the text we can display
int width = inset.Width; ;
//int height = inset.Height;
string s = value;
if (width < 0) return;
int length = value.Length;
SizeF size = SizeF.Empty;
bool measurable = false;
while (!measurable) {
try {
if (s.Length >= 65536) {
// MeasureString tops out at 64kb strings.
s = s.Substring(0, 65535);
}
size = g.MeasureString(s, font, width + 1000, StringFormat.GenericTypographic);
measurable = true;
} catch (Exception) {
// perhaps the string is just too long!
s = s.Substring(0, s.Length / 2);
}
}
int j = s.Length;
int dy = (font.Height - (int)Math.Ceiling(size.Height)) / 2;
if (dy < 0) dy = 0;
char[] ws = new char[] { ' ', '\t' };
if ((int)size.Width > width && j > 1) { // line wrap?
int start = 0;
int w = 0;
int k = value.IndexOfAny(ws);
while (k > 0) {
s = value.Substring(0, k) + ellipsis;
size = g.MeasureString(s, font, width + 1000, StringFormat.GenericTypographic);
if ((int)size.Width < width && k < length) {
start = k;
w = (int)size.Width;
while (start < length && (value[start] == ' ' || value[start] == '\t')) {
start++;
}
k = value.IndexOfAny(ws, start);
} else {
break;
}
}
j = start;
if (w < width / 2) {
// if we have a really long word (e.g. binhex) then just take characters
// up to the end of the line.
while ((int)w < width && j < length) {
j++;
s = value.Substring(0, j) + ellipsis;
size = g.MeasureString(s, font, width + 1000, StringFormat.GenericTypographic);
w = (int)size.Width;
}
}
if (j <= 0) {
s = "";
} else if (j < length) {
s = value.Substring(0, j - 1) + ellipsis;
}
this.visibleTextCache[tn] = s;
}
// Draw the current item text based on the current Font and the custom brush settings.
g.DrawString(s, font, myBrush, inset.Left, dy + inset.Top, StringFormat.GenericTypographic);
}
// If the ListBox has focus, draw a focus rectangle around the selected item.
if (tn == this.SelectedNode) {
g.SmoothingMode = SmoothingMode.Default;
Pen p = new Pen(Color.Black, 1);
p.DashStyle = DashStyle.Dot;
p.Alignment = PenAlignment.Inset;
p.LineJoin = LineJoin.Round;
bounds.Width--;
bounds.Height--;
g.DrawRectangle(p, bounds);
p.Dispose();
}
myBrush.Dispose();
}
protected override void OnMouseDown(MouseEventArgs e) {
base.OnMouseDown(e);
CurrentEvent.Event = e;
this.editor.EndEdit(false);
if (this.nodes != null) {
Point p = this.ApplyScrollOffset(e.X, e.Y);
TreeNode tn = this.FindNodeAt(this.nodes, p.X, p.Y);
if (tn != null) {
if (this.SelectedNode == tn && this.Focused) {
if (e.Button == MouseButtons.Left) {
this.BeginEdit(null);
}
return;
} else {
this.SelectedNode = tn;
}
}
}
this.Focus();
}
public TreeNode FindNodeAt(TreeNodeCollection nodes, int x, int y) {
if (nodes == null) return null;
foreach (TreeNode n in nodes) {
Rectangle r = GetTextBounds(n);
if (r.Contains(x, y)) {
return n;
}
if (n.IsExpanded && n.LabelBounds.Top <= y && n.bottom >= y) {
TreeNode result = FindNodeAt(n.Nodes, x, y);
if (result != null) return result;
}
}
return null;
}
public Rectangle GetTextBounds(TreeNode n) {
Rectangle r = new Rectangle(0, n.LabelBounds.Top, this.Width, n.LabelBounds.Height);
return r;
}
void FindString(object sender, string toFind) {
TreeNode node = this.SelectedNode;
if (node == null) node = this.FirstVisibleNode;
TreeNode start = node;
while (node != null) {
string s = GetNodeText(node);
if (s != null && s.StartsWith(toFind, StringComparison.CurrentCultureIgnoreCase)) {
this.SelectedNode = node;
return;
}
node = node.NextVisibleNode;
if (node == null) node = this.FirstVisibleNode;
if (node == start)
break;
}
}
public TreeNode FirstVisibleNode {
get {
if (this.nodes == null) return null;
foreach (TreeNode node in this.nodes) {
if (node.IsVisible) {
return node;
}
}
return null;
}
}
AccessibleNodeTextView acc;
protected override AccessibleObject CreateAccessibilityInstance() {
if (this.acc == null) this.acc = new AccessibleNodeTextView(this);
return this.acc;
}
}
class AccessibleNodeTextView : Control.ControlAccessibleObject {
NodeTextView view;
Dictionary<TreeNode, AccessibleObject> cache = new Dictionary<TreeNode, AccessibleObject>();
public AccessibleNodeTextView(NodeTextView view)
: base(view) {
this.view = view;
}
public NodeTextView View { get { return this.view; } }
public override Rectangle Bounds {
get {
return view.RectangleToScreen(view.ClientRectangle);
}
}
public override string DefaultAction {
get {
return "Edit";
}
}
public override void DoDefaultAction() {
if (view.SelectedNode != null) {
view.FocusBeginEdit(null);
}
}
public override int GetChildCount() {
return view.Nodes.Count;
}
public override AccessibleObject GetChild(int index) {
TreeNode node = view.Nodes[index];
return Wrap(node);
}
public AccessibleObject Wrap(TreeNode node) {
if (node == null) return null;
AccessibleObject a;
cache.TryGetValue(node, out a);
if (a == null){
a = new AccessibleNodeTextViewNode(this, node);
cache[node] = a;
}
return a;
}
public override AccessibleObject GetFocused() {
return GetSelected();
}
public override int GetHelpTopic(out string fileName) {
fileName = "TBD";
return 0;
}
public override AccessibleObject GetSelected() {
if (view.SelectedNode != null) {
return Wrap(view.SelectedNode);
}
return this;
}
public override AccessibleObject HitTest(int x, int y) {
Point pt = view.PointToClient(new Point(x, y));
pt = view.ApplyScrollOffset(pt);
TreeNode node = view.FindNodeAt(view.Nodes, pt.X, pt.Y);
if (node != null) {
return Wrap(node);
}
return this;
}
public override AccessibleObject Navigate(AccessibleNavigation navdir) {
TreeNode node = null;
TreeNodeCollection children = view.Nodes;
int count = children.Count;
switch (navdir) {
case AccessibleNavigation.Left:
case AccessibleNavigation.Down:
case AccessibleNavigation.FirstChild:
if (count > 0) node = children[0];
break;
case AccessibleNavigation.Next:
case AccessibleNavigation.Previous:
case AccessibleNavigation.Right:
case AccessibleNavigation.Up:
if (count > 0) node = children[count - 1];
break;
case AccessibleNavigation.LastChild:
// special meaning for us, it means find the intellisense popup window!
return view.Editor.CompletionSet.AccessibilityObject;
}
if (node != null) {
return Wrap(node);
}
return this;
}
public override AccessibleObject Parent {
get {
return view.Parent.AccessibilityObject;
}
}
public override AccessibleRole Role {
get {
return view.AccessibleRole;
}
}
public override void Select(AccessibleSelection flags) {
this.view.Focus();
}
public override AccessibleStates State {
get {
AccessibleStates result = AccessibleStates.Focusable | AccessibleStates.Selectable |
AccessibleStates.Sizeable;
if (view.Focused) result |= AccessibleStates.Focused;
if (!view.Visible) result |= AccessibleStates.Invisible;
return result;
}
}
public override string Value {
get {
return "";
}
set {
//???
}
}
}
class AccessibleNodeTextViewNode : AccessibleObject {
TreeNode node;
NodeTextView view;
AccessibleNodeTextView acc;
public AccessibleNodeTextViewNode(AccessibleNodeTextView acc, TreeNode node) {
this.acc = acc;
this.view = acc.View;
this.node = node;
}
public override Rectangle Bounds {
get {
Rectangle bounds = view.GetTextBounds(node);
bounds.Offset(view.ScrollPosition);
return view.RectangleToScreen(bounds);
}
}
public override string DefaultAction {
get {
return "Toggle";
}
}
public override string Description {
get {
return "TextNode";
}
}
public override void DoDefaultAction() {
node.Toggle();
}
public override int GetChildCount() {
return node.Nodes.Count;
}
public override AccessibleObject GetChild(int index) {
TreeNode child = this.node.Nodes[index];
return acc.Wrap(child);
}
public override AccessibleObject GetFocused() {
return GetSelected();
}
public override int GetHelpTopic(out string fileName) {
fileName = "TBD";
return 0;
}
public override AccessibleObject GetSelected() {
if (node.Selected) {
return this;
}
return acc.GetSelected();
}
public override string Help {
get {
return "TBD";
}
}
public override AccessibleObject HitTest(int x, int y) {
return acc.HitTest(x, y);
}
public override string KeyboardShortcut {
get {
return "TBD";
}
}
public override string Name {
get {
return node.Label;
}
set {
// hack alert - this is breaking architectural layering!
XmlTreeNode xnode = (XmlTreeNode)node;
view.UndoManager.Push(new EditNodeName(xnode, value));
}
}
public override AccessibleObject Navigate(AccessibleNavigation navdir) {
TreeNode result = null;
TreeNodeCollection children = node.Nodes;
int count = children.Count;
switch (navdir) {
case AccessibleNavigation.Down:
case AccessibleNavigation.Next:
result = node.NextVisibleNode;
if (result == null) {
return view.Editor.CompletionSet.AccessibilityObject;
}
break;
case AccessibleNavigation.FirstChild:
if (count > 0) result = children[0];
if (!node.IsExpanded) node.Expand();
break;
case AccessibleNavigation.Left:
return node.AccessibleObject;
case AccessibleNavigation.Right:
break;
case AccessibleNavigation.LastChild:
if (count > 0) result = children[count - 1];
if (!node.IsExpanded) node.Expand();
break;
case AccessibleNavigation.Previous:
case AccessibleNavigation.Up:
result = node.PrevVisibleNode;
break;
}
if (result != null) {
return acc.Wrap(result);
}
return this;
}
public override AccessibleObject Parent {
get {
if (node.Parent != null) {
return acc.Wrap(node.Parent);
} else {
return acc;
}
}
}
public override AccessibleRole Role {
get {
return AccessibleRole.ListItem;
}
}
public override void Select(AccessibleSelection flags) {
view.Focus();
if ((flags & AccessibleSelection.TakeSelection) != 0 ||
(flags & AccessibleSelection.AddSelection) != 0) {
view.SelectedNode = node;
} else if ((flags & AccessibleSelection.RemoveSelection) != 0) {
if (view.SelectedNode == this.node) {
view.SelectedNode = null;
}
}
}
public override AccessibleStates State {
get {
AccessibleStates result = AccessibleStates.Focusable | AccessibleStates.Selectable;
if (node.Selected) result |= AccessibleStates.Focused | AccessibleStates.Selected;
if (!node.IsVisible) result |= AccessibleStates.Invisible;
if (node.IsExpanded) result |= AccessibleStates.Expanded;
else result |= AccessibleStates.Collapsed;
return result;
}
}
public override string Value {
get {
string s = this.node.Text;
if (s == null) s = "";
return s;
}
set {
// hack alert - this is breaking architectural layering!
XmlTreeNode xnode = (XmlTreeNode)node;
XmlTreeView xview = xnode.XmlTreeView;
view.UndoManager.Push(new EditNodeValue(xview, xnode, value));
}
}
}
}
| |
using System;
using Gtk;
using Pango;
using System.Collections.Generic;
using System.IO;
namespace Moscrif.IDE.Controls
{
public class OutputConsole : Gtk.HBox
{
//Gtk.ScrolledWindow
Gtk.TextBuffer buffer;
Gtk.TextView textEditorControl;
TextMark endMark;
FontDescription customFont;
Gtk.ScrolledWindow sw;
//Gtk.VButtonBox vbt;
Gtk.VBox vbt;
TextTag tag;
TextTag bold;
TextTag errorTag;
TextTag consoleLogTag;
int ident = 0;
List<TextTag> tags = new List<TextTag>();
Stack<string> indents = new Stack<string>();
Queue<QueuedUpdate> updates = new Queue<QueuedUpdate>();
QueuedTextWrite lastTextWrite;
GLib.TimeoutHandler outputDispatcher;
bool outputDispatcherRunning = false;
const int MAX_BUFFER_LENGTH = 200 * 1024;
public OutputConsole()
{
buffer = new Gtk.TextBuffer(new Gtk.TextTagTable());
textEditorControl = new Gtk.TextView(buffer);
textEditorControl.Editable = true;
sw = new ScrolledWindow();
sw.ShadowType = ShadowType.Out;
sw.Add(textEditorControl);
this.PackStart(sw, true, true, 0);
vbt = new VBox();
Gdk.Pixbuf clear_pixbuf = MainClass.Tools.GetIconFromStock("file-new.png", IconSize.SmallToolbar);
Button btnClear = new Button(new Gtk.Image(clear_pixbuf));
btnClear.TooltipText = MainClass.Languages.Translate("clear");
btnClear.Relief = ReliefStyle.None;
btnClear.CanFocus = false;
btnClear.WidthRequest = btnClear.HeightRequest = 24;
btnClear.Clicked += delegate(object sender, EventArgs e) {
Clear();
};
Gdk.Pixbuf save_pixbuf = MainClass.Tools.GetIconFromStock("save.png", IconSize.SmallToolbar);
Button btnSave = new Button(new Gtk.Image(save_pixbuf));
btnSave.TooltipText = MainClass.Languages.Translate("save");
btnSave.Relief = ReliefStyle.None;
btnSave.CanFocus = false;
btnSave.WidthRequest = btnSave.HeightRequest = 24;
btnSave.Clicked += delegate(object sender, EventArgs e) {
Save();
};
vbt.WidthRequest = 24;
vbt.PackStart(btnClear, false, false, 0);
vbt.PackStart(btnSave, false, false, 0);
this.PackEnd(vbt, false, false, 0);
bold = new TextTag("bold");
bold.Weight = Pango.Weight.Bold;
buffer.TagTable.Add(bold);
errorTag = new TextTag("error");
errorTag.Foreground = "red";
buffer.TagTable.Add(errorTag);
consoleLogTag = new TextTag("consoleLog");
consoleLogTag.Foreground = "darkgrey";
buffer.TagTable.Add(consoleLogTag);
tag = new TextTag("0");
tag.LeftMargin = 10;
buffer.TagTable.Add(tag);
tags.Add(tag);
endMark = buffer.CreateMark("end-mark", buffer.EndIter, false);
outputDispatcher = new GLib.TimeoutHandler(OutputDispatchHandler);
customFont = Pango.FontDescription.FromString(MainClass.Settings.ConsoleTaskFont);
textEditorControl.ModifyFont(customFont);
textEditorControl.KeyPressEvent += HandleTextEditorControlKeyPressEvent;
this.ShowAll();
}
public void SetFont(string fontname){
customFont = Pango.FontDescription.FromString(fontname);
textEditorControl.ModifyFont(customFont);
}
public void Redraw(){
while (Application.EventsPending ())
Application.RunIteration ();
textEditorControl.QueueDraw();
textEditorControl.ShowAll();
textEditorControl.GrabFocus();
WriteError("---------\n");
WriteText("**********\n");
textEditorControl.HideAll();
textEditorControl.ShowAll();
textEditorControl.QueueDraw();
textEditorControl.ShowAll();
textEditorControl.GrabFocus();
//textEditorControl.
}
public void Save(){
Gtk.FileChooserDialog fc = new Gtk.FileChooserDialog(MainClass.Languages.Translate("save_path"), MainClass.MainWindow, FileChooserAction.Save, MainClass.Languages.Translate("cancel"), ResponseType.Cancel, MainClass.Languages.Translate("save"), ResponseType.Accept);
fc.SetCurrentFolder(MainClass.Workspace.RootDirectory);
if (fc.Run() == (int)ResponseType.Accept) {
System.IO.StreamWriter write = new System.IO.StreamWriter(fc.Filename);
write.Write(buffer.Text);
write.Close();
}
fc.Destroy();
}
void HandleTextEditorControlKeyPressEvent(object o, KeyPressEventArgs args)
{
if (args.Event.Key != Gdk.Key.Return)
return;
WriteText("OK");
}
public void Clear()
{
lock (updates) {
updates.Clear();
lastTextWrite = null;
outputDispatcherRunning = false;
}
buffer.Clear();
}
//mechanism to to batch copy text when large amounts are being dumped
bool OutputDispatchHandler()
{
lock (updates) {
lastTextWrite = null;
if (updates.Count == 0) {
outputDispatcherRunning = false;
return false;
} else if (!outputDispatcherRunning) {
updates.Clear();
return false;
} else {
while (updates.Count > 0) {
QueuedUpdate up = updates.Dequeue();
up.Execute(this);
}
}
}
return true;
}
void AddQueuedUpdate(QueuedUpdate update)
{
lock (updates) {
updates.Enqueue(update);
if (!outputDispatcherRunning) {
GLib.Timeout.Add(50, outputDispatcher);
outputDispatcherRunning = true;
}
lastTextWrite = update as QueuedTextWrite;
}
}
protected void UnsafeBeginTask(string name, int totalWork)
{
if (name != null && name.Length > 0) {
Indent();
indents.Push(name);
} else
indents.Push(null);
if (name != null) {
UnsafeAddText(Environment.NewLine + name + Environment.NewLine, bold);
}
}
public void BeginTask(string name, int totalWork)
{
QueuedBeginTask bt = new QueuedBeginTask(name, totalWork);
AddQueuedUpdate(bt);
}
public void EndTask()
{
QueuedEndTask et = new QueuedEndTask();
AddQueuedUpdate(et);
}
protected void UnsafeEndTask()
{
if (indents.Count > 0 && indents.Pop() != null)
Unindent();
}
public void WriteText(string text)
{
//raw text has an extra optimisation here, as we can append it to existing updates
lock (updates) {
if (lastTextWrite != null) {
if (lastTextWrite.Tag == null) {
lastTextWrite.Write(text);
return;
}
}
}
QueuedTextWrite qtw = new QueuedTextWrite(text, null);
AddQueuedUpdate(qtw);
}
public void WriteError(string text)
{
QueuedTextWrite w = new QueuedTextWrite(text, errorTag);
AddQueuedUpdate(w);
}
protected void UnsafeAddText(string text, TextTag extraTag)
{
//don't allow the pad to hold more than MAX_BUFFER_LENGTH chars
int overrun = (buffer.CharCount + text.Length) - MAX_BUFFER_LENGTH;
if (overrun > 0) {
TextIter start = buffer.StartIter;
TextIter end = buffer.GetIterAtOffset(overrun);
buffer.Delete(ref start, ref end);
}
TextIter it = buffer.EndIter;
ScrolledWindow window = textEditorControl.Parent as ScrolledWindow;
bool scrollToEnd = true;
if (window != null) {
scrollToEnd = window.Vadjustment.Value >= window.Vadjustment.Upper - 2 * window.Vadjustment.PageSize;
}
if (extraTag != null)
buffer.InsertWithTags(ref it, text, tag, extraTag);
else
buffer.InsertWithTags(ref it, text, tag);
if (scrollToEnd) {
it.LineOffset = 0;
buffer.MoveMark(endMark, it);
textEditorControl.ScrollToMark(endMark, 0, false, 0, 0);
}
}
void Indent()
{
ident++;
if (ident >= tags.Count) {
tag = new TextTag(ident.ToString());
tag.LeftMargin = 10 + 15 * (ident - 1);
buffer.TagTable.Add(tag);
tags.Add(tag);
} else {
tag = tags[ident];
}
}
void Unindent()
{
if (ident >= 0) {
ident--;
tag = tags[ident];
}
}
protected override void OnDestroyed()
{
base.OnDestroyed();
lock (updates) {
updates.Clear();
lastTextWrite = null;
}
//IdeApp.Preferences.CustomOutputPadFontChanged -= HandleCustomFontChanged;
if (customFont != null) {
customFont.Dispose();
customFont = null;
}
}
private abstract class QueuedUpdate
{
public abstract void Execute(OutputConsole pad);
}
private class QueuedTextWrite : QueuedUpdate
{
private System.Text.StringBuilder Text;
public TextTag Tag;
public override void Execute(OutputConsole pad)
{
pad.UnsafeAddText(Text.ToString(), Tag);
}
public QueuedTextWrite(string text, TextTag tag)
{
Text = new System.Text.StringBuilder(text);
Tag = tag;
}
public void Write(string s)
{
Text.Append(s);
if (Text.Length > MAX_BUFFER_LENGTH)
Text.Remove(0, Text.Length - MAX_BUFFER_LENGTH);
}
}
private class QueuedBeginTask : QueuedUpdate
{
public string Name;
public int TotalWork;
public override void Execute(OutputConsole pad)
{
pad.UnsafeBeginTask(Name, TotalWork);
}
public QueuedBeginTask(string name, int totalWork)
{
TotalWork = totalWork;
Name = name;
}
}
private class QueuedEndTask : QueuedUpdate
{
public override void Execute(OutputConsole pad)
{
pad.UnsafeEndTask();
}
}
}
}
| |
///////////////////////////////////////////////////////////////////////////////////
// Open 3D Model Viewer (open3mod) (v2.0)
// [Tab.cs]
// (c) 2012-2015, Open3Mod Contributors
//
// Licensed under the terms and conditions of the 3-clause BSD license. See
// the LICENSE file in the root folder of the repository for the details.
//
// HIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
///////////////////////////////////////////////////////////////////////////////////
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Linq;
using System.Text;
using OpenTK;
namespace open3mod
{
/// <summary>
/// Represents a single tab in the UI. A tab always contains exactly one scene
/// being rendered, a scene being loaded or no scene at all (the latter is the
/// dummy tab that is initially open).
///
/// A scene is thus coupled to a tab and therefore owned by TabState. The
/// list of all tabs is maintained by UIState, which also knows which tab
/// is active.
/// </summary>
public sealed class Tab
{
/// <summary>
/// Enum of all supported tab states.
/// </summary>
public enum TabState {
Empty = 0,
Loading,
Rendering,
Failed
}
/// <summary>
/// Index all 3D views - there can be up to four 3D views at this time,
/// but the rest of the codebase always works with _Max so it can be
/// nicely adjusted simply by adding more indexes.
/// </summary>
public enum ViewIndex
{
Index0 = 0,
Index1,
Index2,
Index3,
_Max
}
/// <summary>
/// Enumerates all the separator bars that can be dragged in order to
/// resize viewports.
/// </summary>
public enum ViewSeparator
{
Horizontal = 0,
Vertical,
_Max,
Both
}
/// <summary>
/// Supported arrangements of 3D views. Right now only the number of
/// 3d windows.
/// </summary>
public enum ViewMode
{
// values pertain to CoreSettings:DefaultViewMode!
Single = 0,
Two = 1,
Four = 2
}
/// <summary>
/// Current state of the tab. The state flag is maintained internally
/// and switched to "Rendering" as soon as a scene is set. The initial
/// state can be set using the constructor.
/// </summary>
public TabState State { get; private set; }
/// <summary>
/// Index of the currently active viewport
/// </summary>
public ViewIndex ActiveViewIndex = 0;
/// <summary>
/// Array of viewport objects. Entries are null until a viewport index is
/// at least used once with the tab. After a viewport has been enabled once,
/// the corresponding Viewport instance is retained so a viewport
/// keeps its state when the user hides it and shows it again.
///
/// Which viewport setup is currently active in the GUI is specified by the
/// ActiveViewMode property.
/// </summary>
public Viewport[] ActiveViews
{
get
{
if (_dirtySplit)
{
ValidateViewportBounds();
}
return _activeViews;
}
set{
_activeViews = value;
}
}
private Viewport[] _activeViews = new Viewport[(int)ViewIndex._Max];
/// <summary>
/// Current view mode
/// </summary>
public ViewMode ActiveViewMode
{
get { return _activeViewMode; }
set
{
// hardcoded table of viewport sizes. This is the only location
// so changing these constants is sufficient to adjust viewport defaults
_activeViewMode = value;
CoreSettings.CoreSettings.Default.DefaultViewMode = (int) value;
switch(_activeViewMode)
{
case ViewMode.Single:
ActiveViews = new []
{
new Viewport(new Vector4(0.0f, 0.0f, 1.0f, 1.0f), CameraMode.Orbit),
null,
null,
null
};
break;
case ViewMode.Two:
ActiveViews = new []
{
new Viewport(new Vector4(0.0f, 0.0f, 1.0f, 0.5f), CameraMode.Orbit),
null,
new Viewport(new Vector4(0.0f, 0.5f, 1.0f, 1.0f), CameraMode.X),
null
};
break;
case ViewMode.Four:
ActiveViews = new []
{
new Viewport(new Vector4(0.0f, 0.0f, 0.5f, 0.5f), CameraMode.Orbit),
new Viewport(new Vector4(0.5f, 0.0f, 1.0f, 0.5f), CameraMode.Z),
new Viewport(new Vector4(0.0f, 0.5f, 0.5f, 1.0f), CameraMode.X),
new Viewport(new Vector4(0.5f, 0.5f, 1.0f, 1.0f), CameraMode.Y)
};
break;
default:
throw new ArgumentOutOfRangeException();
}
Debug.Assert(ActiveViews[0] != null);
if (ActiveViews[(int)ActiveViewIndex] == null)
{
ActiveViewIndex = ViewIndex.Index0;
}
}
}
/// <summary>
/// Obtain an instance of the current active camera controller (i.e.
/// the controller for the current active view and current active camera
/// mode. This may be a null.
/// </summary>
public ICameraController ActiveCameraController {
get { return ActiveCameraControllerForView(ActiveViewIndex); }
}
/// <summary>
/// Current active scene
/// </summary>
public Scene ActiveScene
{
get { return _activeScene; }
set
{
Debug.Assert(State != TabState.Failed, "cannot recover from TabState.Failed");
// make sure the previous scene instance is properly disposed
if (_activeScene != null)
{
_activeScene.Dispose();
}
_activeScene = value;
// switch state to "Rendering" if the new scene is non-null
if (_activeScene == null)
{
State = TabState.Empty;
}
else
{
State = TabState.Rendering;
}
}
}
/// <summary>
/// File name of the scene in the tab. This member is already set while
/// the scene is loading and "ActiveScene" is null. This field is null
/// if the tab is in state TabState.Empty.
/// </summary>
public string File { get; private set; }
/// <summary>
/// If the tab is in a failed state this contains the error message
/// that describes the failure. Otherwise, this is an empty string.
/// </summary>
public string ErrorMessage
{
get { return _errorMessage; }
}
/// <summary>
/// Unique ID of the tab. This is used to connect with the UI. The value
/// is set via the constructor and never changes.
/// </summary>
public readonly object Id;
private Scene _activeScene;
private string _errorMessage;
private ViewMode _activeViewMode = ViewMode.Four;
/// <summary>
/// Position of the horizontal and vertical splits
/// in [MinimumViewportSplit,1-MinimumViewportSplit]
/// </summary>
private float _verticalSplitPos = 0.5f;
private float _horizontalSplitPos = 0.5f;
/// <summary>
/// dirty flag for the recalculation of viewport bounds
/// </summary>
private bool _dirtySplit = true;
/// <summary>
/// Create an empty tab.
/// <param name="id">Static id to associate with the tab, see "ID"</param>
/// <param name="fileBeingLoaded">Specifies the file that is being loaded
/// for this tab. If this is non-null, the state of the tab is set
/// to TabState.Loading and the file name is stored in the File field.</param>
/// </summary>
public Tab(object id, string fileBeingLoaded)
{
var vm = CoreSettings.CoreSettings.Default.DefaultViewMode;
if(vm <= 2 && vm >= 0)
{
ActiveViewMode = (ViewMode) vm;
}
else
{
ActiveViewMode = ViewMode.Four;
CoreSettings.CoreSettings.Default.DefaultViewMode = (int) ViewMode.Four;
}
State = fileBeingLoaded == null ? TabState.Empty : TabState.Loading;
File = fileBeingLoaded;
Id = id;
}
/// <summary>
/// Gets the ICameraController responsible for a particular view
/// for the current active camera mode.
/// </summary>
/// <param name="targetView">View index</param>
/// <returns>ICameraController or null if there is no implementation</returns>
public ICameraController ActiveCameraControllerForView(ViewIndex targetView)
{
return ActiveViews[(int)targetView] == null ? null : ActiveViews[(int) targetView].ActiveCameraControllerForView();
}
public void Dispose()
{
if (ActiveScene != null)
{
ActiveScene.Dispose();
ActiveScene = null;
}
GC.SuppressFinalize(this);
}
#if DEBUG
~Tab()
{
// OpenTk is unsafe from here, explicit Dispose() is required.
Debug.Assert(false);
}
#endif
/// <summary>
/// Sets the tab to a permanent "failed to load" state. In this
/// state, the tab keeps displaying an error message but nothing
/// else.
/// </summary>
/// <param name="message"></param>
public void SetFailed(string message)
{
State = TabState.Failed;
_activeScene = null;
_errorMessage = message;
}
/// <summary>
/// Changes the camera mode in the currently active view.
/// </summary>
/// <param name="cameraMode">New camera mode</param>
public void ChangeActiveCameraMode(CameraMode cameraMode)
{
ChangeCameraModeForView(ActiveViewIndex, cameraMode);
}
/// <summary>
/// Changes the camera mode for a view.
/// </summary>
/// <param name="viewIndex">index of the view.</param>
/// <param name="cameraMode">New camera mode.</param>
public void ChangeCameraModeForView(ViewIndex viewIndex, CameraMode cameraMode)
{
Debug.Assert(ActiveViews[(int)viewIndex] != null);
var view = ActiveViews[(int) viewIndex];
view.ChangeCameraModeForView(cameraMode);
}
/// <summary>
/// Resets the camera in the currently active view
/// </summary>
/// <param name="cameraMode">New camera mode</param>
public void ResetActiveCameraController()
{
Debug.Assert(ActiveViews[(int)ActiveViewIndex] != null);
var view = ActiveViews[(int)ActiveViewIndex];
view.ResetCameraController();
}
/// <summary>
/// Converts a (mouse) hit position to a viewport index - in other words,
/// it calculates the index of the viewport that is hit by a click
/// on a given mouse position.
/// </summary>
/// <param name="x">Hit position x, in normalized [0,1] range</param>
/// <param name="y">Hit position y, in normalized [0,1] range</param>
/// <returns>Tab.ViewIndex._Max if the hit coordinate doesn't hit a
/// viewport. If not, the ViewIndex of the tab that was hit.</returns>
public ViewIndex GetViewportIndexHit(float x, float y)
{
var index = ViewIndex.Index0;
foreach (var viewport in ActiveViews)
{
if (viewport == null)
{
++index;
continue;
}
var view = viewport.Bounds;
if (x >= view.X && x <= view.Z &&
y >= view.Y && y <= view.W)
{
break;
}
++index;
}
return index;
}
/// <summary>
/// Converts a mouse position to a viewport separator. It therefore
/// checks whether the mouse is in a region where dragging viewport
/// borders is possible.
/// </summary>
/// <param name="x">Mouse x, in relative coordinates</param>
/// <param name="y">Mouse y, in relative coordinates</param>
/// <returns>Tab.ViewSeparator._Max if the mouse coordinate doesn't hit a
/// viewport separator. If not, the separator that was hit.</returns>
public ViewSeparator GetViewportSeparatorHit(float x, float y)
{
if (_activeViewMode == ViewMode.Single)
{
return ViewSeparator._Max;
}
var vp = ActiveViews[0];
Debug.Assert(vp != null);
const float threshold = 0.01f;
if (Math.Abs(x - vp.Bounds.Z) < threshold && _activeViewMode != ViewMode.Two)
{
if (Math.Abs(y - vp.Bounds.W) < threshold)
{
return ViewSeparator.Both;
}
return ViewSeparator.Vertical;
}
if (Math.Abs(y - vp.Bounds.W) < threshold)
{
return ViewSeparator.Horizontal;
}
return ViewSeparator._Max;
}
private const float MinimumViewportSplit = 0.1f;
/// <summary>
/// Sets a new position for the horizontal split between viewports.
///
/// This is only possible (and otherwise ignored) if all the four viewports are enabled.
/// </summary>
/// <param name="f">New splitter bar position, in [0,1]. Positions outside
/// [MinimumViewportSplit,1-MinimumViewportSplit] are clamped.</param>
public void SetViewportSplitH(float f)
{
if (ActiveViewMode != ViewMode.Four)
{
return;
}
if (f < MinimumViewportSplit)
{
f = MinimumViewportSplit;
}
else if (f > 1.0f-MinimumViewportSplit)
{
f = 1.0f-MinimumViewportSplit;
}
_horizontalSplitPos = f;
_dirtySplit = true;
}
/// <summary>
/// Sets a new position for the vertical split between viewports.
///
/// This is only possible (and otherwise ignored) if at least two viewports are enabled.
/// </summary>
/// <param name="f">New splitter bar position, in [0,1]. Positions outside
/// [MinimumViewportSplit,1-MinimumViewportSplit] are clamped.</param>
public void SetViewportSplitV(float f)
{
if (ActiveViewMode != ViewMode.Two && ActiveViewMode != ViewMode.Four)
{
return;
}
if (f < MinimumViewportSplit)
{
f = MinimumViewportSplit;
}
else if (f > 1.0f - MinimumViewportSplit)
{
f = 1.0f - MinimumViewportSplit;
}
_verticalSplitPos = f;
_dirtySplit = true;
}
/// <summary>
/// Ensure every viewport bounds do not overlap the splitter in both directions
/// </summary>
private void ValidateViewportBounds()
{
Debug.Assert(_dirtySplit);
foreach (var viewport in _activeViews.Where(viewport => viewport != null))
{
var b = viewport.Bounds;
//set vertical split
if (Math.Abs(b.Y - _verticalSplitPos) > 0.0f && b.Y >= MinimumViewportSplit * 0.99999f)
{
b.Y = _verticalSplitPos;
}
else if (b.W <= 1.0f - MinimumViewportSplit * 0.99999f)
{
b.W = _verticalSplitPos;
}
//set horizontal split
if (Math.Abs(b.X - _horizontalSplitPos) > 0.0f && b.X >= MinimumViewportSplit * 0.99999f)
{
b.X = _horizontalSplitPos;
}
else if (b.Z <= 1.0f - MinimumViewportSplit * 0.99999f)
{
b.Z = _horizontalSplitPos;
}
viewport.Bounds = b;
}
_dirtySplit = false;
}
}
}
/* vi: set shiftwidth=4 tabstop=4: */
| |
using CrystalDecisions.CrystalReports.Engine;
using CrystalDecisions.Windows.Forms;
using DpSdkEngLib;
using DPSDKOPSLib;
using Microsoft.VisualBasic;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Diagnostics;
using System.Windows.Forms;
using System.Linq;
using System.Xml.Linq;
// ERROR: Not supported in C#: OptionDeclaration
namespace _4PosBackOffice.NET
{
internal partial class frmStockMultiBarcode : System.Windows.Forms.Form
{
private ADODB.Recordset withEventsField_adoPrimaryRS;
public ADODB.Recordset adoPrimaryRS {
get { return withEventsField_adoPrimaryRS; }
set {
if (withEventsField_adoPrimaryRS != null) {
withEventsField_adoPrimaryRS.MoveComplete -= adoPrimaryRS_MoveComplete;
withEventsField_adoPrimaryRS.WillChangeRecord -= adoPrimaryRS_WillChangeRecord;
}
withEventsField_adoPrimaryRS = value;
if (withEventsField_adoPrimaryRS != null) {
withEventsField_adoPrimaryRS.MoveComplete += adoPrimaryRS_MoveComplete;
withEventsField_adoPrimaryRS.WillChangeRecord += adoPrimaryRS_WillChangeRecord;
}
}
}
bool mbChangedByCode;
int mvBookMark;
bool mbEditFlag;
bool mbAddNewFlag;
bool mbDataChanged;
string gFilter;
string gFilterSQL;
private void loadLanguage()
{
//frmStockMultiBarcode = No Code [Edit Stock Item Barcodes]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then frmStockMultiBarcode.Caption = rsLang("LanguageLayoutLnk_Description"): frmStockMultiBarcode.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
//lblHeading = No Code [Using the "Stock Item Selector"...]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then lblHeading.Caption = rsLang("LanguageLayoutLnk_Description"): lblHeading.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
//Note: _lbl_0 caption grammar wrong!
//_lbl_0 = No Label [Which Shrink Quantity do you wish to edit]
//rsLang.filter = "LanguageLayoutLnk_LanguageID=" & 0000
//If rsLang.RecordCount Then _lbl_0.Caption = rsLang("LanguageLayoutLnk_Description"): _lbl_0.RightToLeft = rsLang("LanguageLayoutLnk_RightTL")
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1006;
//Filter|Checked
if (modRecordSet.rsLang.RecordCount){cmdFilter.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdFilter.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1085;
//Print|Checked
if (modRecordSet.rsLang.RecordCount){cmdPrint.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdPrint.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
modRecordSet.rsLang.filter = "LanguageLayoutLnk_LanguageID=" + 1004;
//Exit|Checked
if (modRecordSet.rsLang.RecordCount){cmdClose.Text = modRecordSet.rsLang.Fields("LanguageLayoutLnk_Description").Value;cmdClose.RightToLeft = modRecordSet.rsLang.Fields("LanguageLayoutLnk_RightTL").Value;}
modRecordSet.rsHelp.filter = "Help_Section=0 AND Help_Form='" + this.Name + "'";
//UPGRADE_ISSUE: Form property frmStockMultiBarcode.ToolTip1 was not upgraded. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="CC4C7EC0-C903-48FC-ACCC-81861D12DA4A"'
if (modRecordSet.rsHelp.RecordCount)
this.ToolTip1 = modRecordSet.rsHelp.Fields("Help_ContextID").Value;
}
private void cmbShrink_ClickEvent(System.Object eventSender, MouseEventArgs eventArgs)
{
getNamespace();
}
private void cmdFilter_Click(System.Object eventSender, System.EventArgs eventArgs)
{
My.MyProject.Forms.frmFilter.loadFilter(ref gFilter);
getNamespace();
}
private void getNamespace()
{
if (string.IsNullOrEmpty(cmbShrink.BoundText))
return;
string lString = null;
lString = " ((Catalogue.Catalogue_Quantity)=" + this.cmbShrink.BoundText + ") ";
if (string.IsNullOrEmpty(gFilter)) {
this.lblHeading.Text = "";
} else {
My.MyProject.Forms.frmFilter.buildCriteria(ref gFilter);
this.lblHeading.Text = My.MyProject.Forms.frmFilter.gHeading;
}
gFilterSQL = My.MyProject.Forms.frmFilter.gCriteria;
if (string.IsNullOrEmpty(gFilterSQL)) {
lString = " WHERE StockItem.StockItem_Disabled=0 AND StockItem.StockItem_Discontinued=0 ";
} else {
lString = gFilterSQL + " AND StockItem.StockItem_Disabled=0 AND StockItem.StockItem_Discontinued=0 ";
}
adoPrimaryRS = modRecordSet.getRS(ref "SELECT StockItem.StockItem_Name, Catalogue.Catalogue_Barcode, Catalogue.Catalogue_Disabled, Catalogue.Catalogue_StockItemID, Catalogue.Catalogue_Quantity FROM StockItem INNER JOIN Catalogue ON StockItem.StockItemID = Catalogue.Catalogue_StockItemID " + lString + " ORDER BY StockItem.StockItem_Name;");
grdDataGrid.DataSource = adoPrimaryRS;
grdDataGrid.Columns[0].DataPropertyName = "Stock Name";
grdDataGrid.Columns[0].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgLeft;
//grdDataGrid.Columns(0).Frozen = True
grdDataGrid.Columns[1].DataPropertyName = "Barcode";
grdDataGrid.Columns[1].DefaultCellStyle.Alignment = MSDataGridLib.AlignmentConstants.dbgRight;
grdDataGrid.Columns[1].Width = sizeConvertors.twipsToPixels(1400, true);
grdDataGrid.Columns[2].Visible = false;
grdDataGrid.Columns[3].Visible = false;
grdDataGrid.Columns[4].Visible = false;
frmStockMultiBarcode_Resize(this, new System.EventArgs());
mbDataChanged = false;
}
private void cmdPrint_Click(System.Object eventSender, System.EventArgs eventArgs)
{
CrystalDecisions.CrystalReports.Engine.ReportDocument Report = default(CrystalDecisions.CrystalReports.Engine.ReportDocument);
ADODB.Recordset rs = default(ADODB.Recordset);
bool ltype = false;
Report.Load("cryStockItemBarcode.rpt");
System.Windows.Forms.Cursor.Current = System.Windows.Forms.Cursors.WaitCursor;
rs = modRecordSet.getRS(ref "SELECT * FROM Company");
Report.SetParameterValue("txtCompanyName", rs.Fields("Company_Name"));
Report.SetParameterValue("txtTitle", this.Text + " Where Shrink Quantity is " + this.cmbShrink.BoundText);
Report.SetParameterValue("txtFilter", this.lblHeading.Text);
rs.Close();
//Report.Database.SetDataSource(adoPrimaryRS, 3)
Report.Database.Tables(1).SetDataSource(adoPrimaryRS);
//Report.VerifyOnEveryPrint = True
My.MyProject.Forms.frmReportShow.Text = Report.ParameterFields("txtTitle").ToString;
My.MyProject.Forms.frmReportShow.CRViewer1.ReportSource = Report;
My.MyProject.Forms.frmReportShow.mReport = Report;
My.MyProject.Forms.frmReportShow.sMode = "0";
My.MyProject.Forms.frmReportShow.CRViewer1.Refresh();
System.Windows.Forms.Cursor.Current = System.Windows.Forms.Cursors.Default;
My.MyProject.Forms.frmReportShow.ShowDialog();
}
private void buildDataControls()
{
doDataControl(ref (this.cmbShrink), ref "SELECT DISTINCT ShrinkItem.ShrinkItem_Quantity From ShrinkItem ORDER BY ShrinkItem.ShrinkItem_Quantity;", ref "ShrinkItem_Quantity", ref "ShrinkItem_Quantity");
cmbShrink.BoundText = Convert.ToString(1);
}
private void doDataControl(ref myDataGridView dataControl, ref string sql, ref string boundColumn, ref string listField)
{
ADODB.Recordset rs = default(ADODB.Recordset);
rs = modRecordSet.getRS(ref sql);
dataControl.DataSource = rs;
dataControl.boundColumn = boundColumn;
dataControl.listField = listField;
}
private void frmStockMultiBarcode_Load(System.Object eventSender, System.EventArgs eventArgs)
{
buildDataControls();
gFilter = "stockitem";
getNamespace();
mbDataChanged = false;
loadLanguage();
}
private void frmStockMultiBarcode_KeyPress(System.Object eventSender, System.Windows.Forms.KeyPressEventArgs eventArgs)
{
short KeyAscii = Strings.Asc(eventArgs.KeyChar);
if (KeyAscii == 27) {
KeyAscii = 0;
cmdClose_Click(cmdClose, new System.EventArgs());
}
eventArgs.KeyChar = Strings.Chr(KeyAscii);
if (KeyAscii == 0) {
eventArgs.Handled = true;
}
}
//UPGRADE_WARNING: Event frmStockMultiBarcode.Resize may fire when form is initialized. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="88B12AE1-6DE0-48A0-86F1-60C0686C026A"'
private void frmStockMultiBarcode_Resize(System.Object eventSender, System.EventArgs eventArgs)
{
// ERROR: Not supported in C#: OnErrorStatement
//This will resize the grid when the form is resized
System.Windows.Forms.Application.DoEvents();
grdDataGrid.Height = sizeConvertors.twipsToPixels(sizeConvertors.pixelToTwips(this.ClientRectangle.Height, false) - 30 - sizeConvertors.pixelToTwips(picButtons.Height, false), false);
grdDataGrid.Columns[0].Width = sizeConvertors.twipsToPixels(sizeConvertors.pixelToTwips(grdDataGrid.Width, true) - 1400 - 580, true);
}
private void frmStockMultiBarcode_FormClosed(System.Object eventSender, System.Windows.Forms.FormClosedEventArgs eventArgs)
{
//UPGRADE_WARNING: Screen property Screen.MousePointer has a new behavior. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6BA9B8D2-2A32-4B6E-8D36-44949974A5B4"'
System.Windows.Forms.Cursor.Current = System.Windows.Forms.Cursors.Default;
}
private void adoPrimaryRS_MoveComplete(ADODB.EventReasonEnum adReason, ADODB.Error pError, ref ADODB.EventStatusEnum adStatus, ADODB.Recordset pRecordset)
{
//This will display the current record position for this recordset
}
private void adoPrimaryRS_WillChangeRecord(ADODB.EventReasonEnum adReason, int cRecords, ref ADODB.EventStatusEnum adStatus, ADODB.Recordset pRecordset)
{
//This is where you put validation code
//This event gets called when the following actions occur
bool bCancel = false;
switch (adReason) {
case ADODB.EventReasonEnum.adRsnAddNew:
break;
case ADODB.EventReasonEnum.adRsnClose:
break;
case ADODB.EventReasonEnum.adRsnDelete:
break;
case ADODB.EventReasonEnum.adRsnFirstChange:
break;
case ADODB.EventReasonEnum.adRsnMove:
break;
case ADODB.EventReasonEnum.adRsnRequery:
break;
case ADODB.EventReasonEnum.adRsnResynch:
break;
case ADODB.EventReasonEnum.adRsnUndoAddNew:
break;
case ADODB.EventReasonEnum.adRsnUndoDelete:
break;
case ADODB.EventReasonEnum.adRsnUndoUpdate:
break;
case ADODB.EventReasonEnum.adRsnUpdate:
break;
}
//bCancel = True
// adStatus = adStatusCantDeny
}
private void cmdCancel_Click()
{
// ERROR: Not supported in C#: OnErrorStatement
mbEditFlag = false;
mbAddNewFlag = false;
adoPrimaryRS.CancelUpdate();
//UPGRADE_WARNING: Couldn't resolve default property of object mvBookMark. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
if (mvBookMark > 0) {
//UPGRADE_WARNING: Couldn't resolve default property of object mvBookMark. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="6A50421D-15FE-4896-8A1B-2EC21E9037B2"'
adoPrimaryRS.Bookmark = mvBookMark;
} else {
adoPrimaryRS.MoveFirst();
}
mbDataChanged = false;
}
//UPGRADE_NOTE: update was upgraded to update_Renamed. Click for more: 'ms-help://MS.VSCC.v90/dv_commoner/local/redirect.htm?keyword="A9E4979A-37FA-4718-9994-97DD76ED70A7"'
private void update_Renamed()
{
// ERROR: Not supported in C#: OnErrorStatement
adoPrimaryRS.UpdateBatch(ADODB.AffectEnum.adAffectAll);
if (mbAddNewFlag) {
adoPrimaryRS.MoveLast();
//move to the new record
}
mbEditFlag = false;
mbAddNewFlag = false;
mbDataChanged = false;
return;
UpdateErr:
Interaction.MsgBox(Err().Description);
}
private void cmdClose_Click(System.Object eventSender, System.EventArgs eventArgs)
{
update_Renamed();
this.Close();
}
private void goFirst()
{
// ERROR: Not supported in C#: OnErrorStatement
adoPrimaryRS.MoveFirst();
mbDataChanged = false;
return;
GoFirstError:
Interaction.MsgBox(Err().Description);
}
private void goLast()
{
// ERROR: Not supported in C#: OnErrorStatement
adoPrimaryRS.MoveLast();
mbDataChanged = false;
return;
GoLastError:
Interaction.MsgBox(Err().Description);
}
//Private Sub grdDataGrid_CellValueChanged(ByVal eventSender As System.Object, ByVal eventArgs As AxMSDataGridLib.DDataGridEvents_CellValueChangedEvent) Handles grdDataGrid.CellValueChanged
// If grdDataGrid.Columns(ColIndex).DataFormat.Format = "#,##0.00" Then
// grdDataGrid.Columns(ColIndex).DataFormat = 0
// End If
// End Sub
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#if XMLSERIALIZERGENERATOR
namespace Microsoft.XmlSerializer.Generator
#else
namespace System.Xml.Serialization
#endif
{
using System.Reflection;
using System.Collections;
using System.IO;
using System.Xml.Schema;
using System;
using System.Text;
using System.Threading;
using System.Globalization;
using System.Security;
using System.Xml.Serialization.Configuration;
using System.Diagnostics;
using System.Collections.Generic;
using System.Runtime.Versioning;
using System.Xml;
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public struct XmlDeserializationEvents
{
private XmlNodeEventHandler _onUnknownNode;
private XmlAttributeEventHandler _onUnknownAttribute;
private XmlElementEventHandler _onUnknownElement;
private UnreferencedObjectEventHandler _onUnreferencedObject;
#if !XMLSERIALIZERGENERATOR
internal object sender;
#endif
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnknownNode"]/*' />
public XmlNodeEventHandler OnUnknownNode
{
get
{
return _onUnknownNode;
}
set
{
_onUnknownNode = value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnknownAttribute"]/*' />
public XmlAttributeEventHandler OnUnknownAttribute
{
get
{
return _onUnknownAttribute;
}
set
{
_onUnknownAttribute = value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnknownElement"]/*' />
public XmlElementEventHandler OnUnknownElement
{
get
{
return _onUnknownElement;
}
set
{
_onUnknownElement = value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnreferencedObject"]/*' />
public UnreferencedObjectEventHandler OnUnreferencedObject
{
get
{
return _onUnreferencedObject;
}
set
{
_onUnreferencedObject = value;
}
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation"]/*' />
///<internalonly/>
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public abstract class XmlSerializerImplementation
{
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.Reader"]/*' />
public virtual XmlSerializationReader Reader { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.Writer"]/*' />
public virtual XmlSerializationWriter Writer { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.ReadMethods"]/*' />
public virtual Hashtable ReadMethods { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.WriteMethods"]/*' />
public virtual Hashtable WriteMethods { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.TypedSerializers"]/*' />
public virtual Hashtable TypedSerializers { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.CanSerialize"]/*' />
public virtual bool CanSerialize(Type type) { throw new NotSupportedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.GetSerializer"]/*' />
public virtual XmlSerializer GetSerializer(Type type) { throw new NotSupportedException(); }
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public class XmlSerializer
{
internal enum SerializationMode
{
CodeGenOnly,
ReflectionOnly,
ReflectionAsBackup
}
internal static SerializationMode Mode { get; set; } = SerializationMode.ReflectionAsBackup;
private static bool ReflectionMethodEnabled
{
get
{
return Mode == SerializationMode.ReflectionOnly || Mode == SerializationMode.ReflectionAsBackup;
}
}
private TempAssembly _tempAssembly;
#pragma warning disable 0414
private bool _typedSerializer;
#pragma warning restore 0414
private Type _primitiveType;
private XmlMapping _mapping;
private XmlDeserializationEvents _events = new XmlDeserializationEvents();
#if uapaot
private XmlSerializer innerSerializer;
public string DefaultNamespace = null;
#else
internal string DefaultNamespace = null;
#endif
private Type _rootType;
private static TempAssemblyCache s_cache = new TempAssemblyCache();
private static volatile XmlSerializerNamespaces s_defaultNamespaces;
private static XmlSerializerNamespaces DefaultNamespaces
{
get
{
if (s_defaultNamespaces == null)
{
XmlSerializerNamespaces nss = new XmlSerializerNamespaces();
nss.AddInternal("xsi", XmlSchema.InstanceNamespace);
nss.AddInternal("xsd", XmlSchema.Namespace);
if (s_defaultNamespaces == null)
{
s_defaultNamespaces = nss;
}
}
return s_defaultNamespaces;
}
}
private static readonly Dictionary<Type, Dictionary<XmlSerializerMappingKey, XmlSerializer>> s_xmlSerializerTable = new Dictionary<Type, Dictionary<XmlSerializerMappingKey, XmlSerializer>>();
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer8"]/*' />
///<internalonly/>
protected XmlSerializer()
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, XmlAttributeOverrides overrides, Type[] extraTypes, XmlRootAttribute root, string defaultNamespace) :
this(type, overrides, extraTypes, root, defaultNamespace, null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, XmlRootAttribute root) : this(type, null, Array.Empty<Type>(), root, null, null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer3"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
#if !uapaot
public XmlSerializer(Type type, Type[] extraTypes) : this(type, null, extraTypes, null, null, null)
#else
public XmlSerializer(Type type, Type[] extraTypes) : this(type)
#endif
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer4"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, XmlAttributeOverrides overrides) : this(type, overrides, Array.Empty<Type>(), null, null, null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer5"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(XmlTypeMapping xmlTypeMapping)
{
if (xmlTypeMapping == null)
throw new ArgumentNullException(nameof(xmlTypeMapping));
#if !uapaot
_tempAssembly = GenerateTempAssembly(xmlTypeMapping);
#endif
_mapping = xmlTypeMapping;
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer6"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type) : this(type, (string)null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, string defaultNamespace)
{
if (type == null)
throw new ArgumentNullException(nameof(type));
DefaultNamespace = defaultNamespace;
_rootType = type;
_mapping = GetKnownMapping(type, defaultNamespace);
if (_mapping != null)
{
_primitiveType = type;
return;
}
#if !uapaot
_tempAssembly = s_cache[defaultNamespace, type];
if (_tempAssembly == null)
{
lock (s_cache)
{
_tempAssembly = s_cache[defaultNamespace, type];
if (_tempAssembly == null)
{
{
XmlSerializerImplementation contract = null;
Assembly assembly = TempAssembly.LoadGeneratedAssembly(type, null, out contract);
if (assembly == null)
{
// need to reflect and generate new serialization assembly
XmlReflectionImporter importer = new XmlReflectionImporter(defaultNamespace);
_mapping = importer.ImportTypeMapping(type, null, defaultNamespace);
_tempAssembly = GenerateTempAssembly(_mapping, type, defaultNamespace);
}
else
{
// we found the pre-generated assembly, now make sure that the assembly has the right serializer
// try to avoid the reflection step, need to get ElementName, namespace and the Key form the type
_mapping = XmlReflectionImporter.GetTopLevelMapping(type, defaultNamespace);
_tempAssembly = new TempAssembly(new XmlMapping[] { _mapping }, assembly, contract);
}
}
}
s_cache.Add(defaultNamespace, type, _tempAssembly);
}
}
if (_mapping == null)
{
_mapping = XmlReflectionImporter.GetTopLevelMapping(type, defaultNamespace);
}
#else
XmlSerializerImplementation contract = GetXmlSerializerContractFromGeneratedAssembly();
if (contract != null)
{
this.innerSerializer = contract.GetSerializer(type);
}
else if (ReflectionMethodEnabled)
{
var importer = new XmlReflectionImporter(defaultNamespace);
_mapping = importer.ImportTypeMapping(type, null, defaultNamespace);
if (_mapping == null)
{
_mapping = XmlReflectionImporter.GetTopLevelMapping(type, defaultNamespace);
}
}
#endif
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer7"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, XmlAttributeOverrides overrides, Type[] extraTypes, XmlRootAttribute root, string defaultNamespace, string location)
{
if (type == null)
throw new ArgumentNullException(nameof(type));
DefaultNamespace = defaultNamespace;
_rootType = type;
XmlReflectionImporter importer = new XmlReflectionImporter(overrides, defaultNamespace);
if (extraTypes != null)
{
for (int i = 0; i < extraTypes.Length; i++)
importer.IncludeType(extraTypes[i]);
}
_mapping = importer.ImportTypeMapping(type, root, defaultNamespace);
#if !uapaot
_tempAssembly = GenerateTempAssembly(_mapping, type, defaultNamespace, location);
#endif
}
internal static TempAssembly GenerateTempAssembly(XmlMapping xmlMapping)
{
return GenerateTempAssembly(xmlMapping, null, null);
}
internal static TempAssembly GenerateTempAssembly(XmlMapping xmlMapping, Type type, string defaultNamespace)
{
return GenerateTempAssembly(xmlMapping, type, defaultNamespace, null);
}
internal static TempAssembly GenerateTempAssembly(XmlMapping xmlMapping, Type type, string defaultNamespace, string location)
{
if (xmlMapping == null)
{
throw new ArgumentNullException(nameof(xmlMapping));
}
xmlMapping.CheckShallow();
if (xmlMapping.IsSoap)
{
return null;
}
return new TempAssembly(new XmlMapping[] { xmlMapping }, new Type[] { type }, defaultNamespace, location);
}
#if !XMLSERIALIZERGENERATOR
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(TextWriter textWriter, object o)
{
Serialize(textWriter, o, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(TextWriter textWriter, object o, XmlSerializerNamespaces namespaces)
{
XmlTextWriter xmlWriter = new XmlTextWriter(textWriter);
xmlWriter.Formatting = Formatting.Indented;
xmlWriter.Indentation = 2;
Serialize(xmlWriter, o, namespaces);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(Stream stream, object o)
{
Serialize(stream, o, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize3"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(Stream stream, object o, XmlSerializerNamespaces namespaces)
{
XmlTextWriter xmlWriter = new XmlTextWriter(stream, null);
xmlWriter.Formatting = Formatting.Indented;
xmlWriter.Indentation = 2;
Serialize(xmlWriter, o, namespaces);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize4"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(XmlWriter xmlWriter, object o)
{
Serialize(xmlWriter, o, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize5"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces)
{
Serialize(xmlWriter, o, namespaces, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize6"]/*' />
public void Serialize(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces, string encodingStyle)
{
Serialize(xmlWriter, o, namespaces, encodingStyle, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize6"]/*' />
public void Serialize(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces, string encodingStyle, string id)
{
try
{
if (_primitiveType != null)
{
if (encodingStyle != null && encodingStyle.Length > 0)
{
throw new InvalidOperationException(SR.Format(SR.XmlInvalidEncodingNotEncoded1, encodingStyle));
}
SerializePrimitive(xmlWriter, o, namespaces);
}
#if !uapaot
else if (ShouldUseReflectionBasedSerialization(_mapping))
{
XmlMapping mapping;
if (_mapping != null && _mapping.GenerateSerializer)
{
mapping = _mapping;
}
else
{
XmlReflectionImporter importer = new XmlReflectionImporter(DefaultNamespace);
mapping = importer.ImportTypeMapping(_rootType, null, DefaultNamespace);
}
var writer = new ReflectionXmlSerializationWriter(mapping, xmlWriter, namespaces == null || namespaces.Count == 0 ? DefaultNamespaces : namespaces, encodingStyle, id);
writer.WriteObject(o);
}
else if (_tempAssembly == null || _typedSerializer)
{
// The contion for the block is never true, thus the block is never hit.
XmlSerializationWriter writer = CreateWriter();
writer.Init(xmlWriter, namespaces == null || namespaces.Count == 0 ? DefaultNamespaces : namespaces, encodingStyle, id, _tempAssembly);
try
{
Serialize(o, writer);
}
finally
{
writer.Dispose();
}
}
else
_tempAssembly.InvokeWriter(_mapping, xmlWriter, o, namespaces == null || namespaces.Count == 0 ? DefaultNamespaces : namespaces, encodingStyle, id);
#else
else
{
if (this.innerSerializer != null)
{
if (!string.IsNullOrEmpty(this.DefaultNamespace))
{
this.innerSerializer.DefaultNamespace = this.DefaultNamespace;
}
XmlSerializationWriter writer = this.innerSerializer.CreateWriter();
writer.Init(xmlWriter, namespaces == null || namespaces.Count == 0 ? DefaultNamespaces : namespaces, encodingStyle, id);
try
{
this.innerSerializer.Serialize(o, writer);
}
finally
{
writer.Dispose();
}
}
else if (ReflectionMethodEnabled)
{
XmlMapping mapping;
if (_mapping != null && _mapping.GenerateSerializer)
{
mapping = _mapping;
}
else
{
XmlReflectionImporter importer = new XmlReflectionImporter(DefaultNamespace);
mapping = importer.ImportTypeMapping(_rootType, null, DefaultNamespace);
}
var writer = new ReflectionXmlSerializationWriter(mapping, xmlWriter, namespaces == null || namespaces.Count == 0 ? DefaultNamespaces : namespaces, encodingStyle, id);
writer.WriteObject(o);
}
else
{
throw new InvalidOperationException(SR.Format(SR.Xml_MissingSerializationCodeException, this._rootType, typeof(XmlSerializer).Name));
}
}
#endif
}
catch (Exception e)
{
if (e is TargetInvocationException)
e = e.InnerException;
throw new InvalidOperationException(SR.XmlGenError, e);
}
xmlWriter.Flush();
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public object Deserialize(Stream stream)
{
XmlTextReader xmlReader = new XmlTextReader(stream);
xmlReader.WhitespaceHandling = WhitespaceHandling.Significant;
xmlReader.Normalization = true;
xmlReader.XmlResolver = null;
return Deserialize(xmlReader, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public object Deserialize(TextReader textReader)
{
XmlTextReader xmlReader = new XmlTextReader(textReader);
xmlReader.WhitespaceHandling = WhitespaceHandling.Significant;
xmlReader.Normalization = true;
xmlReader.XmlResolver = null;
return Deserialize(xmlReader, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public object Deserialize(XmlReader xmlReader)
{
return Deserialize(xmlReader, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize3"]/*' />
public object Deserialize(XmlReader xmlReader, XmlDeserializationEvents events)
{
return Deserialize(xmlReader, null, events);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize4"]/*' />
public object Deserialize(XmlReader xmlReader, string encodingStyle)
{
return Deserialize(xmlReader, encodingStyle, _events);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize5"]/*' />
public object Deserialize(XmlReader xmlReader, string encodingStyle, XmlDeserializationEvents events)
{
events.sender = this;
try
{
if (_primitiveType != null)
{
if (encodingStyle != null && encodingStyle.Length > 0)
{
throw new InvalidOperationException(SR.Format(SR.XmlInvalidEncodingNotEncoded1, encodingStyle));
}
return DeserializePrimitive(xmlReader, events);
}
#if !uapaot
else if (ShouldUseReflectionBasedSerialization(_mapping))
{
XmlMapping mapping;
if (_mapping != null && _mapping.GenerateSerializer)
{
mapping = _mapping;
}
else
{
XmlReflectionImporter importer = new XmlReflectionImporter(DefaultNamespace);
mapping = importer.ImportTypeMapping(_rootType, null, DefaultNamespace);
}
var reader = new ReflectionXmlSerializationReader(mapping, xmlReader, events, encodingStyle);
return reader.ReadObject();
}
else if (_tempAssembly == null || _typedSerializer)
{
XmlSerializationReader reader = CreateReader();
reader.Init(xmlReader, events, encodingStyle, _tempAssembly);
try
{
return Deserialize(reader);
}
finally
{
reader.Dispose();
}
}
else
{
return _tempAssembly.InvokeReader(_mapping, xmlReader, events, encodingStyle);
}
#else
else
{
if (this.innerSerializer != null)
{
if (!string.IsNullOrEmpty(this.DefaultNamespace))
{
this.innerSerializer.DefaultNamespace = this.DefaultNamespace;
}
XmlSerializationReader reader = this.innerSerializer.CreateReader();
reader.Init(xmlReader, encodingStyle);
try
{
return this.innerSerializer.Deserialize(reader);
}
finally
{
reader.Dispose();
}
}
else if (ReflectionMethodEnabled)
{
XmlMapping mapping;
if (_mapping != null && _mapping.GenerateSerializer)
{
mapping = _mapping;
}
else
{
XmlReflectionImporter importer = new XmlReflectionImporter(DefaultNamespace);
mapping = importer.ImportTypeMapping(_rootType, null, DefaultNamespace);
}
var reader = new ReflectionXmlSerializationReader(mapping, xmlReader, events, encodingStyle);
return reader.ReadObject();
}
else
{
throw new InvalidOperationException(SR.Format(SR.Xml_MissingSerializationCodeException, this._rootType, typeof(XmlSerializer).Name));
}
}
#endif
}
catch (Exception e)
{
if (e is TargetInvocationException)
e = e.InnerException;
if (xmlReader is IXmlLineInfo)
{
IXmlLineInfo lineInfo = (IXmlLineInfo)xmlReader;
throw new InvalidOperationException(SR.Format(SR.XmlSerializeErrorDetails, lineInfo.LineNumber.ToString(CultureInfo.InvariantCulture), lineInfo.LinePosition.ToString(CultureInfo.InvariantCulture)), e);
}
else
{
throw new InvalidOperationException(SR.XmlSerializeError, e);
}
}
}
#endif
private static bool ShouldUseReflectionBasedSerialization(XmlMapping mapping)
{
return Mode == SerializationMode.ReflectionOnly
|| (mapping != null && mapping.IsSoap);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.CanDeserialize"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public virtual bool CanDeserialize(XmlReader xmlReader)
{
if (_primitiveType != null)
{
TypeDesc typeDesc = (TypeDesc)TypeScope.PrimtiveTypes[_primitiveType];
return xmlReader.IsStartElement(typeDesc.DataType.Name, string.Empty);
}
#if !uapaot
else if (_tempAssembly != null)
{
return _tempAssembly.CanRead(_mapping, xmlReader);
}
else
{
return false;
}
#else
if (this.innerSerializer != null)
{
return this.innerSerializer.CanDeserialize(xmlReader);
}
else
{
return ReflectionMethodEnabled;
}
#endif
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.FromMappings"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public static XmlSerializer[] FromMappings(XmlMapping[] mappings)
{
return FromMappings(mappings, (Type)null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.FromMappings1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public static XmlSerializer[] FromMappings(XmlMapping[] mappings, Type type)
{
if (mappings == null || mappings.Length == 0) return Array.Empty<XmlSerializer>();
#if uapaot
XmlSerializer[] serializers = GetReflectionBasedSerializers(mappings, type);
return serializers;
#else
bool anySoapMapping = false;
foreach (var mapping in mappings)
{
if (mapping.IsSoap)
{
anySoapMapping = true;
}
}
if ((anySoapMapping && ReflectionMethodEnabled) || Mode == SerializationMode.ReflectionOnly)
{
XmlSerializer[] serializers = GetReflectionBasedSerializers(mappings, type);
return serializers;
}
XmlSerializerImplementation contract = null;
Assembly assembly = type == null ? null : TempAssembly.LoadGeneratedAssembly(type, null, out contract);
TempAssembly tempAssembly = null;
if (assembly == null)
{
if (XmlMapping.IsShallow(mappings))
{
return Array.Empty<XmlSerializer>();
}
else
{
if (type == null)
{
tempAssembly = new TempAssembly(mappings, new Type[] { type }, null, null);
XmlSerializer[] serializers = new XmlSerializer[mappings.Length];
contract = tempAssembly.Contract;
for (int i = 0; i < serializers.Length; i++)
{
serializers[i] = (XmlSerializer)contract.TypedSerializers[mappings[i].Key];
serializers[i].SetTempAssembly(tempAssembly, mappings[i]);
}
return serializers;
}
else
{
// Use XmlSerializer cache when the type is not null.
return GetSerializersFromCache(mappings, type);
}
}
}
else
{
XmlSerializer[] serializers = new XmlSerializer[mappings.Length];
for (int i = 0; i < serializers.Length; i++)
serializers[i] = (XmlSerializer)contract.TypedSerializers[mappings[i].Key];
return serializers;
}
#endif
}
private static XmlSerializer[] GetReflectionBasedSerializers(XmlMapping[] mappings, Type type)
{
var serializers = new XmlSerializer[mappings.Length];
for (int i = 0; i < serializers.Length; i++)
{
serializers[i] = new XmlSerializer();
serializers[i]._rootType = type;
serializers[i]._mapping = mappings[i];
}
return serializers;
}
#if XMLSERIALIZERGENERATOR
[ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)]
[ResourceExposure(ResourceScope.None)]
public static bool GenerateSerializer(Type[] types, XmlMapping[] mappings, string codePath)
{
if (types == null || types.Length == 0)
return false;
if (mappings == null)
throw new ArgumentNullException(nameof(mappings));
if(!Directory.Exists(codePath))
{
throw new ArgumentException(SR.Format(SR.XmlMelformMapping));
}
if (XmlMapping.IsShallow(mappings))
{
throw new InvalidOperationException(SR.Format(SR.XmlMelformMapping));
}
Assembly assembly = null;
for (int i = 0; i < types.Length; i++)
{
Type type = types[i];
if (DynamicAssemblies.IsTypeDynamic(type))
{
throw new InvalidOperationException(SR.Format(SR.XmlPregenTypeDynamic, type.FullName));
}
if (assembly == null)
{
assembly = type.Assembly;
}
else if (type.Assembly != assembly)
{
throw new ArgumentException(SR.Format(SR.XmlPregenOrphanType, type.FullName, assembly.Location), "types");
}
}
return TempAssembly.GenerateSerializerFile(mappings, types, null, assembly, new Hashtable(), codePath);
}
#endif
private static XmlSerializer[] GetSerializersFromCache(XmlMapping[] mappings, Type type)
{
XmlSerializer[] serializers = new XmlSerializer[mappings.Length];
Dictionary<XmlSerializerMappingKey, XmlSerializer> typedMappingTable = null;
lock (s_xmlSerializerTable)
{
if (!s_xmlSerializerTable.TryGetValue(type, out typedMappingTable))
{
typedMappingTable = new Dictionary<XmlSerializerMappingKey, XmlSerializer>();
s_xmlSerializerTable[type] = typedMappingTable;
}
}
lock (typedMappingTable)
{
var pendingKeys = new Dictionary<XmlSerializerMappingKey, int>();
for (int i = 0; i < mappings.Length; i++)
{
XmlSerializerMappingKey mappingKey = new XmlSerializerMappingKey(mappings[i]);
if (!typedMappingTable.TryGetValue(mappingKey, out serializers[i]))
{
pendingKeys.Add(mappingKey, i);
}
}
if (pendingKeys.Count > 0)
{
XmlMapping[] pendingMappings = new XmlMapping[pendingKeys.Count];
int index = 0;
foreach (XmlSerializerMappingKey mappingKey in pendingKeys.Keys)
{
pendingMappings[index++] = mappingKey.Mapping;
}
TempAssembly tempAssembly = new TempAssembly(pendingMappings, new Type[] { type }, null, null);
XmlSerializerImplementation contract = tempAssembly.Contract;
foreach (XmlSerializerMappingKey mappingKey in pendingKeys.Keys)
{
index = pendingKeys[mappingKey];
serializers[index] = (XmlSerializer)contract.TypedSerializers[mappingKey.Mapping.Key];
serializers[index].SetTempAssembly(tempAssembly, mappingKey.Mapping);
typedMappingTable[mappingKey] = serializers[index];
}
}
}
return serializers;
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.FromTypes"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public static XmlSerializer[] FromTypes(Type[] types)
{
if (types == null)
return Array.Empty<XmlSerializer>();
#if uapaot
var serializers = new XmlSerializer[types.Length];
for (int i = 0; i < types.Length; i++)
{
serializers[i] = new XmlSerializer(types[i]);
}
return serializers;
#else
XmlReflectionImporter importer = new XmlReflectionImporter();
XmlTypeMapping[] mappings = new XmlTypeMapping[types.Length];
for (int i = 0; i < types.Length; i++)
{
mappings[i] = importer.ImportTypeMapping(types[i]);
}
return FromMappings(mappings);
#endif
}
#if uapaot
// this the global XML serializer contract introduced for multi-file
private static XmlSerializerImplementation xmlSerializerContract;
internal static XmlSerializerImplementation GetXmlSerializerContractFromGeneratedAssembly()
{
// hack to pull in SetXmlSerializerContract which is only referenced from the
// code injected by MainMethodInjector transform
// there's probably also a way to do this via [DependencyReductionRoot],
// but I can't get the compiler to find that...
if (xmlSerializerContract == null)
SetXmlSerializerContract(null);
// this method body used to be rewritten by an IL transform
// with the restructuring for multi-file, it has become a regular method
return xmlSerializerContract;
}
public static void SetXmlSerializerContract(XmlSerializerImplementation xmlSerializerImplementation)
{
xmlSerializerContract = xmlSerializerImplementation;
}
#endif
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.GetXmlSerializerAssemblyName"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public static string GetXmlSerializerAssemblyName(Type type)
{
return GetXmlSerializerAssemblyName(type, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.GetXmlSerializerAssemblyName"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public static string GetXmlSerializerAssemblyName(Type type, string defaultNamespace)
{
if (type == null)
{
throw new ArgumentNullException(nameof(type));
}
return Compiler.GetTempAssemblyName(type.Assembly.GetName(), defaultNamespace);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnknownNode"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public event XmlNodeEventHandler UnknownNode
{
add
{
_events.OnUnknownNode += value;
}
remove
{
_events.OnUnknownNode -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnknownAttribute"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public event XmlAttributeEventHandler UnknownAttribute
{
add
{
_events.OnUnknownAttribute += value;
}
remove
{
_events.OnUnknownAttribute -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnknownElement"]/*' />
public event XmlElementEventHandler UnknownElement
{
add
{
_events.OnUnknownElement += value;
}
remove
{
_events.OnUnknownElement -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnreferencedObject"]/*' />
public event UnreferencedObjectEventHandler UnreferencedObject
{
add
{
_events.OnUnreferencedObject += value;
}
remove
{
_events.OnUnreferencedObject -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.CreateReader"]/*' />
///<internalonly/>
protected virtual XmlSerializationReader CreateReader() { throw new NotImplementedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize4"]/*' />
///<internalonly/>
protected virtual object Deserialize(XmlSerializationReader reader) { throw new NotImplementedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.CreateWriter"]/*' />
///<internalonly/>
protected virtual XmlSerializationWriter CreateWriter() { throw new NotImplementedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize7"]/*' />
///<internalonly/>
protected virtual void Serialize(object o, XmlSerializationWriter writer) { throw new NotImplementedException(); }
internal void SetTempAssembly(TempAssembly tempAssembly, XmlMapping mapping)
{
_tempAssembly = tempAssembly;
_mapping = mapping;
_typedSerializer = true;
}
private static XmlTypeMapping GetKnownMapping(Type type, string ns)
{
if (ns != null && ns != string.Empty)
return null;
TypeDesc typeDesc = (TypeDesc)TypeScope.PrimtiveTypes[type];
if (typeDesc == null)
return null;
ElementAccessor element = new ElementAccessor();
element.Name = typeDesc.DataType.Name;
XmlTypeMapping mapping = new XmlTypeMapping(null, element);
mapping.SetKeyInternal(XmlMapping.GenerateKey(type, null, null));
return mapping;
}
#if !XMLSERIALIZERGENERATOR
private void SerializePrimitive(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces)
{
XmlSerializationPrimitiveWriter writer = new XmlSerializationPrimitiveWriter();
writer.Init(xmlWriter, namespaces, null, null, null);
switch (_primitiveType.GetTypeCode())
{
case TypeCode.String:
writer.Write_string(o);
break;
case TypeCode.Int32:
writer.Write_int(o);
break;
case TypeCode.Boolean:
writer.Write_boolean(o);
break;
case TypeCode.Int16:
writer.Write_short(o);
break;
case TypeCode.Int64:
writer.Write_long(o);
break;
case TypeCode.Single:
writer.Write_float(o);
break;
case TypeCode.Double:
writer.Write_double(o);
break;
case TypeCode.Decimal:
writer.Write_decimal(o);
break;
case TypeCode.DateTime:
writer.Write_dateTime(o);
break;
case TypeCode.Char:
writer.Write_char(o);
break;
case TypeCode.Byte:
writer.Write_unsignedByte(o);
break;
case TypeCode.SByte:
writer.Write_byte(o);
break;
case TypeCode.UInt16:
writer.Write_unsignedShort(o);
break;
case TypeCode.UInt32:
writer.Write_unsignedInt(o);
break;
case TypeCode.UInt64:
writer.Write_unsignedLong(o);
break;
default:
if (_primitiveType == typeof(XmlQualifiedName))
{
writer.Write_QName(o);
}
else if (_primitiveType == typeof(byte[]))
{
writer.Write_base64Binary(o);
}
else if (_primitiveType == typeof(Guid))
{
writer.Write_guid(o);
}
else if (_primitiveType == typeof(TimeSpan))
{
writer.Write_TimeSpan(o);
}
else
{
throw new InvalidOperationException(SR.Format(SR.XmlUnxpectedType, _primitiveType.FullName));
}
break;
}
}
private object DeserializePrimitive(XmlReader xmlReader, XmlDeserializationEvents events)
{
XmlSerializationPrimitiveReader reader = new XmlSerializationPrimitiveReader();
reader.Init(xmlReader, events, null, null);
object o;
switch (_primitiveType.GetTypeCode())
{
case TypeCode.String:
o = reader.Read_string();
break;
case TypeCode.Int32:
o = reader.Read_int();
break;
case TypeCode.Boolean:
o = reader.Read_boolean();
break;
case TypeCode.Int16:
o = reader.Read_short();
break;
case TypeCode.Int64:
o = reader.Read_long();
break;
case TypeCode.Single:
o = reader.Read_float();
break;
case TypeCode.Double:
o = reader.Read_double();
break;
case TypeCode.Decimal:
o = reader.Read_decimal();
break;
case TypeCode.DateTime:
o = reader.Read_dateTime();
break;
case TypeCode.Char:
o = reader.Read_char();
break;
case TypeCode.Byte:
o = reader.Read_unsignedByte();
break;
case TypeCode.SByte:
o = reader.Read_byte();
break;
case TypeCode.UInt16:
o = reader.Read_unsignedShort();
break;
case TypeCode.UInt32:
o = reader.Read_unsignedInt();
break;
case TypeCode.UInt64:
o = reader.Read_unsignedLong();
break;
default:
if (_primitiveType == typeof(XmlQualifiedName))
{
o = reader.Read_QName();
}
else if (_primitiveType == typeof(byte[]))
{
o = reader.Read_base64Binary();
}
else if (_primitiveType == typeof(Guid))
{
o = reader.Read_guid();
}
else if (_primitiveType == typeof(TimeSpan))
{
o = reader.Read_TimeSpan();
}
else
{
throw new InvalidOperationException(SR.Format(SR.XmlUnxpectedType, _primitiveType.FullName));
}
break;
}
return o;
}
#endif
private class XmlSerializerMappingKey
{
public XmlMapping Mapping;
public XmlSerializerMappingKey(XmlMapping mapping)
{
this.Mapping = mapping;
}
public override bool Equals(object obj)
{
XmlSerializerMappingKey other = obj as XmlSerializerMappingKey;
if (other == null)
return false;
if (this.Mapping.Key != other.Mapping.Key)
return false;
if (this.Mapping.ElementName != other.Mapping.ElementName)
return false;
if (this.Mapping.Namespace != other.Mapping.Namespace)
return false;
if (this.Mapping.IsSoap != other.Mapping.IsSoap)
return false;
return true;
}
public override int GetHashCode()
{
int hashCode = this.Mapping.IsSoap ? 0 : 1;
if (this.Mapping.Key != null)
hashCode ^= this.Mapping.Key.GetHashCode();
if (this.Mapping.ElementName != null)
hashCode ^= this.Mapping.ElementName.GetHashCode();
if (this.Mapping.Namespace != null)
hashCode ^= this.Mapping.Namespace.GetHashCode();
return hashCode;
}
}
}
}
| |
// Lucene version compatibility level 4.8.1
using J2N;
using YAF.Lucene.Net.Analysis.TokenAttributes;
using YAF.Lucene.Net.Diagnostics;
using YAF.Lucene.Net.Util;
using System.IO;
namespace YAF.Lucene.Net.Analysis.Util
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// An abstract base class for simple, character-oriented tokenizers.
/// <para>
/// You must specify the required <see cref="LuceneVersion"/> compatibility
/// when creating <see cref="CharTokenizer"/>:
/// <list type="bullet">
/// <item><description>As of 3.1, <see cref="CharTokenizer"/> uses an int based API to normalize and
/// detect token codepoints. See <see cref="IsTokenChar(int)"/> and
/// <see cref="Normalize(int)"/> for details.</description></item>
/// </list>
/// </para>
/// <para>
/// A new <see cref="CharTokenizer"/> API has been introduced with Lucene 3.1. This API
/// moved from UTF-16 code units to UTF-32 codepoints to eventually add support
/// for <a href=
/// "http://java.sun.com/j2se/1.5.0/docs/api/java/lang/Character.html#supplementary"
/// >supplementary characters</a>. The old <i>char</i> based API has been
/// deprecated and should be replaced with the <i>int</i> based methods
/// <see cref="IsTokenChar(int)"/> and <see cref="Normalize(int)"/>.
/// </para>
/// <para>
/// As of Lucene 3.1 each <see cref="CharTokenizer"/> - constructor expects a
/// <see cref="LuceneVersion"/> argument. Based on the given <see cref="LuceneVersion"/> either the new
/// API or a backwards compatibility layer is used at runtime. For
/// <see cref="LuceneVersion"/> < 3.1 the backwards compatibility layer ensures correct
/// behavior even for indexes build with previous versions of Lucene. If a
/// <see cref="LuceneVersion"/> >= 3.1 is used <see cref="CharTokenizer"/> requires the new API to
/// be implemented by the instantiated class. Yet, the old <i>char</i> based API
/// is not required anymore even if backwards compatibility must be preserved.
/// <see cref="CharTokenizer"/> subclasses implementing the new API are fully backwards
/// compatible if instantiated with <see cref="LuceneVersion"/> < 3.1.
/// </para>
/// <para>
/// <strong>Note:</strong> If you use a subclass of <see cref="CharTokenizer"/> with <see cref="LuceneVersion"/> >=
/// 3.1 on an index build with a version < 3.1, created tokens might not be
/// compatible with the terms in your index.
/// </para>
/// </summary>
public abstract class CharTokenizer : Tokenizer
{
/// <summary>
/// Creates a new <see cref="CharTokenizer"/> instance
/// </summary>
/// <param name="matchVersion">
/// Lucene version to match </param>
/// <param name="input">
/// the input to split up into tokens </param>
public CharTokenizer(LuceneVersion matchVersion, TextReader input)
: base(input)
{
Init(matchVersion);
}
/// <summary>
/// Creates a new <see cref="CharTokenizer"/> instance
/// </summary>
/// <param name="matchVersion">
/// Lucene version to match </param>
/// <param name="factory">
/// the attribute factory to use for this <see cref="Tokenizer"/> </param>
/// <param name="input">
/// the input to split up into tokens </param>
public CharTokenizer(LuceneVersion matchVersion, AttributeFactory factory, TextReader input)
: base(factory, input)
{
Init(matchVersion);
}
/// <summary>
/// LUCENENET specific - Added in the .NET version to assist with setting the attributes
/// from multiple constructors.
/// </summary>
/// <param name="matchVersion"></param>
private void Init(LuceneVersion matchVersion)
{
charUtils = CharacterUtils.GetInstance(matchVersion);
termAtt = AddAttribute<ICharTermAttribute>();
offsetAtt = AddAttribute<IOffsetAttribute>();
}
private int offset = 0, bufferIndex = 0, dataLen = 0, finalOffset = 0;
private const int MAX_WORD_LEN = 255;
private const int IO_BUFFER_SIZE = 4096;
private ICharTermAttribute termAtt;
private IOffsetAttribute offsetAtt;
private CharacterUtils charUtils;
private readonly CharacterUtils.CharacterBuffer ioBuffer = CharacterUtils.NewCharacterBuffer(IO_BUFFER_SIZE);
/// <summary>
/// Returns true iff a codepoint should be included in a token. This tokenizer
/// generates as tokens adjacent sequences of codepoints which satisfy this
/// predicate. Codepoints for which this is false are used to define token
/// boundaries and are not included in tokens.
/// </summary>
protected abstract bool IsTokenChar(int c);
/// <summary>
/// Called on each token character to normalize it before it is added to the
/// token. The default implementation does nothing. Subclasses may use this to,
/// e.g., lowercase tokens.
/// </summary>
protected virtual int Normalize(int c)
{
return c;
}
public override sealed bool IncrementToken()
{
ClearAttributes();
int length = 0;
int start = -1; // this variable is always initialized
int end = -1;
char[] buffer = termAtt.Buffer;
while (true)
{
if (bufferIndex >= dataLen)
{
offset += dataLen;
charUtils.Fill(ioBuffer, m_input); // read supplementary char aware with CharacterUtils
if (ioBuffer.Length == 0)
{
dataLen = 0; // so next offset += dataLen won't decrement offset
if (length > 0)
{
break;
}
else
{
finalOffset = CorrectOffset(offset);
return false;
}
}
dataLen = ioBuffer.Length;
bufferIndex = 0;
}
// use CharacterUtils here to support < 3.1 UTF-16 code unit behavior if the char based methods are gone
int c = charUtils.CodePointAt(ioBuffer.Buffer, bufferIndex, ioBuffer.Length);
int charCount = Character.CharCount(c);
bufferIndex += charCount;
if (IsTokenChar(c)) // if it's a token char
{
if (length == 0) // start of token
{
if (Debugging.AssertsEnabled) Debugging.Assert(start == -1);
start = offset + bufferIndex - charCount;
end = start;
} // check if a supplementary could run out of bounds
else if (length >= buffer.Length - 1)
{
buffer = termAtt.ResizeBuffer(2 + length); // make sure a supplementary fits in the buffer
}
end += charCount;
length += Character.ToChars(Normalize(c), buffer, length); // buffer it, normalized
if (length >= MAX_WORD_LEN) // buffer overflow! make sure to check for >= surrogate pair could break == test
{
break;
}
} // at non-Letter w/ chars
else if (length > 0)
{
break; // return 'em
}
}
termAtt.Length = length;
if (Debugging.AssertsEnabled) Debugging.Assert(start != -1);
offsetAtt.SetOffset(CorrectOffset(start), finalOffset = CorrectOffset(end));
return true;
}
public override sealed void End()
{
base.End();
// set final offset
offsetAtt.SetOffset(finalOffset, finalOffset);
}
public override void Reset()
{
base.Reset();
bufferIndex = 0;
offset = 0;
dataLen = 0;
finalOffset = 0;
ioBuffer.Reset(); // make sure to reset the IO buffer!!
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Windows.Forms;
using UOPDefine;
namespace UoKRUnpacker
{
class UopManager : IDisposable
{
private static UopManager sm_Istance;
public static UopManager getIstance()
{
return getIstance(false);
}
public static UopManager getIstance(bool bReset)
{
if (bReset && (sm_Istance != null))
{
sm_Istance.Dispose();
sm_Istance = null;
}
return (sm_Istance = (sm_Istance == null) ? new UopManager() : sm_Istance);
}
private string m_UopPath;
private UOPFile m_UopFile;
private void UnloadUop()
{
if (m_UopFile != null)
{
m_UopFile.Dispose();
m_UopFile = null;
}
}
public UopManager()
{
m_UopPath = null;
m_UopFile = null;
}
public UOPFile UopFile
{
get { return m_UopFile; }
}
public string UopPath
{
get { return m_UopPath; }
set { m_UopPath = value; }
}
public bool Load()
{
UnloadUop();
bool bReturn = true;
try
{
using (FileStream fsToParse = new FileStream(m_UopPath, FileMode.Open))
{
using (BinaryReader brToParse = new BinaryReader(fsToParse))
{
m_UopFile = UOPFile.FromBinary(brToParse);
}
}
}
catch
{
m_UopFile = null;
bReturn = false;
}
GC.Collect();
return bReturn;
}
public bool Load(string sPath)
{
m_UopPath = sPath;
return Load();
}
public bool Write(string sPath)
{
bool bReturn = true;
try
{
using (FileStream fsToParse = new FileStream(sPath, FileMode.Create))
{
using (BinaryWriter brToParse = new BinaryWriter(fsToParse))
{
m_UopFile.ToBinary(brToParse);
}
}
}
catch
{
bReturn = false;
}
GC.Collect();
return bReturn;
}
public bool UnPack(string sPath)
{
bool bReturn = true;
string fileName = Path.GetFileNameWithoutExtension(m_UopPath);
int i = 0, j = 0;
try
{
foreach (UOPIndexBlockHeader dumpTemp1 in UopFile.m_Content)
{
foreach (UOPPairData dumpTemp2 in dumpTemp1.m_ListData)
{
using (FileStream fsWrite = File.Create(sPath + @"\" + String.Format(StaticData.UNPACK_NAMEPATTERN, fileName, i, j, dumpTemp2.First.IsCompressed ? StaticData.UNPACK_EXT_COMP : StaticData.UNPACK_EXT_UCOMP)))
{
using (BinaryWriter bwWrite = new BinaryWriter(fsWrite))
{
bwWrite.Write(dumpTemp2.Second.Extract(dumpTemp2.First.IsCompressed, dumpTemp2.First.m_LenghtUncompressed));
}
}
j++;
}
i++;
j = 0;
}
}
catch
{
bReturn = false;
}
GC.Collect();
return bReturn;
}
public void Delete(UOPIndexBlockHeader toDelete)
{
for (int i = 0; i < UopFile.m_Content.Count; i++)
{
if (toDelete.Equals(UopFile.m_Content[i]))
{
UopFile.m_Content.RemoveAt(i);
}
}
}
public void Delete(UOPPairData toDelete)
{
foreach (UOPIndexBlockHeader ibhCurrent in UopFile.m_Content)
{
for (int i = 0; i < ibhCurrent.m_ListData.Count; i++)
{
if (toDelete.Equals(ibhCurrent.m_ListData[i]))
{
ibhCurrent.m_ListData.RemoveAt(i);
}
}
}
}
public void FixOffsets(int iIndex, int subIndex)
{
// Fix every IndexHeader
foreach (UOPIndexBlockHeader uopibhCurrent in m_UopFile.m_Content)
{
uopibhCurrent.m_Files = (uint)(uopibhCurrent.FilesDynamicCount);
}
// Fix total file count
int iCurrentFiles = m_UopFile.FilesDynamicCount;
if (iCurrentFiles != (int)(m_UopFile.m_Header.m_totalIndex))
{
m_UopFile.m_Header.m_totalIndex = (uint)(iCurrentFiles);
}
// Fix compression flag
foreach (UOPIndexBlockHeader uopibhCurrent in m_UopFile.m_Content)
{
foreach (UOPPairData uopPairCurrent in uopibhCurrent.m_ListData)
{
uopPairCurrent.First.IsCompressed = uopPairCurrent.First.IsReallyCompressed;
}
}
// Fix offsets starting from (iIndex,subIndex)
for (int outerIndex = iIndex; outerIndex < m_UopFile.m_Content.Count; outerIndex++)
{
for (int innerIndex = subIndex; innerIndex < m_UopFile.m_Content[outerIndex].m_ListData.Count; innerIndex++)
{
int outerIndexForCalc, innerIndexForCalc;
ulong newOffset;
if (innerIndex == 0)
{
if (outerIndex == 0)
{
innerIndexForCalc = outerIndexForCalc = 0;
}
else
{
outerIndexForCalc = outerIndex - 1;
innerIndexForCalc = m_UopFile.m_Content[outerIndexForCalc].m_ListData.Count - 1;
}
}
else
{
outerIndexForCalc = outerIndex;
innerIndexForCalc = innerIndex - 1;
}
newOffset = m_UopFile.m_Content[outerIndexForCalc].m_ListData[innerIndexForCalc].First.m_OffsetOfDataBlock;
newOffset += ((innerIndex == 0) && (outerIndex == 0)) ? 0 : (UOPFileData.SIZE + m_UopFile.m_Content[outerIndexForCalc].m_ListData[innerIndexForCalc].First.m_LenghtCompressed);
if (outerIndex != outerIndexForCalc)
{
newOffset += UOPIndexBlockHeader.SIZE + (UOPFileIndexDef.SIZE * 100);
}
m_UopFile.m_Content[outerIndex].m_ListData[innerIndex].First.m_OffsetOfDataBlock = newOffset;
}
// After the first ride, we start from the begin
subIndex = 0;
if ( m_UopFile.m_Content[outerIndex].m_OffsetNextIndex != 0 )
{
int outerIndexForCalc = outerIndex - 1;
m_UopFile.m_Content[outerIndex].m_OffsetNextIndex = (ulong)((outerIndexForCalc == -1) ? 0 : m_UopFile.m_Content[outerIndexForCalc].m_OffsetNextIndex);
m_UopFile.m_Content[outerIndex].m_OffsetNextIndex += (ulong)((outerIndexForCalc == -1) ? UOPGeneralHeader.SIZE : 0);
m_UopFile.m_Content[outerIndex].m_OffsetNextIndex += (ulong)(UOPIndexBlockHeader.SIZE) ;
m_UopFile.m_Content[outerIndex].m_OffsetNextIndex += (ulong)(UOPFileIndexDef.SIZE * 100);
m_UopFile.m_Content[outerIndex].m_OffsetNextIndex += (ulong)(UOPFileData.SIZE * m_UopFile.m_Content[outerIndex].m_ListData.Count);
m_UopFile.m_Content[outerIndex].m_OffsetNextIndex += m_UopFile.m_Content[outerIndex].TotalSizeOfCompressedData;
}
}
}
public UopPatchError Replace(string sWhat, UOPPairData upData, bool bUncompressed)
{
if (upData == null)
{
return UopPatchError.IndexBlockError;
}
if (( sWhat == null ) || (!File.Exists(sWhat)))
{
return UopPatchError.FileError;
}
byte[] fileContent = null;
using (FileStream fsToParse = new FileStream(sWhat, FileMode.Open))
{
using (BinaryReader brToParse = new BinaryReader(fsToParse))
{
long fSize = fsToParse.Seek(0, SeekOrigin.End);
fileContent = new byte[fSize];
fsToParse.Seek(0, SeekOrigin.Begin);
fileContent = brToParse.ReadBytes((int)fSize);
}
}
byte[] compressedStream = null;
int iDestLength = -1;
if (bUncompressed)
{
compressedStream = fileContent;
iDestLength = fileContent.Length;
}
else
{
compressedStream = new byte[(int)Compressor.CompressBound((ulong)(fileContent.Length))];
iDestLength = compressedStream.Length;
if (ZLibError.Okay != Compressor.Compress(compressedStream, ref iDestLength, fileContent, fileContent.Length))
{
GC.Collect();
return UopPatchError.CompressionError;
}
}
if ((compressedStream == null) || (iDestLength == -1) || (bUncompressed && (compressedStream.Length != iDestLength)))
{
GC.Collect();
return UopPatchError.BufferError;
}
bool bResultReplace = upData.ReplaceData(compressedStream, (uint)(iDestLength), (uint)(fileContent.Length));
GC.Collect();
return bResultReplace ? UopPatchError.Okay : UopPatchError.ReplaceError;
}
public UopPatchError Replace(string sWhat, int iIndex, int subIndex, bool bUncompressed)
{
if (m_UopFile.m_Content.Count <= iIndex)
{
return UopPatchError.IndexBlockError;
}
if (m_UopFile.m_Content[iIndex].m_ListData.Count <= subIndex)
{
return UopPatchError.FileIndexError;
}
return Replace(sWhat, m_UopFile.m_Content[iIndex].m_ListData[subIndex], bUncompressed);
}
public enum UopPatchError : int
{
ReplaceError = -7,
WriteError = -6,
BufferError = -5,
CompressionError = -4,
FileIndexError = -3,
IndexBlockError = -2,
FileError = -1,
Okay = 0,
}
#region IDisposable Membri di
public void Dispose()
{
UnloadUop();
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
//
// System.Net.ListenerAsyncResult
//
// Authors:
// Gonzalo Paniagua Javier (gonzalo@ximian.com)
//
// Copyright (c) 2005 Ximian, Inc (http://www.ximian.com)
//
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System.Runtime.ExceptionServices;
using System.Threading;
namespace System.Net
{
internal class ListenerAsyncResult : IAsyncResult
{
private ManualResetEvent _handle;
private bool _synch;
private bool _completed;
private AsyncCallback _cb;
private object _state;
private Exception _exception;
private HttpListenerContext _context;
private object _locker = new object();
private ListenerAsyncResult _forward;
internal readonly HttpListener _parent;
internal bool _endCalled;
internal bool _inGet;
public ListenerAsyncResult(HttpListener parent, AsyncCallback cb, object state)
{
_parent = parent;
_cb = cb;
_state = state;
}
internal void Complete(Exception exc)
{
if (_forward != null)
{
_forward.Complete(exc);
return;
}
_exception = exc;
if (_inGet && (exc is ObjectDisposedException))
_exception = new HttpListenerException((int)HttpStatusCode.InternalServerError, SR.net_listener_close);
lock (_locker)
{
_completed = true;
if (_handle != null)
_handle.Set();
if (_cb != null)
ThreadPool.UnsafeQueueUserWorkItem(s_invokeCB, this);
}
}
private static WaitCallback s_invokeCB = new WaitCallback(InvokeCallback);
private static void InvokeCallback(object o)
{
ListenerAsyncResult ares = (ListenerAsyncResult)o;
if (ares._forward != null)
{
InvokeCallback(ares._forward);
return;
}
try
{
ares._cb(ares);
}
catch
{
}
}
internal void Complete(HttpListenerContext context)
{
Complete(context, false);
}
internal void Complete(HttpListenerContext context, bool synch)
{
if (_forward != null)
{
_forward.Complete(context, synch);
return;
}
_synch = synch;
_context = context;
lock (_locker)
{
AuthenticationSchemes schemes = context._listener.SelectAuthenticationScheme(context);
if ((schemes == AuthenticationSchemes.Basic || context._listener.AuthenticationSchemes == AuthenticationSchemes.Negotiate) && context.Request.Headers["Authorization"] == null)
{
context.Response.StatusCode = (int)HttpStatusCode.Unauthorized;
context.Response.Headers["WWW-Authenticate"] = schemes + " realm=\"" + context._listener.Realm + "\"";
context.Response.OutputStream.Close();
IAsyncResult ares = context._listener.BeginGetContext(_cb, _state);
_forward = (ListenerAsyncResult)ares;
lock (_forward._locker)
{
if (_handle != null)
_forward._handle = _handle;
}
ListenerAsyncResult next = _forward;
for (int i = 0; next._forward != null; i++)
{
if (i > 20)
Complete(new HttpListenerException((int)HttpStatusCode.Unauthorized, SR.net_listener_auth_errors));
next = next._forward;
}
}
else
{
_completed = true;
_synch = false;
if (_handle != null)
_handle.Set();
if (_cb != null)
ThreadPool.UnsafeQueueUserWorkItem(s_invokeCB, this);
}
}
}
internal HttpListenerContext GetContext()
{
if (_forward != null)
{
return _forward.GetContext();
}
if (_exception != null)
{
ExceptionDispatchInfo.Throw(_exception);
}
return _context;
}
public object AsyncState
{
get
{
if (_forward != null)
return _forward.AsyncState;
return _state;
}
}
public WaitHandle AsyncWaitHandle
{
get
{
if (_forward != null)
return _forward.AsyncWaitHandle;
lock (_locker)
{
if (_handle == null)
_handle = new ManualResetEvent(_completed);
}
return _handle;
}
}
public bool CompletedSynchronously
{
get
{
if (_forward != null)
return _forward.CompletedSynchronously;
return _synch;
}
}
public bool IsCompleted
{
get
{
if (_forward != null)
return _forward.IsCompleted;
lock (_locker)
{
return _completed;
}
}
}
}
}
| |
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
enum TurnStage
{
None,
Moving,
Combat,
InBetween
}
public class Controller : MonoBehaviour
{
public const int NUM_PLAYERS = 2;
public float turnSpeed = 20;
public float zoomSpeed = 20;
private GameObject[] squads;
private List<GameObject> targetsInRange;
private int selectedSquadIndex;
private int selectedTargetIndex;
private GameObject selectedLight;
private Rigidbody selectedRB; //used to move selected squad
private Rigidbody selectedTarget; //used to pick a target for combat
private Text debugText;
private Vector3 lightOffset = new Vector3(0, 2, 0);
private TurnStage currentStage = TurnStage.None;
//FoV
private Mesh mesh;
private Material materialFov;
private const int fovQuality = 15;
private int currentPlayersTurn = 0;
//called by loadgame
public void init()
{
selectedLight = GameObject.Find("SelectedLight");
if (selectedLight == null) throw new MissingReferenceException("Need SelectedLight");
targetsInRange = new List<GameObject>();
GameObject g = GameObject.Find("DebugText");
if (g == null) throw new MissingReferenceException("Need Debug text");
debugText = g.GetComponent<Text>();
//FoV
//FoV = new GameObject("FoV");
materialFov = (Material)Resources.Load("Materials/FoV");
if (materialFov == null)
throw new MissingReferenceException("Need Resources/Materials/FoV");
mesh = new Mesh();
mesh.vertices = new Vector3[4 * fovQuality]; // Could be of size [2 * quality + 2] if circle segment is continuous
mesh.triangles = new int[3 * 2 * fovQuality];
Vector3[] normals = new Vector3[4 * fovQuality];
Vector2[] uv = new Vector2[4 * fovQuality];
for (int i = 0; i < uv.Length; i++)
uv[i] = new Vector2(0, 0);
for (int i = 0; i < normals.Length; i++)
normals[i] = new Vector3(0, 1, 0);
mesh.uv = uv;
mesh.normals = normals;
//needed for unit movement to work out, no one should fly away
Physics.IgnoreLayerCollision(LayerMask.NameToLayer("Squad"),LayerMask.NameToLayer("Squad"));
Physics.IgnoreLayerCollision(LayerMask.NameToLayer("Unit"),LayerMask.NameToLayer("Squad"));
Physics.IgnoreLayerCollision(LayerMask.NameToLayer("Unit"),LayerMask.NameToLayer("Unit"));
}
/// <summary>
/// function for getting targets within the range of an object
/// </summary>
/// <param name="center">origin of attack</param>
/// <param name="radius">radius of attack</param>
/// <param name="target">the player being targeted</param>
/// <param name="layer">The layer mask is a bit shifted number</param>
/// <returns></returns>
public List<GameObject> getTargets(Vector3 center, float radius, int activePlayer, int layer = 0)
{
if (layer == 0)
{
layer = 1 << 12; //Layer 8 being "Squad layer"
layer = ~layer;
}
Collider[] hitColliders = Physics.OverlapSphere(center, radius); //Needs to figure out layers
List<GameObject> targets = new List<GameObject>();
Debug.Log("Number of objects in range: " + hitColliders.Length);
int i = 0;
while (i < hitColliders.Length)
{
for(int j = 0; j < NUM_PLAYERS; j++)
{
string playerTarget = "Player" + j.ToString() + "Squad";
if (j != activePlayer && hitColliders[i].tag == playerTarget)
targets.Add(hitColliders[i].gameObject);
}
i++;
}
return targets;
}
public void updateSquadList(string tag)
{
squads = GameObject.FindGameObjectsWithTag(tag);
selectedSquadIndex = 0;
if (squads.Length == 0) throw new UnityException("Failed to find squad.");
setLight();
}
private void setLight()
{
selectedLight.transform.position = squads[selectedSquadIndex].transform.position + lightOffset;
}
private void checkChangeSquad()
{
if (Input.GetButtonUp("R1"))
{
do{
selectedSquadIndex++;
selectedSquadIndex %= squads.Length;
}while (!squads[selectedSquadIndex].activeInHierarchy);
if (selectedRB != null) selectedRB.velocity = Vector3.zero;
Camera.main.GetComponent<CameraController>().setCameraTarget(squads[selectedSquadIndex].transform.position);
}
if (Input.GetButtonUp("L1"))
{
do{
selectedSquadIndex--;
if (selectedSquadIndex < 0) selectedSquadIndex = squads.Length - 1;
}while (!squads[selectedSquadIndex].activeInHierarchy);
if (selectedRB != null) selectedRB.velocity = Vector3.zero;
Camera.main.GetComponent<CameraController>().setCameraTarget(squads[selectedSquadIndex].transform.position);
}
selectedRB = squads[selectedSquadIndex].GetComponent<Rigidbody>();
if (selectedRB != null) selectedRB.velocity = Vector3.zero;
}
private CameraController getMainCamController(){
return Camera.main.GetComponent<CameraController> ();
}
private SquadManager getSelectedManager()
{
return squads[selectedSquadIndex].GetComponent<SquadManager>();
}
private void checkNewAction()
{
//start move
if (Input.GetAxis("DpadH") == -1)
{
if (getSelectedManager().numActions > 0)
{
currentStage = TurnStage.Moving;
getSelectedManager().startMovement();
}
}
//start start combat
if (Input.GetAxis("DpadH") == 1)
{
if (getSelectedManager().numActions > 0)
{
currentStage = TurnStage.Combat;
targetsInRange = getTargets(selectedRB.position, 20, currentPlayersTurn);
selectedTargetIndex = 0;
Debug.Log("Number of targets within range: " + targetsInRange.Count.ToString());
//foreach (GameObject target in targetsInRange)
//{
// target.SendMessage("withinRange");
//}
}
}
//skip
if (Input.GetAxis("DpadV") == -1)
{
if (getSelectedManager().numActions > 0)
{
currentStage = TurnStage.InBetween;
getSelectedManager().skipAction();
}
if (getSelectedManager().numActions == 0)
{
currentStage = TurnStage.None;
}
checkStateEndOfAction();
}
}
private void selectNextAvalibleSquad (){
for(selectedSquadIndex=0; selectedSquadIndex<squads.Length; selectedSquadIndex++)
{
if(squads[selectedSquadIndex].activeInHierarchy && getSelectedManager().numActions>0)
{
getMainCamController().setCameraTarget(squads[selectedSquadIndex].transform.position);
break;
}
}
}
private void checkStateEndOfAction()
{
if (targetsInRange.Count > 0)
{
foreach (GameObject target in targetsInRange)
{
if(target.activeInHierarchy)
target.SendMessage("disableLight");
}
targetsInRange.Clear();
}
if (getSelectedManager ().numActions == SquadManager.MAX_ACTIONS|| getSelectedManager ().numActions == 0) {
currentStage = TurnStage.None;
if (checkTurnComplete ())
nextTurn ();
else selectNextAvalibleSquad();
}
else
currentStage = TurnStage.InBetween;
}
float GetSquadAngle()
{
return 90 - Mathf.Rad2Deg * Mathf.Atan2(transform.forward.z, transform.forward.x); // Left handed CW. z = angle 0, x = angle 90
}
bool checkTurnComplete()
{
foreach (GameObject g in squads)
{
if (g.GetComponent<SquadManager>().numActions > 0)
return false;
}
return true;
}
//call at end of turn
void nextTurn()
{
foreach (GameObject g in squads)
{
g.GetComponent<SquadManager>().resetActions();
}
currentPlayersTurn = (currentPlayersTurn + 1) % NUM_PLAYERS;
updateSquadList("Player" + currentPlayersTurn + "Squad");
Debug.Log("Player #" + currentPlayersTurn);
}
// Update is called once per frame
void Update()
{
debugText.text = "Player:" + currentPlayersTurn;
debugText.text += " Remaining Actions:" + getSelectedManager().numActions;
debugText.text += " Current Stage: ";
switch (currentStage)
{
case TurnStage.None: debugText.text += "None"; break;
case TurnStage.Moving: debugText.text += "Moving"; break;
case TurnStage.InBetween: debugText.text += "In Between"; break;
case TurnStage.Combat: debugText.text += "Combat"; break;
};
if (squads.Length > 0)
{
if (currentStage == TurnStage.None)
{
//skip turn button
if (Input.GetButtonDown("Select")) { nextTurn(); }
checkChangeSquad();
checkNewAction();
}
else if (currentStage == TurnStage.InBetween)
{
checkNewAction();
}
else if (currentStage == TurnStage.Moving)
{
//if the squad is no longer moving (triggered if max distance is met)
if (!getSelectedManager().midMovement)
{
//if we have another action
if (getSelectedManager().numActions > 0)
{
currentStage = TurnStage.InBetween;
}
else currentStage = TurnStage.None;
}
//user undo
else if (Input.GetButtonDown("Circle"))
{
getSelectedManager().undoMove();
checkStateEndOfAction();
}
//user ends early
else if (Input.GetButtonDown("Cross"))
{
getSelectedManager().endMovement();
checkStateEndOfAction();
}
else
{
selectedRB = squads[selectedSquadIndex].GetComponent<Rigidbody>();
float v = Input.GetAxis("JoystickLV");
float h = Input.GetAxis("JoystickLH");
selectedRB.velocity = (Quaternion.Euler(0, getMainCamController().angle, 0) * new Vector3(h, 0, v).normalized) * 20;
getMainCamController().setCameraTarget(squads[selectedSquadIndex].transform.position,true);
}
}
else if (currentStage == TurnStage.Combat)
{
//TODO: enable combat in squad
//skip
if (Input.GetAxis("DpadV") == -1)
{
getSelectedManager().skipAction();
checkStateEndOfAction();
}
if (Input.GetButtonUp("R1") && targetsInRange.Count > 0)
{
targetsInRange[selectedTargetIndex].SendMessage("disableLight");
selectedTargetIndex++;
selectedTargetIndex %= targetsInRange.Count;
targetsInRange[selectedTargetIndex].SendMessage("enableLight");
}
if (Input.GetButtonUp("L1") && targetsInRange.Count > 0)
{
targetsInRange[selectedTargetIndex].SendMessage("disableLight");
selectedTargetIndex--;
if (selectedTargetIndex < 0) selectedTargetIndex = targetsInRange.Count - 1;
targetsInRange[selectedTargetIndex].SendMessage("enableLight");
}
if (Input.GetButtonDown("Cross") && targetsInRange.Count > 0) //A
{
//if (getSelectedManager().numActions == 2) currentStage = TurnStage.None;
//if (getSelectedManager().numActions == 1) currentStage = TurnStage.InBetween;
Debug.Log("I shot someone!");
targetsInRange[selectedTargetIndex].SendMessage("takeDamage", 5);
getSelectedManager().skipAction();
checkStateEndOfAction();
}
if (Input.GetButtonDown("Circle")) //B
{
if (getSelectedManager().numActions == 2) currentStage = TurnStage.None;
if (getSelectedManager().numActions == 1) currentStage = TurnStage.InBetween;
//getSelectedManager().skipAction();
checkStateEndOfAction();
}
else
{
//this is where aiming would happen
}
}
setLight();
}
}
private void drawFoV(Quaternion fovRotation, float angle_fov = 20, float dist_max = 15)
{
const float dist_min = 5.0f;
float angle_lookat = GetSquadAngle();
float angle_start = angle_lookat - angle_fov;
float angle_end = angle_lookat + angle_fov;
float angle_delta = (angle_end - angle_start) / fovQuality;
float angle_curr = angle_start;
float angle_next = angle_start + angle_delta;
Vector3 pos_curr_min = Vector3.zero;
Vector3 pos_curr_max = Vector3.zero;
Vector3 pos_next_min = Vector3.zero;
Vector3 pos_next_max = Vector3.zero;
Vector3[] vertices = new Vector3[4 * fovQuality]; // Could be of size [2 * quality + 2] if circle segment is continuous
int[] triangles = new int[3 * 2 * fovQuality];
for (int i = 0; i < fovQuality; i++)
{
Vector3 sphere_curr = new Vector3(
Mathf.Sin(Mathf.Deg2Rad * (angle_curr)), 0, // Left handed CW
Mathf.Cos(Mathf.Deg2Rad * (angle_curr)));
Vector3 sphere_next = new Vector3(
Mathf.Sin(Mathf.Deg2Rad * (angle_next)), 0,
Mathf.Cos(Mathf.Deg2Rad * (angle_next)));
pos_curr_min = transform.position + sphere_curr * dist_min;
pos_curr_max = transform.position + sphere_curr * dist_max;
pos_next_min = transform.position + sphere_next * dist_min;
pos_next_max = transform.position + sphere_next * dist_max;
int a = 4 * i;
int b = 4 * i + 1;
int c = 4 * i + 2;
int d = 4 * i + 3;
vertices[a] = pos_curr_min;
vertices[b] = pos_curr_max;
vertices[c] = pos_next_max;
vertices[d] = pos_next_min;
triangles[6 * i] = a; // Triangle1: abc
triangles[6 * i + 1] = b;
triangles[6 * i + 2] = c;
triangles[6 * i + 3] = c; // Triangle2: cda
triangles[6 * i + 4] = d;
triangles[6 * i + 5] = a;
angle_curr += angle_delta;
angle_next += angle_delta;
}
mesh.vertices = vertices;
mesh.triangles = triangles;
Graphics.DrawMesh(mesh, squads[selectedSquadIndex].transform.position, fovRotation, materialFov, 0);
}
Mesh setupFoV(float angle_fov = 20, float dist_max = 15)
{
const float dist_min = 5.0f;
float angle_lookat = GetSquadAngle();
float angle_start = angle_lookat - angle_fov;
float angle_end = angle_lookat + angle_fov;
float angle_delta = (angle_end - angle_start) / fovQuality;
float angle_curr = angle_start;
float angle_next = angle_start + angle_delta;
Vector3 pos_curr_min = Vector3.zero;
Vector3 pos_curr_max = Vector3.zero;
Vector3 pos_next_min = Vector3.zero;
Vector3 pos_next_max = Vector3.zero;
Vector3[] vertices = new Vector3[4 * fovQuality]; // Could be of size [2 * quality + 2] if circle segment is continuous
int[] triangles = new int[3 * 2 * fovQuality];
for (int i = 0; i < fovQuality; i++)
{
Vector3 sphere_curr = new Vector3(
Mathf.Sin(Mathf.Deg2Rad * (angle_curr)), 0, // Left handed CW
Mathf.Cos(Mathf.Deg2Rad * (angle_curr)));
Vector3 sphere_next = new Vector3(
Mathf.Sin(Mathf.Deg2Rad * (angle_next)), 0,
Mathf.Cos(Mathf.Deg2Rad * (angle_next)));
pos_curr_min = transform.position + sphere_curr * dist_min;
pos_curr_max = transform.position + sphere_curr * dist_max;
pos_next_min = transform.position + sphere_next * dist_min;
pos_next_max = transform.position + sphere_next * dist_max;
int a = 4 * i;
int b = 4 * i + 1;
int c = 4 * i + 2;
int d = 4 * i + 3;
vertices[a] = pos_curr_min;
vertices[b] = pos_curr_max;
vertices[c] = pos_next_max;
vertices[d] = pos_next_min;
triangles[6 * i] = a; // Triangle1: abc
triangles[6 * i + 1] = b;
triangles[6 * i + 2] = c;
triangles[6 * i + 3] = c; // Triangle2: cda
triangles[6 * i + 4] = d;
triangles[6 * i + 5] = a;
angle_curr += angle_delta;
angle_next += angle_delta;
}
mesh.vertices = vertices;
mesh.triangles = triangles;
return mesh;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
**
**
** Purpose: Enumerates files and dirs
**
===========================================================*/
using System.Collections;
using System.Collections.Generic;
using Microsoft.Win32;
using Microsoft.Win32.SafeHandles;
using System.Runtime.InteropServices;
using System.Globalization;
using System.Runtime.Versioning;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Threading;
namespace System.IO
{
// Overview:
// The key methods instantiate FileSystemEnumerableIterators. These compose the iterator with search result
// handlers that instantiate the FileInfo, DirectoryInfo, String, etc. The handlers then perform any
// additional required permission demands.
internal static class FileSystemEnumerableFactory
{
internal static IEnumerable<String> CreateFileNameIterator(String path, String originalUserPath, String searchPattern,
bool includeFiles, bool includeDirs, SearchOption searchOption, bool checkHost)
{
Contract.Requires(path != null);
Contract.Requires(originalUserPath != null);
Contract.Requires(searchPattern != null);
SearchResultHandler<String> handler = new StringResultHandler(includeFiles, includeDirs);
return new FileSystemEnumerableIterator<String>(path, originalUserPath, searchPattern, searchOption, handler, checkHost);
}
}
// Abstract Iterator, borrowed from Linq. Used in anticipation of need for similar enumerables
// in the future
abstract internal class Iterator<TSource> : IEnumerable<TSource>, IEnumerator<TSource>
{
int threadId;
internal int state;
internal TSource current;
public Iterator()
{
threadId = Thread.CurrentThread.ManagedThreadId;
}
public TSource Current
{
get { return current; }
}
protected abstract Iterator<TSource> Clone();
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
current = default(TSource);
state = -1;
}
public IEnumerator<TSource> GetEnumerator()
{
if (threadId == Thread.CurrentThread.ManagedThreadId && state == 0)
{
state = 1;
return this;
}
Iterator<TSource> duplicate = Clone();
duplicate.state = 1;
return duplicate;
}
public abstract bool MoveNext();
object IEnumerator.Current
{
get { return Current; }
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
void IEnumerator.Reset()
{
throw new NotSupportedException();
}
}
// Overview:
// Enumerates file system entries matching the search parameters. For recursive searches this
// searches through all the sub dirs and executes the search criteria against every dir.
//
// Generic implementation:
// FileSystemEnumerableIterator is generic. When it gets a WIN32_FIND_DATA, it calls the
// result handler to create an instance of the generic type.
//
// Usage:
// Use FileSystemEnumerableFactory to obtain FSEnumerables that can enumerate file system
// entries as String path names, FileInfos, DirectoryInfos, or FileSystemInfos.
//
// Security:
// For all the dirs/files returned, demands path discovery permission for their parent folders
internal class FileSystemEnumerableIterator<TSource> : Iterator<TSource>
{
private const int STATE_INIT = 1;
private const int STATE_SEARCH_NEXT_DIR = 2;
private const int STATE_FIND_NEXT_FILE = 3;
private const int STATE_FINISH = 4;
private SearchResultHandler<TSource> _resultHandler;
private List<Directory.SearchData> searchStack;
private Directory.SearchData searchData;
private String searchCriteria;
SafeFindHandle _hnd = null;
// empty means we know in advance that we won't find any search results, which can happen if:
// 1. we don't have a search pattern
// 2. we're enumerating only the top directory and found no matches during the first call
// This flag allows us to return early for these cases. We can't know this in advance for
// SearchOption.AllDirectories because we do a "*" search for subdirs and then use the
// searchPattern at each directory level.
bool empty;
private String userPath;
private SearchOption searchOption;
private String fullPath;
private String normalizedSearchPath;
#if !PLATFORM_UNIX
private int _oldMode;
private bool _setBackOldMode;
#endif
internal FileSystemEnumerableIterator(String path, String originalUserPath, String searchPattern, SearchOption searchOption, SearchResultHandler<TSource> resultHandler, bool checkHost)
{
Contract.Requires(path != null);
Contract.Requires(originalUserPath != null);
Contract.Requires(searchPattern != null);
Contract.Requires(searchOption == SearchOption.AllDirectories || searchOption == SearchOption.TopDirectoryOnly);
Contract.Requires(resultHandler != null);
#if !PLATFORM_UNIX
_setBackOldMode = Interop.Kernel32.SetThreadErrorMode(Interop.Kernel32.SEM_FAILCRITICALERRORS, out _oldMode);
#endif
searchStack = new List<Directory.SearchData>();
String normalizedSearchPattern = NormalizeSearchPattern(searchPattern);
if (normalizedSearchPattern.Length == 0)
{
empty = true;
}
else
{
_resultHandler = resultHandler;
this.searchOption = searchOption;
fullPath = Path.GetFullPath(path);
String fullSearchString = GetFullSearchString(fullPath, normalizedSearchPattern);
normalizedSearchPath = Path.GetDirectoryName(fullSearchString);
// normalize search criteria
searchCriteria = GetNormalizedSearchCriteria(fullSearchString, normalizedSearchPath);
// fix up user path
String searchPatternDirName = Path.GetDirectoryName(normalizedSearchPattern);
String userPathTemp = originalUserPath;
if (searchPatternDirName != null && searchPatternDirName.Length != 0)
{
userPathTemp = Path.Combine(userPathTemp, searchPatternDirName);
}
this.userPath = userPathTemp;
searchData = new Directory.SearchData(normalizedSearchPath, this.userPath, searchOption);
CommonInit();
}
}
private void CommonInit()
{
Debug.Assert(searchCriteria != null && searchData != null, "searchCriteria and searchData should be initialized");
// Execute searchCriteria against the current directory
String searchPath = Path.Combine(searchData.fullPath, searchCriteria);
Win32Native.WIN32_FIND_DATA data = new Win32Native.WIN32_FIND_DATA();
// Open a Find handle
_hnd = Win32Native.FindFirstFile(searchPath, data);
if (_hnd.IsInvalid)
{
int hr = Marshal.GetLastWin32Error();
if (hr != Win32Native.ERROR_FILE_NOT_FOUND && hr != Win32Native.ERROR_NO_MORE_FILES)
{
HandleError(hr, searchData.fullPath);
}
else
{
// flag this as empty only if we're searching just top directory
// Used in fast path for top directory only
empty = searchData.searchOption == SearchOption.TopDirectoryOnly;
}
}
// fast path for TopDirectoryOnly. If we have a result, go ahead and set it to
// current. If empty, dispose handle.
if (searchData.searchOption == SearchOption.TopDirectoryOnly)
{
if (empty)
{
_hnd.Dispose();
}
else
{
SearchResult searchResult = CreateSearchResult(searchData, data);
if (_resultHandler.IsResultIncluded(searchResult))
{
current = _resultHandler.CreateObject(searchResult);
}
}
}
// for AllDirectories, we first recurse into dirs, so cleanup and add searchData
// to the stack
else
{
_hnd.Dispose();
searchStack.Add(searchData);
}
}
private FileSystemEnumerableIterator(String fullPath, String normalizedSearchPath, String searchCriteria, String userPath, SearchOption searchOption, SearchResultHandler<TSource> resultHandler)
{
this.fullPath = fullPath;
this.normalizedSearchPath = normalizedSearchPath;
this.searchCriteria = searchCriteria;
this._resultHandler = resultHandler;
this.userPath = userPath;
this.searchOption = searchOption;
searchStack = new List<Directory.SearchData>();
if (searchCriteria != null)
{
searchData = new Directory.SearchData(normalizedSearchPath, userPath, searchOption);
CommonInit();
}
else
{
empty = true;
}
}
protected override Iterator<TSource> Clone()
{
return new FileSystemEnumerableIterator<TSource>(fullPath, normalizedSearchPath, searchCriteria, userPath, searchOption, _resultHandler);
}
protected override void Dispose(bool disposing)
{
try
{
if (_hnd != null)
{
_hnd.Dispose();
}
}
finally
{
#if !PLATFORM_UNIX
if (_setBackOldMode)
{
uint _ignore;
Interop.Kernel32.SetThreadErrorMode(_oldMode, out _ignore);
}
#endif
base.Dispose(disposing);
}
}
public override bool MoveNext()
{
Win32Native.WIN32_FIND_DATA data = new Win32Native.WIN32_FIND_DATA();
switch (state)
{
case STATE_INIT:
{
if (empty)
{
state = STATE_FINISH;
goto case STATE_FINISH;
}
if (searchData.searchOption == SearchOption.TopDirectoryOnly)
{
state = STATE_FIND_NEXT_FILE;
if (current != null)
{
return true;
}
else
{
goto case STATE_FIND_NEXT_FILE;
}
}
else
{
state = STATE_SEARCH_NEXT_DIR;
goto case STATE_SEARCH_NEXT_DIR;
}
}
case STATE_SEARCH_NEXT_DIR:
{
Debug.Assert(searchData.searchOption != SearchOption.TopDirectoryOnly, "should not reach this code path if searchOption == TopDirectoryOnly");
// Traverse directory structure. We need to get '*'
while (searchStack.Count > 0)
{
searchData = searchStack[0];
Debug.Assert((searchData.fullPath != null), "fullpath can't be null!");
searchStack.RemoveAt(0);
// Traverse the subdirs
AddSearchableDirsToStack(searchData);
// Execute searchCriteria against the current directory
String searchPath = Path.Combine(searchData.fullPath, searchCriteria);
// Open a Find handle
_hnd = Win32Native.FindFirstFile(searchPath, data);
if (_hnd.IsInvalid)
{
int hr = Marshal.GetLastWin32Error();
if (hr == Win32Native.ERROR_FILE_NOT_FOUND || hr == Win32Native.ERROR_NO_MORE_FILES || hr == Win32Native.ERROR_PATH_NOT_FOUND)
continue;
_hnd.Dispose();
HandleError(hr, searchData.fullPath);
}
state = STATE_FIND_NEXT_FILE;
SearchResult searchResult = CreateSearchResult(searchData, data);
if (_resultHandler.IsResultIncluded(searchResult))
{
current = _resultHandler.CreateObject(searchResult);
return true;
}
else
{
goto case STATE_FIND_NEXT_FILE;
}
}
state = STATE_FINISH;
goto case STATE_FINISH;
}
case STATE_FIND_NEXT_FILE:
{
if (searchData != null && _hnd != null)
{
// Keep asking for more matching files/dirs, add it to the list
while (Win32Native.FindNextFile(_hnd, data))
{
SearchResult searchResult = CreateSearchResult(searchData, data);
if (_resultHandler.IsResultIncluded(searchResult))
{
current = _resultHandler.CreateObject(searchResult);
return true;
}
}
// Make sure we quit with a sensible error.
int hr = Marshal.GetLastWin32Error();
if (_hnd != null)
_hnd.Dispose();
// ERROR_FILE_NOT_FOUND is valid here because if the top level
// dir doen't contain any subdirs and matching files then
// we will get here with this errorcode from the searchStack walk
if ((hr != 0) && (hr != Win32Native.ERROR_NO_MORE_FILES)
&& (hr != Win32Native.ERROR_FILE_NOT_FOUND))
{
HandleError(hr, searchData.fullPath);
}
}
if (searchData.searchOption == SearchOption.TopDirectoryOnly)
{
state = STATE_FINISH;
goto case STATE_FINISH;
}
else
{
state = STATE_SEARCH_NEXT_DIR;
goto case STATE_SEARCH_NEXT_DIR;
}
}
case STATE_FINISH:
{
Dispose();
break;
}
}
return false;
}
private SearchResult CreateSearchResult(Directory.SearchData localSearchData, Win32Native.WIN32_FIND_DATA findData)
{
String userPathFinal = Path.Combine(localSearchData.userPath, findData.cFileName);
String fullPathFinal = Path.Combine(localSearchData.fullPath, findData.cFileName);
return new SearchResult(fullPathFinal, userPathFinal, findData);
}
private void HandleError(int hr, String path)
{
Dispose();
__Error.WinIOError(hr, path);
}
private void AddSearchableDirsToStack(Directory.SearchData localSearchData)
{
Contract.Requires(localSearchData != null);
String searchPath = Path.Combine(localSearchData.fullPath, "*");
SafeFindHandle hnd = null;
Win32Native.WIN32_FIND_DATA data = new Win32Native.WIN32_FIND_DATA();
try
{
// Get all files and dirs
hnd = Win32Native.FindFirstFile(searchPath, data);
if (hnd.IsInvalid)
{
int hr = Marshal.GetLastWin32Error();
// This could happen if the dir doesn't contain any files.
// Continue with the recursive search though, eventually
// searchStack will become empty
if (hr == Win32Native.ERROR_FILE_NOT_FOUND || hr == Win32Native.ERROR_NO_MORE_FILES || hr == Win32Native.ERROR_PATH_NOT_FOUND)
return;
HandleError(hr, localSearchData.fullPath);
}
// Add subdirs to searchStack. Exempt ReparsePoints as appropriate
int incr = 0;
do
{
if (FileSystemEnumerableHelpers.IsDir(data))
{
String tempFullPath = Path.Combine(localSearchData.fullPath, data.cFileName);
String tempUserPath = Path.Combine(localSearchData.userPath, data.cFileName);
SearchOption option = localSearchData.searchOption;
#if EXCLUDE_REPARSEPOINTS
// Traverse reparse points depending on the searchoption specified
if ((searchDataSubDir.searchOption == SearchOption.AllDirectories) && (0 != (data.dwFileAttributes & Win32Native.FILE_ATTRIBUTE_REPARSE_POINT)))
option = SearchOption.TopDirectoryOnly;
#endif
// Setup search data for the sub directory and push it into the stack
Directory.SearchData searchDataSubDir = new Directory.SearchData(tempFullPath, tempUserPath, option);
searchStack.Insert(incr++, searchDataSubDir);
}
} while (Win32Native.FindNextFile(hnd, data));
// We don't care about errors here
}
finally
{
if (hnd != null)
hnd.Dispose();
}
}
private static String NormalizeSearchPattern(String searchPattern)
{
Contract.Requires(searchPattern != null);
// Make this corner case more useful, like dir
if (searchPattern.Equals("."))
{
return "*";
}
PathInternal.CheckSearchPattern(searchPattern);
return searchPattern;
}
private static String GetNormalizedSearchCriteria(String fullSearchString, String fullPathMod)
{
Contract.Requires(fullSearchString != null);
Contract.Requires(fullPathMod != null);
Contract.Requires(fullSearchString.Length >= fullPathMod.Length);
String searchCriteria = null;
char lastChar = fullPathMod[fullPathMod.Length - 1];
if (PathInternal.IsDirectorySeparator(lastChar))
{
// Can happen if the path is C:\temp, in which case GetDirectoryName would return C:\
searchCriteria = fullSearchString.Substring(fullPathMod.Length);
}
else
{
Debug.Assert(fullSearchString.Length > fullPathMod.Length);
searchCriteria = fullSearchString.Substring(fullPathMod.Length + 1);
}
return searchCriteria;
}
private static String GetFullSearchString(String fullPath, String searchPattern)
{
Contract.Requires(fullPath != null);
Contract.Requires(searchPattern != null);
String tempStr = Path.Combine(fullPath, searchPattern);
// If path ends in a trailing slash (\), append a * or we'll get a "Cannot find the file specified" exception
char lastChar = tempStr[tempStr.Length - 1];
if (PathInternal.IsDirectorySeparator(lastChar) || lastChar == Path.VolumeSeparatorChar)
{
tempStr = tempStr + '*';
}
return tempStr;
}
}
internal abstract class SearchResultHandler<TSource>
{
internal abstract bool IsResultIncluded(SearchResult result);
internal abstract TSource CreateObject(SearchResult result);
}
internal class StringResultHandler : SearchResultHandler<String>
{
private bool _includeFiles;
private bool _includeDirs;
internal StringResultHandler(bool includeFiles, bool includeDirs)
{
_includeFiles = includeFiles;
_includeDirs = includeDirs;
}
internal override bool IsResultIncluded(SearchResult result)
{
bool includeFile = _includeFiles && FileSystemEnumerableHelpers.IsFile(result.FindData);
bool includeDir = _includeDirs && FileSystemEnumerableHelpers.IsDir(result.FindData);
Debug.Assert(!(includeFile && includeDir), result.FindData.cFileName + ": current item can't be both file and dir!");
return (includeFile || includeDir);
}
internal override String CreateObject(SearchResult result)
{
return result.UserPath;
}
}
internal sealed class SearchResult
{
private String fullPath; // fully-qualifed path
private String userPath; // user-specified path
private Win32Native.WIN32_FIND_DATA findData;
internal SearchResult(String fullPath, String userPath, Win32Native.WIN32_FIND_DATA findData)
{
Contract.Requires(fullPath != null);
Contract.Requires(userPath != null);
this.fullPath = fullPath;
this.userPath = userPath;
this.findData = findData;
}
internal String FullPath
{
get { return fullPath; }
}
internal String UserPath
{
get { return userPath; }
}
internal Win32Native.WIN32_FIND_DATA FindData
{
get { return findData; }
}
}
internal static class FileSystemEnumerableHelpers
{
internal static bool IsDir(Win32Native.WIN32_FIND_DATA data)
{
// Don't add "." nor ".."
return (0 != (data.dwFileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY))
&& !data.cFileName.Equals(".") && !data.cFileName.Equals("..");
}
internal static bool IsFile(Win32Native.WIN32_FIND_DATA data)
{
return 0 == (data.dwFileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Dynamic;
using Microsoft.AspNetCore.JsonPatch.Exceptions;
using Xunit;
namespace Microsoft.AspNetCore.JsonPatch.IntegrationTests
{
public class ExpandoObjectIntegrationTest
{
[Fact]
public void AddNewIntProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = 1;
var patchDocument = new JsonPatchDocument();
patchDocument.Add("NewInt", 1);
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Equal(1, targetObject.NewInt);
Assert.Equal(1, targetObject.Test);
}
[Fact]
public void AddNewProperty_ToTypedObject_InExpandoObject()
{
// Arrange
dynamic dynamicProperty = new ExpandoObject();
dynamicProperty.StringProperty = "A";
var targetObject = new NestedObject()
{
DynamicProperty = dynamicProperty
};
var patchDocument = new JsonPatchDocument();
patchDocument.Add("DynamicProperty/StringProperty", "B");
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Equal("B", targetObject.DynamicProperty.StringProperty);
}
[Fact]
public void AddReplaces_ExistingProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.StringProperty = "A";
var patchDocument = new JsonPatchDocument();
patchDocument.Add("StringProperty", "B");
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Equal("B", targetObject.StringProperty);
}
[Fact]
public void AddReplaces_ExistingProperty_InNestedExpandoObject()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.InBetweenFirst = new ExpandoObject();
targetObject.InBetweenFirst.InBetweenSecond = new ExpandoObject();
targetObject.InBetweenFirst.InBetweenSecond.StringProperty = "A";
var patchDocument = new JsonPatchDocument();
patchDocument.Add("/InBetweenFirst/InBetweenSecond/StringProperty", "B");
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Equal("B", targetObject.InBetweenFirst.InBetweenSecond.StringProperty);
}
[Fact]
public void ShouldNotReplaceProperty_WithDifferentCase()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.StringProperty = "A";
var patchDocument = new JsonPatchDocument();
patchDocument.Add("stringproperty", "B");
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Equal("A", targetObject.StringProperty);
Assert.Equal("B", targetObject.stringproperty);
}
[Fact]
public void TestIntegerProperty_IsSuccessful()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = 1;
var patchDocument = new JsonPatchDocument();
patchDocument.Test("Test", 1);
// Act & Assert
patchDocument.ApplyTo(targetObject);
}
[Fact]
public void TestEmptyProperty_IsSuccessful()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = "";
var patchDocument = new JsonPatchDocument();
patchDocument.Test("Test", "");
// Act & Assert
patchDocument.ApplyTo(targetObject);
}
[Fact]
public void TestValueAgainstEmptyProperty_ThrowsJsonPatchException_IsSuccessful()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = "";
var patchDocument = new JsonPatchDocument();
patchDocument.Test("Test", "TestValue");
// Act
var exception = Assert.Throws<JsonPatchException>(() =>
{
patchDocument.ApplyTo(targetObject);
});
// Assert
Assert.Equal("The current value '' at path 'Test' is not equal to the test value 'TestValue'.",
exception.Message);
}
[Fact]
public void TestStringProperty_ThrowsJsonPatchException_IfTestFails()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = "Value";
var patchDocument = new JsonPatchDocument();
patchDocument.Test("Test", "TestValue");
// Act
var exception = Assert.Throws<JsonPatchException>(() =>
{
patchDocument.ApplyTo(targetObject);
});
// Assert
Assert.Equal("The current value 'Value' at path 'Test' is not equal to the test value 'TestValue'.",
exception.Message);
}
[Fact]
public void CopyStringProperty_ToAnotherStringProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.StringProperty = "A";
targetObject.AnotherStringProperty = "B";
var patchDocument = new JsonPatchDocument();
patchDocument.Copy("StringProperty", "AnotherStringProperty");
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Equal("A", targetObject.AnotherStringProperty);
}
[Fact]
public void CopyNullStringProperty_ToAnotherStringProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.StringProperty = null;
targetObject.AnotherStringProperty = "B";
var patchDocument = new JsonPatchDocument();
patchDocument.Copy("StringProperty", "AnotherStringProperty");
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Null(targetObject.AnotherStringProperty);
}
[Fact]
public void MoveIntegerValue_ToAnotherIntegerProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.IntegerValue = 100;
targetObject.AnotherIntegerValue = 200;
var patchDocument = new JsonPatchDocument();
patchDocument.Move("IntegerValue", "AnotherIntegerValue");
// Act
patchDocument.ApplyTo(targetObject);
Assert.Equal(100, targetObject.AnotherIntegerValue);
var cont = targetObject as IDictionary<string, object>;
cont.TryGetValue("IntegerValue", out object valueFromDictionary);
// Assert
Assert.Null(valueFromDictionary);
}
[Fact]
public void Move_ToNonExistingProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.StringProperty = "A";
var patchDocument = new JsonPatchDocument();
patchDocument.Move("StringProperty", "AnotherStringProperty");
// Act
patchDocument.ApplyTo(targetObject);
Assert.Equal("A", targetObject.AnotherStringProperty);
var cont = targetObject as IDictionary<string, object>;
cont.TryGetValue("StringProperty", out var valueFromDictionary);
// Assert
Assert.Null(valueFromDictionary);
}
[Fact]
public void RemoveProperty_ShouldFail_IfItDoesntExist()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = 1;
var patchDocument = new JsonPatchDocument();
patchDocument.Remove("NonExisting");
// Act
var exception = Assert.Throws<JsonPatchException>(() =>
{
patchDocument.ApplyTo(targetObject);
});
// Assert
Assert.Equal("The target location specified by path segment 'NonExisting' was not found.", exception.Message);
}
[Fact]
public void RemoveStringProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = 1;
var patchDocument = new JsonPatchDocument();
patchDocument.Remove("Test");
// Act
patchDocument.ApplyTo(targetObject);
var cont = targetObject as IDictionary<string, object>;
cont.TryGetValue("Test", out object valueFromDictionary);
// Assert
Assert.Null(valueFromDictionary);
}
[Fact]
public void RemoveProperty_MixedCase_ThrowsPathNotFoundException()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = 1;
var patchDocument = new JsonPatchDocument();
patchDocument.Remove("test");
// Act
var exception = Assert.Throws<JsonPatchException>(() =>
{
patchDocument.ApplyTo(targetObject);
});
// Assert
Assert.Equal("The target location specified by path segment 'test' was not found.", exception.Message);
}
[Fact]
public void RemoveNestedProperty()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = new ExpandoObject();
targetObject.Test.AnotherTest = "A";
var patchDocument = new JsonPatchDocument();
patchDocument.Remove("Test");
// Act
patchDocument.ApplyTo(targetObject);
var cont = targetObject as IDictionary<string, object>;
cont.TryGetValue("Test", out object valueFromDictionary);
// Assert
Assert.Null(valueFromDictionary);
}
[Fact]
public void RemoveNestedProperty_MixedCase_ThrowsPathNotFoundException()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.Test = new ExpandoObject();
targetObject.Test.AnotherTest = "A";
var patchDocument = new JsonPatchDocument();
patchDocument.Remove("test");
// Act
var exception = Assert.Throws<JsonPatchException>(() =>
{
patchDocument.ApplyTo(targetObject);
});
// Assert
Assert.Equal("The target location specified by path segment 'test' was not found.", exception.Message);
}
[Fact]
public void ReplaceGuid()
{
// Arrange
dynamic targetObject = new ExpandoObject();
targetObject.GuidValue = Guid.NewGuid();
var newGuid = Guid.NewGuid();
var patchDocument = new JsonPatchDocument();
patchDocument.Replace("GuidValue", newGuid);
// Act
patchDocument.ApplyTo(targetObject);
// Assert
Assert.Equal(newGuid, targetObject.GuidValue);
}
}
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
// This source file is machine generated. Please do not change the code manually.
using System;
using System.Collections.Generic;
using System.IO.Packaging;
using DocumentFormat.OpenXml.Packaging;
using DocumentFormat.OpenXml;
using DocumentFormat.OpenXml.Drawing;
using DocumentFormat.OpenXml.Drawing.ChartDrawing;
using DocumentFormat.OpenXml.Drawing.Charts;
namespace DocumentFormat.OpenXml.Office2010.Drawing.Charts
{
/// <summary>
/// <para>Defines the PivotOptions Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:pivotOptions.</para>
/// </summary>
/// <remarks>
/// The following table lists the possible child types:
/// <list type="bullet">
///<item><description>DropZoneFilter <c14:dropZoneFilter></description></item>
///<item><description>DropZoneCategories <c14:dropZoneCategories></description></item>
///<item><description>DropZoneData <c14:dropZoneData></description></item>
///<item><description>DropZoneSeries <c14:dropZoneSeries></description></item>
///<item><description>DropZonesVisible <c14:dropZonesVisible></description></item>
/// </list>
/// </remarks>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[ChildElementInfo(typeof(DropZoneFilter),(FileFormatVersions)6)]
[ChildElementInfo(typeof(DropZoneCategories),(FileFormatVersions)6)]
[ChildElementInfo(typeof(DropZoneData),(FileFormatVersions)6)]
[ChildElementInfo(typeof(DropZoneSeries),(FileFormatVersions)6)]
[ChildElementInfo(typeof(DropZonesVisible),(FileFormatVersions)6)]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class PivotOptions : OpenXmlCompositeElement
{
private const string tagName = "pivotOptions";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12762;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the PivotOptions class.
/// </summary>
public PivotOptions():base(){}
/// <summary>
///Initializes a new instance of the PivotOptions class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public PivotOptions(System.Collections.Generic.IEnumerable<OpenXmlElement> childElements)
: base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the PivotOptions class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public PivotOptions(params OpenXmlElement[] childElements) : base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the PivotOptions class from outer XML.
/// </summary>
/// <param name="outerXml">Specifies the outer XML of the element.</param>
public PivotOptions(string outerXml)
: base(outerXml)
{
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
internal override OpenXmlElement ElementFactory(byte namespaceId, string name)
{
if( 46 == namespaceId && "dropZoneFilter" == name)
return new DropZoneFilter();
if( 46 == namespaceId && "dropZoneCategories" == name)
return new DropZoneCategories();
if( 46 == namespaceId && "dropZoneData" == name)
return new DropZoneData();
if( 46 == namespaceId && "dropZoneSeries" == name)
return new DropZoneSeries();
if( 46 == namespaceId && "dropZonesVisible" == name)
return new DropZonesVisible();
return null;
}
private static readonly string[] eleTagNames = { "dropZoneFilter","dropZoneCategories","dropZoneData","dropZoneSeries","dropZonesVisible" };
private static readonly byte[] eleNamespaceIds = { 46,46,46,46,46 };
internal override string[] ElementTagNames {
get{
return eleTagNames;
}
}
internal override byte[] ElementNamespaceIds {
get{
return eleNamespaceIds;
}
}
internal override OpenXmlCompositeType OpenXmlCompositeType
{
get {return OpenXmlCompositeType.OneSequence;}
}
/// <summary>
/// <para> DropZoneFilter.</para>
/// <para> Represents the following element tag in the schema: c14:dropZoneFilter </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public DropZoneFilter DropZoneFilter
{
get
{
return GetElement<DropZoneFilter>(0);
}
set
{
SetElement(0, value);
}
}
/// <summary>
/// <para> DropZoneCategories.</para>
/// <para> Represents the following element tag in the schema: c14:dropZoneCategories </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public DropZoneCategories DropZoneCategories
{
get
{
return GetElement<DropZoneCategories>(1);
}
set
{
SetElement(1, value);
}
}
/// <summary>
/// <para> DropZoneData.</para>
/// <para> Represents the following element tag in the schema: c14:dropZoneData </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public DropZoneData DropZoneData
{
get
{
return GetElement<DropZoneData>(2);
}
set
{
SetElement(2, value);
}
}
/// <summary>
/// <para> DropZoneSeries.</para>
/// <para> Represents the following element tag in the schema: c14:dropZoneSeries </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public DropZoneSeries DropZoneSeries
{
get
{
return GetElement<DropZoneSeries>(3);
}
set
{
SetElement(3, value);
}
}
/// <summary>
/// <para> DropZonesVisible.</para>
/// <para> Represents the following element tag in the schema: c14:dropZonesVisible </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public DropZonesVisible DropZonesVisible
{
get
{
return GetElement<DropZonesVisible>(4);
}
set
{
SetElement(4, value);
}
}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<PivotOptions>(deep);
}
}
/// <summary>
/// <para>Defines the SketchOptions Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:sketchOptions.</para>
/// </summary>
/// <remarks>
/// The following table lists the possible child types:
/// <list type="bullet">
///<item><description>InSketchMode <c14:inSketchMode></description></item>
///<item><description>ShowSketchButton <c14:showSketchBtn></description></item>
/// </list>
/// </remarks>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[ChildElementInfo(typeof(InSketchMode),(FileFormatVersions)6)]
[ChildElementInfo(typeof(ShowSketchButton),(FileFormatVersions)6)]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class SketchOptions : OpenXmlCompositeElement
{
private const string tagName = "sketchOptions";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12763;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the SketchOptions class.
/// </summary>
public SketchOptions():base(){}
/// <summary>
///Initializes a new instance of the SketchOptions class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public SketchOptions(System.Collections.Generic.IEnumerable<OpenXmlElement> childElements)
: base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the SketchOptions class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public SketchOptions(params OpenXmlElement[] childElements) : base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the SketchOptions class from outer XML.
/// </summary>
/// <param name="outerXml">Specifies the outer XML of the element.</param>
public SketchOptions(string outerXml)
: base(outerXml)
{
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
internal override OpenXmlElement ElementFactory(byte namespaceId, string name)
{
if( 46 == namespaceId && "inSketchMode" == name)
return new InSketchMode();
if( 46 == namespaceId && "showSketchBtn" == name)
return new ShowSketchButton();
return null;
}
private static readonly string[] eleTagNames = { "inSketchMode","showSketchBtn" };
private static readonly byte[] eleNamespaceIds = { 46,46 };
internal override string[] ElementTagNames {
get{
return eleTagNames;
}
}
internal override byte[] ElementNamespaceIds {
get{
return eleNamespaceIds;
}
}
internal override OpenXmlCompositeType OpenXmlCompositeType
{
get {return OpenXmlCompositeType.OneSequence;}
}
/// <summary>
/// <para> InSketchMode.</para>
/// <para> Represents the following element tag in the schema: c14:inSketchMode </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public InSketchMode InSketchMode
{
get
{
return GetElement<InSketchMode>(0);
}
set
{
SetElement(0, value);
}
}
/// <summary>
/// <para> ShowSketchButton.</para>
/// <para> Represents the following element tag in the schema: c14:showSketchBtn </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public ShowSketchButton ShowSketchButton
{
get
{
return GetElement<ShowSketchButton>(1);
}
set
{
SetElement(1, value);
}
}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<SketchOptions>(deep);
}
}
/// <summary>
/// <para>Defines the InvertSolidFillFormat Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:invertSolidFillFmt.</para>
/// </summary>
/// <remarks>
/// The following table lists the possible child types:
/// <list type="bullet">
///<item><description>ShapeProperties <c14:spPr></description></item>
/// </list>
/// </remarks>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[ChildElementInfo(typeof(ShapeProperties),(FileFormatVersions)6)]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class InvertSolidFillFormat : OpenXmlCompositeElement
{
private const string tagName = "invertSolidFillFmt";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12764;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the InvertSolidFillFormat class.
/// </summary>
public InvertSolidFillFormat():base(){}
/// <summary>
///Initializes a new instance of the InvertSolidFillFormat class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public InvertSolidFillFormat(System.Collections.Generic.IEnumerable<OpenXmlElement> childElements)
: base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the InvertSolidFillFormat class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public InvertSolidFillFormat(params OpenXmlElement[] childElements) : base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the InvertSolidFillFormat class from outer XML.
/// </summary>
/// <param name="outerXml">Specifies the outer XML of the element.</param>
public InvertSolidFillFormat(string outerXml)
: base(outerXml)
{
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
internal override OpenXmlElement ElementFactory(byte namespaceId, string name)
{
if( 46 == namespaceId && "spPr" == name)
return new ShapeProperties();
return null;
}
private static readonly string[] eleTagNames = { "spPr" };
private static readonly byte[] eleNamespaceIds = { 46 };
internal override string[] ElementTagNames {
get{
return eleTagNames;
}
}
internal override byte[] ElementNamespaceIds {
get{
return eleNamespaceIds;
}
}
internal override OpenXmlCompositeType OpenXmlCompositeType
{
get {return OpenXmlCompositeType.OneSequence;}
}
/// <summary>
/// <para> ShapeProperties.</para>
/// <para> Represents the following element tag in the schema: c14:spPr </para>
/// </summary>
/// <remark>
/// xmlns:c14 = http://schemas.microsoft.com/office/drawing/2007/8/2/chart
/// </remark>
public ShapeProperties ShapeProperties
{
get
{
return GetElement<ShapeProperties>(0);
}
set
{
SetElement(0, value);
}
}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<InvertSolidFillFormat>(deep);
}
}
/// <summary>
/// <para>Defines the Style Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:style.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class Style : OpenXmlLeafElement
{
private const string tagName = "style";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12765;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
private static string[] attributeTagNames = { "val" };
private static byte[] attributeNamespaceIds = { 0 };
internal override string[] AttributeTagNames {
get{
return attributeTagNames;
}
}
internal override byte[] AttributeNamespaceIds {
get{
return attributeNamespaceIds;
}
}
/// <summary>
/// <para> val.</para>
/// <para>Represents the following attribute in the schema: val </para>
/// </summary>
[SchemaAttr(0, "val")]
public ByteValue Val
{
get { return (ByteValue)Attributes[0]; }
set { Attributes[0] = value; }
}
/// <summary>
/// Initializes a new instance of the Style class.
/// </summary>
public Style():base(){}
internal override OpenXmlSimpleType AttributeFactory(byte namespaceId, string name)
{
if( 0 == namespaceId && "val" == name)
return new ByteValue();
return base.AttributeFactory(namespaceId, name);
}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<Style>(deep);
}
}
/// <summary>
/// <para>Defines the ShapeProperties Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:spPr.</para>
/// </summary>
/// <remarks>
/// The following table lists the possible child types:
/// <list type="bullet">
///<item><description>DocumentFormat.OpenXml.Drawing.Transform2D <a:xfrm></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.CustomGeometry <a:custGeom></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.PresetGeometry <a:prstGeom></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.NoFill <a:noFill></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.SolidFill <a:solidFill></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.GradientFill <a:gradFill></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.BlipFill <a:blipFill></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.PatternFill <a:pattFill></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.GroupFill <a:grpFill></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.Outline <a:ln></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.EffectList <a:effectLst></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.EffectDag <a:effectDag></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.Scene3DType <a:scene3d></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.Shape3DType <a:sp3d></description></item>
///<item><description>DocumentFormat.OpenXml.Drawing.ShapePropertiesExtensionList <a:extLst></description></item>
/// </list>
/// </remarks>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.Transform2D))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.CustomGeometry))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.PresetGeometry))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.NoFill))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.SolidFill))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.GradientFill))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.BlipFill))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.PatternFill))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.GroupFill))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.Outline))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.EffectList))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.EffectDag))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.Scene3DType))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.Shape3DType))]
[ChildElementInfo(typeof(DocumentFormat.OpenXml.Drawing.ShapePropertiesExtensionList))]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class ShapeProperties : OpenXmlCompositeElement
{
private const string tagName = "spPr";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12766;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
private static string[] attributeTagNames = { "bwMode" };
private static byte[] attributeNamespaceIds = { 0 };
internal override string[] AttributeTagNames {
get{
return attributeTagNames;
}
}
internal override byte[] AttributeNamespaceIds {
get{
return attributeNamespaceIds;
}
}
/// <summary>
/// <para> Black and White Mode.</para>
/// <para>Represents the following attribute in the schema: bwMode </para>
/// </summary>
[SchemaAttr(0, "bwMode")]
public EnumValue<DocumentFormat.OpenXml.Drawing.BlackWhiteModeValues> BlackWhiteMode
{
get { return (EnumValue<DocumentFormat.OpenXml.Drawing.BlackWhiteModeValues>)Attributes[0]; }
set { Attributes[0] = value; }
}
/// <summary>
/// Initializes a new instance of the ShapeProperties class.
/// </summary>
public ShapeProperties():base(){}
/// <summary>
///Initializes a new instance of the ShapeProperties class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public ShapeProperties(System.Collections.Generic.IEnumerable<OpenXmlElement> childElements)
: base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the ShapeProperties class with the specified child elements.
/// </summary>
/// <param name="childElements">Specifies the child elements.</param>
public ShapeProperties(params OpenXmlElement[] childElements) : base(childElements)
{
}
/// <summary>
/// Initializes a new instance of the ShapeProperties class from outer XML.
/// </summary>
/// <param name="outerXml">Specifies the outer XML of the element.</param>
public ShapeProperties(string outerXml)
: base(outerXml)
{
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
internal override OpenXmlElement ElementFactory(byte namespaceId, string name)
{
if( 10 == namespaceId && "xfrm" == name)
return new DocumentFormat.OpenXml.Drawing.Transform2D();
if( 10 == namespaceId && "custGeom" == name)
return new DocumentFormat.OpenXml.Drawing.CustomGeometry();
if( 10 == namespaceId && "prstGeom" == name)
return new DocumentFormat.OpenXml.Drawing.PresetGeometry();
if( 10 == namespaceId && "noFill" == name)
return new DocumentFormat.OpenXml.Drawing.NoFill();
if( 10 == namespaceId && "solidFill" == name)
return new DocumentFormat.OpenXml.Drawing.SolidFill();
if( 10 == namespaceId && "gradFill" == name)
return new DocumentFormat.OpenXml.Drawing.GradientFill();
if( 10 == namespaceId && "blipFill" == name)
return new DocumentFormat.OpenXml.Drawing.BlipFill();
if( 10 == namespaceId && "pattFill" == name)
return new DocumentFormat.OpenXml.Drawing.PatternFill();
if( 10 == namespaceId && "grpFill" == name)
return new DocumentFormat.OpenXml.Drawing.GroupFill();
if( 10 == namespaceId && "ln" == name)
return new DocumentFormat.OpenXml.Drawing.Outline();
if( 10 == namespaceId && "effectLst" == name)
return new DocumentFormat.OpenXml.Drawing.EffectList();
if( 10 == namespaceId && "effectDag" == name)
return new DocumentFormat.OpenXml.Drawing.EffectDag();
if( 10 == namespaceId && "scene3d" == name)
return new DocumentFormat.OpenXml.Drawing.Scene3DType();
if( 10 == namespaceId && "sp3d" == name)
return new DocumentFormat.OpenXml.Drawing.Shape3DType();
if( 10 == namespaceId && "extLst" == name)
return new DocumentFormat.OpenXml.Drawing.ShapePropertiesExtensionList();
return null;
}
private static readonly string[] eleTagNames = { "xfrm","custGeom","prstGeom","noFill","solidFill","gradFill","blipFill","pattFill","grpFill","ln","effectLst","effectDag","scene3d","sp3d","extLst" };
private static readonly byte[] eleNamespaceIds = { 10,10,10,10,10,10,10,10,10,10,10,10,10,10,10 };
internal override string[] ElementTagNames {
get{
return eleTagNames;
}
}
internal override byte[] ElementNamespaceIds {
get{
return eleNamespaceIds;
}
}
internal override OpenXmlCompositeType OpenXmlCompositeType
{
get {return OpenXmlCompositeType.OneSequence;}
}
/// <summary>
/// <para> 2D Transform for Individual Objects.</para>
/// <para> Represents the following element tag in the schema: a:xfrm </para>
/// </summary>
/// <remark>
/// xmlns:a = http://schemas.openxmlformats.org/drawingml/2006/main
/// </remark>
public DocumentFormat.OpenXml.Drawing.Transform2D Transform2D
{
get
{
return GetElement<DocumentFormat.OpenXml.Drawing.Transform2D>(0);
}
set
{
SetElement(0, value);
}
}
internal override OpenXmlSimpleType AttributeFactory(byte namespaceId, string name)
{
if( 0 == namespaceId && "bwMode" == name)
return new EnumValue<DocumentFormat.OpenXml.Drawing.BlackWhiteModeValues>();
return base.AttributeFactory(namespaceId, name);
}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<ShapeProperties>(deep);
}
}
/// <summary>
/// <para>Defines the DropZoneFilter Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:dropZoneFilter.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class DropZoneFilter : BooleanFalseType
{
private const string tagName = "dropZoneFilter";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12767;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the DropZoneFilter class.
/// </summary>
public DropZoneFilter():base(){}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<DropZoneFilter>(deep);
}
}
/// <summary>
/// <para>Defines the DropZoneCategories Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:dropZoneCategories.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class DropZoneCategories : BooleanFalseType
{
private const string tagName = "dropZoneCategories";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12768;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the DropZoneCategories class.
/// </summary>
public DropZoneCategories():base(){}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<DropZoneCategories>(deep);
}
}
/// <summary>
/// <para>Defines the DropZoneData Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:dropZoneData.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class DropZoneData : BooleanFalseType
{
private const string tagName = "dropZoneData";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12769;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the DropZoneData class.
/// </summary>
public DropZoneData():base(){}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<DropZoneData>(deep);
}
}
/// <summary>
/// <para>Defines the DropZoneSeries Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:dropZoneSeries.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class DropZoneSeries : BooleanFalseType
{
private const string tagName = "dropZoneSeries";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12770;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the DropZoneSeries class.
/// </summary>
public DropZoneSeries():base(){}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<DropZoneSeries>(deep);
}
}
/// <summary>
/// <para>Defines the DropZonesVisible Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:dropZonesVisible.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class DropZonesVisible : BooleanFalseType
{
private const string tagName = "dropZonesVisible";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12771;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the DropZonesVisible class.
/// </summary>
public DropZonesVisible():base(){}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<DropZonesVisible>(deep);
}
}
/// <summary>
/// <para>Defines the InSketchMode Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:inSketchMode.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class InSketchMode : BooleanFalseType
{
private const string tagName = "inSketchMode";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12772;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
/// <summary>
/// Initializes a new instance of the InSketchMode class.
/// </summary>
public InSketchMode():base(){}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<InSketchMode>(deep);
}
}
/// <summary>
/// Defines the BooleanFalseType class.
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
public abstract partial class BooleanFalseType : OpenXmlLeafElement
{
private static string[] attributeTagNames = { "val" };
private static byte[] attributeNamespaceIds = { 0 };
internal override string[] AttributeTagNames {
get{
return attributeTagNames;
}
}
internal override byte[] AttributeNamespaceIds {
get{
return attributeNamespaceIds;
}
}
/// <summary>
/// <para> val.</para>
/// <para>Represents the following attribute in the schema: val </para>
/// </summary>
[SchemaAttr(0, "val")]
public BooleanValue Val
{
get { return (BooleanValue)Attributes[0]; }
set { Attributes[0] = value; }
}
internal override OpenXmlSimpleType AttributeFactory(byte namespaceId, string name)
{
if( 0 == namespaceId && "val" == name)
return new BooleanValue();
return base.AttributeFactory(namespaceId, name);
}
/// <summary>
/// Initializes a new instance of the BooleanFalseType class.
/// </summary>
protected BooleanFalseType(){}
}
/// <summary>
/// <para>Defines the ShowSketchButton Class.</para>
///<para>This class is available in Office2010 or above.</para>
/// <para> When the object is serialized out as xml, its qualified name is c14:showSketchBtn.</para>
/// </summary>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix")]
[System.CodeDom.Compiler.GeneratedCode("DomGen", "2.0")]
[OfficeAvailability(FileFormatVersions.Office2010)]
public partial class ShowSketchButton : OpenXmlLeafElement
{
private const string tagName = "showSketchBtn";
/// <summary>
/// Gets the local name of the element.
/// </summary>
public override string LocalName
{
get { return tagName; }
}
private const byte tagNsId = 46;
internal override byte NamespaceId
{
get { return tagNsId; }
}
internal const int ElementTypeIdConst = 12773;
/// <summary>
/// Gets the type ID of the element.
/// </summary>
internal override int ElementTypeId
{
get { return ElementTypeIdConst; }
}
/// <summary>
/// Whether this element is available in a specific version of Office Application.
/// </summary>
/// <param name="version">The Office file format version.</param>
/// <returns>Returns true if the element is defined in the specified version.</returns>
internal override bool IsInVersion(FileFormatVersions version)
{
if((6 & (int)version) > 0)
{
return true;
}
return false;
}
private static string[] attributeTagNames = { "val" };
private static byte[] attributeNamespaceIds = { 0 };
internal override string[] AttributeTagNames {
get{
return attributeTagNames;
}
}
internal override byte[] AttributeNamespaceIds {
get{
return attributeNamespaceIds;
}
}
/// <summary>
/// <para> val.</para>
/// <para>Represents the following attribute in the schema: val </para>
/// </summary>
[SchemaAttr(0, "val")]
public BooleanValue Val
{
get { return (BooleanValue)Attributes[0]; }
set { Attributes[0] = value; }
}
/// <summary>
/// Initializes a new instance of the ShowSketchButton class.
/// </summary>
public ShowSketchButton():base(){}
internal override OpenXmlSimpleType AttributeFactory(byte namespaceId, string name)
{
if( 0 == namespaceId && "val" == name)
return new BooleanValue();
return base.AttributeFactory(namespaceId, name);
}
/// <summary>
/// Creates a duplicate of this node.
/// </summary>
/// <param name="deep">True to recursively clone the subtree under the specified node; false to clone only the node itself. </param>
/// <returns>Returns the cloned node. </returns>
public override OpenXmlElement CloneNode(bool deep)
{
return CloneImp<ShowSketchButton>(deep);
}
}
}
| |
#define USE_TRACING
#define DEBUG
using System;
using System.Collections;
using NUnit.Framework;
namespace Google.GData.Client.UnitTests
{
// dummy class to make porting autogenerated tests easier
public class TestContext
{
}
[TestFixture]
public class CoreTestSuite : BaseTestClass
{
//////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] queries the remote feed, saves it, loads it and compares it</summary>
/// <param name="uriToQuery">the host to access, including query parameters</param>
//////////////////////////////////////////////////////////////////////
internal void RemoteHostQueryAndCompare(Uri uriToQuery)
{
Tracing.TraceMsg("Entering RemoteHostQueryAndCompare");
int iCount = 0;
FeedQuery query = new FeedQuery();
query.Uri = uriToQuery;
Service service = new Service();
service.RequestFactory = factory;
AtomFeed f = service.Query(query);
ObjectModelHelper.DumpAtomObject(f, CreateDumpFileName("QueryRemoteHost"));
iCount = f.Entries.Count;
// let's try loading this...
Service service2 = new Service();
FeedQuery query2 = new FeedQuery();
query2.Uri = new Uri(CreateUriLogFileName("QueryRemoteHost"));
AtomFeed feed = service2.Query(query2);
Assert.AreEqual(iCount, feed.Entries.Count, "loaded feed has different number of entries");
Tracing.TraceInfo("Comparing feed objects as source");
Assert.IsTrue(ObjectModelHelper.IsSourceIdentical(f, feed), "Feeds are not identical");
if (feed.Entries != null)
{
AtomEntry theOtherEntry;
Tracing.TraceInfo("Comparing Entries");
for (int i = 0; i < feed.Entries.Count; i++)
{
theOtherEntry = feed.Entries[i];
Assert.IsTrue(ObjectModelHelper.IsEntryIdentical(f.Entries[i], theOtherEntry),
"Entries are not identical");
}
}
Tracing.TraceInfo("Leaving RemoteHostQueryAndCompare for : " + uriToQuery.AbsoluteUri);
}
/////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
/// <summary>[Test] creates a new entry, saves and loads it back</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void CreateEmptyEntrySaveAndLoad()
{
Tracing.TraceMsg("Entering Create/Save/Load test");
AtomEntry entry = ObjectModelHelper.CreateAtomEntry(1);
entry.Content.Type = "text";
entry.Content.Content = "";
ObjectModelHelper.DumpAtomObject(entry, CreateDumpFileName("CreateEmptyEntrySaveAndLoad"));
// let's try loading this...
Service service = new Service();
service.RequestFactory = factory;
FeedQuery query = new FeedQuery();
query.Uri = new Uri(CreateUriLogFileName("CreateEmptyEntrySaveAndLoad"));
AtomFeed feed = service.Query(query);
Assert.IsTrue(feed.Entries != null, "Feed.Entries should not be null");
Assert.AreEqual(1, feed.Entries.Count, "Feed.Entries should have ONE element");
// that feed should have ONE entry
if (feed.Entries != null)
{
AtomEntry theOtherEntry = feed.Entries[0];
Assert.IsTrue(ObjectModelHelper.IsEntryIdentical(entry, theOtherEntry), "Entries should be identical");
}
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] creates a new entry, saves and loads it back</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void CreateEntrySaveAndLoad()
{
Tracing.TraceMsg("Entering Create/Save/Load test");
AtomEntry entry = ObjectModelHelper.CreateAtomEntry(1);
ObjectModelHelper.DumpAtomObject(entry, CreateDumpFileName("CreateEntrySaveAndLoad"));
// let's try loading this...
Service service = new Service();
service.RequestFactory = factory;
FeedQuery query = new FeedQuery();
query.Uri = new Uri(CreateUriLogFileName("CreateEntrySaveAndLoad"));
AtomFeed feed = service.Query(query);
Assert.IsTrue(feed.Entries != null, "Feed.Entries should not be null");
Assert.AreEqual(1, feed.Entries.Count, "Feed.Entries should have ONE element");
// that feed should have ONE entry
if (feed.Entries != null)
{
AtomEntry theOtherEntry = feed.Entries[0];
Assert.IsTrue(ObjectModelHelper.IsEntryIdentical(entry, theOtherEntry));
}
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] creates a feed object from scratch</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void CreateFeed()
{
Tracing.TraceInfo("Entering Create Feed Test");
AtomFeed feed = new AtomFeed(new Uri("http://dummy"), null);
AtomEntry entry;
for (int i = 1; i <= iIterations; i++)
{
entry = ObjectModelHelper.CreateAtomEntry(i);
feed.Entries.Add(entry);
}
Tracing.TraceInfo("now persisting feed");
ObjectModelHelper.DumpAtomObject(feed, CreateDumpFileName("CreateFeed"));
Tracing.TraceInfo("now loadiing feed from disk");
Service service = new Service();
service.RequestFactory = factory;
FeedQuery query = new FeedQuery();
query.Uri = new Uri(CreateUriLogFileName("CreateFeed"));
feed = service.Query(query);
Assert.IsTrue(feed.Entries != null, "Feed.Entries should not be null");
Assert.AreEqual(iIterations, feed.Entries.Count, "Feed.Entries should have 50 elements");
if (feed.Entries != null)
{
for (int i = 1; i <= iIterations; i++)
{
entry = ObjectModelHelper.CreateAtomEntry(i);
AtomEntry theOtherEntry = feed.Entries[i - 1];
Assert.IsTrue(ObjectModelHelper.IsEntryIdentical(entry, theOtherEntry));
}
}
}
/////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
/// <summary>[Test] creates a new feed, saves and loads it back</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void CreateFeedObjectSaveAndLoad()
{
Tracing.TraceMsg("Entering CreateFeedObjectSaveAndLoad test");
Service service = new Service();
AtomFeed feed = new AtomFeed(new Uri("http://www.atomfeed.com/"), service);
feed.Self = "http://www.atomfeed.com/self";
feed.Feed = "http://www.atomfeed.com/feed";
feed.NextChunk = "http://www.atomfeed.com/next";
feed.PrevChunk = "http://www.atomfeed.com/prev";
feed.Post = "http://www.atomfeed.com/post";
ObjectModelHelper.DumpAtomObject(feed, CreateDumpFileName("CreateFeedSaveAndLoad"));
// let's try loading this...
service.RequestFactory = factory;
FeedQuery query = new FeedQuery();
query.Uri = new Uri(CreateUriLogFileName("CreateFeedSaveAndLoad"));
feed = service.Query(query);
Assert.AreEqual("http://www.atomfeed.com/self", feed.Self, "Feed.Self is not correct");
Assert.AreEqual("http://www.atomfeed.com/feed", feed.Feed, "Feed.Feed is not correct");
Assert.AreEqual("http://www.atomfeed.com/next", feed.NextChunk, "Feed.Next is not correct");
Assert.AreEqual("http://www.atomfeed.com/prev", feed.PrevChunk, "Feed.Prev is not correct");
Assert.AreEqual("http://www.atomfeed.com/post", feed.Post, "Feed.Post is not correct");
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] creates a new entry, saves and loads it back
/// uses HTML content to test the persistence/encoding code
/// </summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void CreateHTMLEntrySaveAndLoad()
{
Tracing.TraceMsg("Entering CreateHTMLEntrySaveAndLoad");
AtomEntry entry = ObjectModelHelper.CreateAtomEntry(1);
entry.Content.Type = "html";
entry.Content.Content = HttpUtility.HtmlDecode("<b>this is a <test></b>");
Tracing.TraceMsg("Content: " + entry.Content.Content);
ObjectModelHelper.DumpAtomObject(entry, CreateDumpFileName("CreateHTMLEntrySaveAndLoad"));
// let's try loading this...
Service service = new Service();
service.RequestFactory = factory;
FeedQuery query = new FeedQuery();
query.Uri = new Uri(CreateUriLogFileName("CreateHTMLEntrySaveAndLoad"));
AtomFeed feed = service.Query(query);
Assert.IsTrue(feed.Entries != null, "Feed.Entries should not be null");
Assert.AreEqual(1, feed.Entries.Count, "Feed.Entries should have ONE element");
// that feed should have ONE entry
if (feed.Entries != null)
{
AtomEntry theOtherEntry = feed.Entries[0];
Tracing.TraceMsg("Loaded Content: " + theOtherEntry.Content.Content);
Assert.IsTrue(ObjectModelHelper.IsEntryIdentical(entry, theOtherEntry));
}
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] creates a new entry, saves and loads it back
/// uses XHTML content to test the persistence/encoding code
/// </summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void CreateXHTMLEntrySaveAndLoad()
{
Tracing.TraceMsg("Entering CreateXHTMLEntrySaveAndLoad");
AtomEntry entry = ObjectModelHelper.CreateAtomEntry(1);
entry.Content.Type = "xhtml";
entry.Content.Content =
HttpUtility.HtmlDecode("<div xmlns=\"http://www.w3.org/2005/Atom\"><b>this is a test</b></div>");
Tracing.TraceMsg("Content: " + entry.Content.Content);
ObjectModelHelper.DumpAtomObject(entry, CreateDumpFileName("CreateXHTMLEntrySaveAndLoad"));
Tracing.TraceMsg("saved in... CreateXHTMLEntrySaveAndLoad");
// let's try loading this...
Service service = new Service();
service.RequestFactory = factory;
FeedQuery query = new FeedQuery();
query.Uri = new Uri(CreateUriLogFileName("CreateXHTMLEntrySaveAndLoad"));
AtomFeed feed = service.Query(query);
Tracing.TraceMsg("loaded in... CreateXHTMLEntrySaveAndLoad");
Assert.IsTrue(feed.Entries != null, "Feed.Entries should not be null");
Assert.AreEqual(1, feed.Entries.Count, "Feed.Entries should have ONE element");
// that feed should have ONE entry
if (feed.Entries != null)
{
Tracing.TraceMsg("checking entries... CreateXHTMLEntrySaveAndLoad");
AtomEntry theOtherEntry = feed.Entries[0];
Assert.IsTrue(theOtherEntry.Content != null, "the entry should have a content element");
Assert.IsTrue(theOtherEntry.Content.Type.Equals("xhtml"),
"the entry should have a content element of type xhtml");
Assert.IsTrue(theOtherEntry.Content.Content != null,
"the entry should have a content element that is not empty");
Tracing.TraceMsg("Loaded Content: " + theOtherEntry.Content.Content);
Assert.IsTrue(ObjectModelHelper.IsEntryIdentical(entry, theOtherEntry));
Tracing.TraceMsg("done comparing entries... CreateXHTMLEntrySaveAndLoad");
}
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>creates a number or rows and delets them again</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void DefaultHostExtensionTest()
{
Tracing.TraceMsg("Entering DefaultHostExtensionTest");
if (strRemoteHost != null)
{
FeedQuery query = new FeedQuery();
Service service = new Service();
service.NewAtomEntry += OnParsedNewEntry;
service.NewExtensionElement += OnNewExtensionElement;
service.RequestFactory = new GDataLoggingRequestFactory(ServiceName, ApplicationName);
query.Uri = new Uri(strRemoteHost);
AtomFeed returnFeed = service.Query(query);
ObjectModelHelper.DumpAtomObject(returnFeed, CreateDumpFileName("ExtensionFeed"));
}
}
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] public QueryObjectTest()</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void QueryObjectTest()
{
Tracing.TraceInfo("Entering QueryObject Test");
FeedQuery query = new FeedQuery();
query.Uri = new Uri(defaultHost);
AtomCategory aCat = new AtomCategory("Test", new AtomUri("urn:test.com"));
QueryCategory qCat = new QueryCategory(aCat);
query.Categories.Add(qCat);
aCat = new AtomCategory("TestNotAndOr", new AtomUri("urn:test.com"));
qCat = new QueryCategory(aCat);
qCat.Operator = QueryCategoryOperator.OR;
qCat.Excluded = true;
query.Categories.Add(qCat);
aCat = new AtomCategory("ANDTHISONE", new AtomUri(""));
qCat = new QueryCategory(aCat);
query.Categories.Add(qCat);
aCat = new AtomCategory("AnotherOrWithoutCategory");
qCat = new QueryCategory(aCat);
qCat.Operator = QueryCategoryOperator.OR;
qCat.Excluded = true;
query.Categories.Add(qCat);
query.Query = "Hospital";
query.NumberToRetrieve = 20;
Tracing.TraceInfo("query: " + query.Uri);
Uri uri = query.Uri;
Tracing.TraceInfo("Uri: query= " + uri.Query);
query.Uri = uri;
Tracing.TraceInfo("Parsed Query URI: " + query.Uri);
Assert.IsTrue(uri.AbsolutePath.Equals(query.Uri.AbsolutePath),
"both query URIs should be identical, uri: " + uri.AbsolutePath + " compared to query: " +
query.Uri.AbsolutePath);
query.CategoryQueriesAsParameter = true;
uri = query.Uri;
Tracing.TraceInfo("Uri: query= " + uri.Query);
query.Uri = uri;
Tracing.TraceInfo("Parsed Query URI: " + query.Uri.AbsoluteUri);
Assert.IsTrue(uri.AbsolutePath.Equals(query.Uri.AbsolutePath),
"both query URIs should be identical, uri: " + uri.AbsolutePath + " compared to query: " +
query.Uri.AbsolutePath);
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] Creates a feedquery and checks if the SSL translation works</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void QuerySSLTest()
{
Tracing.TraceMsg("Entering QuerySSLTest test");
FeedQuery query = new FeedQuery();
query.Uri = new Uri("http://www.google.com/");
Assert.AreEqual("http://www.google.com/", query.Uri.ToString(), "both uris should be http now");
query.UseSSL = true;
Assert.AreEqual("https://www.google.com/", query.Uri.ToString(), "both uris should be https now");
query.UseSSL = false;
Assert.AreEqual("http://www.google.com/", query.Uri.ToString(), "both uris should be http now");
// now construct the other way round
query = new FeedQuery();
query.Uri = new Uri("https://www.google.com/");
Assert.IsTrue(query.UseSSL, "Use SSL should be true due to detection of the https string");
Assert.AreEqual("https://www.google.com/", query.Uri.ToString(), "both uris should be https now");
query.UseSSL = false;
Assert.AreEqual("http://www.google.com/", query.Uri.ToString(), "both uris should be http now");
query.UseSSL = true;
Assert.AreEqual("https://www.google.com/", query.Uri.ToString(), "both uris should be https now");
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>[Test] walks over the list of remotehosts out of the
/// unitTestExternalHosts
/// add key="Host1" value="http://www.franklinmint.fm/2005/09/26/test_entry2.xml"
/// section in the config file and queries and compares the object model
/// </summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void RemoteHostQueryTest()
{
Tracing.TraceInfo("Entering RemoteHostQueryTest()");
if (externalHosts != null)
{
for (int i = 0; i < iIterations; i++)
{
Tracing.TraceInfo("Having a dictionary RemoteHostQueryTest()");
foreach (DictionaryEntry de in externalHosts)
{
Tracing.TraceInfo("Using DictionaryEntry for external Query: " + de.Value);
Uri uriToQuery = new Uri((string) de.Value);
RemoteHostQueryAndCompare(uriToQuery);
}
}
}
}
//////////////////////////////////////////////////////////////////////
/// <summary>checks if the feedquery object is stable</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void TestFeedQuery()
{
Tracing.TraceMsg("Entering DefaultHostExtensionTest");
try
{
FeedQuery query = new FeedQuery();
query.NumberToRetrieve = 20;
Uri u = query.Uri;
}
catch (UriFormatException)
{
// ignore this.
}
catch (Exception)
{
Assert.Fail("That should not have crashed");
}
}
/////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////
/// <summary>tests the tokenizer collection</summary>
//////////////////////////////////////////////////////////////////////
[Test]
public void TestTokenCollection()
{
Tracing.TraceMsg("Entering TestTokenCollection");
string toTest = "Test=Test?other=whatever\nTest2=Line2?other=whatishere";
TokenCollection tokens = new TokenCollection(toTest, new[] {'\n', '='});
TokenCollection tokenSmart = new TokenCollection(toTest, '=', true, 2);
int iTokens = 0;
foreach (string token in tokens)
{
// tokens should have 5 tokens, as the = signs split into 5
iTokens++;
if (iTokens == 1)
{
Assert.IsTrue(token.Equals("Test"), "The first token should be Test, but it is: " + token);
}
if (iTokens == 4)
{
Assert.IsTrue(token.Equals("Test2"), "The fourth token should be Test2 but it is: " + token);
}
}
iTokens = 0;
foreach (string token in tokenSmart)
{
// tokens should have 5 tokens, as the = signs split into 5
iTokens++;
if (iTokens == 1)
{
Assert.IsTrue(token.Equals("Test"), "The first smart token should be Test, but it is: " + token);
}
if (iTokens == 4)
{
Assert.IsTrue(token.Equals("Line2?other=whatishere"),
"The fourth smart token should be whatishere, but it is: " + token);
}
}
}
/////////////////////////////////////////////////////////////////////////////
} /// end of CoreTestSuite
}
| |
/*
* Copyright (C) 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Modified for use with the Chartboost Unity plugin
*/
using System;
using System.Collections;
using System.IO;
using UnityEngine;
using UnityEditor;
namespace ChartboostSDK {
[CustomEditor(typeof(CBSettings))]
public class CBSettingEditor : Editor {
GUIContent iOSAppIdLabel = new GUIContent("App Id [?]:", "Chartboost App Ids can be found at https://dashboard.chartboost.com/app");
GUIContent iOSAppSecretLabel = new GUIContent("App Signature [?]:", "Chartboost App Signature can be found at https://dashboard.chartboost.com/app");
GUIContent androidAppIdLabel = new GUIContent("App Id [?]:", "Chartboost App Ids can be found at https://dashboard.chartboost.com/app");
GUIContent androidAppSecretLabel = new GUIContent("App Signature [?]:", "Chartboost App Signature can be found at https://dashboard.chartboost.com/app");
GUIContent amazonAppIdLabel = new GUIContent("App Id [?]:", "Chartboost App Ids can be found at https://dashboard.chartboost.com/app");
GUIContent amazonAppSecretLabel = new GUIContent("App Signature [?]:", "Chartboost App Signature can be found at https://dashboard.chartboost.com/app");
GUIContent selectorLabel = new GUIContent("Android Platform [?]:", "Select if building for Google Play or Amazon Store");
GUIContent iOSLabel = new GUIContent("iOS");
GUIContent androidLabel = new GUIContent("Google Play");
GUIContent amazonLabel = new GUIContent("Amazon");
GUIContent enableLoggingLabel = new GUIContent("Enable Logging");
GUIContent enableLoggingToggle = new GUIContent("isLoggingEnabled");
// minimum version of the Google Play Services library project
private long MinGmsCoreVersionCode = 4030530;
private string sError = "Error";
private string sOk = "OK";
private string sCancel = "Cancel";
private string sSuccess = "Success";
private string sWarning = "Warning";
private string sSdkNotFound = "Android SDK Not found";
private string sSdkNotFoundBlurb = "The Android SDK path was not found. " +
"Please configure it in the Unity preferences window (under External Tools).";
private string sLibProjNotFound = "Google Play Services Library Project Not Found";
private string sLibProjNotFoundBlurb = "Google Play Services library project " +
"could not be found your SDK installation. Make sure it is installed (open " +
"the SDK manager and go to Extras, and select Google Play Services).";
private string sLibProjVerNotFound = "The version of your copy of the Google Play " +
"Services Library Project could not be determined. Please make sure it is " +
"at least version {0}. Continue?";
private string sLibProjVerTooOld = "Your copy of the Google Play " +
"Services Library Project is out of date. Please launch the Android SDK manager " +
"and upgrade your Google Play Services bundle to the latest version (your version: " +
"{0}; required version: {1}). Proceeding may cause problems. Proceed anyway?";
private string sSetupComplete = "Chartboost configured successfully.";
private CBSettings instance;
public override void OnInspectorGUI() {
instance = (CBSettings)target;
SetupUI();
}
private void SetupUI() {
EditorGUILayout.HelpBox("Add the Chartboost App Id and App Secret associated with this game", MessageType.None);
// iOS
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(iOSLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(iOSAppIdLabel);
EditorGUILayout.LabelField(iOSAppSecretLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
instance.SetIOSAppId(EditorGUILayout.TextField(instance.iOSAppId));
instance.SetIOSAppSecret(EditorGUILayout.TextField(instance.iOSAppSecret));
EditorGUILayout.EndHorizontal();
EditorGUILayout.Space();
EditorGUILayout.Space();
// Android
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(androidLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(androidAppIdLabel);
EditorGUILayout.LabelField(androidAppSecretLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
instance.SetAndroidAppId(EditorGUILayout.TextField(instance.androidAppId));
instance.SetAndroidAppSecret(EditorGUILayout.TextField(instance.androidAppSecret));
EditorGUILayout.EndHorizontal();
EditorGUILayout.Space();
EditorGUILayout.Space();
// Amazon
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(amazonLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(amazonAppIdLabel);
EditorGUILayout.LabelField(amazonAppSecretLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
instance.SetAmazonAppId(EditorGUILayout.TextField(instance.amazonAppId));
instance.SetAmazonAppSecret(EditorGUILayout.TextField(instance.amazonAppSecret));
EditorGUILayout.EndHorizontal();
EditorGUILayout.Space();
EditorGUILayout.Space();
// Android Selector
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(selectorLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
instance.SetAndroidPlatformIndex(EditorGUILayout.Popup("Android Platform", instance.SelectedAndroidPlatformIndex, instance.AndroidPlatformLabels));
EditorGUILayout.EndHorizontal();
EditorGUILayout.Space();
EditorGUILayout.Space();
// Loggin toggle.
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(enableLoggingLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
CBSettings.enableLogging(EditorGUILayout.Toggle(enableLoggingToggle, instance.isLoggingEnabled));
EditorGUILayout.EndHorizontal();
EditorGUILayout.Space();
EditorGUILayout.Space();
EditorGUILayout.BeginHorizontal();
if (GUILayout.Button("Setup Android SDK"))
{
DoSetup();
}
EditorGUILayout.EndHorizontal();
}
private void DoSetup() {
string sdkPath = GetAndroidSdkPath();
string libProjPath = sdkPath +
FixSlashes("/extras/google/google_play_services/libproject/google-play-services_lib");
string libProjAM = libProjPath + FixSlashes("/AndroidManifest.xml");
string libProjDestDir = FixSlashes("Assets/Plugins/Android/google-play-services_lib");
// check that Android SDK is there
if (!HasAndroidSdk()) {
Debug.LogError("Android SDK not found.");
EditorUtility.DisplayDialog(sSdkNotFound,
sSdkNotFoundBlurb, sOk);
return;
}
// check that the Google Play Services lib project is there
if (!System.IO.Directory.Exists(libProjPath) || !System.IO.File.Exists(libProjAM)) {
Debug.LogError("Google Play Services lib project not found at: " + libProjPath);
EditorUtility.DisplayDialog(sLibProjNotFound,
sLibProjNotFoundBlurb, sOk);
return;
}
// check lib project version
if (!CheckAndWarnAboutGmsCoreVersion(libProjAM)) {
return;
}
// create needed directories
EnsureDirExists("Assets/Plugins");
EnsureDirExists("Assets/Plugins/Android");
// clear out the destination library project
DeleteDirIfExists(libProjDestDir);
// Copy Google Play Services library
FileUtil.CopyFileOrDirectory(libProjPath, libProjDestDir);
// refresh assets, and we're done
AssetDatabase.Refresh();
EditorUtility.DisplayDialog(sSuccess,
sSetupComplete, sOk);
}
private bool CheckAndWarnAboutGmsCoreVersion(string libProjAMFile) {
string manifestContents = ReadFile(libProjAMFile);
string[] fields = manifestContents.Split('\"');
int i;
long vercode = 0;
for (i = 0; i < fields.Length; i++) {
if (fields[i].Contains("android:versionCode") && i + 1 < fields.Length) {
vercode = System.Convert.ToInt64(fields[i + 1]);
}
}
if (vercode == 0) {
return EditorUtility.DisplayDialog(sWarning, string.Format(
sLibProjVerNotFound,
MinGmsCoreVersionCode),
sOk, sCancel);
} else if (vercode < MinGmsCoreVersionCode) {
return EditorUtility.DisplayDialog(sWarning, string.Format(
sLibProjVerTooOld, vercode,
MinGmsCoreVersionCode),
sOk, sCancel);
}
return true;
}
private void EnsureDirExists(string dir) {
dir = dir.Replace("/", System.IO.Path.DirectorySeparatorChar.ToString());
if (!System.IO.Directory.Exists(dir)) {
System.IO.Directory.CreateDirectory(dir);
}
}
private void DeleteDirIfExists(string dir) {
if (System.IO.Directory.Exists(dir)) {
System.IO.Directory.Delete(dir, true);
}
}
private string FixSlashes(string path) {
return path.Replace("/", System.IO.Path.DirectorySeparatorChar.ToString());
}
private string ReadFile(string filePath) {
filePath = FixSlashes(filePath);
if (!File.Exists(filePath)) {
EditorUtility.DisplayDialog(sError, "Plugin error: file not found: " + filePath, sOk);
return null;
}
StreamReader sr = new StreamReader(filePath);
string body = sr.ReadToEnd();
sr.Close();
return body;
}
private string GetAndroidSdkPath() {
string sdkPath = EditorPrefs.GetString("AndroidSdkRoot");
if (sdkPath != null && (sdkPath.EndsWith("/") || sdkPath.EndsWith("\\"))) {
sdkPath = sdkPath.Substring(0, sdkPath.Length - 1);
}
return sdkPath;
}
private bool HasAndroidSdk() {
string sdkPath = GetAndroidSdkPath();
return sdkPath != null && sdkPath.Trim() != "" && System.IO.Directory.Exists(sdkPath);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using Microsoft.AspNetCore.Internal;
using Microsoft.AspNetCore.SignalR.Protocol;
using Xunit;
namespace Microsoft.AspNetCore.SignalR.Common.Tests.Internal.Protocol
{
using static HubMessageHelpers;
public class MessagePackHubProtocolTests : MessagePackHubProtocolTestBase
{
protected override IHubProtocol HubProtocol => new MessagePackHubProtocol();
[Fact]
public void SerializerCanSerializeTypesWithNoDefaultCtor()
{
var result = Write(CompletionMessage.WithResult("0", new List<int> { 42 }.AsReadOnly()));
AssertMessages(new byte[] { ArrayBytes(5), 3, 0x80, StringBytes(1), (byte)'0', 0x03, ArrayBytes(1), 42 }, result);
}
[Theory]
[InlineData(DateTimeKind.Utc)]
[InlineData(DateTimeKind.Local)]
[InlineData(DateTimeKind.Unspecified)]
public void WriteAndParseDateTimeConvertsToUTC(DateTimeKind dateTimeKind)
{
// The messagepack Timestamp format always converts input DateTime to Utc if they are passed as "DateTimeKind.Local" :
// https://github.com/neuecc/MessagePack-CSharp/pull/520/files#diff-ed970b3daebc708ce49f55d418075979
var originalDateTime = new DateTime(2018, 4, 9, 0, 0, 0, dateTimeKind);
var writer = MemoryBufferWriter.Get();
try
{
HubProtocol.WriteMessage(CompletionMessage.WithResult("xyz", originalDateTime), writer);
var bytes = new ReadOnlySequence<byte>(writer.ToArray());
HubProtocol.TryParseMessage(ref bytes, new TestBinder(typeof(DateTime)), out var hubMessage);
var completionMessage = Assert.IsType<CompletionMessage>(hubMessage);
var resultDateTime = (DateTime)completionMessage.Result;
// The messagepack Timestamp format specifies that time is stored as seconds since 1970-01-01 UTC
// so the library has no choice but to store the time as UTC
// https://github.com/msgpack/msgpack/blob/master/spec.md#timestamp-extension-type
// So If the original DateTiem was a "Local" one, we create a new DateTime equivalent to the original one but converted to Utc
var expectedUtcDateTime = (originalDateTime.Kind == DateTimeKind.Local) ? originalDateTime.ToUniversalTime() : originalDateTime;
Assert.Equal(expectedUtcDateTime, resultDateTime);
}
finally
{
MemoryBufferWriter.Return(writer);
}
}
[Fact]
public void WriteAndParseDateTimeOffset()
{
var dateTimeOffset = new DateTimeOffset(new DateTime(2018, 4, 9), TimeSpan.FromHours(10));
var writer = MemoryBufferWriter.Get();
try
{
HubProtocol.WriteMessage(CompletionMessage.WithResult("xyz", dateTimeOffset), writer);
var bytes = new ReadOnlySequence<byte>(writer.ToArray());
HubProtocol.TryParseMessage(ref bytes, new TestBinder(typeof(DateTimeOffset)), out var hubMessage);
var completionMessage = Assert.IsType<CompletionMessage>(hubMessage);
var resultDateTimeOffset = (DateTimeOffset)completionMessage.Result;
Assert.Equal(dateTimeOffset, resultDateTimeOffset);
}
finally
{
MemoryBufferWriter.Return(writer);
}
}
public static IEnumerable<object[]> TestDataNames
{
get
{
foreach (var k in TestData.Keys)
{
yield return new object[] { k };
}
}
}
// TestData that requires object serialization
public static IDictionary<string, MessagePackHubProtocolTestBase.ProtocolTestData> TestData => new[]
{
// Completion messages
new ProtocolTestData(
name: "CompletionWithNoHeadersAndNullResult",
message: CompletionMessage.WithResult("xyz", payload: null),
binary: "lQOAo3h5egPA"),
new ProtocolTestData(
name: "CompletionWithNoHeadersAndCustomObjectResult",
message: CompletionMessage.WithResult("xyz", payload: new CustomObject()),
binary: "lQOAo3h5egOGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgM="),
new ProtocolTestData(
name: "CompletionWithNoHeadersAndCustomObjectArrayResult",
message: CompletionMessage.WithResult("xyz", payload: new[] { new CustomObject(), new CustomObject() }),
binary: "lQOAo3h5egOShqpTdHJpbmdQcm9wqFNpZ25hbFIhqkRvdWJsZVByb3DLQBkh+1RCzxKnSW50UHJvcCqsRGF0ZVRpbWVQcm9w1v9Y7ByAqE51bGxQcm9wwKtCeXRlQXJyUHJvcMQDAQIDhqpTdHJpbmdQcm9wqFNpZ25hbFIhqkRvdWJsZVByb3DLQBkh+1RCzxKnSW50UHJvcCqsRGF0ZVRpbWVQcm9w1v9Y7ByAqE51bGxQcm9wwKtCeXRlQXJyUHJvcMQDAQID"),
new ProtocolTestData(
name: "CompletionWithHeadersAndCustomObjectArrayResult",
message: AddHeaders(TestHeaders, CompletionMessage.WithResult("xyz", payload: new[] { new CustomObject(), new CustomObject() })),
binary: "lQODo0Zvb6NCYXKyS2V5V2l0aApOZXcNCkxpbmVzq1N0aWxsIFdvcmtzsVZhbHVlV2l0aE5ld0xpbmVzsEFsc28KV29ya3MNCkZpbmWjeHl6A5KGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgM="),
new ProtocolTestData(
name: "CompletionWithNoHeadersAndEnumResult",
message: CompletionMessage.WithResult("xyz", payload: TestEnum.One),
binary: "lQOAo3h5egOjT25l"),
// Invocation messages
new ProtocolTestData(
name: "InvocationWithNoHeadersNoIdAndSingleNullArg",
message: new InvocationMessage("method", new object[] { null }),
binary: "lgGAwKZtZXRob2SRwJA="),
new ProtocolTestData(
name: "InvocationWithNoHeadersNoIdIntAndEnumArgs",
message: new InvocationMessage("method", new object[] { 42, TestEnum.One }),
binary: "lgGAwKZtZXRob2SSKqNPbmWQ"),
new ProtocolTestData(
name: "InvocationWithNoHeadersNoIdAndCustomObjectArg",
message: new InvocationMessage("method", new object[] { 42, "string", new CustomObject() }),
binary: "lgGAwKZtZXRob2STKqZzdHJpbmeGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOQ"),
new ProtocolTestData(
name: "InvocationWithNoHeadersNoIdAndArrayOfCustomObjectArgs",
message: new InvocationMessage("method", new object[] { new CustomObject(), new CustomObject() }),
binary: "lgGAwKZtZXRob2SShqpTdHJpbmdQcm9wqFNpZ25hbFIhqkRvdWJsZVByb3DLQBkh+1RCzxKnSW50UHJvcCqsRGF0ZVRpbWVQcm9w1v9Y7ByAqE51bGxQcm9wwKtCeXRlQXJyUHJvcMQDAQIDhqpTdHJpbmdQcm9wqFNpZ25hbFIhqkRvdWJsZVByb3DLQBkh+1RCzxKnSW50UHJvcCqsRGF0ZVRpbWVQcm9w1v9Y7ByAqE51bGxQcm9wwKtCeXRlQXJyUHJvcMQDAQIDkA=="),
new ProtocolTestData(
name: "InvocationWithHeadersNoIdAndArrayOfCustomObjectArgs",
message: AddHeaders(TestHeaders, new InvocationMessage("method", new object[] { new CustomObject(), new CustomObject() })),
binary: "lgGDo0Zvb6NCYXKyS2V5V2l0aApOZXcNCkxpbmVzq1N0aWxsIFdvcmtzsVZhbHVlV2l0aE5ld0xpbmVzsEFsc28KV29ya3MNCkZpbmXApm1ldGhvZJKGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOQ"),
// StreamItem Messages
new ProtocolTestData(
name: "StreamItemWithNoHeadersAndNullItem",
message: new StreamItemMessage("xyz", item: null),
binary: "lAKAo3h5esA="),
new ProtocolTestData(
name: "StreamItemWithNoHeadersAndEnumItem",
message: new StreamItemMessage("xyz", item: TestEnum.One),
binary: "lAKAo3h5eqNPbmU="),
new ProtocolTestData(
name: "StreamItemWithNoHeadersAndCustomObjectItem",
message: new StreamItemMessage("xyz", item: new CustomObject()),
binary: "lAKAo3h5eoaqU3RyaW5nUHJvcKhTaWduYWxSIapEb3VibGVQcm9wy0AZIftUQs8Sp0ludFByb3AqrERhdGVUaW1lUHJvcNb/WOwcgKhOdWxsUHJvcMCrQnl0ZUFyclByb3DEAwECAw=="),
new ProtocolTestData(
name: "StreamItemWithNoHeadersAndCustomObjectArrayItem",
message: new StreamItemMessage("xyz", item: new[] { new CustomObject(), new CustomObject() }),
binary: "lAKAo3h5epKGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgM="),
new ProtocolTestData(
name: "StreamItemWithHeadersAndCustomObjectArrayItem",
message: AddHeaders(TestHeaders, new StreamItemMessage("xyz", item: new[] { new CustomObject(), new CustomObject() })),
binary: "lAKDo0Zvb6NCYXKyS2V5V2l0aApOZXcNCkxpbmVzq1N0aWxsIFdvcmtzsVZhbHVlV2l0aE5ld0xpbmVzsEFsc28KV29ya3MNCkZpbmWjeHl6koaqU3RyaW5nUHJvcKhTaWduYWxSIapEb3VibGVQcm9wy0AZIftUQs8Sp0ludFByb3AqrERhdGVUaW1lUHJvcNb/WOwcgKhOdWxsUHJvcMCrQnl0ZUFyclByb3DEAwECA4aqU3RyaW5nUHJvcKhTaWduYWxSIapEb3VibGVQcm9wy0AZIftUQs8Sp0ludFByb3AqrERhdGVUaW1lUHJvcNb/WOwcgKhOdWxsUHJvcMCrQnl0ZUFyclByb3DEAwECAw=="),
// StreamInvocation Messages
new ProtocolTestData(
name: "StreamInvocationWithNoHeadersAndEnumArg",
message: new StreamInvocationMessage("xyz", "method", new object[] { TestEnum.One }),
binary: "lgSAo3h5eqZtZXRob2SRo09uZZA="),
new ProtocolTestData(
name: "StreamInvocationWithNoHeadersAndNullArg",
message: new StreamInvocationMessage("xyz", "method", new object[] { null }),
binary: "lgSAo3h5eqZtZXRob2SRwJA="),
new ProtocolTestData(
name: "StreamInvocationWithNoHeadersAndIntStringAndCustomObjectArgs",
message: new StreamInvocationMessage("xyz", "method", new object[] { 42, "string", new CustomObject() }),
binary: "lgSAo3h5eqZtZXRob2STKqZzdHJpbmeGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOQ"),
new ProtocolTestData(
name: "StreamInvocationWithNoHeadersAndCustomObjectArrayArg",
message: new StreamInvocationMessage("xyz", "method", new object[] { new CustomObject(), new CustomObject() }),
binary: "lgSAo3h5eqZtZXRob2SShqpTdHJpbmdQcm9wqFNpZ25hbFIhqkRvdWJsZVByb3DLQBkh+1RCzxKnSW50UHJvcCqsRGF0ZVRpbWVQcm9w1v9Y7ByAqE51bGxQcm9wwKtCeXRlQXJyUHJvcMQDAQIDhqpTdHJpbmdQcm9wqFNpZ25hbFIhqkRvdWJsZVByb3DLQBkh+1RCzxKnSW50UHJvcCqsRGF0ZVRpbWVQcm9w1v9Y7ByAqE51bGxQcm9wwKtCeXRlQXJyUHJvcMQDAQIDkA=="),
new ProtocolTestData(
name: "StreamInvocationWithHeadersAndCustomObjectArrayArg",
message: AddHeaders(TestHeaders, new StreamInvocationMessage("xyz", "method", new object[] { new CustomObject(), new CustomObject() })),
binary: "lgSDo0Zvb6NCYXKyS2V5V2l0aApOZXcNCkxpbmVzq1N0aWxsIFdvcmtzsVZhbHVlV2l0aE5ld0xpbmVzsEFsc28KV29ya3MNCkZpbmWjeHl6pm1ldGhvZJKGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOGqlN0cmluZ1Byb3CoU2lnbmFsUiGqRG91YmxlUHJvcMtAGSH7VELPEqdJbnRQcm9wKqxEYXRlVGltZVByb3DW/1jsHICoTnVsbFByb3DAq0J5dGVBcnJQcm9wxAMBAgOQ"),
}.ToDictionary(t => t.Name);
[Theory]
[MemberData(nameof(TestDataNames))]
public void ParseMessages(string testDataName)
{
var testData = TestData[testDataName];
TestParseMessages(testData);
}
[Theory]
[MemberData(nameof(TestDataNames))]
public void WriteMessages(string testDataName)
{
var testData = TestData[testDataName];
TestWriteMessages(testData);
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="NetworkStream.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Net.Sockets {
using System.IO;
using System.Runtime.InteropServices;
using System.Threading;
using System.Security.Permissions;
using System.Threading.Tasks;
/// <devdoc>
/// <para>
/// Provides the underlying stream of data for network access.
/// </para>
/// </devdoc>
public class NetworkStream : Stream {
/// <devdoc>
/// <para>
/// Used by the class to hold the underlying socket the stream uses.
/// </para>
/// </devdoc>
private Socket m_StreamSocket;
/// <devdoc>
/// <para>
/// Used by the class to indicate that the stream is m_Readable.
/// </para>
/// </devdoc>
private bool m_Readable;
/// <devdoc>
/// <para>
/// Used by the class to indicate that the stream is writable.
/// </para>
/// </devdoc>
private bool m_Writeable;
private bool m_OwnsSocket;
/// <devdoc>
/// <para>Creates a new instance of the <see cref='System.Net.Sockets.NetworkStream'/> without initalization.</para>
/// </devdoc>
internal NetworkStream() {
m_OwnsSocket = true;
}
// Can be constructed directly out of a socket
/// <devdoc>
/// <para>Creates a new instance of the <see cref='System.Net.Sockets.NetworkStream'/> class for the specified <see cref='System.Net.Sockets.Socket'/>.</para>
/// </devdoc>
public NetworkStream(Socket socket) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User)) {
#endif
if (socket == null) {
throw new ArgumentNullException("socket");
}
InitNetworkStream(socket, FileAccess.ReadWrite);
#if DEBUG
}
#endif
}
//UEUE (see FileStream)
// ownsHandle: true if the file handle will be owned by this NetworkStream instance; otherwise, false.
public NetworkStream(Socket socket, bool ownsSocket) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User)) {
#endif
if (socket == null) {
throw new ArgumentNullException("socket");
}
InitNetworkStream(socket, FileAccess.ReadWrite);
m_OwnsSocket = ownsSocket;
#if DEBUG
}
#endif
}
internal NetworkStream(NetworkStream networkStream, bool ownsSocket) {
Socket socket = networkStream.Socket;
if (socket == null) {
throw new ArgumentNullException("networkStream");
}
InitNetworkStream(socket, FileAccess.ReadWrite);
m_OwnsSocket = ownsSocket;
}
// Create with a socket and access mode
/// <devdoc>
/// <para>Creates a new instance of the <see cref='System.Net.Sockets.NetworkStream'/> class for the specified <see cref='System.Net.Sockets.Socket'/> with the specified access rights.</para>
/// </devdoc>
public NetworkStream(Socket socket, FileAccess access) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User)) {
#endif
if (socket == null) {
throw new ArgumentNullException("socket");
}
InitNetworkStream(socket, access);
#if DEBUG
}
#endif
}
public NetworkStream(Socket socket, FileAccess access, bool ownsSocket) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User)) {
#endif
if (socket == null) {
throw new ArgumentNullException("socket");
}
InitNetworkStream(socket, access);
m_OwnsSocket = ownsSocket;
#if DEBUG
}
#endif
}
//
// Socket - provides access to socket for stream closing
//
protected Socket Socket {
get {
return m_StreamSocket;
}
}
internal Socket InternalSocket {
get {
Socket chkSocket = m_StreamSocket;
if (m_CleanedUp || chkSocket == null) {
throw new ObjectDisposedException(this.GetType().FullName);
}
return chkSocket;
}
}
internal void InternalAbortSocket()
{
if (!m_OwnsSocket)
{
throw new InvalidOperationException();
}
Socket chkSocket = m_StreamSocket;
if (m_CleanedUp || chkSocket == null)
{
return;
}
try
{
chkSocket.Close(0);
}
catch (ObjectDisposedException)
{
}
}
internal void ConvertToNotSocketOwner() {
m_OwnsSocket = false;
// Suppress for finialization still allow proceed the requests
GC.SuppressFinalize(this);
}
/// <devdoc>
/// <para>
/// Used by the class to indicate that the stream is m_Readable.
/// </para>
/// </devdoc>
protected bool Readable {
get {
return m_Readable;
}
set {
m_Readable = value;
}
}
/// <devdoc>
/// <para>
/// Used by the class to indicate that the stream is writable.
/// </para>
/// </devdoc>
protected bool Writeable {
get {
return m_Writeable;
}
set {
m_Writeable = value;
}
}
/// <devdoc>
/// <para>
/// Indicates that data can be read from the stream.
/// We return the readability of this stream. This is a read only property.
/// </para>
/// </devdoc>
public override bool CanRead {
get {
return m_Readable;
}
}
/// <devdoc>
/// <para>
/// Indicates that the stream can seek a specific location
/// in the stream. This property always returns <see langword='false'/>
/// .
/// </para>
/// </devdoc>
public override bool CanSeek {
get {
return false;
}
}
/// <devdoc>
/// <para>
/// Indicates that data can be written to the stream.
/// </para>
/// </devdoc>
public override bool CanWrite {
get {
return m_Writeable;
}
}
/// <devdoc>
/// <para>Indicates whether we can timeout</para>
/// </devdoc>
public override bool CanTimeout {
get {
return true; // should we check for Connected state?
}
}
/// <devdoc>
/// <para>Set/Get ReadTimeout, note of a strange behavior, 0 timeout == infinite for sockets,
/// so we map this to -1, and if you set 0, we cannot support it</para>
/// </devdoc>
public override int ReadTimeout {
get {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
int timeout = (int)m_StreamSocket.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveTimeout);
if (timeout == 0) {
return -1;
}
return timeout;
#if DEBUG
}
#endif
}
set {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
if (value<=0 && value!=System.Threading.Timeout.Infinite) {
throw new ArgumentOutOfRangeException("value", SR.GetString(SR.net_io_timeout_use_gt_zero));
}
SetSocketTimeoutOption(SocketShutdown.Receive, value, false);
#if DEBUG
}
#endif
}
}
/// <devdoc>
/// <para>Set/Get WriteTimeout, note of a strange behavior, 0 timeout == infinite for sockets,
/// so we map this to -1, and if you set 0, we cannot support it</para>
/// </devdoc>
public override int WriteTimeout {
get {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
int timeout = (int)m_StreamSocket.GetSocketOption(SocketOptionLevel.Socket, SocketOptionName.SendTimeout);
if (timeout == 0) {
return -1;
}
return timeout;
#if DEBUG
}
#endif
}
set {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
if (value <= 0 && value != System.Threading.Timeout.Infinite) {
throw new ArgumentOutOfRangeException("value", SR.GetString(SR.net_io_timeout_use_gt_zero));
}
SetSocketTimeoutOption(SocketShutdown.Send, value, false);
#if DEBUG
}
#endif
}
}
/// <devdoc>
/// <para>
/// Indicates data is available on the stream to be read.
/// This property checks to see if at least one byte of data is currently available
/// </para>
/// </devdoc>
public virtual bool DataAvailable {
get {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_readfailure, SR.GetString(SR.net_io_connectionclosed)));
}
// Ask the socket how many bytes are available. If it's
// not zero, return true.
return chkStreamSocket.Available != 0;
#if DEBUG
}
#endif
}
}
/// <devdoc>
/// <para>
/// The length of data available on the stream. Always throws <see cref='NotSupportedException'/>.
/// </para>
/// </devdoc>
public override long Length {
get {
throw new NotSupportedException(SR.GetString(SR.net_noseek));
}
}
/// <devdoc>
/// <para>
/// Gets or sets the position in the stream. Always throws <see cref='NotSupportedException'/>.
/// </para>
/// </devdoc>
public override long Position {
get {
throw new NotSupportedException(SR.GetString(SR.net_noseek));
}
set {
throw new NotSupportedException(SR.GetString(SR.net_noseek));
}
}
/// <devdoc>
/// <para>
/// Seeks a specific position in the stream. This method is not supported by the
/// <see cref='NetworkStream'/> class.
/// </para>
/// </devdoc>
public override long Seek(long offset, SeekOrigin origin) {
throw new NotSupportedException(SR.GetString(SR.net_noseek));
}
/*++
InitNetworkStream - initialize a network stream.
This is the common NetworkStream constructor, called whenever a
network stream is created. We validate the socket, set a few
options, and call our parent's initializer.
Input:
S - Socket to be used.
Access - Access type desired.
Returns:
Nothing, but may throw an exception.
--*/
internal void InitNetworkStream(Socket socket, FileAccess Access) {
//
// parameter validation
//
if (!socket.Blocking) {
throw new IOException(SR.GetString(SR.net_sockets_blocking));
}
if (!socket.Connected) {
throw new IOException(SR.GetString(SR.net_notconnected));
}
if (socket.SocketType != SocketType.Stream) {
throw new IOException(SR.GetString(SR.net_notstream));
}
m_StreamSocket = socket;
switch (Access) {
case FileAccess.Read:
m_Readable = true;
break;
case FileAccess.Write:
m_Writeable = true;
break;
case FileAccess.ReadWrite:
default: // assume FileAccess.ReadWrite
m_Readable = true;
m_Writeable = true;
break;
}
}
internal bool PollRead() {
if (m_CleanedUp) {
return false;
}
Socket chkStreamSocket = m_StreamSocket;
if (chkStreamSocket == null) {
return false;
}
return chkStreamSocket.Poll(0, SelectMode.SelectRead);
}
internal bool Poll(int microSeconds, SelectMode mode) {
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
Socket chkStreamSocket = m_StreamSocket;
if (chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_readfailure, SR.GetString(SR.net_io_connectionclosed)));
}
return chkStreamSocket.Poll(microSeconds, mode);
}
/*++
Read - provide core Read functionality.
Provide core read functionality. All we do is call through to the
socket Receive functionality.
Input:
Buffer - Buffer to read into.
Offset - Offset into the buffer where we're to read.
Count - Number of bytes to read.
Returns:
Number of bytes we read, or 0 if the socket is closed.
--*/
/// <devdoc>
/// <para>
/// Reads data from the stream.
/// </para>
/// </devdoc>
//UEUE
public override int Read([In, Out] byte[] buffer, int offset, int size) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Sync)) {
#endif
bool canRead = CanRead; // Prevent race with Dispose.
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
if (!canRead) {
throw new InvalidOperationException(SR.GetString(SR.net_writeonlystream));
}
//
// parameter validation
//
if (buffer==null) {
throw new ArgumentNullException("buffer");
}
if (offset<0 || offset>buffer.Length) {
throw new ArgumentOutOfRangeException("offset");
}
if (size<0 || size>buffer.Length-offset) {
throw new ArgumentOutOfRangeException("size");
}
Socket chkStreamSocket = m_StreamSocket;
if (chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_readfailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
int bytesTransferred = chkStreamSocket.Receive(buffer, offset, size, 0);
return bytesTransferred;
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_readfailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
/*++
Write - provide core Write functionality.
Provide core write functionality. All we do is call through to the
socket Send method..
Input:
Buffer - Buffer to write from.
Offset - Offset into the buffer from where we'll start writing.
Count - Number of bytes to write.
Returns:
Number of bytes written. We'll throw an exception if we
can't write everything. It's brutal, but there's no other
way to indicate an error.
--*/
/// <devdoc>
/// <para>
/// Writes data to the stream..
/// </para>
/// </devdoc>
public override void Write(byte[] buffer, int offset, int size) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Sync)) {
#endif
bool canWrite = CanWrite; // Prevent race with Dispose.
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
if (!canWrite) {
throw new InvalidOperationException(SR.GetString(SR.net_readonlystream));
}
//
// parameter validation
//
if (buffer==null) {
throw new ArgumentNullException("buffer");
}
if (offset<0 || offset>buffer.Length) {
throw new ArgumentOutOfRangeException("offset");
}
if (size<0 || size>buffer.Length-offset) {
throw new ArgumentOutOfRangeException("size");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
//
// since the socket is in blocking mode this will always complete
// after ALL the requested number of bytes was transferred
//
chkStreamSocket.Send(buffer, offset, size, SocketFlags.None);
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
private int m_CloseTimeout = Socket.DefaultCloseTimeout; // 1 ms; -1 = respect linger options
public void Close(int timeout) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Sync)) {
#endif
if (timeout < -1) {
throw new ArgumentOutOfRangeException("timeout");
}
m_CloseTimeout = timeout;
Close();
#if DEBUG
}
#endif
}
private volatile bool m_CleanedUp = false;
protected override void Dispose(bool disposing) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User)) {
#endif
// Mark this as disposed before changing anything else.
bool cleanedUp = m_CleanedUp;
m_CleanedUp = true;
if (!cleanedUp && disposing) {
//
// only resource we need to free is the network stream, since this
// is based on the client socket, closing the stream will cause us
// to flush the data to the network, close the stream and (in the
// NetoworkStream code) close the socket as well.
//
if (m_StreamSocket!=null) {
m_Readable = false;
m_Writeable = false;
if (m_OwnsSocket) {
//
// if we own the Socket (false by default), close it
// ignoring possible exceptions (eg: the user told us
// that we own the Socket but it closed at some point of time,
// here we would get an ObjectDisposedException)
//
Socket chkStreamSocket = m_StreamSocket;
if (chkStreamSocket!=null) {
chkStreamSocket.InternalShutdown(SocketShutdown.Both);
chkStreamSocket.Close(m_CloseTimeout);
}
}
}
}
#if DEBUG
}
#endif
base.Dispose(disposing);
}
~NetworkStream() {
#if DEBUG
GlobalLog.SetThreadSource(ThreadKinds.Finalization);
// using (GlobalLog.SetThreadKind(ThreadKinds.System | ThreadKinds.Async)) {
#endif
Dispose(false);
#if DEBUG
// }
#endif
}
/// <devdoc>
/// <para>
/// Indicates whether the stream is still connected
/// </para>
/// </devdoc>
internal bool Connected {
get {
Socket socket = m_StreamSocket;
if (!m_CleanedUp && socket !=null && socket.Connected) {
return true;
} else {
return false;
}
}
}
/*++
BeginRead - provide async read functionality.
This method provides async read functionality. All we do is
call through to the underlying socket async read.
Input:
buffer - Buffer to read into.
offset - Offset into the buffer where we're to read.
size - Number of bytes to read.
Returns:
An IASyncResult, representing the read.
--*/
/// <devdoc>
/// <para>
/// Begins an asychronous read from a stream.
/// </para>
/// </devdoc>
[HostProtection(ExternalThreading=true)]
public override IAsyncResult BeginRead(byte[] buffer, int offset, int size, AsyncCallback callback, Object state) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
bool canRead = CanRead; // Prevent race with Dispose.
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
if (!canRead) {
throw new InvalidOperationException(SR.GetString(SR.net_writeonlystream));
}
//
// parameter validation
//
if (buffer==null) {
throw new ArgumentNullException("buffer");
}
if (offset<0 || offset>buffer.Length) {
throw new ArgumentOutOfRangeException("offset");
}
if (size<0 || size>buffer.Length-offset) {
throw new ArgumentOutOfRangeException("size");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_readfailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
IAsyncResult asyncResult =
chkStreamSocket.BeginReceive(
buffer,
offset,
size,
SocketFlags.None,
callback,
state);
return asyncResult;
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_readfailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
internal virtual IAsyncResult UnsafeBeginRead(byte[] buffer, int offset, int size, AsyncCallback callback, Object state)
{
bool canRead = CanRead; // Prevent race with Dispose.
if (m_CleanedUp)
{
throw new ObjectDisposedException(GetType().FullName);
}
if (!canRead)
{
throw new InvalidOperationException(SR.GetString(SR.net_writeonlystream));
}
Socket chkStreamSocket = m_StreamSocket;
if (chkStreamSocket == null)
{
throw new IOException(SR.GetString(SR.net_io_readfailure, SR.GetString(SR.net_io_connectionclosed)));
}
try
{
IAsyncResult asyncResult = chkStreamSocket.UnsafeBeginReceive(
buffer,
offset,
size,
SocketFlags.None,
callback,
state);
return asyncResult;
}
catch (Exception exception)
{
if (NclUtilities.IsFatal(exception)) throw;
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_readfailure, exception.Message), exception);
}
}
/*++
EndRead - handle the end of an async read.
This method is called when an async read is completed. All we
do is call through to the core socket EndReceive functionality.
Input:
buffer - Buffer to read into.
offset - Offset into the buffer where we're to read.
size - Number of bytes to read.
Returns:
The number of bytes read. May throw an exception.
--*/
/// <devdoc>
/// <para>
/// Handle the end of an asynchronous read.
/// </para>
/// </devdoc>
public override int EndRead(IAsyncResult asyncResult) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User)) {
#endif
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
//
// parameter validation
//
if (asyncResult==null) {
throw new ArgumentNullException("asyncResult");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_readfailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
int bytesTransferred = chkStreamSocket.EndReceive(asyncResult);
return bytesTransferred;
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_readfailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
/*++
BeginWrite - provide async write functionality.
This method provides async write functionality. All we do is
call through to the underlying socket async send.
Input:
buffer - Buffer to write into.
offset - Offset into the buffer where we're to write.
size - Number of bytes to written.
Returns:
An IASyncResult, representing the write.
--*/
/// <devdoc>
/// <para>
/// Begins an asynchronous write to a stream.
/// </para>
/// </devdoc>
[HostProtection(ExternalThreading=true)]
public override IAsyncResult BeginWrite(byte[] buffer, int offset, int size, AsyncCallback callback, Object state) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
bool canWrite = CanWrite; // Prevent race with Dispose.
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
if (!canWrite) {
throw new InvalidOperationException(SR.GetString(SR.net_readonlystream));
}
//
// parameter validation
//
if (buffer==null) {
throw new ArgumentNullException("buffer");
}
if (offset<0 || offset>buffer.Length) {
throw new ArgumentOutOfRangeException("offset");
}
if (size<0 || size>buffer.Length-offset) {
throw new ArgumentOutOfRangeException("size");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
//
// call BeginSend on the Socket.
//
IAsyncResult asyncResult =
chkStreamSocket.BeginSend(
buffer,
offset,
size,
SocketFlags.None,
callback,
state);
return asyncResult;
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
internal virtual IAsyncResult UnsafeBeginWrite(byte[] buffer, int offset, int size, AsyncCallback callback, Object state) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User | ThreadKinds.Async)) {
#endif
bool canWrite = CanWrite; // Prevent race with Dispose.
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
if (!canWrite) {
throw new InvalidOperationException(SR.GetString(SR.net_readonlystream));
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
//
// call BeginSend on the Socket.
//
IAsyncResult asyncResult =
chkStreamSocket.UnsafeBeginSend(
buffer,
offset,
size,
SocketFlags.None,
callback,
state);
return asyncResult;
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
/// <devdoc>
/// <para>
/// Handle the end of an asynchronous write.
/// This method is called when an async write is completed. All we
/// do is call through to the core socket EndSend functionality.
/// Returns: The number of bytes read. May throw an exception.
/// </para>
/// </devdoc>
public override void EndWrite(IAsyncResult asyncResult) {
#if DEBUG
using (GlobalLog.SetThreadKind(ThreadKinds.User)) {
#endif
if (m_CleanedUp){
throw new ObjectDisposedException(this.GetType().FullName);
}
//
// parameter validation
//
if (asyncResult==null) {
throw new ArgumentNullException("asyncResult");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
chkStreamSocket.EndSend(asyncResult);
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
/// <devdoc>
/// <para>
/// Performs a [....] Write of an array of buffers.
/// </para>
/// </devdoc>
internal virtual void MultipleWrite(BufferOffsetSize[] buffers)
{
GlobalLog.ThreadContract(ThreadKinds.Sync, "NetworkStream#" + ValidationHelper.HashString(this) + "::MultipleWrite");
//
// parameter validation
//
if (buffers == null) {
throw new ArgumentNullException("buffers");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
chkStreamSocket.MultipleSend(
buffers,
SocketFlags.None);
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
}
/// <devdoc>
/// <para>
/// Starts off an async Write of an array of buffers.
/// </para>
/// </devdoc>
internal virtual IAsyncResult BeginMultipleWrite(
BufferOffsetSize[] buffers,
AsyncCallback callback,
Object state)
{
#if DEBUG
GlobalLog.ThreadContract(ThreadKinds.Unknown, "NetworkStream#" + ValidationHelper.HashString(this) + "::BeginMultipleWrite");
using (GlobalLog.SetThreadKind(ThreadKinds.Async)) {
#endif
//
// parameter validation
//
if (buffers == null) {
throw new ArgumentNullException("buffers");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
//
// call BeginMultipleSend on the Socket.
//
IAsyncResult asyncResult =
chkStreamSocket.BeginMultipleSend(
buffers,
SocketFlags.None,
callback,
state);
return asyncResult;
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
internal virtual IAsyncResult UnsafeBeginMultipleWrite(
BufferOffsetSize[] buffers,
AsyncCallback callback,
Object state)
{
#if DEBUG
GlobalLog.ThreadContract(ThreadKinds.Unknown, "NetworkStream#" + ValidationHelper.HashString(this) + "::BeginMultipleWrite");
using (GlobalLog.SetThreadKind(ThreadKinds.Async)) {
#endif
//
// parameter validation
//
if (buffers == null) {
throw new ArgumentNullException("buffers");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
//
// call BeginMultipleSend on the Socket.
//
IAsyncResult asyncResult =
chkStreamSocket.UnsafeBeginMultipleSend(
buffers,
SocketFlags.None,
callback,
state);
return asyncResult;
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
#if DEBUG
}
#endif
}
internal virtual void EndMultipleWrite(IAsyncResult asyncResult) {
GlobalLog.ThreadContract(ThreadKinds.Unknown, "NetworkStream#" + ValidationHelper.HashString(this) + "::EndMultipleWrite");
//
// parameter validation
//
if (asyncResult == null) {
throw new ArgumentNullException("asyncResult");
}
Socket chkStreamSocket = m_StreamSocket;
if(chkStreamSocket == null) {
throw new IOException(SR.GetString(SR.net_io_writefailure, SR.GetString(SR.net_io_connectionclosed)));
}
try {
chkStreamSocket.EndMultipleSend(asyncResult);
}
catch (Exception exception) {
if (exception is ThreadAbortException || exception is StackOverflowException || exception is OutOfMemoryException) {
throw;
}
//
// some sort of error occured on the socket call,
// set the SocketException as InnerException and throw
//
throw new IOException(SR.GetString(SR.net_io_writefailure, exception.Message), exception);
}
}
/// <devdoc>
/// <para>
/// Flushes data from the stream. This is meaningless for us, so it does nothing.
/// </para>
/// </devdoc>
public override void Flush() {
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
return Task.CompletedTask;
}
/// <devdoc>
/// <para>
/// Sets the length of the stream. Always throws <see cref='NotSupportedException'/>
/// </para>
/// </devdoc>
public override void SetLength(long value) {
throw new NotSupportedException(SR.GetString(SR.net_noseek));
}
int m_CurrentReadTimeout = -1;
int m_CurrentWriteTimeout = -1;
internal void SetSocketTimeoutOption(SocketShutdown mode, int timeout, bool silent) {
GlobalLog.Print("NetworkStream#" + ValidationHelper.HashString(this) + "::SetSocketTimeoutOption() mode:" + mode + " silent:" + silent + " timeout:" + timeout + " m_CurrentReadTimeout:" + m_CurrentReadTimeout + " m_CurrentWriteTimeout:" + m_CurrentWriteTimeout);
GlobalLog.ThreadContract(ThreadKinds.Unknown, "NetworkStream#" + ValidationHelper.HashString(this) + "::SetSocketTimeoutOption");
if (timeout < 0) {
timeout = 0; // -1 becomes 0 for the winsock stack
}
Socket chkStreamSocket = m_StreamSocket;
if (chkStreamSocket==null) {
return;
}
if (mode==SocketShutdown.Send || mode==SocketShutdown.Both) {
if (timeout!=m_CurrentWriteTimeout) {
chkStreamSocket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.SendTimeout, timeout, silent);
m_CurrentWriteTimeout = timeout;
}
}
if (mode==SocketShutdown.Receive || mode==SocketShutdown.Both) {
if (timeout!=m_CurrentReadTimeout) {
chkStreamSocket.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveTimeout, timeout, silent);
m_CurrentReadTimeout = timeout;
}
}
}
#if TRAVE
[System.Diagnostics.Conditional("TRAVE")]
internal void DebugMembers() {
if (m_StreamSocket != null) {
GlobalLog.Print("m_StreamSocket:");
m_StreamSocket.DebugMembers();
}
}
#endif
}
}
| |
// $Id: mxGraphView.cs,v 1.2 2014/02/19 09:40:59 gaudenz Exp $
// Copyright (c) 2007-2008, Gaudenz Alder
using System;
using System.Diagnostics;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using System.Drawing;
using System.Drawing.Imaging;
namespace com.mxgraph
{
/// <summary>
/// Implements a view for the graph. This class is in charge of computing the
/// absolute coordinates for the relative child geometries, the points for
/// perimeters and edge styles and keeping them cached in mxCellStates for
/// faster retrieval. The states are updated whenever the model or the view
/// state (translate, scale) changes. The scale and translate are honoured in
/// the bounds.
/// </summary>
public class mxGraphView
{
/// <summary>
/// Shared instance of an empty point.
/// </summary>
private static mxPoint EMPTY_POINT = new mxPoint();
/// <summary>
/// Reference to the enclosing graph.
/// </summary>
protected mxGraph graph;
/// <summary>
/// Caches the current bounds of the graph.
/// </summary>
protected mxRectangle graphBounds = new mxRectangle();
/// <summary>
/// Specifies the scale. Default is 1 (100%).
/// </summary>
protected double scale = 1;
/// <summary>
/// Point that specifies the current translation. Default is a new
/// empty point.
/// </summary>
protected mxPoint translate = new mxPoint(0, 0);
/// <summary>
/// Maps from cells to cell states.
/// </summary>
protected Dictionary<Object, mxCellState> states = new Dictionary<Object, mxCellState>();
/// <summary>
/// Specifies if the view should be revalidated if the scale or
/// translation changes.
/// </summary>
protected bool eventsEnabled = true;
/// <summary>
/// Constructs a new view for the given graph.
/// </summary>
/// <param name="graph">Reference to the enclosing graph.</param>
public mxGraphView(mxGraph graph)
{
this.graph = graph;
}
/// <summary>
/// Returns the enclosing graph.
/// </summary>
public mxGraph Graph
{
get { return graph; }
}
/// <summary>
/// Returns the cached diagram bounds.
/// </summary>
public mxRectangle GraphBounds
{
get { return graphBounds; }
set { graphBounds = value; }
}
/// <summary>
/// Sets or returns the current scale.
/// </summary>
public double Scale
{
get { return scale; }
set
{
if (scale != value)
{
scale = value;
Revalidate();
}
}
}
/// <summary>
/// Sets or returns the current translation.
/// </summary>
public mxPoint Translate
{
get { return translate; }
set
{
if (translate.X != value.X ||
translate.Y != value.Y)
{
translate = value;
Revalidate();
}
}
}
/// <summary>
/// Sets or returns the current translation.
/// </summary>
public Dictionary<Object, mxCellState> States
{
get { return states; }
set { states = value; }
}
/// <summary>
/// Sets or returns the current scale.
/// </summary>
public bool IsEventsEnabled
{
get { return eventsEnabled; }
set { eventsEnabled = value; }
}
/// <summary>
/// Returns the bounding box for an array of cells or null, if no cells are
/// specified.
/// </summary>
/// <param name="cells"></param>
/// <returns></returns>
public mxRectangle GetBounds(Object[] cells)
{
return GetBounds(cells, false);
}
/// <summary>
/// Returns the bounding box for an array of cells or null, if no cells are
/// specified.
/// </summary>
/// <param name="cells"></param>
/// <returns></returns>
public mxRectangle GetBoundingBox(Object[] cells)
{
return GetBounds(cells, true);
}
/// <summary>
/// Returns the bounding box for an array of cells or null, if no cells are
/// specified.
/// </summary>
public mxRectangle GetBounds(Object[] cells, bool boundingBox)
{
mxRectangle result = null;
if (cells != null && cells.Length > 0)
{
mxIGraphModel model = graph.Model;
for (int i = 0; i < cells.Length; i++)
{
if (model.IsVertex(cells[i]) || model.IsEdge(cells[i]))
{
mxCellState state = GetState(cells[i]);
if (state != null)
{
mxRectangle tmp = (boundingBox) ? state.BoundingBox : state;
if (tmp != null)
{
if (result == null)
{
result = new mxRectangle(tmp);
}
else
{
result.Add(tmp);
}
}
}
}
}
}
return result;
}
/// <summary>
/// First invalidates, then validates all cell states.
/// </summary>
public void Revalidate()
{
Invalidate();
Validate();
}
/// <summary>
/// Invalidates all cell states.
/// </summary>
public void Invalidate()
{
// LATER: Invalidate cell states recursively
states.Clear();
}
/// <summary>
/// First validates all bounds and then validates all points recursively on
/// all visible cells.
/// </summary>
public void Validate()
{
Object cell = graph.Model.Root;
if (cell != null && states.Count == 0)
{
mxRectangle graphBounds = GetBoundingBox(ValidateCellState(ValidateCell(cell)));
GraphBounds = (graphBounds != null) ? graphBounds : new mxRectangle();
}
}
/// <summary>
/// Shortcut to validateCell with visible set to true.
/// </summary>
public mxRectangle GetBoundingBox(mxCellState state)
{
return GetBoundingBox(state, true);
}
/// <summary>
/// Returns the bounding box of the shape and the label for the given
/// cell state and its children if recurse is true.
/// </summary>
/// <param name="state">Cell state whose bounding box should be returned.</param>
/// <param name="recurse">Boolean indicating if the children should be included.</param>
public mxRectangle GetBoundingBox(mxCellState state, Boolean recurse)
{
mxRectangle bbox = null;
if (state != null)
{
if (state.BoundingBox != null)
{
bbox = (mxRectangle)state.BoundingBox.Clone();
}
if (recurse)
{
mxIGraphModel model = graph.Model;
int childCount = model.GetChildCount(state.Cell);
for (int i = 0; i < childCount; i++)
{
mxRectangle bounds = GetBoundingBox(
GetState(model.GetChildAt(state.Cell, i)), true);
if (bounds != null)
{
if (bbox == null)
{
bbox = bounds;
}
else
{
bbox.Add(bounds);
}
}
}
}
}
return bbox;
}
/// <summary>
/// Shortcut to validateCell with visible set to true.
/// </summary>
public Object ValidateCell(Object cell)
{
return ValidateCell(cell, true);
}
/// <summary>
/// Recursively creates the cell state for the given cell if visible is true and
/// the given cell is visible. If the cell is not visible but the state exists
/// then it is removed using removeState.
/// </summary>
/// <param name="cell">Cell whose cell state should be created.</param>
/// <param name="visible">Boolean indicating if the cell should be visible.</param>
public Object ValidateCell(Object cell, Boolean visible)
{
if (cell != null)
{
visible = visible && graph.IsCellVisible(cell);
mxCellState state = GetState(cell, visible);
if (state != null && !visible)
{
RemoveState(cell);
}
else
{
mxIGraphModel model = graph.Model;
int childCount = model.GetChildCount(cell);
for (int i = 0; i < childCount; i++)
{
ValidateCell(
model.GetChildAt(cell, i),
visible && !graph.IsCellCollapsed(cell));
}
}
}
return cell;
}
/// <summary>
/// Shortcut to validateCellState with recurse set to true.
/// </summary>
public mxCellState ValidateCellState(Object cell)
{
return ValidateCellState(cell, true);
}
/// <summary>
/// Validates the cell state for the given cell.
/// </summary>
/// <param name="cell">Cell whose cell state should be validated.</param>
/// <param name="recurse">Boolean indicating if the children of the cell should be
/// validated.</param>
/// <returns></returns>
public mxCellState ValidateCellState(Object cell, Boolean recurse)
{
mxCellState state = null;
if (cell != null)
{
state = GetState(cell);
if (state != null)
{
mxIGraphModel model = graph.Model;
if (state.Invalid)
{
state.Invalid = false;
ValidateCellState(model.GetParent(cell), false);
mxCellState source = ValidateCellState(GetVisibleTerminal(cell, true), false);
mxCellState target = ValidateCellState(GetVisibleTerminal(cell, false), false);
UpdateCellState(state, source, target);
if (model.IsEdge(cell) || model.IsVertex(cell))
{
UpdateLabelBounds(state);
UpdateBoundingBox(state);
}
}
if (recurse)
{
int childCount = model.GetChildCount(cell);
for (int i = 0; i < childCount; i++)
{
ValidateCellState(model.GetChildAt(cell, i));
}
}
}
}
return state;
}
/// <summary>
/// Updates the given cell state.
/// </summary>
/// <param name="state"></param>
public void UpdateCellState(mxCellState state, mxCellState source, mxCellState target)
{
state.AbsoluteOffset.X = 0;
state.AbsoluteOffset.Y = 0;
state.Origin.X = 0;
state.Origin.Y = 0;
state.Length = 0;
mxIGraphModel model = graph.Model;
mxCellState pState = GetState(model.GetParent(state.Cell));
if (pState != null)
{
state.Origin.X += pState.Origin.X;
state.Origin.Y += pState.Origin.Y;
}
mxPoint offset = graph.GetChildOffsetForCell(state.Cell);
if (offset != null)
{
state.Origin.X += offset.X;
state.Origin.Y += offset.Y;
}
mxGeometry geo = graph.GetCellGeometry(state.Cell);
if (geo != null)
{
if (!model.IsEdge(state.Cell))
{
mxPoint origin = state.Origin;
offset = geo.Offset;
if (offset == null)
{
offset = EMPTY_POINT;
}
if (geo.Relative && pState != null)
{
if (model.IsEdge(pState.Cell))
{
mxPoint orig = GetPoint(pState, geo);
if (orig != null)
{
origin.X += (orig.X / scale) - pState.Origin.X - translate.X;
origin.Y += (orig.Y / scale) - pState.Origin.Y - translate.Y;
}
}
else
{
origin.X += geo.X * pState.Width / scale + offset.X;
origin.Y += geo.Y * pState.Height / scale + offset.Y;
}
}
else
{
state.AbsoluteOffset = new mxPoint(scale * offset.X,
scale * offset.Y);
origin.X += geo.X;
origin.Y += geo.Y;
}
}
state.X = scale * (translate.X + state.Origin.X);
state.Y = scale * (translate.Y + state.Origin.Y);
state.Width = scale * geo.Width;
state.Height = scale * geo.Height;
if (model.IsVertex(state.Cell))
{
UpdateVertexState(state, geo);
}
if (model.IsEdge(state.Cell))
{
UpdateEdgeState(state, geo, source, target);
}
}
}
/// <summary>
/// Validates the given cell state.
/// </summary>
public void UpdateVertexState(mxCellState state, mxGeometry geo)
{
// LATER: Add support for rotation
UpdateVertexLabelOffset(state);
}
/// <summary>
/// Validates the given cell state.
/// </summary>
public void UpdateEdgeState(mxCellState state, mxGeometry geo, mxCellState source, mxCellState target)
{
// This will remove edges with no terminals and no terminal points
// as such edges are invalid and produce NPEs in the edge styles.
// Also removes connected edges that have no visible terminals.
if ((graph.Model.GetTerminal(state.Cell, true) != null && source == null) ||
(source == null && geo.GetTerminalPoint(true) == null) ||
(graph.Model.GetTerminal(state.Cell, false) != null && target == null) ||
(target == null && geo.GetTerminalPoint(false) == null))
{
RemoveState(state.Cell, true);
}
else
{
UpdateFixedTerminalPoints(state, source, target);
UpdatePoints(state, geo.Points, source, target);
UpdateFloatingTerminalPoints(state, source, target);
if (state.AbsolutePointCount() < 2 || state.AbsolutePoints[0] == null || state
.AbsolutePoints[state.AbsolutePointCount() - 1] == null)
{
// This will remove edges with invalid points from the list of states in the view.
// Happens if the one of the terminals and the corresponding terminal point is null.
RemoveState(state.Cell, true);
}
else
{
UpdateEdgeBounds(state);
state.AbsoluteOffset = GetPoint(state, geo);
}
}
}
/// <summary>
/// Updates the absoluteOffset of the given vertex cell state. This takes
/// into account the label position styles.
/// </summary>
/// <param name="state">Cell state whose absolute offset should be updated.</param>
public void UpdateVertexLabelOffset(mxCellState state)
{
string horizontal = mxUtils.GetString(state.Style,
mxConstants.STYLE_LABEL_POSITION,
mxConstants.ALIGN_CENTER);
if (horizontal.Equals(mxConstants.ALIGN_LEFT))
{
state.AbsoluteOffset.X -= state.Width;
}
else if (horizontal.Equals(mxConstants.ALIGN_RIGHT))
{
state.AbsoluteOffset.X += state.Width;
}
string vertical = mxUtils.GetString(state.Style,
mxConstants.STYLE_VERTICAL_LABEL_POSITION,
mxConstants.ALIGN_MIDDLE);
if (vertical.Equals(mxConstants.ALIGN_TOP))
{
state.AbsoluteOffset.Y -= state.Height;
}
else if (vertical.Equals(mxConstants.ALIGN_BOTTOM))
{
state.AbsoluteOffset.Y += state.Height;
}
}
/// <summary>
/// Updates the label bounds in the given state.
/// </summary>
/// <param name="state"></param>
public void UpdateLabelBounds(mxCellState state)
{
Object cell = state.Cell;
Dictionary<string, Object> style = state.Style;
if (mxUtils.GetString(style, mxConstants.STYLE_OVERFLOW, "").Equals("fill"))
{
state.LabelBounds = new mxRectangle(state);
}
else
{
string label = graph.GetLabel(cell);
mxRectangle vertexBounds = (!graph.Model.IsEdge(cell)) ?
state : null;
state.LabelBounds = mxUtils.GetLabelPaintBounds(label,
style, false, state.AbsoluteOffset, vertexBounds,
scale);
}
}
/// <summary>
/// Updates the bounding box in the given cell state.
/// </summary>
/// <param name="state">Cell state whose bounding box should be
/// updated.</param>
/// <returns></returns>
public mxRectangle UpdateBoundingBox(mxCellState state)
{
// Gets the cell bounds and adds shadows and markers
mxRectangle rect = new mxRectangle(state.GetRectangle());
Dictionary<string, Object> style = state.Style;
// Adds extra pixels for the marker and stroke assuming
// that the border stroke is centered around the bounds
// and the first pixel is drawn inside the bounds
double strokeWidth = Math.Max(1, Math.Round(mxUtils.GetInt(style,
mxConstants.STYLE_STROKEWIDTH, 1)
* scale));
strokeWidth -= Math.Max(1, strokeWidth / 2);
if (graph.Model.IsEdge(state.Cell))
{
int ms = 0;
if (style.ContainsKey(mxConstants.STYLE_ENDARROW)
|| style.ContainsKey(mxConstants.STYLE_STARTARROW))
{
ms = (int)Math.Round(mxConstants.DEFAULT_MARKERSIZE * scale);
}
// Adds the strokewidth
rect.Grow(ms + strokeWidth);
// Adds worst case border for an arrow shape
if (mxUtils.GetString(style, mxConstants.STYLE_SHAPE, "").Equals(
mxConstants.SHAPE_ARROW))
{
rect.Grow(mxConstants.ARROW_WIDTH / 2);
}
}
else
{
rect.Grow(strokeWidth);
}
// Adds extra pixels for the shadow
if (mxUtils.IsTrue(style, mxConstants.STYLE_SHADOW))
{
rect.Width += mxConstants.SHADOW_OFFSETX;
rect.Height += mxConstants.SHADOW_OFFSETY;
}
// Adds oversize images in labels
if (mxUtils.GetString(style, mxConstants.STYLE_SHAPE, "").Equals(
mxConstants.SHAPE_LABEL))
{
if (mxUtils.GetString(style, mxConstants.STYLE_IMAGE) != null)
{
double w = mxUtils.GetInt(style,
mxConstants.STYLE_IMAGE_WIDTH,
mxConstants.DEFAULT_IMAGESIZE) * scale;
double h = mxUtils.GetInt(style,
mxConstants.STYLE_IMAGE_HEIGHT,
mxConstants.DEFAULT_IMAGESIZE) * scale;
double x = state.X;
double y = 0;
string imgAlign = mxUtils
.GetString(style, mxConstants.STYLE_IMAGE_ALIGN,
mxConstants.ALIGN_LEFT);
string imgValign = mxUtils.GetString(style,
mxConstants.STYLE_IMAGE_VERTICAL_ALIGN,
mxConstants.ALIGN_MIDDLE);
if (imgAlign.Equals(mxConstants.ALIGN_RIGHT))
{
x += state.Width - w;
}
else if (imgAlign.Equals(mxConstants.ALIGN_CENTER))
{
x += (state.Width - w) / 2;
}
if (imgValign.Equals(mxConstants.ALIGN_TOP))
{
y = state.Y;
}
else if (imgValign.Equals(mxConstants.ALIGN_BOTTOM))
{
y = state.Y + state.Height - h;
}
else
{
y = state.Y + (state.Height - h) / 2;
}
rect.Add(new mxRectangle(x, y, w, h));
}
}
// Adds the rotated bounds to the bounding box if the
// shape is rotated
double rotation = mxUtils.GetDouble(style, mxConstants.STYLE_ROTATION);
mxRectangle bbox = mxUtils.GetBoundingBox(rect, rotation);
// Add the rotated bounding box to the non-rotated so
// that all handles are also covered
if (bbox != null)
{
rect.Add(bbox);
}
// Unifies the cell bounds and the label bounds
if (!mxUtils.GetString(style, mxConstants.STYLE_OVERFLOW, "").Equals("hidden"))
{
rect.Add(state.LabelBounds);
}
state.BoundingBox = rect;
return rect;
}
/// <summary>
/// Sets the initial absolute terminal points in the given state before the edge
/// style is computed.
/// </summary>
/// <param name="edge">Cell state whose initial terminal points should be updated.</param>
/// <param name="source">Cell state which represents the source terminal.</param>
/// <param name="target">Cell state which represents the target terminal.</param>
public void UpdateFixedTerminalPoints(mxCellState edge, mxCellState source, mxCellState target)
{
UpdateFixedTerminalPoint(edge, source, true,
graph.GetConnectionConstraint(edge, source, true));
UpdateFixedTerminalPoint(edge, target, false,
graph.GetConnectionConstraint(edge, target, false));
}
/// <summary>
/// Sets the fixed source or target terminal point on the given edge.
/// </summary>
/// <param name="edge">State whose terminal point should be updated.</param>
/// <param name="terminal">State which represents the actual terminal.</param>
/// <param name="source">Boolean that specifies if the terminal is the source.</param>
/// <param name="constraint">Constraint that specifies the connection.</param>
public void UpdateFixedTerminalPoint(mxCellState edge, mxCellState terminal,
bool source, mxConnectionConstraint constraint)
{
mxPoint pt = null;
if (constraint != null)
{
pt = graph.GetConnectionPoint(terminal, constraint);
}
if (pt == null && terminal == null)
{
mxPoint orig = edge.Origin;
mxGeometry geo = graph.GetCellGeometry(edge.Cell);
pt = geo.GetTerminalPoint(source);
if (pt != null)
{
pt = new mxPoint(scale * (translate.X + pt.X + orig.X),
scale * (translate.Y + pt.Y + orig.Y));
}
}
edge.SetAbsoluteTerminalPoint(pt, source);
}
/// <summary>
/// Updates the absolute points in the given state using the specified array
/// of points as the relative points.
/// </summary>
/// <param name="edge">Cell state whose absolute points should be updated.</param>
/// <param name="points">Array of points that constitute the relative points.</param>
/// <param name="source">Cell that represents the source terminal.</param>
/// <param name="target">Cell that represents the target terminal.</param>
public void UpdatePoints(mxCellState edge, List<mxPoint> points, mxCellState source, mxCellState target)
{
if (edge != null)
{
List<mxPoint> pts = new List<mxPoint>();
pts.Add(edge.AbsolutePoints[0]);
mxEdgeStyleFunction edgeStyle = GetEdgeStyle(edge, points, source, target);
if (edgeStyle != null)
{
mxCellState src = GetTerminalPort(edge, source, true);
mxCellState trg = GetTerminalPort(edge, target, false);
((mxEdgeStyleFunction)edgeStyle)(edge, src, trg, points, pts);
}
else if (points != null)
{
for (int i = 0; i < points.Count; i++)
{
if (points[i] is mxPoint)
{
mxPoint pt = points[i].Clone();
pts.Add(TransformControlPoint(edge, pt));
}
}
}
List<mxPoint> tmp = edge.AbsolutePoints;
pts.Add(tmp[tmp.Count - 1]);
edge.AbsolutePoints = pts;
}
}
/// <summary>
/// Transforms the given control point to an absolute point.
/// </summary>
public mxPoint TransformControlPoint(mxCellState state, mxPoint pt)
{
mxPoint orig = state.Origin;
return new mxPoint(scale * (pt.X + translate.X + orig.X),
scale * (pt.Y + translate.Y + orig.Y));
}
/// <summary>
/// Returns the edge style function to be used to render the given edge
/// state.
/// </summary>
public mxEdgeStyleFunction GetEdgeStyle(mxCellState edge, List<mxPoint> points,
Object source, Object target)
{
object edgeStyle = null;
if (source != null && source == target)
{
edge.Style.TryGetValue(mxConstants.STYLE_LOOP, out edgeStyle);
if (edgeStyle == null)
{
edgeStyle = graph.DefaultLoopStyle;
}
}
else if (!mxUtils.IsTrue(edge.Style,
mxConstants.STYLE_NOEDGESTYLE, false))
{
edge.Style.TryGetValue(mxConstants.STYLE_EDGE, out edgeStyle);
}
// Converts string values to objects
if (edgeStyle is String)
{
string str = edgeStyle.ToString();
Object tmp = mxStyleRegistry.GetValue(str);
if (tmp == null)
{
tmp = mxUtils.Eval(str);
}
edgeStyle = tmp;
}
if (edgeStyle is mxEdgeStyleFunction)
{
return (mxEdgeStyleFunction)edgeStyle;
}
return null;
}
/// <summary>
/// Updates the terminal points in the given state after the edge style was
/// computed for the edge.
/// </summary>
/// <param name="state">State whose terminal points should be updated.</param>
/// <param name="source">State that represents the source terminal.</param>
/// <param name="target">State that represents the target terminal.</param>
public void UpdateFloatingTerminalPoints(mxCellState state, mxCellState source, mxCellState target)
{
mxPoint p0 = state.AbsolutePoints[0];
mxPoint pe = state.AbsolutePoints[state.AbsolutePointCount() - 1];
if (pe == null && target != null)
{
UpdateFloatingTerminalPoint(state, target, source, false);
}
if (p0 == null && source != null)
{
UpdateFloatingTerminalPoint(state, source, target, true);
}
}
/// <summary>
/// Updates the absolute terminal point in the given state for the given
/// start and end state, where start is the source if source is true.
/// </summary>
/// <param name="edge">State whose terminal point should be updated.</param>
/// <param name="start">for the terminal on "this" side of the edge.</param>
/// <param name="end">for the terminal on the other side of the edge.</param>
/// <param name="source">Boolean indicating if start is the source terminal state.</param>
public void UpdateFloatingTerminalPoint(mxCellState edge, mxCellState start,
mxCellState end, bool source)
{
start = GetTerminalPort(edge, start, source);
mxPoint next = GetNextPoint(edge, end, source);
double border = mxUtils.GetDouble(edge.Style, mxConstants.STYLE_PERIMETER_SPACING);
border += mxUtils.GetDouble(edge.Style, (source) ?
mxConstants.STYLE_SOURCE_PERIMETER_SPACING :
mxConstants.STYLE_TARGET_PERIMETER_SPACING);
mxPoint pt = GetPerimeterPoint(start, next, graph.IsOrthogonal(edge), border);
edge.SetAbsoluteTerminalPoint(pt, source);
}
/// <summary>
/// Returns the given terminal or the port defined in the given edge state if a
/// cell state exists for that port.
/// </summary>
public mxCellState GetTerminalPort(mxCellState state, mxCellState terminal, bool source)
{
string key = (source) ? mxConstants.STYLE_SOURCE_PORT
: mxConstants.STYLE_TARGET_PORT;
string id = mxUtils.GetString(state.Style, key);
if (id != null && graph.Model is mxGraphModel)
{
mxCellState tmp = GetState(((mxGraphModel)graph.Model).GetCell(id));
// Only uses ports where a cell state exists
if (tmp != null)
{
terminal = tmp;
}
}
return terminal;
}
/// <summary>
/// Returns a point that defines the location of the intersection point between
/// the perimeter and the line between the center of the shape and the given point.
/// </summary>
public mxPoint GetPerimeterPoint(mxCellState terminal, mxPoint next, bool orthogonal)
{
return GetPerimeterPoint(terminal, next, orthogonal, 0);
}
/// <summary>
/// Returns a point that defines the location of the intersection point between
/// the perimeter and the line between the center of the shape and the given point.
/// </summary>
/// <param name="terminal">State for the source or target terminal.</param>
/// <param name="next">Point that lies outside of the given terminal.</param>
/// <param name="orthogonal">Specifies if the orthogonal projection onto
/// the perimeter should be returned. If this is false then the intersection
/// of the perimeter and the line between the next and the center point is
/// returned.</param>
/// <param name="border">Optional border between the perimeter and the shape.</param>
public mxPoint GetPerimeterPoint(mxCellState terminal, mxPoint next, bool orthogonal, double border)
{
mxPoint point = null;
if (terminal != null)
{
mxPerimeterFunction perimeter = GetPerimeterFunction(terminal);
if (perimeter != null && next != null)
{
mxRectangle bounds = GetPerimeterBounds(terminal, border);
if (bounds.Width > 0 || bounds.Height > 0)
{
point = perimeter(bounds, terminal, next, orthogonal);
}
}
if (point == null)
{
point = GetPoint(terminal);
}
}
return point;
}
/// <summary>
/// Returns the x-coordinate of the center point for automatic routing.
/// </summary>
/// <returns>Returns the x-coordinate of the routing center point.</returns>
public double GetRoutingCenterX(mxCellState state)
{
float f = (state.Style != null) ? mxUtils.GetFloat(state.
Style, mxConstants.STYLE_ROUTING_CENTER_X) : 0;
return state.GetCenterX() + f * state.Width;
}
/// <summary>
/// Returns the y-coordinate of the center point for automatic routing.
/// </summary>
/// <returns>Returns the y-coordinate of the routing center point.</returns>
public double GetRoutingCenterY(mxCellState state)
{
float f = (state.Style != null) ? mxUtils.GetFloat(state.
Style, mxConstants.STYLE_ROUTING_CENTER_Y) : 0;
return state.GetCenterY() + f * state.Height;
}
/// <summary>
/// Returns the perimeter bounds for the given terminal, edge pair.
/// </summary>
public mxRectangle GetPerimeterBounds(mxCellState terminal, double border)
{
if (terminal != null)
{
border += mxUtils.GetDouble(terminal.Style, mxConstants.STYLE_PERIMETER_SPACING);
}
return terminal.GetPerimeterBounds(border * scale);
}
/// <summary>
/// Returns the perimeter function for the given state.
/// </summary>
public mxPerimeterFunction GetPerimeterFunction(mxCellState state)
{
object perimeter = null;
state.Style.TryGetValue(mxConstants.STYLE_PERIMETER, out perimeter);
// Converts string values to objects
if (perimeter is String)
{
string str = perimeter.ToString();
Object tmp = mxStyleRegistry.GetValue(str);
if (tmp == null)
{
tmp = mxUtils.Eval(str);
}
perimeter = tmp;
}
if (perimeter is mxPerimeterFunction)
{
return (mxPerimeterFunction)perimeter;
}
return null;
}
/// <summary>
/// Returns the nearest point in the list of absolute points or the center
/// of the opposite terminal.
/// </summary>
/// <param name="edge">State that represents the edge.</param>
/// <param name="opposite">State that represents the opposite terminal.</param>
/// <param name="source">Boolean indicating if the next point for the source or target
/// should be returned.</param>
public mxPoint GetNextPoint(mxCellState edge, mxCellState opposite, bool source)
{
List<mxPoint> pts = edge.AbsolutePoints;
mxPoint point = null;
if (pts != null && pts.Count >= 2)
{
int count = pts.Count;
int index = (source) ? Math.Min(1, count - 1) : Math.Max(0, count - 2);
point = pts[index];
}
if (point == null && opposite != null)
{
point = new mxPoint(opposite.GetCenterX(), opposite.GetCenterY());
}
return point;
}
/// <summary>
/// Returns the nearest ancestor terminal that is visible. The edge appears
/// to be connected to this terminal on the display.
/// </summary>
/// <param name="edge">Cell whose visible terminal should be returned.</param>
/// <param name="source">Boolean that specifies if the source or target terminal
/// should be returned.</param>
/// <returns>Returns the visible source or target terminal.</returns>
public Object GetVisibleTerminal(Object edge, bool source)
{
mxIGraphModel model = graph.Model;
Object result = model.GetTerminal(edge, source);
Object best = result;
while (result != null)
{
if (!graph.IsCellVisible(best) ||
graph.IsCellCollapsed(result))
{
best = result;
}
result = model.GetParent(result);
}
// Checks if the result is not a layer
if (model.GetParent(best) == model.Root)
{
best = null;
}
return best;
}
/// <summary>
/// Updates the given state using the bounding box of the absolute points.
/// Also updates terminal distance, length and segments.
/// </summary>
/// <param name="state">Cell state whose bounds should be updated.</param>
public void UpdateEdgeBounds(mxCellState state)
{
List<mxPoint> points = state.AbsolutePoints;
mxPoint p0 = points[0];
mxPoint pe = points[points.Count - 1];
if (p0 == null || pe == null)
{
// Note: This is an error that normally occurs
// if a connected edge has a null-terminal, ie.
// edge.source == null or edge.target == null.
states.Remove(state.Cell);
}
else
{
if (p0.X != pe.X || p0.Y != pe.Y)
{
double dx = pe.X - p0.X;
double dy = pe.Y - p0.Y;
state.TerminalDistance = Math.Sqrt(dx * dx + dy * dy);
}
else
{
state.TerminalDistance = 0;
}
double length = 0;
double[] segments = new double[points.Count - 1];
mxPoint pt = p0;
if (pt != null)
{
double minX = pt.X;
double minY = pt.Y;
double maxX = minX;
double maxY = minY;
for (int i = 1; i < points.Count; i++)
{
mxPoint tmp = points[i];
if (tmp != null)
{
double dx = pt.X - tmp.X;
double dy = pt.Y - tmp.Y;
double segment = Math.Sqrt(dx * dx + dy * dy);
segments[i - 1] = segment;
length += segment;
pt = tmp;
minX = Math.Min(pt.X, minX);
minY = Math.Min(pt.Y, minY);
maxX = Math.Max(pt.X, maxX);
maxY = Math.Max(pt.Y, maxY);
}
}
state.Length = length;
state.Segments = segments;
double markerSize = 1; // TODO: include marker size
state.X = minX;
state.Y = minY;
state.Width = Math.Max(markerSize, maxX - minX);
state.Height = Math.Max(markerSize, maxY - minY);
}
else
{
state.Length = 0;
}
}
}
/// <summary>
/// Returns the absolute center point along the given edge.
/// </summary>
public mxPoint GetPoint(mxCellState state)
{
return GetPoint(state, null);
}
/// <summary>
/// Returns the absolute point on the edge for the given relative
/// geometry as a point. The edge is represented by the given cell state.
/// </summary>
/// <param name="state">Represents the state of the parent edge.</param>
/// <param name="geometry">Represents the relative location.</param>
public mxPoint GetPoint(mxCellState state, mxGeometry geometry)
{
double x = state.GetCenterX();
double y = state.GetCenterY();
if (state.Segments != null && (geometry == null || geometry.Relative))
{
double gx = (geometry != null) ? geometry.X / 2 : 0;
int pointCount = state.AbsolutePoints.Count;
double dist = (gx + 0.5) * state.Length;
double[] segments = state.Segments;
double segment = segments[0];
double length = 0;
int index = 1;
while (dist > length + segment && index < pointCount - 1)
{
length += segment;
segment = segments[index++];
}
double factor = (segment == 0) ? 0 : (dist - length) / segment;
mxPoint p0 = state.AbsolutePoints[index - 1];
mxPoint pe = state.AbsolutePoints[index];
if (p0 != null &&
pe != null)
{
double gy = 0;
double offsetX = 0;
double offsetY = 0;
if (geometry != null)
{
gy = geometry.Y;
mxPoint offset = geometry.Offset;
if (offset != null)
{
offsetX = offset.X;
offsetY = offset.Y;
}
}
double dx = pe.X - p0.X;
double dy = pe.Y - p0.Y;
double nx = (segment == 0) ? 0 : dy / segment;
double ny = (segment == 0) ? 0 : dx / segment;
x = p0.X + dx * factor + (nx * gy + offsetX) * scale;
y = p0.Y + dy * factor - (ny * gy - offsetY) * scale;
}
}
else if (geometry != null)
{
mxPoint offset = geometry.Offset;
if (offset != null)
{
x += offset.X;
y += offset.Y;
}
}
return new mxPoint(x, y);
}
/// <summary>
/// Returns the state for the given cell or null if no state is defined for
/// the cell.
/// </summary>
/// <param name="cell">Cell whose state should be returned.</param>
/// <returns>Returns the state for the given cell.</returns>
public mxCellState GetState(Object cell)
{
return GetState(cell, false);
}
/// <summary>
/// Returns the object that maps from cells to states.
/// </summary>
public Dictionary<Object, mxCellState> GetStates(Object[] cells)
{
return states;
}
/// <summary>
/// Returns the states for the given array of cells. The array contains
/// all states that are not null, that is, the returned array may have
/// less elements than the given array.
/// </summary>
public mxCellState[] GetCellStates(Object[] cells)
{
List<mxCellState> result = new List<mxCellState>(cells.Length);
for (int i = 0; i < cells.Length; i++)
{
mxCellState state = GetState(cells[i]);
if (state != null)
{
result.Add(state);
}
}
return result.ToArray();
}
/// <summary>
/// Returns the cell state for the given cell. If create is true, then
/// the state is created if it does not yet exist.
/// </summary>
/// <param name="cell">Cell for which a new state should be returned.</param>
/// <param name="create">Boolean indicating if a new state should be created if it
/// does not yet exist.</param>
/// <returns>Returns the state for the given cell.</returns>
public mxCellState GetState(Object cell, bool create)
{
mxCellState state = null;
if (cell != null)
{
if (states.ContainsKey(cell))
{
state = states[cell];
}
else if (create && graph.IsCellVisible(cell))
{
state = CreateState(cell);
states[cell] = state;
}
}
return state;
}
/// <summary>
/// Shortcut to removeState with recurse set to false.
/// </summary>
public mxCellState RemoveState(Object cell)
{
mxCellState state = null;
if (states.ContainsKey(cell))
{
state = states[cell];
states.Remove(cell);
}
return state;
}
/// <summary>
/// Removes and returns the mxCellState for the given cell.
/// </summary>
/// <param name="cell">mxCell for which the mxCellState should be removed.</param>
/// <returns>Returns the mxCellState that has been removed.</returns>
public mxCellState RemoveState(Object cell, Boolean recurse)
{
if (recurse)
{
mxIGraphModel model = graph.Model;
int childCount = model.GetChildCount(cell);
for (int i = 0; i < childCount; i++)
{
RemoveState(model.GetChildAt(cell, i), true);
}
}
mxCellState state = null;
if (states.ContainsKey(cell))
{
state = states[cell];
states.Remove(cell);
}
return state;
}
/// <summary>
/// Creates and returns a cell state for the given cell.
/// </summary>
/// <param name="cell">Cell for which a new state should be created.</param>
/// <returns>Returns a new state for the given cell.</returns>
public mxCellState CreateState(Object cell)
{
Dictionary<string, Object> style = graph.GetCellStyle(cell);
return new mxCellState(this, cell, style);
}
}
}
| |
// -----------------------------------------------------------------------
// <copyright file="AddInstrumentIbViewModel.cs" company="">
// Copyright 2017 Alexander Soffronow Pagonidis
// </copyright>
// -----------------------------------------------------------------------
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Data.Entity;
using System.Linq;
using System.Reactive;
using System.Reactive.Linq;
using System.Threading.Tasks;
using System.Windows;
using EntityData;
using Krs.Ats.IBNet;
using MahApps.Metro.Controls.Dialogs;
using NLog;
using QDMS;
using ReactiveUI;
namespace QDMSServer.ViewModels
{
public class AddInstrumentIbViewModel : ReactiveObject, IDisposable
{
public AddInstrumentIbViewModel(IDialogCoordinator dialogService, IDataClient client)
{
_dialogService = dialogService;
_qdmsClient = client;
CreateCommands();
Random r = new Random();
_client = new IBClient();
//random connection id for this one...
_client.Connect(Properties.Settings.Default.ibClientHost, Properties.Settings.Default.ibClientPort, r.Next(1000, 200000));
AddedInstruments = new List<Instrument>();
_client.ContractDetails += _client_ContractDetails;
_client.ContractDetailsEnd += _client_ContractDetailsEnd;
Observable
.FromEventPattern<ConnectionClosedEventArgs>(_client, "ConnectionClosed")
.Subscribe(e => _logger.Warn("IB Instrument Adder connection closed."));
Observable
.FromEventPattern<NextValidIdEventArgs>(_client, "NextValidId")
.Subscribe(e => _nextRequestID = e.EventArgs.OrderId);
Observable
.FromEventPattern<ErrorEventArgs>(_client, "Error")
.Subscribe(e =>
{
if (e.EventArgs.ErrorMsg != "No security definition has been found for the request")
{
_logger.Error($"{e.EventArgs.ErrorCode} - {e.EventArgs.ErrorMsg}");
}
Status = e.EventArgs.ErrorMsg;
SearchUnderway = false;
});
Exchanges = new ObservableCollection<string> { "All" };
_exchanges = new Dictionary<string, Exchange>();
using (var context = new MyDBContext())
{
_thisDS = context.Datasources.First(x => x.Name == "Interactive Brokers");
foreach (Exchange e in context.Exchanges.Include(x => x.Sessions))
{
Exchanges.Add(e.Name);
_exchanges.Add(e.Name, e);
}
}
Instruments = new ObservableCollection<Instrument>();
InstrumentTypes = new ObservableCollection<InstrumentType>();
//list the available types from our enum
var values = MyUtils.GetEnumValues<InstrumentType>();
foreach (var val in values)
{
InstrumentTypes.Add(val);
}
}
public void Dispose()
{
_client.Dispose();
}
private void _client_BatchContractDetail(object sender, ContractDetailsEventArgs e)
{
Status = string.Format("{0}/{1} symbols received", _totalSymbols - _queuedSymbols.Count, _totalSymbols);
_tmpContractDetails.Add(e);
}
private async void _client_BatchContractDetailsEnd(object sender, ContractDetailsEndEventArgs e)
{
if (_tmpContractDetails.Count == 1) //we only want one because otherwise there is ambiguity in the contracts
{
Instrument instrument = ContractToInstrument(_tmpContractDetails[0]);
if (instrument != null && await TryAddInstrument(instrument).ConfigureAwait(true) != null)
{
//successfully added the symbol
_symbolsAdded.Add(instrument.Symbol);
}
}
else
{
_logger.Info("Could not batch add " + _tmpContractDetails.FirstOrDefault()?.ContractDetails.Summary.Symbol + ", " + _tmpContractDetails.Count + " ambiguous contracts found.");
}
_tmpContractDetails.Clear();
if (_queuedSymbols.Count == 0)
{
//in this case, we have completed all the requests
BatchRequestJobCompleted();
}
else
{
//we're not done, send the next request
SendNextRequestInBatch();
}
}
private void _client_ContractDetails(object sender, ContractDetailsEventArgs e)
{
Instrument instrument = ContractToInstrument(e);
if (instrument == null) return;
Application.Current.Dispatcher.Invoke(() => Instruments.Add(instrument));
}
void _client_ContractDetailsEnd(object sender, ContractDetailsEndEventArgs e)
{
SearchUnderway = false; //re-enables the search commands
Status = Instruments.Count + " contracts arrived";
}
private void BatchRequestJobCompleted()
{
Status = string.Format("Batch addition complete: {0} of {1} successfully added",
_symbolsAdded.Count, _totalSymbols);
SearchUnderway = false; //re-enables the search commands
_client.ContractDetails -= _client_BatchContractDetail;
_client.ContractDetails += _client_ContractDetails;
_client.ContractDetailsEnd -= _client_BatchContractDetailsEnd;
_client.ContractDetailsEnd += _client_ContractDetailsEnd;
_client.Error -= _client_BatchAddingError;
//some symbols may not have been added, we put them back in the textbox so the user can see which
MultiSymbolText = string.Join(", ", _batchAllSymbols.Except(_symbolsAdded));
}
private Instrument ContractToInstrument(ContractDetailsEventArgs e)
{
var instrument = TWSUtils.ContractDetailsToInstrument(e.ContractDetails);
instrument.Datasource = _thisDS;
instrument.DatasourceID = _thisDS.ID;
if (e.ContractDetails.Summary.Exchange != null && _exchanges.ContainsKey(e.ContractDetails.Summary.Exchange))
{
instrument.Exchange = _exchanges[e.ContractDetails.Summary.Exchange];
instrument.ExchangeID = instrument.Exchange.ID;
}
else
{
_logger.Error("Could not find exchange in database: " + e.ContractDetails.Summary.Exchange);
return null;
}
if (e.ContractDetails.Summary.PrimaryExchange != null &&
_exchanges.ContainsKey(e.ContractDetails.Summary.PrimaryExchange))
{
instrument.PrimaryExchange = _exchanges[e.ContractDetails.Summary.PrimaryExchange];
instrument.PrimaryExchangeID = instrument.PrimaryExchange.ID;
}
else if (!string.IsNullOrEmpty(e.ContractDetails.Summary.PrimaryExchange))
{
_logger.Error("Could not find exchange in database: " + e.ContractDetails.Summary.PrimaryExchange);
return null;
}
return instrument;
}
private void CreateCommands()
{
AddSelectedInstruments = ReactiveCommand.CreateFromTask<IList>(async instruments =>
await ExecuteAddSelectedInstruments(instruments).ConfigureAwait(true));
Search = ReactiveCommand.Create(ExecuteSearch, this.WhenAny(x => x.SearchUnderway, x => !x.Value).ObserveOnDispatcher());
BatchAddMultipleSymbols = ReactiveCommand.Create(ExecuteBatchAddSymbols, this.WhenAny(x => x.SearchUnderway, x => !x.Value).ObserveOnDispatcher());
}
private async Task ExecuteAddSelectedInstruments(IList selectedInstruments)
{
if (selectedInstruments == null) throw new ArgumentNullException(nameof(selectedInstruments));
if (selectedInstruments.Count == 0) return;
int count = 0;
foreach (Instrument newInstrument in selectedInstruments)
{
if (await TryAddInstrument(newInstrument).ConfigureAwait(true) != null)
{
count++;
}
}
Status = string.Format("{0}/{1} instruments added.", count, selectedInstruments.Count);
}
private void ExecuteBatchAddSymbols()
{
if (string.IsNullOrEmpty(MultiSymbolText)) return;
List<string> symbols = MultiSymbolText
.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)
.Select(x => x.Trim())
.ToList();
if (symbols.Count == 0) return;
_totalSymbols = symbols.Count;
SearchUnderway = true; //disables the search commands
//we set up a queue and progressively empty it by querying the symbols one by one and adding what we can
_client.ContractDetails += _client_BatchContractDetail;
_client.ContractDetails -= _client_ContractDetails;
_client.ContractDetailsEnd += _client_BatchContractDetailsEnd;
_client.ContractDetailsEnd -= _client_ContractDetailsEnd;
_client.Error += _client_BatchAddingError;
_symbolsAdded.Clear();
_batchAllSymbols.Clear();
_batchAllSymbols.AddRange(symbols);
foreach (string s in symbols)
{
_queuedSymbols.Enqueue(s);
}
//start the process of sending the queries
SendNextRequestInBatch();
}
private void _client_BatchAddingError(object sender, ErrorEventArgs e)
{
if (e.ErrorMsg == "No security definition has been found for the request")
{
SendNextRequestInBatch();
}
}
private void ExecuteSearch()
{
Instruments.Clear();
SendContractDetailsRequest(Symbol);
}
private void SendContractDetailsRequest(string symbol)
{
var contract = new Contract
{
Symbol = symbol,
SecurityType = TWSUtils.SecurityTypeConverter(SelectedType),
Exchange = SelectedExchange == "All" ? "" : SelectedExchange,
IncludeExpired = IncludeExpired,
Currency = Currency
};
if (ExpirationDate.HasValue)
contract.Expiry = ExpirationDate.Value.ToString("yyyyMM");
if(Strike.HasValue)
{
contract.Strike = Strike.Value;
}
SearchUnderway = true; //disables the search commands
_client.RequestContractDetails(_nextRequestID, contract);
}
/// <summary>
/// When doing a batch symbol addition, this method will send successive requests for the contract details
/// </summary>
private void SendNextRequestInBatch()
{
if (_queuedSymbols.Count == 0) return;
string nextSymbol = _queuedSymbols.Dequeue();
SendContractDetailsRequest(nextSymbol);
}
/// <summary>
///
/// </summary>
/// <param name="instrument"></param>
/// <returns>null if addition failed</returns>
private async Task<Instrument> TryAddInstrument(Instrument instrument)
{
var result = await _qdmsClient.AddInstrument(instrument).ConfigureAwait(true);
if (await result.DisplayErrors(this, _dialogService).ConfigureAwait(true))
{
//request failed
_logger.Error("IB add instrument failure: " + string.Join(",", result.Errors));
return null;
}
var addedInstrument = result.Result;
AddedInstruments.Add(addedInstrument);
return addedInstrument;
}
public List<Instrument> AddedInstruments { get; }
public ReactiveCommand<IList, Unit> AddSelectedInstruments { get; private set; }
public ReactiveCommand<Unit, Unit> BatchAddMultipleSymbols { get; private set; }
public string Currency
{
get => _currency;
set => this.RaiseAndSetIfChanged(ref _currency, value);
}
public ObservableCollection<string> Exchanges { get; set; }
public DateTime? ExpirationDate
{
get => _expirationDate;
set => this.RaiseAndSetIfChanged(ref _expirationDate, value);
}
public bool IncludeExpired
{
get => _includeExpired;
set => this.RaiseAndSetIfChanged(ref _includeExpired, value);
}
public ObservableCollection<Instrument> Instruments { get; set; }
public ObservableCollection<InstrumentType> InstrumentTypes { get; set; }
/// <summary>
/// Used to add multiple symbols in a batch.
/// </summary>
public string MultiSymbolText
{
get => _multiSymbolText;
set => this.RaiseAndSetIfChanged(ref _multiSymbolText, value);
}
public ReactiveCommand<Unit, Unit> Search { get; private set; }
public bool SearchUnderway
{
get => _searchUnderway;
private set => this.RaiseAndSetIfChanged(ref _searchUnderway, value);
}
public string SelectedExchange
{
get => _selectedExchange;
set => this.RaiseAndSetIfChanged(ref _selectedExchange, value);
}
public InstrumentType SelectedType
{
get => _selectedType;
set => this.RaiseAndSetIfChanged(ref _selectedType, value);
}
public string Status
{
get => _status;
set => this.RaiseAndSetIfChanged(ref _status, value);
}
public double? Strike
{
get => _strike;
set => this.RaiseAndSetIfChanged(ref _strike, value);
}
public string Symbol
{
get => _symbol;
set => this.RaiseAndSetIfChanged(ref _symbol, value);
}
private readonly IBClient _client;
private readonly IDataClient _qdmsClient;
private readonly IDialogCoordinator _dialogService;
private readonly Dictionary<string, Exchange> _exchanges;
private readonly Logger _logger = LogManager.GetCurrentClassLogger();
/// <summary>
/// Used to hold the symbols when doing batch additions
/// </summary>
private readonly Queue<string> _queuedSymbols = new Queue<string>();
/// <summary>
/// When doing a batch addition, symbols successfully added are kept here
/// </summary>
private readonly List<string> _symbolsAdded = new List<string>();
/// <summary>
/// When doing batch addition, all symbols being processed are held here.
/// </summary>
private readonly List<string> _batchAllSymbols = new List<string>();
private readonly Datasource _thisDS;
/// <summary>
/// Holds ContractDetailsEventArgs which are then processed in _client_BatchContractDetailsEnd.
/// Can't process them one by one because we need there to only be one.
/// </summary>
private readonly List<ContractDetailsEventArgs> _tmpContractDetails = new List<ContractDetailsEventArgs>();
private string _currency;
private DateTime? _expirationDate;
private bool _includeExpired;
private int _nextRequestID;
private bool _searchUnderway;
private string _selectedExchange;
private InstrumentType _selectedType;
private string _status;
private double? _strike;
private string _symbol;
/// <summary>
/// Holds the total number of symbols to be done in a batch process
/// </summary>
private int _totalSymbols = 0;
private string _multiSymbolText;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace System.Reflection.Metadata.Ecma335
{
internal class NamespaceCache
{
private readonly MetadataReader metadataReader;
private readonly object namespaceTableAndListLock = new object();
private Dictionary<NamespaceDefinitionHandle, NamespaceData> namespaceTable;
private NamespaceData rootNamespace;
private ImmutableArray<NamespaceDefinitionHandle> namespaceList;
internal NamespaceCache(MetadataReader reader)
{
DebugCorlib.Assert(reader != null);
this.metadataReader = reader;
}
/// <summary>
/// Returns whether the namespaceTable has been created. If it hasn't, calling a GetXXX method
/// on this will probably have a very high amount of overhead.
/// </summary>
internal bool CacheIsRealized
{
get { return this.namespaceTable != null; }
}
internal string GetFullName(NamespaceDefinitionHandle handle)
{
DebugCorlib.Assert(!handle.HasFullName); // we should not hit the cache in this case.
NamespaceData data = GetNamespaceData(handle);
return data.FullName;
}
internal NamespaceData GetRootNamespace()
{
EnsureNamespaceTableIsPopulated();
DebugCorlib.Assert(rootNamespace != null);
return rootNamespace;
}
internal NamespaceData GetNamespaceData(NamespaceDefinitionHandle handle)
{
EnsureNamespaceTableIsPopulated();
NamespaceData result;
if (!namespaceTable.TryGetValue(handle, out result))
{
ThrowInvalidHandle();
}
return result;
}
// TODO: move throw helpers to common place.
[MethodImplAttribute(MethodImplOptions.NoInlining)]
private static void ThrowInvalidHandle()
{
throw new BadImageFormatException(MetadataResources.InvalidHandle);
}
/// <summary>
/// This will return a StringHandle for the simple name of a namespace name at the given segment index.
/// If no segment index is passed explicitly or the "segment" index is greater than or equal to the number
/// of segments, then the last segment is used. "Segment" in this context refers to part of a namespace
/// name between dots.
///
/// Example: Given a NamespaceDefinitionHandle to "System.Collections.Generic.Test" called 'handle':
///
/// reader.GetString(GetSimpleName(handle)) == "Test"
/// reader.GetString(GetSimpleName(handle, 0)) == "System"
/// reader.GetString(GetSimpleName(handle, 1)) == "Collections"
/// reader.GetString(GetSimpleName(handle, 2)) == "Generic"
/// reader.GetString(GetSimpleName(handle, 3)) == "Test"
/// reader.GetString(GetSimpleName(handle, 1000)) == "Test"
/// </summary>
private StringHandle GetSimpleName(NamespaceDefinitionHandle fullNamespaceHandle, int segmentIndex = Int32.MaxValue)
{
StringHandle handleContainingSegment = fullNamespaceHandle.GetFullName();
DebugCorlib.Assert(!handleContainingSegment.IsVirtual);
int lastFoundIndex = fullNamespaceHandle.Index - 1;
int currentSegment = 0;
while (currentSegment < segmentIndex)
{
int currentIndex = this.metadataReader.StringStream.IndexOfRaw(lastFoundIndex + 1, '.');
if (currentIndex == -1)
{
break;
}
lastFoundIndex = currentIndex;
++currentSegment;
}
DebugCorlib.Assert(lastFoundIndex >= 0 || currentSegment == 0);
// + 1 because lastFoundIndex will either "point" to a '.', or will be -1. Either way,
// we want the next char.
uint resultIndex = (uint)(lastFoundIndex + 1);
return StringHandle.FromIndex(resultIndex).WithDotTermination();
}
/// <summary>
/// Two distinct namespace handles represent the same namespace if their full names are the same. This
/// method merges builders corresponding to such namespace handles.
/// </summary>
private void PopulateNamespaceTable()
{
lock (namespaceTableAndListLock)
{
if (this.namespaceTable != null)
{
return;
}
var namespaceBuilderTable = new Dictionary<NamespaceDefinitionHandle, NamespaceDataBuilder>();
// Make sure to add entry for root namespace. The root namespace is special in that even
// though it might not have types of its own it always has an equivalent representation
// as a nil handle and we don't want to handle it below as dot-terminated synthetic namespace.
// We use NamespaceDefinitionHandle.FromIndexOfFullName(0) instead of default(NamespaceDefinitionHandle) so
// that we never hand back a handle to the user that doesn't have a typeid as that prevents
// round-trip conversion to Handle and back. (We may discover other handle aliases for the
// root namespace (any nil/empty string will do), but we need this one to always be there.
NamespaceDefinitionHandle rootNamespace = NamespaceDefinitionHandle.FromIndexOfFullName(0);
namespaceBuilderTable.Add(
rootNamespace,
new NamespaceDataBuilder(
rootNamespace,
rootNamespace.GetFullName(),
String.Empty));
PopulateTableWithTypeDefinitions(namespaceBuilderTable);
PopulateTableWithExportedTypes(namespaceBuilderTable);
Dictionary<string, NamespaceDataBuilder> stringTable;
MergeDuplicateNamespaces(namespaceBuilderTable, out stringTable);
List<NamespaceDataBuilder> syntheticNamespaces;
ResolveParentChildRelationships(stringTable, out syntheticNamespaces);
var namespaceTable = new Dictionary<NamespaceDefinitionHandle, NamespaceData>();
foreach (var group in namespaceBuilderTable)
{
// Freeze() caches the result, so any many-to-one relationships
// between keys and values will be preserved and efficiently handled.
namespaceTable.Add(group.Key, group.Value.Freeze());
}
if (syntheticNamespaces != null)
{
foreach (var syntheticNamespace in syntheticNamespaces)
{
namespaceTable.Add(syntheticNamespace.Handle, syntheticNamespace.Freeze());
}
}
this.namespaceTable = namespaceTable;
this.rootNamespace = namespaceTable[rootNamespace];
}
}
/// <summary>
/// This will take 'table' and merge all of the NamespaceData instances that point to the same
/// namespace. It has to create 'stringTable' as an intermediate dictionary, so it will hand it
/// back to the caller should the caller want to use it.
/// </summary>
private void MergeDuplicateNamespaces(Dictionary<NamespaceDefinitionHandle, NamespaceDataBuilder> table, out Dictionary<string, NamespaceDataBuilder> stringTable)
{
var namespaces = new Dictionary<string, NamespaceDataBuilder>();
List<KeyValuePair<NamespaceDefinitionHandle, NamespaceDataBuilder>> remaps = null;
foreach (var group in table)
{
NamespaceDataBuilder data = group.Value;
NamespaceDataBuilder existingRecord;
if (namespaces.TryGetValue(data.FullName, out existingRecord))
{
// Children should not exist until the next step.
DebugCorlib.Assert(data.Namespaces.Count == 0);
data.MergeInto(existingRecord);
if (remaps == null)
{
remaps = new List<KeyValuePair<NamespaceDefinitionHandle, NamespaceDataBuilder>>();
}
remaps.Add(new KeyValuePair<NamespaceDefinitionHandle, NamespaceDataBuilder>(group.Key, existingRecord));
}
else
{
namespaces.Add(data.FullName, data);
}
}
// Needs to be done outside of foreach (var group in table) to avoid modifying the dictionary while foreach'ing over it.
if (remaps != null)
{
foreach (var tuple in remaps)
{
table[tuple.Key] = tuple.Value;
}
}
stringTable = namespaces;
}
/// <summary>
/// Creates a NamespaceDataBuilder instance that contains a synthesized NamespaceDefinitionHandle,
/// as well as the name provided.
/// </summary>
private NamespaceDataBuilder SynthesizeNamespaceData(string fullName, NamespaceDefinitionHandle realChild)
{
DebugCorlib.Assert(realChild.HasFullName);
int numberOfSegments = 0;
foreach (char c in fullName)
{
if (c == '.')
{
numberOfSegments++;
}
}
StringHandle simpleName = GetSimpleName(realChild, numberOfSegments);
var namespaceHandle = NamespaceDefinitionHandle.FromIndexOfSimpleName((uint)simpleName.Index);
return new NamespaceDataBuilder(namespaceHandle, simpleName, fullName);
}
/// <summary>
/// Quick convenience method that handles linking together child + parent
/// </summary>
private void LinkChildDataToParentData(NamespaceDataBuilder child, NamespaceDataBuilder parent)
{
DebugCorlib.Assert(child != null && parent != null);
DebugCorlib.Assert(!child.Handle.IsNil);
child.Parent = parent.Handle;
parent.Namespaces.Add(child.Handle);
}
/// <summary>
/// Links a child to its parent namespace. If the parent namespace doesn't exist, this will create a
/// synthetic one. This will automatically link any synthetic namespaces it creates up to its parents.
/// </summary>
private void LinkChildToParentNamespace(Dictionary<string, NamespaceDataBuilder> existingNamespaces,
NamespaceDataBuilder realChild,
ref List<NamespaceDataBuilder> syntheticNamespaces)
{
DebugCorlib.Assert(realChild.Handle.HasFullName);
string childName = realChild.FullName;
var child = realChild;
// The condition for this loop is very complex -- essentially, we keep going
// until we:
// A. Encounter the root namespace as 'child'
// B. Find a preexisting namespace as 'parent'
while (true)
{
int lastIndex = childName.LastIndexOf('.');
string parentName;
if (lastIndex == -1)
{
if (childName.Length == 0)
{
return;
}
else
{
parentName = String.Empty;
}
}
else
{
parentName = childName.Substring(0, lastIndex);
}
NamespaceDataBuilder parentData;
if (existingNamespaces.TryGetValue(parentName, out parentData))
{
LinkChildDataToParentData(child, parentData);
return;
}
if (syntheticNamespaces != null)
{
foreach (var data in syntheticNamespaces)
{
if (data.FullName == parentName)
{
LinkChildDataToParentData(child, data);
return;
}
}
}
else
{
syntheticNamespaces = new List<NamespaceDataBuilder>();
}
var syntheticParent = SynthesizeNamespaceData(parentName, realChild.Handle);
LinkChildDataToParentData(child, syntheticParent);
syntheticNamespaces.Add(syntheticParent);
childName = syntheticParent.FullName;
child = syntheticParent;
}
}
/// <summary>
/// This will link all parents/children in the given namespaces dictionary up to each other.
///
/// In some cases, we need to synthesize namespaces that do not have any type definitions or forwarders
/// of their own, but do have child namespaces. These are returned via the syntheticNamespaces out
/// parameter.
/// </summary>
private void ResolveParentChildRelationships(Dictionary<string, NamespaceDataBuilder> namespaces, out List<NamespaceDataBuilder> syntheticNamespaces)
{
syntheticNamespaces = null;
foreach (var namespaceData in namespaces.Values)
{
LinkChildToParentNamespace(namespaces, namespaceData, ref syntheticNamespaces);
}
}
/// <summary>
/// Loops through all type definitions in metadata, adding them to the given table
/// </summary>
private void PopulateTableWithTypeDefinitions(Dictionary<NamespaceDefinitionHandle, NamespaceDataBuilder> table)
{
DebugCorlib.Assert(table != null);
foreach (var typeHandle in this.metadataReader.TypeDefinitions)
{
TypeDefinition type = this.metadataReader.GetTypeDefinition(typeHandle);
if (type.Attributes.IsNested())
{
continue;
}
NamespaceDefinitionHandle namespaceHandle = this.metadataReader.TypeDefTable.GetNamespace(typeHandle);
NamespaceDataBuilder builder;
if (table.TryGetValue(namespaceHandle, out builder))
{
builder.TypeDefinitions.Add(typeHandle);
}
else
{
StringHandle name = GetSimpleName(namespaceHandle);
string fullName = this.metadataReader.GetString(namespaceHandle);
var newData = new NamespaceDataBuilder(namespaceHandle, name, fullName);
newData.TypeDefinitions.Add(typeHandle);
table.Add(namespaceHandle, newData);
}
}
}
/// <summary>
/// Loops through all type forwarders in metadata, adding them to the given table
/// </summary>
private void PopulateTableWithExportedTypes(Dictionary<NamespaceDefinitionHandle, NamespaceDataBuilder> table)
{
DebugCorlib.Assert(table != null);
foreach (var exportedTypeHandle in this.metadataReader.ExportedTypes)
{
ExportedType exportedType = this.metadataReader.GetExportedType(exportedTypeHandle);
if (exportedType.Implementation.Kind == HandleKind.ExportedType)
{
continue; // skip nested exported types.
}
NamespaceDefinitionHandle namespaceHandle = exportedType.Namespace;
NamespaceDataBuilder builder;
if (table.TryGetValue(namespaceHandle, out builder))
{
builder.ExportedTypes.Add(exportedTypeHandle);
}
else
{
DebugCorlib.Assert(namespaceHandle.HasFullName);
StringHandle simpleName = GetSimpleName(namespaceHandle);
string fullName = this.metadataReader.GetString(namespaceHandle);
var newData = new NamespaceDataBuilder(namespaceHandle, simpleName, fullName);
newData.ExportedTypes.Add(exportedTypeHandle);
table.Add(namespaceHandle, newData);
}
}
}
/// <summary>
/// If the namespace table doesn't exist, populates it!
/// </summary>
private void EnsureNamespaceTableIsPopulated()
{
// PERF: Branch will rarely be taken; do work in PopulateNamespaceList() so this can be inlined easily.
if (this.namespaceTable == null)
{
PopulateNamespaceTable();
}
DebugCorlib.Assert(this.namespaceTable != null);
}
/// <summary>
/// An intermediate class used to build NamespaceData instances. This was created because we wanted to
/// use ImmutableArrays in NamespaceData, but having ArrayBuilders and ImmutableArrays that served the
/// same purpose in NamespaceData got ugly. With the current design of how we create our Namespace
/// dictionary, this needs to be a class because we have a many-to-one mapping between NamespaceHandles
/// and NamespaceData. So, the pointer semantics must be preserved.
///
/// This class assumes that the builders will not be modified in any way after the first call to
/// Freeze().
/// </summary>
private class NamespaceDataBuilder
{
public readonly NamespaceDefinitionHandle Handle;
public readonly StringHandle Name;
public readonly string FullName;
public NamespaceDefinitionHandle Parent;
public ImmutableArray<NamespaceDefinitionHandle>.Builder Namespaces;
public ImmutableArray<TypeDefinitionHandle>.Builder TypeDefinitions;
public ImmutableArray<ExportedTypeHandle>.Builder ExportedTypes;
private NamespaceData frozen;
public NamespaceDataBuilder(NamespaceDefinitionHandle handle, StringHandle name, string fullName)
{
Handle = handle;
Name = name;
FullName = fullName;
Namespaces = ImmutableArray.CreateBuilder<NamespaceDefinitionHandle>();
TypeDefinitions = ImmutableArray.CreateBuilder<TypeDefinitionHandle>();
ExportedTypes = ImmutableArray.CreateBuilder<ExportedTypeHandle>();
}
/// <summary>
/// Returns a NamespaceData that represents this NamespaceDataBuilder instance. After calling
/// this method, it is an error to use any methods or fields except Freeze() on the target
/// NamespaceDataBuilder.
/// </summary>
public NamespaceData Freeze()
{
// It is not an error to call this function multiple times. We cache the result
// because it's immutable.
if (frozen == null)
{
var namespaces = Namespaces.ToImmutable();
Namespaces = null;
var typeDefinitions = TypeDefinitions.ToImmutable();
TypeDefinitions = null;
var exportedTypes = ExportedTypes.ToImmutable();
ExportedTypes = null;
frozen = new NamespaceData(Name, FullName, Parent, namespaces, typeDefinitions, exportedTypes);
}
return frozen;
}
public void MergeInto(NamespaceDataBuilder other)
{
Parent = default(NamespaceDefinitionHandle);
other.Namespaces.AddRange(this.Namespaces);
other.TypeDefinitions.AddRange(this.TypeDefinitions);
other.ExportedTypes.AddRange(this.ExportedTypes);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.IO.Pipelines;
using System.Linq;
using System.Net;
using System.Runtime.CompilerServices;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.AspNetCore.Hosting.Server;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Features;
using Microsoft.AspNetCore.Internal;
using Microsoft.AspNetCore.Routing;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Infrastructure;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Primitives;
using Microsoft.Net.Http.Headers;
namespace Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http
{
using BadHttpRequestException = Microsoft.AspNetCore.Http.BadHttpRequestException;
internal abstract partial class HttpProtocol : IHttpResponseControl
{
private static readonly byte[] _bytesConnectionClose = Encoding.ASCII.GetBytes("\r\nConnection: close");
private static readonly byte[] _bytesConnectionKeepAlive = Encoding.ASCII.GetBytes("\r\nConnection: keep-alive");
private static readonly byte[] _bytesTransferEncodingChunked = Encoding.ASCII.GetBytes("\r\nTransfer-Encoding: chunked");
private static readonly byte[] _bytesServer = Encoding.ASCII.GetBytes("\r\nServer: " + Constants.ServerName);
internal const string SchemeHttp = "http";
internal const string SchemeHttps = "https";
protected BodyControl? _bodyControl;
private Stack<KeyValuePair<Func<object, Task>, object>>? _onStarting;
private Stack<KeyValuePair<Func<object, Task>, object>>? _onCompleted;
private readonly object _abortLock = new object();
protected volatile bool _connectionAborted;
private bool _preventRequestAbortedCancellation;
private CancellationTokenSource? _abortedCts;
private CancellationToken? _manuallySetRequestAbortToken;
protected RequestProcessingStatus _requestProcessingStatus;
// Keep-alive is default for HTTP/1.1 and HTTP/2; parsing and errors will change its value
// volatile, see: https://msdn.microsoft.com/en-us/library/x13ttww7.aspx
protected volatile bool _keepAlive = true;
// _canWriteResponseBody is set in CreateResponseHeaders.
// If we are writing with GetMemory/Advance before calling StartAsync, assume we can write and throw away contents if we can't.
private bool _canWriteResponseBody = true;
private bool _hasAdvanced;
private bool _isLeasedMemoryInvalid = true;
private bool _autoChunk;
protected Exception? _applicationException;
private BadHttpRequestException? _requestRejectedException;
protected HttpVersion _httpVersion;
// This should only be used by the application, not the server. This is settable on HttpRequest but we don't want that to affect
// how Kestrel processes requests/responses.
private string? _httpProtocol;
private string? _requestId;
private int _requestHeadersParsed;
private long _responseBytesWritten;
private HttpConnectionContext _context = default!;
private RouteValueDictionary? _routeValues;
private Endpoint? _endpoint;
protected string? _methodText;
private string? _scheme;
private Stream? _requestStreamInternal;
private Stream? _responseStreamInternal;
public void Initialize(HttpConnectionContext context)
{
_context = context;
ServerOptions = ServiceContext.ServerOptions;
Reset();
HttpResponseControl = this;
}
public IHttpResponseControl HttpResponseControl { get; set; } = default!;
public ServiceContext ServiceContext => _context.ServiceContext;
private IPEndPoint? LocalEndPoint => _context.LocalEndPoint;
private IPEndPoint? RemoteEndPoint => _context.RemoteEndPoint;
public ITimeoutControl TimeoutControl => _context.TimeoutControl;
public IFeatureCollection ConnectionFeatures => _context.ConnectionFeatures;
public IHttpOutputProducer Output { get; protected set; } = default!;
protected KestrelTrace Log => ServiceContext.Log;
private DateHeaderValueManager DateHeaderValueManager => ServiceContext.DateHeaderValueManager;
// Hold direct reference to ServerOptions since this is used very often in the request processing path
protected KestrelServerOptions ServerOptions { get; set; } = default!;
protected string ConnectionId => _context.ConnectionId;
public string ConnectionIdFeature { get; set; } = default!;
public bool HasStartedConsumingRequestBody { get; set; }
public long? MaxRequestBodySize { get; set; }
public MinDataRate? MinRequestBodyDataRate { get; set; }
public bool AllowSynchronousIO { get; set; }
/// <summary>
/// The request id. <seealso cref="HttpContext.TraceIdentifier"/>
/// </summary>
public string TraceIdentifier
{
set => _requestId = value;
get
{
// don't generate an ID until it is requested
if (_requestId == null)
{
_requestId = CreateRequestId();
}
return _requestId;
}
}
public bool IsUpgradableRequest { get; private set; }
public bool IsUpgraded { get; set; }
public IPAddress? RemoteIpAddress { get; set; }
public int RemotePort { get; set; }
public IPAddress? LocalIpAddress { get; set; }
public int LocalPort { get; set; }
public string? Scheme { get; set; }
public HttpMethod Method { get; set; }
public string MethodText => ((IHttpRequestFeature)this).Method;
public string? PathBase { get; set; }
public string? Path { get; set; }
public string? QueryString { get; set; }
public string? RawTarget { get; set; }
public string HttpVersion
{
get
{
if (_httpVersion == Http.HttpVersion.Http3)
{
return AspNetCore.Http.HttpProtocol.Http3;
}
if (_httpVersion == Http.HttpVersion.Http2)
{
return AspNetCore.Http.HttpProtocol.Http2;
}
if (_httpVersion == Http.HttpVersion.Http11)
{
return AspNetCore.Http.HttpProtocol.Http11;
}
if (_httpVersion == Http.HttpVersion.Http10)
{
return AspNetCore.Http.HttpProtocol.Http10;
}
return string.Empty;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
set
{
// GetKnownVersion returns versions which ReferenceEquals interned string
// As most common path, check for this only in fast-path and inline
if (ReferenceEquals(value, AspNetCore.Http.HttpProtocol.Http3))
{
_httpVersion = Http.HttpVersion.Http3;
}
else if (ReferenceEquals(value, AspNetCore.Http.HttpProtocol.Http2))
{
_httpVersion = Http.HttpVersion.Http2;
}
else if (ReferenceEquals(value, AspNetCore.Http.HttpProtocol.Http11))
{
_httpVersion = Http.HttpVersion.Http11;
}
else if (ReferenceEquals(value, AspNetCore.Http.HttpProtocol.Http10))
{
_httpVersion = Http.HttpVersion.Http10;
}
else
{
HttpVersionSetSlow(value);
}
}
}
[MethodImpl(MethodImplOptions.NoInlining)]
private void HttpVersionSetSlow(string value)
{
if (AspNetCore.Http.HttpProtocol.IsHttp3(value))
{
_httpVersion = Http.HttpVersion.Http3;
}
else if (AspNetCore.Http.HttpProtocol.IsHttp2(value))
{
_httpVersion = Http.HttpVersion.Http2;
}
else if (AspNetCore.Http.HttpProtocol.IsHttp11(value))
{
_httpVersion = Http.HttpVersion.Http11;
}
else if (AspNetCore.Http.HttpProtocol.IsHttp10(value))
{
_httpVersion = Http.HttpVersion.Http10;
}
else
{
_httpVersion = Http.HttpVersion.Unknown;
}
}
public IHeaderDictionary RequestHeaders { get; set; } = default!;
public IHeaderDictionary RequestTrailers { get; } = new HeaderDictionary();
public bool RequestTrailersAvailable { get; set; }
public Stream RequestBody { get; set; } = default!;
public PipeReader RequestBodyPipeReader { get; set; } = default!;
public HttpResponseTrailers? ResponseTrailers { get; set; }
private int _statusCode;
public int StatusCode
{
get => _statusCode;
set
{
if (HasResponseStarted)
{
ThrowResponseAlreadyStartedException(nameof(StatusCode));
}
_statusCode = value;
}
}
private string? _reasonPhrase;
public string? ReasonPhrase
{
get => _reasonPhrase;
set
{
if (HasResponseStarted)
{
ThrowResponseAlreadyStartedException(nameof(ReasonPhrase));
}
_reasonPhrase = value;
}
}
public IHeaderDictionary ResponseHeaders { get; set; } = default!;
public Stream ResponseBody { get; set; } = default!;
public PipeWriter ResponseBodyPipeWriter { get; set; } = default!;
public CancellationToken RequestAborted
{
get
{
// If a request abort token was previously explicitly set, return it.
if (_manuallySetRequestAbortToken.HasValue)
{
return _manuallySetRequestAbortToken.Value;
}
lock (_abortLock)
{
if (_preventRequestAbortedCancellation)
{
return new CancellationToken(false);
}
if (_connectionAborted)
{
return new CancellationToken(true);
}
if (_abortedCts == null)
{
_abortedCts = new CancellationTokenSource();
}
return _abortedCts.Token;
}
}
set
{
// Set an abort token, overriding one we create internally. This setter and associated
// field exist purely to support IHttpRequestLifetimeFeature.set_RequestAborted.
_manuallySetRequestAbortToken = value;
}
}
public bool HasResponseStarted => _requestProcessingStatus >= RequestProcessingStatus.HeadersCommitted;
public bool HasFlushedHeaders => _requestProcessingStatus >= RequestProcessingStatus.HeadersFlushed;
public bool HasResponseCompleted => _requestProcessingStatus == RequestProcessingStatus.ResponseCompleted;
protected HttpRequestHeaders HttpRequestHeaders { get; set; } = new HttpRequestHeaders();
protected HttpResponseHeaders HttpResponseHeaders { get; } = new HttpResponseHeaders();
public void InitializeBodyControl(MessageBody messageBody)
{
if (_bodyControl == null)
{
_bodyControl = new BodyControl(bodyControl: this, this);
}
(RequestBody, ResponseBody, RequestBodyPipeReader, ResponseBodyPipeWriter) = _bodyControl.Start(messageBody);
_requestStreamInternal = RequestBody;
_responseStreamInternal = ResponseBody;
}
// For testing
internal void ResetState()
{
_requestProcessingStatus = RequestProcessingStatus.RequestPending;
}
public void Reset()
{
_onStarting?.Clear();
_onCompleted?.Clear();
_routeValues?.Clear();
_requestProcessingStatus = RequestProcessingStatus.RequestPending;
_autoChunk = false;
_applicationException = null;
_requestRejectedException = null;
ResetFeatureCollection();
HasStartedConsumingRequestBody = false;
MaxRequestBodySize = ServerOptions.Limits.MaxRequestBodySize;
MinRequestBodyDataRate = ServerOptions.Limits.MinRequestBodyDataRate;
AllowSynchronousIO = ServerOptions.AllowSynchronousIO;
TraceIdentifier = null!;
Method = HttpMethod.None;
_methodText = null;
_endpoint = null;
PathBase = null;
Path = null;
RawTarget = null;
QueryString = null;
_httpVersion = Http.HttpVersion.Unknown;
_httpProtocol = null;
_statusCode = StatusCodes.Status200OK;
_reasonPhrase = null;
var remoteEndPoint = RemoteEndPoint;
RemoteIpAddress = remoteEndPoint?.Address;
RemotePort = remoteEndPoint?.Port ?? 0;
var localEndPoint = LocalEndPoint;
LocalIpAddress = localEndPoint?.Address;
LocalPort = localEndPoint?.Port ?? 0;
ConnectionIdFeature = ConnectionId;
HttpRequestHeaders.Reset();
HttpRequestHeaders.EncodingSelector = ServerOptions.RequestHeaderEncodingSelector;
HttpRequestHeaders.ReuseHeaderValues = !ServerOptions.DisableStringReuse;
HttpResponseHeaders.Reset();
HttpResponseHeaders.EncodingSelector = ServerOptions.ResponseHeaderEncodingSelector;
RequestHeaders = HttpRequestHeaders;
ResponseHeaders = HttpResponseHeaders;
RequestTrailers.Clear();
ResponseTrailers?.Reset();
RequestTrailersAvailable = false;
_isLeasedMemoryInvalid = true;
_hasAdvanced = false;
_canWriteResponseBody = true;
if (_scheme == null)
{
var tlsFeature = ConnectionFeatures?[typeof(ITlsConnectionFeature)];
_scheme = tlsFeature != null ? SchemeHttps : SchemeHttp;
}
Scheme = _scheme;
_manuallySetRequestAbortToken = null;
// Lock to prevent CancelRequestAbortedToken from attempting to cancel a disposed CTS.
CancellationTokenSource? localAbortCts = null;
lock (_abortLock)
{
_preventRequestAbortedCancellation = false;
if (_abortedCts?.TryReset() == false)
{
localAbortCts = _abortedCts;
_abortedCts = null;
}
}
localAbortCts?.Dispose();
Output?.Reset();
_requestHeadersParsed = 0;
_responseBytesWritten = 0;
OnReset();
}
protected abstract void OnReset();
protected abstract void ApplicationAbort();
protected virtual void OnRequestProcessingEnding()
{
}
protected virtual void OnRequestProcessingEnded()
{
}
protected virtual void BeginRequestProcessing()
{
}
protected virtual void OnErrorAfterResponseStarted()
{
}
protected virtual bool BeginRead(out ValueTask<ReadResult> awaitable)
{
awaitable = default;
return false;
}
protected abstract string CreateRequestId();
protected abstract MessageBody CreateMessageBody();
protected abstract bool TryParseRequest(ReadResult result, out bool endConnection);
private void CancelRequestAbortedTokenCallback()
{
try
{
CancellationTokenSource? localAbortCts = null;
lock (_abortLock)
{
if (_abortedCts != null && !_preventRequestAbortedCancellation)
{
localAbortCts = _abortedCts;
_abortedCts = null;
}
}
// If we cancel the cts, we don't dispose as people may still be using
// the cts. It also isn't necessary to dispose a canceled cts.
localAbortCts?.Cancel();
}
catch (Exception ex)
{
Log.ApplicationError(ConnectionId, TraceIdentifier, ex);
}
}
protected void CancelRequestAbortedToken()
{
var shouldScheduleCancellation = false;
lock (_abortLock)
{
if (_connectionAborted)
{
return;
}
shouldScheduleCancellation = _abortedCts != null && !_preventRequestAbortedCancellation;
_connectionAborted = true;
}
if (shouldScheduleCancellation)
{
// Potentially calling user code. CancelRequestAbortedToken logs any exceptions.
ServiceContext.Scheduler.Schedule(state => ((HttpProtocol)state!).CancelRequestAbortedTokenCallback(), this);
}
}
protected void PoisonBody(Exception abortReason)
{
_bodyControl?.Abort(abortReason);
}
// Prevents the RequestAborted token from firing for the duration of the request.
private void PreventRequestAbortedCancellation()
{
lock (_abortLock)
{
if (_connectionAborted)
{
return;
}
_preventRequestAbortedCancellation = true;
}
}
public virtual void OnHeader(ReadOnlySpan<byte> name, ReadOnlySpan<byte> value, bool checkForNewlineChars)
{
IncrementRequestHeadersCount();
HttpRequestHeaders.Append(name, value, checkForNewlineChars);
}
public virtual void OnHeader(int index, bool indexOnly, ReadOnlySpan<byte> name, ReadOnlySpan<byte> value)
{
IncrementRequestHeadersCount();
// This method should be overriden in specific implementations and the base should be
// called to validate the header count.
}
public void OnTrailer(ReadOnlySpan<byte> name, ReadOnlySpan<byte> value)
{
IncrementRequestHeadersCount();
string key = name.GetHeaderName();
var valueStr = value.GetRequestHeaderString(key, HttpRequestHeaders.EncodingSelector, checkForNewlineChars : false);
RequestTrailers.Append(key, valueStr);
}
private void IncrementRequestHeadersCount()
{
_requestHeadersParsed++;
if (_requestHeadersParsed > ServerOptions.Limits.MaxRequestHeaderCount)
{
KestrelBadHttpRequestException.Throw(RequestRejectionReason.TooManyHeaders);
}
}
public void OnHeadersComplete()
{
HttpRequestHeaders.OnHeadersComplete();
}
public void OnTrailersComplete()
{
RequestTrailersAvailable = true;
}
public async Task ProcessRequestsAsync<TContext>(IHttpApplication<TContext> application) where TContext : notnull
{
try
{
// We run the request processing loop in a seperate async method so per connection
// exception handling doesn't complicate the generated asm for the loop.
await ProcessRequests(application);
}
catch (BadHttpRequestException ex)
{
// Handle BadHttpRequestException thrown during request line or header parsing.
// SetBadRequestState logs the error.
SetBadRequestState(ex);
}
catch (ConnectionResetException ex)
{
// Don't log ECONNRESET errors made between requests. Browsers like IE will reset connections regularly.
if (_requestProcessingStatus != RequestProcessingStatus.RequestPending)
{
Log.RequestProcessingError(ConnectionId, ex);
}
}
catch (IOException ex)
{
Log.RequestProcessingError(ConnectionId, ex);
}
catch (ConnectionAbortedException ex)
{
Log.RequestProcessingError(ConnectionId, ex);
}
catch (Exception ex)
{
Log.LogWarning(0, ex, CoreStrings.RequestProcessingEndError);
}
finally
{
try
{
await TryProduceInvalidRequestResponse();
}
catch (Exception ex)
{
Log.LogWarning(0, ex, CoreStrings.ConnectionShutdownError);
}
finally
{
OnRequestProcessingEnded();
}
}
}
private async Task ProcessRequests<TContext>(IHttpApplication<TContext> application) where TContext : notnull
{
while (_keepAlive)
{
if (_context.InitialExecutionContext is null)
{
// If this is a first request on a non-Http2Connection, capture a clean ExecutionContext.
_context.InitialExecutionContext = ExecutionContext.Capture();
}
else
{
// Clear any AsyncLocals set during the request; back to a clean state ready for next request
// And/or reset to Http2Connection's ExecutionContext giving access to the connection logging scope
// and any other AsyncLocals set by connection middleware.
ExecutionContext.Restore(_context.InitialExecutionContext);
}
BeginRequestProcessing();
var result = default(ReadResult);
bool endConnection;
do
{
if (BeginRead(out var awaitable))
{
result = await awaitable;
}
} while (!TryParseRequest(result, out endConnection));
if (endConnection)
{
// Connection finished, stop processing requests
return;
}
var messageBody = CreateMessageBody();
if (!messageBody.RequestKeepAlive)
{
_keepAlive = false;
}
IsUpgradableRequest = messageBody.RequestUpgrade;
InitializeBodyControl(messageBody);
var context = application.CreateContext(this);
try
{
KestrelEventSource.Log.RequestStart(this);
// Run the application code for this request
await application.ProcessRequestAsync(context);
// Trigger OnStarting if it hasn't been called yet and the app hasn't
// already failed. If an OnStarting callback throws we can go through
// our normal error handling in ProduceEnd.
// https://github.com/aspnet/KestrelHttpServer/issues/43
if (!HasResponseStarted && _applicationException == null && _onStarting?.Count > 0)
{
await FireOnStarting();
}
if (!_connectionAborted && !VerifyResponseContentLength(out var lengthException))
{
ReportApplicationError(lengthException);
}
}
catch (BadHttpRequestException ex)
{
// Capture BadHttpRequestException for further processing
// This has to be caught here so StatusCode is set properly before disposing the HttpContext
// (DisposeContext logs StatusCode).
SetBadRequestState(ex);
ReportApplicationError(ex);
}
catch (Exception ex)
{
ReportApplicationError(ex);
}
KestrelEventSource.Log.RequestStop(this);
// At this point all user code that needs use to the request or response streams has completed.
// Using these streams in the OnCompleted callback is not allowed.
try
{
Debug.Assert(_bodyControl != null);
await _bodyControl.StopAsync();
}
catch (Exception ex)
{
// BodyControl.StopAsync() can throw if the PipeWriter was completed prior to the application writing
// enough bytes to satisfy the specified Content-Length. This risks double-logging the exception,
// but this scenario generally indicates an app bug, so I don't want to risk not logging it.
ReportApplicationError(ex);
}
// 4XX responses are written by TryProduceInvalidRequestResponse during connection tear down.
if (_requestRejectedException == null)
{
if (!_connectionAborted)
{
// Call ProduceEnd() before consuming the rest of the request body to prevent
// delaying clients waiting for the chunk terminator:
//
// https://github.com/dotnet/corefx/issues/17330#issuecomment-288248663
//
// This also prevents the 100 Continue response from being sent if the app
// never tried to read the body.
// https://github.com/aspnet/KestrelHttpServer/issues/2102
//
// ProduceEnd() must be called before _application.DisposeContext(), to ensure
// HttpContext.Response.StatusCode is correctly set when
// IHttpContextFactory.Dispose(HttpContext) is called.
await ProduceEnd();
}
else if (!HasResponseStarted)
{
// If the request was aborted and no response was sent, there's no
// meaningful status code to log.
StatusCode = 0;
}
}
if (_onCompleted?.Count > 0)
{
await FireOnCompleted();
}
application.DisposeContext(context, _applicationException);
// Even for non-keep-alive requests, try to consume the entire body to avoid RSTs.
if (!_connectionAborted && _requestRejectedException == null && !messageBody.IsEmpty)
{
await messageBody.ConsumeAsync();
}
if (HasStartedConsumingRequestBody)
{
await messageBody.StopAsync();
}
}
}
public void OnStarting(Func<object, Task> callback, object state)
{
if (HasResponseStarted)
{
ThrowResponseAlreadyStartedException(nameof(OnStarting));
}
if (_onStarting == null)
{
_onStarting = new Stack<KeyValuePair<Func<object, Task>, object>>();
}
_onStarting.Push(new KeyValuePair<Func<object, Task>, object>(callback, state));
}
public void OnCompleted(Func<object, Task> callback, object state)
{
if (_onCompleted == null)
{
_onCompleted = new Stack<KeyValuePair<Func<object, Task>, object>>();
}
_onCompleted.Push(new KeyValuePair<Func<object, Task>, object>(callback, state));
}
protected Task FireOnStarting()
{
var onStarting = _onStarting;
if (onStarting?.Count > 0)
{
return ProcessEvents(this, onStarting);
}
return Task.CompletedTask;
static async Task ProcessEvents(HttpProtocol protocol, Stack<KeyValuePair<Func<object, Task>, object>> events)
{
// Try/Catch is outside the loop as any error that occurs is before the request starts.
// So we want to report it as an ApplicationError to fail the request and not process more events.
try
{
while (events.TryPop(out var entry))
{
await entry.Key.Invoke(entry.Value);
}
}
catch (Exception ex)
{
protocol.ReportApplicationError(ex);
}
}
}
protected Task FireOnCompleted()
{
var onCompleted = _onCompleted;
if (onCompleted?.Count > 0)
{
return ProcessEvents(this, onCompleted);
}
return Task.CompletedTask;
static async Task ProcessEvents(HttpProtocol protocol, Stack<KeyValuePair<Func<object, Task>, object>> events)
{
// Try/Catch is inside the loop as any error that occurs is after the request has finished.
// So we will just log it and keep processing the events, as the completion has already happened.
while (events.TryPop(out var entry))
{
try
{
await entry.Key.Invoke(entry.Value);
}
catch (Exception ex)
{
protocol.Log.ApplicationError(protocol.ConnectionId, protocol.TraceIdentifier, ex);
}
}
}
}
private void VerifyAndUpdateWrite(int count)
{
var responseHeaders = HttpResponseHeaders;
if (responseHeaders != null &&
!responseHeaders.HasTransferEncoding &&
responseHeaders.ContentLength.HasValue &&
_responseBytesWritten + count > responseHeaders.ContentLength.Value)
{
_keepAlive = false;
ThrowTooManyBytesWritten(count);
}
_responseBytesWritten += count;
}
[StackTraceHidden]
private void ThrowTooManyBytesWritten(int count)
{
throw GetTooManyBytesWrittenException(count);
}
[MethodImpl(MethodImplOptions.NoInlining)]
private InvalidOperationException GetTooManyBytesWrittenException(int count)
{
var responseHeaders = HttpResponseHeaders;
return new InvalidOperationException(
CoreStrings.FormatTooManyBytesWritten(_responseBytesWritten + count, responseHeaders.ContentLength!.Value));
}
private void CheckLastWrite()
{
var responseHeaders = HttpResponseHeaders;
// Prevent firing request aborted token if this is the last write, to avoid
// aborting the request if the app is still running when the client receives
// the final bytes of the response and gracefully closes the connection.
//
// Called after VerifyAndUpdateWrite(), so _responseBytesWritten has already been updated.
if (responseHeaders != null &&
!responseHeaders.HasTransferEncoding &&
responseHeaders.ContentLength.HasValue &&
_responseBytesWritten == responseHeaders.ContentLength.Value)
{
PreventRequestAbortedCancellation();
}
}
protected bool VerifyResponseContentLength([NotNullWhen(false)] out Exception? ex)
{
var responseHeaders = HttpResponseHeaders;
if (Method != HttpMethod.Head &&
StatusCode != StatusCodes.Status304NotModified &&
!responseHeaders.HasTransferEncoding &&
responseHeaders.ContentLength.HasValue &&
_responseBytesWritten < responseHeaders.ContentLength.Value)
{
// We need to close the connection if any bytes were written since the client
// cannot be certain of how many bytes it will receive.
if (_responseBytesWritten > 0)
{
_keepAlive = false;
}
ex = new InvalidOperationException(
CoreStrings.FormatTooFewBytesWritten(_responseBytesWritten, responseHeaders.ContentLength.Value));
return false;
}
ex = null;
return true;
}
public ValueTask<FlushResult> ProduceContinueAsync()
{
if (HasResponseStarted)
{
return default;
}
if (_httpVersion != Http.HttpVersion.Http10 &&
((IHeaderDictionary)HttpRequestHeaders).TryGetValue(HeaderNames.Expect, out var expect) &&
(expect.FirstOrDefault() ?? "").Equals("100-continue", StringComparison.OrdinalIgnoreCase))
{
return Output.Write100ContinueAsync();
}
return default;
}
public Task InitializeResponseAsync(int firstWriteByteCount)
{
var startingTask = FireOnStarting();
if (!startingTask.IsCompletedSuccessfully)
{
return InitializeResponseAwaited(startingTask, firstWriteByteCount);
}
VerifyInitializeState(firstWriteByteCount);
ProduceStart(appCompleted: false);
return Task.CompletedTask;
}
[MethodImpl(MethodImplOptions.NoInlining)]
public async Task InitializeResponseAwaited(Task startingTask, int firstWriteByteCount)
{
await startingTask;
VerifyInitializeState(firstWriteByteCount);
ProduceStart(appCompleted: false);
}
private HttpResponseHeaders InitializeResponseFirstWrite(int firstWriteByteCount)
{
VerifyInitializeState(firstWriteByteCount);
var responseHeaders = CreateResponseHeaders(appCompleted: false);
// InitializeResponse can only be called if we are just about to Flush the headers
_requestProcessingStatus = RequestProcessingStatus.HeadersFlushed;
return responseHeaders;
}
private void ProduceStart(bool appCompleted)
{
if (HasResponseStarted)
{
return;
}
_isLeasedMemoryInvalid = true;
_requestProcessingStatus = RequestProcessingStatus.HeadersCommitted;
var responseHeaders = CreateResponseHeaders(appCompleted);
Output.WriteResponseHeaders(StatusCode, ReasonPhrase, responseHeaders, _autoChunk, appCompleted);
}
private void VerifyInitializeState(int firstWriteByteCount)
{
if (_applicationException != null)
{
ThrowResponseAbortedException();
}
VerifyAndUpdateWrite(firstWriteByteCount);
}
protected Task TryProduceInvalidRequestResponse()
{
// If _requestAborted is set, the connection has already been closed.
if (_requestRejectedException != null && !_connectionAborted)
{
return ProduceEnd();
}
return Task.CompletedTask;
}
protected Task ProduceEnd()
{
if (HasResponseCompleted)
{
return Task.CompletedTask;
}
_isLeasedMemoryInvalid = true;
if (_requestRejectedException != null || _applicationException != null)
{
if (HasResponseStarted)
{
// We can no longer change the response, so we simply close the connection.
_keepAlive = false;
OnErrorAfterResponseStarted();
return Task.CompletedTask;
}
// If the request was rejected, the error state has already been set by SetBadRequestState and
// that should take precedence.
if (_requestRejectedException != null)
{
SetErrorResponseException(_requestRejectedException);
}
else
{
// 500 Internal Server Error
SetErrorResponseHeaders(statusCode: StatusCodes.Status500InternalServerError);
}
}
if (!HasResponseStarted)
{
ProduceStart(appCompleted: true);
}
return WriteSuffix();
}
private Task WriteSuffix()
{
if (_autoChunk || _httpVersion >= Http.HttpVersion.Http2)
{
// For the same reason we call CheckLastWrite() in Content-Length responses.
PreventRequestAbortedCancellation();
}
var writeTask = Output.WriteStreamSuffixAsync();
if (!writeTask.IsCompletedSuccessfully)
{
return WriteSuffixAwaited(writeTask);
}
writeTask.GetAwaiter().GetResult();
_requestProcessingStatus = RequestProcessingStatus.ResponseCompleted;
if (_keepAlive)
{
Log.ConnectionKeepAlive(ConnectionId);
}
if (Method == HttpMethod.Head && _responseBytesWritten > 0)
{
Log.ConnectionHeadResponseBodyWrite(ConnectionId, _responseBytesWritten);
}
return Task.CompletedTask;
}
private async Task WriteSuffixAwaited(ValueTask<FlushResult> writeTask)
{
_requestProcessingStatus = RequestProcessingStatus.HeadersFlushed;
await writeTask;
_requestProcessingStatus = RequestProcessingStatus.ResponseCompleted;
if (_keepAlive)
{
Log.ConnectionKeepAlive(ConnectionId);
}
if (Method == HttpMethod.Head && _responseBytesWritten > 0)
{
Log.ConnectionHeadResponseBodyWrite(ConnectionId, _responseBytesWritten);
}
}
private HttpResponseHeaders CreateResponseHeaders(bool appCompleted)
{
var responseHeaders = HttpResponseHeaders;
var hasConnection = responseHeaders.HasConnection;
var hasTransferEncoding = responseHeaders.HasTransferEncoding;
// We opt to remove the following headers from an HTTP/2+ response since their presence would be considered a protocol violation.
// This is done quietly because these headers are valid in other contexts and this saves the app from being broken by
// low level protocol details. Http.Sys also removes these headers silently.
//
// https://tools.ietf.org/html/rfc7540#section-8.1.2.2
// "This means that an intermediary transforming an HTTP/1.x message to HTTP/2 will need to remove any header fields
// nominated by the Connection header field, along with the Connection header field itself.
// Such intermediaries SHOULD also remove other connection-specific header fields, such as Keep-Alive,
// Proxy-Connection, Transfer-Encoding, and Upgrade, even if they are not nominated by the Connection header field."
//
// Http/3 has a similar requirement: https://quicwg.org/base-drafts/draft-ietf-quic-http.html#name-field-formatting-and-compre
if (_httpVersion > Http.HttpVersion.Http11 && responseHeaders.HasInvalidH2H3Headers)
{
responseHeaders.ClearInvalidH2H3Headers();
hasTransferEncoding = false;
hasConnection = false;
Log.InvalidResponseHeaderRemoved();
}
if (_keepAlive &&
hasConnection &&
(HttpHeaders.ParseConnection(responseHeaders) & ConnectionOptions.KeepAlive) == 0)
{
_keepAlive = false;
}
// https://tools.ietf.org/html/rfc7230#section-3.3.1
// If any transfer coding other than
// chunked is applied to a response payload body, the sender MUST either
// apply chunked as the final transfer coding or terminate the message
// by closing the connection.
if (hasTransferEncoding &&
HttpHeaders.GetFinalTransferCoding(responseHeaders.HeaderTransferEncoding) != TransferCoding.Chunked)
{
_keepAlive = false;
}
// Set whether response can have body
_canWriteResponseBody = CanWriteResponseBody();
if (!_canWriteResponseBody && hasTransferEncoding)
{
RejectNonBodyTransferEncodingResponse(appCompleted);
}
else if (StatusCode == StatusCodes.Status101SwitchingProtocols)
{
_keepAlive = false;
}
else if (!hasTransferEncoding && !responseHeaders.ContentLength.HasValue)
{
if ((appCompleted || !_canWriteResponseBody) && !_hasAdvanced) // Avoid setting contentLength of 0 if we wrote data before calling CreateResponseHeaders
{
// Don't set the Content-Length header automatically for HEAD requests, 204 responses, or 304 responses.
if (CanAutoSetContentLengthZeroResponseHeader())
{
// Since the app has completed writing or cannot write to the response, we can safely set the Content-Length to 0.
responseHeaders.ContentLength = 0;
}
}
// Note for future reference: never change this to set _autoChunk to true on HTTP/1.0
// connections, even if we were to infer the client supports it because an HTTP/1.0 request
// was received that used chunked encoding. Sending a chunked response to an HTTP/1.0
// client would break compliance with RFC 7230 (section 3.3.1):
//
// A server MUST NOT send a response containing Transfer-Encoding unless the corresponding
// request indicates HTTP/1.1 (or later).
//
// This also covers HTTP/2, which forbids chunked encoding in RFC 7540 (section 8.1:
//
// The chunked transfer encoding defined in Section 4.1 of [RFC7230] MUST NOT be used in HTTP/2.
else if (_httpVersion == Http.HttpVersion.Http11)
{
_autoChunk = true;
responseHeaders.SetRawTransferEncoding("chunked", _bytesTransferEncodingChunked);
}
else
{
_keepAlive = false;
}
}
responseHeaders.SetReadOnly();
if (!hasConnection && _httpVersion < Http.HttpVersion.Http2)
{
if (!_keepAlive)
{
responseHeaders.SetRawConnection("close", _bytesConnectionClose);
}
else if (_httpVersion == Http.HttpVersion.Http10)
{
responseHeaders.SetRawConnection("keep-alive", _bytesConnectionKeepAlive);
}
}
if (_context.AltSvcHeader != null && !responseHeaders.HasAltSvc)
{
responseHeaders.SetRawAltSvc(_context.AltSvcHeader.Value, _context.AltSvcHeader.RawBytes);
}
if (ServerOptions.AddServerHeader && !responseHeaders.HasServer)
{
responseHeaders.SetRawServer(Constants.ServerName, _bytesServer);
}
if (!responseHeaders.HasDate)
{
var dateHeaderValues = DateHeaderValueManager.GetDateHeaderValues();
responseHeaders.SetRawDate(dateHeaderValues.String, dateHeaderValues.Bytes);
}
return responseHeaders;
}
private bool CanWriteResponseBody()
{
// List of status codes taken from Microsoft.Net.Http.Server.Response
return Method != HttpMethod.Head &&
StatusCode != StatusCodes.Status204NoContent &&
StatusCode != StatusCodes.Status205ResetContent &&
StatusCode != StatusCodes.Status304NotModified;
}
private bool CanAutoSetContentLengthZeroResponseHeader()
{
return Method != HttpMethod.Head &&
StatusCode != StatusCodes.Status204NoContent &&
StatusCode != StatusCodes.Status304NotModified;
}
private static void ThrowResponseAlreadyStartedException(string value)
{
throw new InvalidOperationException(CoreStrings.FormatParameterReadOnlyAfterResponseStarted(value));
}
private void RejectNonBodyTransferEncodingResponse(bool appCompleted)
{
var ex = new InvalidOperationException(CoreStrings.FormatHeaderNotAllowedOnResponse("Transfer-Encoding", StatusCode));
if (!appCompleted)
{
// Back out of header creation surface exception in user code
_requestProcessingStatus = RequestProcessingStatus.AppStarted;
throw ex;
}
else
{
ReportApplicationError(ex);
// 500 Internal Server Error
SetErrorResponseHeaders(statusCode: StatusCodes.Status500InternalServerError);
}
}
private void SetErrorResponseException(BadHttpRequestException ex)
{
SetErrorResponseHeaders(ex.StatusCode);
#pragma warning disable CS0618 // Type or member is obsolete
if (ex is Microsoft.AspNetCore.Server.Kestrel.Core.BadHttpRequestException kestrelEx && !StringValues.IsNullOrEmpty(kestrelEx.AllowedHeader))
#pragma warning restore CS0618 // Type or member is obsolete
{
HttpResponseHeaders.HeaderAllow = kestrelEx.AllowedHeader;
}
}
private void SetErrorResponseHeaders(int statusCode)
{
Debug.Assert(!HasResponseStarted, $"{nameof(SetErrorResponseHeaders)} called after response had already started.");
StatusCode = statusCode;
ReasonPhrase = null;
var responseHeaders = HttpResponseHeaders;
responseHeaders.Reset();
ResponseTrailers?.Reset();
var dateHeaderValues = DateHeaderValueManager.GetDateHeaderValues();
responseHeaders.SetRawDate(dateHeaderValues.String, dateHeaderValues.Bytes);
responseHeaders.ContentLength = 0;
if (ServerOptions.AddServerHeader)
{
responseHeaders.SetRawServer(Constants.ServerName, _bytesServer);
}
}
public void HandleNonBodyResponseWrite()
{
// Writes to HEAD response are ignored and logged at the end of the request
if (Method != HttpMethod.Head)
{
ThrowWritingToResponseBodyNotSupported();
}
}
[StackTraceHidden]
private void ThrowWritingToResponseBodyNotSupported()
{
// Throw Exception for 204, 205, 304 responses.
throw new InvalidOperationException(CoreStrings.FormatWritingToResponseBodyNotSupported(StatusCode));
}
[StackTraceHidden]
private void ThrowResponseAbortedException()
{
throw new ObjectDisposedException(CoreStrings.UnhandledApplicationException, _applicationException);
}
[StackTraceHidden]
[DoesNotReturn]
public void ThrowRequestTargetRejected(Span<byte> target)
=> throw GetInvalidRequestTargetException(target);
[MethodImpl(MethodImplOptions.NoInlining)]
private BadHttpRequestException GetInvalidRequestTargetException(ReadOnlySpan<byte> target)
=> KestrelBadHttpRequestException.GetException(
RequestRejectionReason.InvalidRequestTarget,
Log.IsEnabled(LogLevel.Information)
? target.GetAsciiStringEscaped(Constants.MaxExceptionDetailSize)
: string.Empty);
public void SetBadRequestState(BadHttpRequestException ex)
{
Log.ConnectionBadRequest(ConnectionId, ex);
_requestRejectedException = ex;
if (!HasResponseStarted)
{
SetErrorResponseException(ex);
}
const string badRequestEventName = "Microsoft.AspNetCore.Server.Kestrel.BadRequest";
if (ServiceContext.DiagnosticSource?.IsEnabled(badRequestEventName) == true)
{
ServiceContext.DiagnosticSource.Write(badRequestEventName, this);
}
_keepAlive = false;
}
public void ReportApplicationError(Exception? ex)
{
// ReportApplicationError can be called with a null exception from MessageBody
if (ex == null)
{
return;
}
if (_applicationException == null)
{
_applicationException = ex;
}
else if (_applicationException is AggregateException)
{
_applicationException = new AggregateException(_applicationException, ex).Flatten();
}
else
{
_applicationException = new AggregateException(_applicationException, ex);
}
Log.ApplicationError(ConnectionId, TraceIdentifier, ex);
}
public void Advance(int bytes)
{
if (bytes < 0)
{
throw new ArgumentOutOfRangeException(nameof(bytes));
}
else if (bytes > 0)
{
_hasAdvanced = true;
}
if (_isLeasedMemoryInvalid)
{
throw new InvalidOperationException("Invalid ordering of calling StartAsync or CompleteAsync and Advance.");
}
if (_canWriteResponseBody)
{
VerifyAndUpdateWrite(bytes);
Output.Advance(bytes);
}
else
{
HandleNonBodyResponseWrite();
// For HEAD requests, we still use the number of bytes written for logging
// how many bytes were written.
VerifyAndUpdateWrite(bytes);
}
}
public Memory<byte> GetMemory(int sizeHint = 0)
{
_isLeasedMemoryInvalid = false;
return Output.GetMemory(sizeHint);
}
public Span<byte> GetSpan(int sizeHint = 0)
{
_isLeasedMemoryInvalid = false;
return Output.GetSpan(sizeHint);
}
public ValueTask<FlushResult> FlushPipeAsync(CancellationToken cancellationToken)
{
if (!HasResponseStarted)
{
var initializeTask = InitializeResponseAsync(0);
if (!initializeTask.IsCompletedSuccessfully)
{
return FlushAsyncAwaited(initializeTask, cancellationToken);
}
}
return Output.FlushAsync(cancellationToken);
}
public void CancelPendingFlush()
{
Output.CancelPendingFlush();
}
public Task CompleteAsync(Exception? exception = null)
{
if (exception != null)
{
var wrappedException = new ConnectionAbortedException("The BodyPipe was completed with an exception.", exception);
ReportApplicationError(wrappedException);
if (HasResponseStarted)
{
ApplicationAbort();
}
}
// Finalize headers
if (!HasResponseStarted)
{
var onStartingTask = FireOnStarting();
if (!onStartingTask.IsCompletedSuccessfully)
{
return CompleteAsyncAwaited(onStartingTask);
}
}
// Flush headers, body, trailers...
if (!HasResponseCompleted)
{
if (!VerifyResponseContentLength(out var lengthException))
{
// Try to throw this exception from CompleteAsync() instead of CompleteAsyncAwaited() if possible,
// so it can be observed by BodyWriter.Complete(). If this isn't possible because an
// async OnStarting callback hadn't yet run, it's OK, since the Exception will be observed with
// the call to _bodyControl.StopAsync() in ProcessRequests().
ThrowException(lengthException);
}
return ProduceEnd();
}
return Task.CompletedTask;
}
private async Task CompleteAsyncAwaited(Task onStartingTask)
{
await onStartingTask;
if (!HasResponseCompleted)
{
if (!VerifyResponseContentLength(out var lengthException))
{
ThrowException(lengthException);
}
await ProduceEnd();
}
}
[StackTraceHidden]
private static void ThrowException(Exception exception)
{
throw exception;
}
public ValueTask<FlushResult> WritePipeAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken)
{
// For the first write, ensure headers are flushed if WriteDataAsync isn't called.
if (!HasResponseStarted)
{
return FirstWriteAsync(data, cancellationToken);
}
else
{
VerifyAndUpdateWrite(data.Length);
}
if (_canWriteResponseBody)
{
if (_autoChunk)
{
if (data.Length == 0)
{
return default;
}
return Output.WriteChunkAsync(data.Span, cancellationToken);
}
else
{
CheckLastWrite();
return Output.WriteDataToPipeAsync(data.Span, cancellationToken: cancellationToken);
}
}
else
{
HandleNonBodyResponseWrite();
return default;
}
}
private ValueTask<FlushResult> FirstWriteAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken)
{
Debug.Assert(!HasResponseStarted);
var startingTask = FireOnStarting();
if (!startingTask.IsCompletedSuccessfully)
{
return FirstWriteAsyncAwaited(startingTask, data, cancellationToken);
}
return FirstWriteAsyncInternal(data, cancellationToken);
}
private async ValueTask<FlushResult> FirstWriteAsyncAwaited(Task initializeTask, ReadOnlyMemory<byte> data, CancellationToken cancellationToken)
{
await initializeTask;
return await FirstWriteAsyncInternal(data, cancellationToken);
}
private ValueTask<FlushResult> FirstWriteAsyncInternal(ReadOnlyMemory<byte> data, CancellationToken cancellationToken)
{
var responseHeaders = InitializeResponseFirstWrite(data.Length);
if (_canWriteResponseBody)
{
if (_autoChunk)
{
if (data.Length == 0)
{
Output.WriteResponseHeaders(StatusCode, ReasonPhrase, responseHeaders, _autoChunk, appCompleted: false);
return Output.FlushAsync(cancellationToken);
}
return Output.FirstWriteChunkedAsync(StatusCode, ReasonPhrase, responseHeaders, _autoChunk, data.Span, cancellationToken);
}
else
{
CheckLastWrite();
return Output.FirstWriteAsync(StatusCode, ReasonPhrase, responseHeaders, _autoChunk, data.Span, cancellationToken);
}
}
else
{
Output.WriteResponseHeaders(StatusCode, ReasonPhrase, responseHeaders, _autoChunk, appCompleted: false);
HandleNonBodyResponseWrite();
return Output.FlushAsync(cancellationToken);
}
}
public Task FlushAsync(CancellationToken cancellationToken = default)
{
return FlushPipeAsync(cancellationToken).GetAsTask();
}
[MethodImpl(MethodImplOptions.NoInlining)]
private async ValueTask<FlushResult> FlushAsyncAwaited(Task initializeTask, CancellationToken cancellationToken)
{
await initializeTask;
return await Output.FlushAsync(cancellationToken);
}
public Task WriteAsync(ReadOnlyMemory<byte> data, CancellationToken cancellationToken = default)
{
return WritePipeAsync(data, cancellationToken).GetAsTask();
}
public async ValueTask<FlushResult> WriteAsyncAwaited(Task initializeTask, ReadOnlyMemory<byte> data, CancellationToken cancellationToken)
{
await initializeTask;
// WriteAsyncAwaited is only called for the first write to the body.
// Ensure headers are flushed if Write(Chunked)Async isn't called.
if (_canWriteResponseBody)
{
if (_autoChunk)
{
if (data.Length == 0)
{
return await Output.FlushAsync(cancellationToken);
}
return await Output.WriteChunkAsync(data.Span, cancellationToken);
}
else
{
CheckLastWrite();
return await Output.WriteDataToPipeAsync(data.Span, cancellationToken: cancellationToken);
}
}
else
{
HandleNonBodyResponseWrite();
return await Output.FlushAsync(cancellationToken);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void TestCSByte()
{
var test = new BooleanBinaryOpTest__TestCSByte();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class BooleanBinaryOpTest__TestCSByte
{
private const int VectorSize = 32;
private const int Op1ElementCount = VectorSize / sizeof(SByte);
private const int Op2ElementCount = VectorSize / sizeof(SByte);
private static SByte[] _data1 = new SByte[Op1ElementCount];
private static SByte[] _data2 = new SByte[Op2ElementCount];
private static Vector256<SByte> _clsVar1;
private static Vector256<SByte> _clsVar2;
private Vector256<SByte> _fld1;
private Vector256<SByte> _fld2;
private BooleanBinaryOpTest__DataTable<SByte, SByte> _dataTable;
static BooleanBinaryOpTest__TestCSByte()
{
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<SByte>, byte>(ref _clsVar1), ref Unsafe.As<SByte, byte>(ref _data1[0]), VectorSize);
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<SByte>, byte>(ref _clsVar2), ref Unsafe.As<SByte, byte>(ref _data2[0]), VectorSize);
}
public BooleanBinaryOpTest__TestCSByte()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<SByte>, byte>(ref _fld1), ref Unsafe.As<SByte, byte>(ref _data1[0]), VectorSize);
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<SByte>, byte>(ref _fld2), ref Unsafe.As<SByte, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (sbyte)(random.Next(sbyte.MinValue, sbyte.MaxValue)); }
_dataTable = new BooleanBinaryOpTest__DataTable<SByte, SByte>(_data1, _data2, VectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Avx.TestC(
Unsafe.Read<Vector256<SByte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<SByte>>(_dataTable.inArray2Ptr)
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result);
}
public void RunBasicScenario_Load()
{
var result = Avx.TestC(
Avx.LoadVector256((SByte*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((SByte*)(_dataTable.inArray2Ptr))
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result);
}
public void RunBasicScenario_LoadAligned()
{
var result = Avx.TestC(
Avx.LoadAlignedVector256((SByte*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((SByte*)(_dataTable.inArray2Ptr))
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result);
}
public void RunReflectionScenario_UnsafeRead()
{
var method = typeof(Avx).GetMethod(nameof(Avx.TestC), new Type[] { typeof(Vector256<SByte>), typeof(Vector256<SByte>) });
if (method != null)
{
var result = method.Invoke(null, new object[] {
Unsafe.Read<Vector256<SByte>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<SByte>>(_dataTable.inArray2Ptr)
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result));
}
}
public void RunReflectionScenario_Load()
{
var method = typeof(Avx).GetMethod(nameof(Avx.TestC), new Type[] { typeof(Vector256<SByte>), typeof(Vector256<SByte>) });
if (method != null)
{
var result = method.Invoke(null, new object[] {
Avx.LoadVector256((SByte*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((SByte*)(_dataTable.inArray2Ptr))
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result));
}
}
public void RunReflectionScenario_LoadAligned()
{
var method = typeof(Avx).GetMethod(nameof(Avx.TestC), new Type[] { typeof(Vector256<SByte>), typeof(Vector256<SByte>) });
if (method != null)
{
var result = method.Invoke(null, new object[] {
Avx.LoadAlignedVector256((SByte*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((SByte*)(_dataTable.inArray2Ptr))
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result));
}
}
public void RunClsVarScenario()
{
var result = Avx.TestC(
_clsVar1,
_clsVar2
);
ValidateResult(_clsVar1, _clsVar2, result);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector256<SByte>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector256<SByte>>(_dataTable.inArray2Ptr);
var result = Avx.TestC(left, right);
ValidateResult(left, right, result);
}
public void RunLclVarScenario_Load()
{
var left = Avx.LoadVector256((SByte*)(_dataTable.inArray1Ptr));
var right = Avx.LoadVector256((SByte*)(_dataTable.inArray2Ptr));
var result = Avx.TestC(left, right);
ValidateResult(left, right, result);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Avx.LoadAlignedVector256((SByte*)(_dataTable.inArray1Ptr));
var right = Avx.LoadAlignedVector256((SByte*)(_dataTable.inArray2Ptr));
var result = Avx.TestC(left, right);
ValidateResult(left, right, result);
}
public void RunLclFldScenario()
{
var test = new BooleanBinaryOpTest__TestCSByte();
var result = Avx.TestC(test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, result);
}
public void RunFldScenario()
{
var result = Avx.TestC(_fld1, _fld2);
ValidateResult(_fld1, _fld2, result);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector256<SByte> left, Vector256<SByte> right, bool result, [CallerMemberName] string method = "")
{
SByte[] inArray1 = new SByte[Op1ElementCount];
SByte[] inArray2 = new SByte[Op2ElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
ValidateResult(inArray1, inArray2, result, method);
}
private void ValidateResult(void* left, void* right, bool result, [CallerMemberName] string method = "")
{
SByte[] inArray1 = new SByte[Op1ElementCount];
SByte[] inArray2 = new SByte[Op2ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<SByte, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
ValidateResult(inArray1, inArray2, result, method);
}
private void ValidateResult(SByte[] left, SByte[] right, bool result, [CallerMemberName] string method = "")
{
var expectedResult = true;
for (var i = 0; i < Op1ElementCount; i++)
{
expectedResult &= ((~left[i] & right[i]) == 0);
}
if (expectedResult != result)
{
Succeeded = false;
Console.WriteLine($"{nameof(Avx)}.{nameof(Avx.TestC)}<SByte>(Vector256<SByte>, Vector256<SByte>): {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Timers;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Diagnostics;
using System.Reflection;
using System.Threading;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Physics.Manager;
using OpenSim.Region.Framework.Scenes.Serialization;
using System.Runtime.Serialization.Formatters.Binary;
using System.Runtime.Serialization;
using Timer = System.Timers.Timer;
using log4net;
namespace OpenSim.Region.Framework.Scenes
{
public class KeyframeTimer
{
private static Dictionary<Scene, KeyframeTimer> m_timers =
new Dictionary<Scene, KeyframeTimer>();
private Timer m_timer;
private Dictionary<KeyframeMotion, object> m_motions = new Dictionary<KeyframeMotion, object>();
private object m_lockObject = new object();
private object m_timerLock = new object();
private const double m_tickDuration = 50.0;
public double TickDuration
{
get { return m_tickDuration; }
}
public KeyframeTimer(Scene scene)
{
m_timer = new Timer();
m_timer.Interval = TickDuration;
m_timer.AutoReset = true;
m_timer.Elapsed += OnTimer;
}
public void Start()
{
lock (m_timer)
{
if (!m_timer.Enabled)
m_timer.Start();
}
}
private void OnTimer(object sender, ElapsedEventArgs ea)
{
if (!Monitor.TryEnter(m_timerLock))
return;
try
{
List<KeyframeMotion> motions;
lock (m_lockObject)
{
motions = new List<KeyframeMotion>(m_motions.Keys);
}
foreach (KeyframeMotion m in motions)
{
try
{
m.OnTimer(TickDuration);
}
catch (Exception)
{
// Don't stop processing
}
}
}
catch (Exception)
{
// Keep running no matter what
}
finally
{
Monitor.Exit(m_timerLock);
}
}
public static void Add(KeyframeMotion motion)
{
KeyframeTimer timer;
if (motion.Scene == null)
return;
lock (m_timers)
{
if (!m_timers.TryGetValue(motion.Scene, out timer))
{
timer = new KeyframeTimer(motion.Scene);
m_timers[motion.Scene] = timer;
if (!SceneManager.Instance.AllRegionsReady)
{
// Start the timers only once all the regions are ready. This is required
// when using megaregions, because the megaregion is correctly configured
// only after all the regions have been loaded. (If we don't do this then
// when the prim moves it might think that it crossed into a region.)
SceneManager.Instance.OnRegionsReadyStatusChange += delegate(SceneManager sm)
{
if (sm.AllRegionsReady)
timer.Start();
};
}
// Check again, in case the regions were started while we were adding the event handler
if (SceneManager.Instance.AllRegionsReady)
{
timer.Start();
}
}
}
lock (timer.m_lockObject)
{
timer.m_motions[motion] = null;
}
}
public static void Remove(KeyframeMotion motion)
{
KeyframeTimer timer;
if (motion.Scene == null)
return;
lock (m_timers)
{
if (!m_timers.TryGetValue(motion.Scene, out timer))
{
return;
}
}
lock (timer.m_lockObject)
{
timer.m_motions.Remove(motion);
}
}
}
[Serializable]
public class KeyframeMotion
{
//private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
public enum PlayMode : int
{
Forward = 0,
Reverse = 1,
Loop = 2,
PingPong = 3
};
[Flags]
public enum DataFormat : int
{
Translation = 2,
Rotation = 1
}
[Serializable]
public struct Keyframe
{
public Vector3? Position;
public Quaternion? Rotation;
public Quaternion StartRotation;
public int TimeMS;
public int TimeTotal;
public Vector3 AngularVelocity;
public Vector3 StartPosition;
};
private Vector3 m_serializedPosition;
private Vector3 m_basePosition;
private Quaternion m_baseRotation;
private Keyframe m_currentFrame;
private List<Keyframe> m_frames = new List<Keyframe>();
private Keyframe[] m_keyframes;
// skip timer events.
//timer.stop doesn't assure there aren't event threads still being fired
[NonSerialized()]
private bool m_timerStopped;
[NonSerialized()]
private bool m_isCrossing;
[NonSerialized()]
private bool m_waitingCrossing;
// retry position for cross fail
[NonSerialized()]
private Vector3 m_nextPosition;
[NonSerialized()]
private SceneObjectGroup m_group;
private PlayMode m_mode = PlayMode.Forward;
private DataFormat m_data = DataFormat.Translation | DataFormat.Rotation;
private bool m_running = false;
[NonSerialized()]
private bool m_selected = false;
private int m_iterations = 0;
private int m_skipLoops = 0;
[NonSerialized()]
private Scene m_scene;
public Scene Scene
{
get { return m_scene; }
}
public DataFormat Data
{
get { return m_data; }
}
public bool Selected
{
set
{
if (m_group != null)
{
if (!value)
{
// Once we're let go, recompute positions
if (m_selected)
UpdateSceneObject(m_group);
}
else
{
// Save selection position in case we get moved
if (!m_selected)
{
StopTimer();
m_serializedPosition = m_group.AbsolutePosition;
}
}
}
m_isCrossing = false;
m_waitingCrossing = false;
m_selected = value;
}
}
private void StartTimer()
{
KeyframeTimer.Add(this);
m_timerStopped = false;
}
private void StopTimer()
{
m_timerStopped = true;
KeyframeTimer.Remove(this);
}
public static KeyframeMotion FromData(SceneObjectGroup grp, Byte[] data)
{
KeyframeMotion newMotion = null;
try
{
using (MemoryStream ms = new MemoryStream(data))
{
BinaryFormatter fmt = new BinaryFormatter();
newMotion = (KeyframeMotion)fmt.Deserialize(ms);
}
newMotion.m_group = grp;
if (grp != null)
{
newMotion.m_scene = grp.Scene;
if (grp.IsSelected)
newMotion.m_selected = true;
}
newMotion.m_timerStopped = false;
newMotion.m_running = true;
newMotion.m_isCrossing = false;
newMotion.m_waitingCrossing = false;
}
catch
{
newMotion = null;
}
return newMotion;
}
public void UpdateSceneObject(SceneObjectGroup grp)
{
m_isCrossing = false;
m_waitingCrossing = false;
StopTimer();
if (grp == null)
return;
m_group = grp;
m_scene = grp.Scene;
Vector3 grppos = grp.AbsolutePosition;
Vector3 offset = grppos - m_serializedPosition;
// avoid doing it more than once
// current this will happen dragging a prim to other region
m_serializedPosition = grppos;
m_basePosition += offset;
m_nextPosition += offset;
m_currentFrame.StartPosition += offset;
m_currentFrame.Position += offset;
for (int i = 0; i < m_frames.Count; i++)
{
Keyframe k = m_frames[i];
k.StartPosition += offset;
k.Position += offset;
m_frames[i]=k;
}
if (m_running)
Start();
}
public KeyframeMotion(SceneObjectGroup grp, PlayMode mode, DataFormat data)
{
m_mode = mode;
m_data = data;
m_group = grp;
if (grp != null)
{
m_basePosition = grp.AbsolutePosition;
m_baseRotation = grp.GroupRotation;
m_scene = grp.Scene;
}
m_timerStopped = true;
m_isCrossing = false;
m_waitingCrossing = false;
}
public void SetKeyframes(Keyframe[] frames)
{
m_keyframes = frames;
}
public KeyframeMotion Copy(SceneObjectGroup newgrp)
{
StopTimer();
KeyframeMotion newmotion = new KeyframeMotion(null, m_mode, m_data);
newmotion.m_group = newgrp;
newmotion.m_scene = newgrp.Scene;
if (m_keyframes != null)
{
newmotion.m_keyframes = new Keyframe[m_keyframes.Length];
m_keyframes.CopyTo(newmotion.m_keyframes, 0);
}
newmotion.m_frames = new List<Keyframe>(m_frames);
newmotion.m_basePosition = m_basePosition;
newmotion.m_baseRotation = m_baseRotation;
if (m_selected)
newmotion.m_serializedPosition = m_serializedPosition;
else
{
if (m_group != null)
newmotion.m_serializedPosition = m_group.AbsolutePosition;
else
newmotion.m_serializedPosition = m_serializedPosition;
}
newmotion.m_currentFrame = m_currentFrame;
newmotion.m_iterations = m_iterations;
newmotion.m_running = m_running;
if (m_running && !m_waitingCrossing)
StartTimer();
return newmotion;
}
public void Delete()
{
m_running = false;
StopTimer();
m_isCrossing = false;
m_waitingCrossing = false;
m_frames.Clear();
m_keyframes = null;
}
public void Start()
{
m_isCrossing = false;
m_waitingCrossing = false;
if (m_keyframes != null && m_group != null && m_keyframes.Length > 0)
{
StartTimer();
m_running = true;
}
else
{
m_running = false;
StopTimer();
}
}
public void Stop()
{
m_running = false;
m_isCrossing = false;
m_waitingCrossing = false;
StopTimer();
m_basePosition = m_group.AbsolutePosition;
m_baseRotation = m_group.GroupRotation;
m_group.RootPart.Velocity = Vector3.Zero;
m_group.RootPart.AngularVelocity = Vector3.Zero;
m_group.SendGroupRootTerseUpdate();
// m_group.RootPart.ScheduleTerseUpdate();
m_frames.Clear();
}
public void Pause()
{
m_running = false;
StopTimer();
m_group.RootPart.Velocity = Vector3.Zero;
m_group.RootPart.AngularVelocity = Vector3.Zero;
m_group.SendGroupRootTerseUpdate();
// m_group.RootPart.ScheduleTerseUpdate();
}
private void GetNextList()
{
m_frames.Clear();
Vector3 pos = m_basePosition;
Quaternion rot = m_baseRotation;
if (m_mode == PlayMode.Loop || m_mode == PlayMode.PingPong || m_iterations == 0)
{
int direction = 1;
if (m_mode == PlayMode.Reverse || ((m_mode == PlayMode.PingPong) && ((m_iterations & 1) != 0)))
direction = -1;
int start = 0;
int end = m_keyframes.Length;
if (direction < 0)
{
start = m_keyframes.Length - 1;
end = -1;
}
for (int i = start; i != end ; i += direction)
{
Keyframe k = m_keyframes[i];
k.StartPosition = pos;
if (k.Position.HasValue)
{
k.Position = (k.Position * direction);
// k.Velocity = (Vector3)k.Position / (k.TimeMS / 1000.0f);
k.Position += pos;
}
else
{
k.Position = pos;
// k.Velocity = Vector3.Zero;
}
k.StartRotation = rot;
if (k.Rotation.HasValue)
{
if (direction == -1)
k.Rotation = Quaternion.Conjugate((Quaternion)k.Rotation);
k.Rotation = rot * k.Rotation;
}
else
{
k.Rotation = rot;
}
/* ang vel not in use for now
float angle = 0;
float aa = k.StartRotation.X * k.StartRotation.X + k.StartRotation.Y * k.StartRotation.Y + k.StartRotation.Z * k.StartRotation.Z + k.StartRotation.W * k.StartRotation.W;
float bb = ((Quaternion)k.Rotation).X * ((Quaternion)k.Rotation).X + ((Quaternion)k.Rotation).Y * ((Quaternion)k.Rotation).Y + ((Quaternion)k.Rotation).Z * ((Quaternion)k.Rotation).Z + ((Quaternion)k.Rotation).W * ((Quaternion)k.Rotation).W;
float aa_bb = aa * bb;
if (aa_bb == 0)
{
angle = 0;
}
else
{
float ab = k.StartRotation.X * ((Quaternion)k.Rotation).X +
k.StartRotation.Y * ((Quaternion)k.Rotation).Y +
k.StartRotation.Z * ((Quaternion)k.Rotation).Z +
k.StartRotation.W * ((Quaternion)k.Rotation).W;
float q = (ab * ab) / aa_bb;
if (q > 1.0f)
{
angle = 0;
}
else
{
angle = (float)Math.Acos(2 * q - 1);
}
}
k.AngularVelocity = (new Vector3(0, 0, 1) * (Quaternion)k.Rotation) * (angle / (k.TimeMS / 1000));
*/
k.TimeTotal = k.TimeMS;
m_frames.Add(k);
pos = (Vector3)k.Position;
rot = (Quaternion)k.Rotation;
}
m_basePosition = pos;
m_baseRotation = rot;
m_iterations++;
}
}
public void OnTimer(double tickDuration)
{
if (m_skipLoops > 0)
{
m_skipLoops--;
return;
}
if (m_timerStopped) // trap events still in air even after a timer.stop
return;
if (m_group == null)
return;
bool update = false;
if (m_selected)
{
if (m_group.RootPart.Velocity != Vector3.Zero)
{
m_group.RootPart.Velocity = Vector3.Zero;
m_group.SendGroupRootTerseUpdate();
}
return;
}
if (m_isCrossing)
{
// if crossing and timer running then cross failed
// wait some time then
// retry to set the position that evtually caused the outbound
// if still outside region this will call startCrossing below
m_isCrossing = false;
m_group.AbsolutePosition = m_nextPosition;
if (!m_isCrossing)
{
StopTimer();
StartTimer();
}
return;
}
if (m_frames.Count == 0)
{
GetNextList();
if (m_frames.Count == 0)
{
Stop();
Scene scene = m_group.Scene;
IScriptModule[] scriptModules = scene.RequestModuleInterfaces<IScriptModule>();
foreach (IScriptModule m in scriptModules)
{
if (m == null)
continue;
m.PostObjectEvent(m_group.RootPart.UUID, "moving_end", new object[0]);
}
return;
}
m_currentFrame = m_frames[0];
m_currentFrame.TimeMS += (int)tickDuration;
//force a update on a keyframe transition
update = true;
}
m_currentFrame.TimeMS -= (int)tickDuration;
// Do the frame processing
double remainingSteps = (double)m_currentFrame.TimeMS / tickDuration;
if (remainingSteps <= 0.0)
{
m_group.RootPart.Velocity = Vector3.Zero;
m_group.RootPart.AngularVelocity = Vector3.Zero;
m_nextPosition = (Vector3)m_currentFrame.Position;
m_group.AbsolutePosition = m_nextPosition;
// we are sending imediate updates, no doing force a extra terseUpdate
// m_group.UpdateGroupRotationR((Quaternion)m_currentFrame.Rotation);
m_group.RootPart.RotationOffset = (Quaternion)m_currentFrame.Rotation;
m_frames.RemoveAt(0);
if (m_frames.Count > 0)
m_currentFrame = m_frames[0];
update = true;
}
else
{
float completed = ((float)m_currentFrame.TimeTotal - (float)m_currentFrame.TimeMS) / (float)m_currentFrame.TimeTotal;
bool lastStep = m_currentFrame.TimeMS <= tickDuration;
Vector3 positionThisStep = m_currentFrame.StartPosition + (m_currentFrame.Position.Value - m_currentFrame.StartPosition) * completed;
Vector3 motionThisStep = positionThisStep - m_group.AbsolutePosition;
float mag = Vector3.Mag(motionThisStep);
if ((mag >= 0.02f) || lastStep)
{
m_nextPosition = m_group.AbsolutePosition + motionThisStep;
m_group.AbsolutePosition = m_nextPosition;
update = true;
}
//int totalSteps = m_currentFrame.TimeTotal / (int)tickDuration;
//m_log.DebugFormat("KeyframeMotion.OnTimer: step {0}/{1}, curPosition={2}, finalPosition={3}, motionThisStep={4} (scene {5})",
// totalSteps - remainingSteps + 1, totalSteps, m_group.AbsolutePosition, m_currentFrame.Position, motionThisStep, m_scene.RegionInfo.RegionName);
if ((Quaternion)m_currentFrame.Rotation != m_group.GroupRotation)
{
Quaternion current = m_group.GroupRotation;
Quaternion step = Quaternion.Slerp(m_currentFrame.StartRotation, (Quaternion)m_currentFrame.Rotation, completed);
step.Normalize();
/* use simpler change detection
* float angle = 0;
float aa = current.X * current.X + current.Y * current.Y + current.Z * current.Z + current.W * current.W;
float bb = step.X * step.X + step.Y * step.Y + step.Z * step.Z + step.W * step.W;
float aa_bb = aa * bb;
if (aa_bb == 0)
{
angle = 0;
}
else
{
float ab = current.X * step.X +
current.Y * step.Y +
current.Z * step.Z +
current.W * step.W;
float q = (ab * ab) / aa_bb;
if (q > 1.0f)
{
angle = 0;
}
else
{
angle = (float)Math.Acos(2 * q - 1);
}
}
if (angle > 0.01f)
*/
if(Math.Abs(step.X - current.X) > 0.001f
|| Math.Abs(step.Y - current.Y) > 0.001f
|| Math.Abs(step.Z - current.Z) > 0.001f
|| lastStep)
// assuming w is a dependente var
{
// m_group.UpdateGroupRotationR(step);
m_group.RootPart.RotationOffset = step;
//m_group.RootPart.UpdateAngularVelocity(m_currentFrame.AngularVelocity / 2);
update = true;
}
}
}
if (update)
{
m_group.SendGroupRootTerseUpdate();
}
}
public Byte[] Serialize()
{
StopTimer();
SceneObjectGroup tmp = m_group;
m_group = null;
if (!m_selected && tmp != null)
m_serializedPosition = tmp.AbsolutePosition;
using (MemoryStream ms = new MemoryStream())
{
BinaryFormatter fmt = new BinaryFormatter();
fmt.Serialize(ms, this);
m_group = tmp;
if (m_running && !m_waitingCrossing)
StartTimer();
return ms.ToArray();
}
}
public void StartCrossingCheck()
{
// timer will be restart by crossingFailure
// or never since crossing worked and this
// should be deleted
StopTimer();
m_isCrossing = true;
m_waitingCrossing = true;
// to remove / retune to smoth crossings
if (m_group.RootPart.Velocity != Vector3.Zero)
{
m_group.RootPart.Velocity = Vector3.Zero;
m_group.SendGroupRootTerseUpdate();
// m_group.RootPart.ScheduleTerseUpdate();
}
}
public void CrossingFailure()
{
m_waitingCrossing = false;
if (m_group != null)
{
m_group.RootPart.Velocity = Vector3.Zero;
m_group.SendGroupRootTerseUpdate();
// m_group.RootPart.ScheduleTerseUpdate();
if (m_running)
{
StopTimer();
m_skipLoops = 1200; // 60 seconds
StartTimer();
}
}
}
}
}
| |
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Text;
using System;
#if XAMARIN
#else
using Divan.Lucene;
#endif
using Newtonsoft.Json.Linq;
using System.IO;
namespace Divan
{
/// <summary>
/// A CouchDatabase corresponds to a named CouchDB database in a specific CouchServer.
/// This is the main API to work with CouchDB. One useful approach is to create your own subclasses
/// for your different databases.
/// </summary>
public class CouchDatabase : ICouchDatabase
{
private string name;
public readonly IList<CouchDesignDocument> DesignDocuments = new List<CouchDesignDocument>();
public CouchDatabase()
{
Name = "default";
}
public CouchDatabase(ICouchServer server)
: this()
{
Server = server;
}
public CouchDatabase(string name, ICouchServer server)
{
Name = name;
Server = server;
}
public string Name
{
get
{
if (Server == null)
return name;
return Server.DatabasePrefix + name;
}
set
{
name = value;
}
}
public ICouchServer Server { get; set; }
public bool RunningOnMono()
{
return Server.RunningOnMono;
}
public CouchDesignDocument NewDesignDocument(string aName)
{
var newDoc = new CouchDesignDocument(aName, this);
DesignDocuments.Add(newDoc);
return newDoc;
}
/// <summary>
/// Only to be used when developing.
/// </summary>
public ICouchViewDefinition NewTempView(string designDoc, string viewName, string mapText)
{
var doc = NewDesignDocument(designDoc);
var view = doc.AddView(viewName, "function (doc) {" + mapText + "}");
doc.Synch();
return view;
}
/// <summary>
/// Currently the logic is that the code is always the master.
/// And we also do not remove design documents in the database that
/// we no longer have in code.
/// </summary>
public void SynchDesignDocuments()
{
foreach (var doc in DesignDocuments)
{
doc.Synch();
}
}
public ICouchRequest Request()
{
return new CouchRequest(this);
}
public ICouchRequest Request(string path)
{
return (new CouchRequest(this)).Path(path);
}
public int CountDocuments()
{
return (Request().Parse())["doc_count"].Value<int>();
}
public ICouchRequest RequestAllDocuments()
{
return Request("_all_docs");
}
/// <summary>
/// Return all documents in the database as CouchJsonDocuments.
/// This method is only practical for testing purposes.
/// </summary>
/// <returns>A list of all documents.</returns>
public IEnumerable<CouchJsonDocument> GetAllDocuments()
{
return QueryAllDocuments().IncludeDocuments().GetResult().Documents<CouchJsonDocument>();
}
/// <summary>
/// Return all documents in the database using a supplied
/// document type implementing ICouchDocument.
/// This method is only practical for testing purposes.
/// </summary>
/// <typeparam name="T">The document type to use.</typeparam>
/// <returns>A list of all documents.</returns>
public IEnumerable<T> GetAllDocuments<T>() where T : ICouchDocument, new()
{
return QueryAllDocuments().IncludeDocuments().GetResult().Documents<T>();
}
/// <summary>
/// Return all documents in the database, but only with id and revision.
/// CouchDocument does not contain the actual content.
/// </summary>
/// <returns>List of documents</returns>
public IEnumerable<CouchDocument> GetAllDocumentsWithoutContent()
{
QueryAllDocuments().GetResult().ValueDocuments<CouchDocument>();
var list = new List<CouchDocument>();
JObject json = RequestAllDocuments().Parse();
foreach (JObject row in json["rows"])
{
list.Add(new CouchDocument(row["id"].ToString(), (row["value"])["rev"].ToString()));
}
return list;
}
/// <summary>
/// Initialize CouchDB database by saving new or changed design documents into it.
/// Override if needed in subclasses.
/// </summary>
public virtual void Initialize()
{
SynchDesignDocuments();
}
public bool Exists()
{
return Server.HasDatabase(Name);
}
/// <summary>
/// Check first if database exists, and if it does not - create it and initialize it.
/// </summary>
public void Create()
{
if (!Exists())
{
Server.CreateDatabase(Name);
Initialize();
}
}
public void Delete()
{
if (Exists())
{
Server.DeleteDatabase(Name);
}
}
/// <summary>
/// Write a document given as plain JSON and a document id. A document may already exist in db and will then be overwritten.
/// </summary>
/// <param name="json">Document as a JSON string</param>
/// <param name="documentId">Document identifier</param>
/// <returns>A new CouchJsonDocument</returns>
public ICouchDocument WriteDocument(string json, string documentId)
{
return WriteDocument(new CouchJsonDocument(json, documentId));
}
/// <summary>
/// Write a CouchDocument or ICouchDocument, it may already exist in db and will then be overwritten.
/// </summary>
/// <param name="document">Couch document</param>
/// <returns>Couch Document with new Rev set.</returns>
/// <remarks>This relies on the document to already have an id.</remarks>
public ICouchDocument
WriteDocument(ICouchDocument document)
{
return WriteDocument(document, false);
}
public T SaveArbitraryDocument<T>(T document)
{
return ((CouchDocumentWrapper<T>)SaveDocument(new CouchDocumentWrapper<T>(document))).Instance;
}
/// <summary>
/// This is a convenience method that creates or writes a ICouchDocument depending on if
/// it has an id or not. If it does not have an id we create the document and let CouchDB allocate
/// an id. If it has an id we use WriteDocument which will overwrite the existing document in CouchDB.
/// </summary>
/// <param name="document">ICouchDocument</param>
/// <returns>ICouchDocument with new Rev set.</returns>
public ICouchDocument SaveDocument(ICouchDocument document)
{
var reconcilingDoc = document as IReconcilingDocument;
ICouchDocument savedDoc;
try
{
savedDoc = document.Id == null ?
CreateDocument(document) :
WriteDocument(document);
}
catch (CouchConflictException)
{
if (reconcilingDoc == null)
throw;
// can't handle a brand-new document
if (String.IsNullOrEmpty(reconcilingDoc.Rev))
throw;
switch (reconcilingDoc.ReconcileBy)
{
case ReconcileStrategy.None:
throw;
default:
reconcilingDoc.Reconcile(reconcilingDoc.GetDatabaseCopy(this));
SaveDocument(reconcilingDoc);
break;
}
savedDoc = reconcilingDoc;
}
if (reconcilingDoc != null)
reconcilingDoc.SaveCommited();
return savedDoc;
}
/// <summary>
/// Write a CouchDocument or ICouchDocument, it may already exist in db and will then be overwritten.
/// </summary>
/// <param name="document">Couch document</param>
/// <param name="batch">True if we don't want to wait for flush (commit).</param>
/// <returns>Couch Document with new Rev set.</returns>
/// <remarks>This relies on the document to already have an id.</remarks>
public ICouchDocument WriteDocument(ICouchDocument document, bool batch)
{
if (document.Id == null)
{
throw CouchException.Create(
"Failed to write document using PUT because it lacks an id, use CreateDocument instead to let CouchDB generate an id");
}
JObject result =
Request(document.Id).Query(batch ? "?batch=ok" : null).Data(CouchDocument.WriteJson(document)).Put().Check("Failed to write document").Result();
document.Id = result["id"].Value<string>(); // Not really needed
document.Rev = result["rev"].Value<string>();
return document;
}
/// <summary>
/// Add an attachment to an existing ICouchDocument, it may already exist in db and will then be overwritten.
/// </summary>
/// <param name="document">Couch document</param>
/// <param name="attachmentName">Name of the attachment.</param>
/// <param name="attachmentData">The attachment data.</param>
/// <param name="mimeType">The MIME type for the attachment.</param>
/// <returns>The document.</returns>
/// <remarks>This relies on the document to already have an id.</remarks>
public ICouchDocument WriteAttachment(ICouchDocument document, string attachmentName, string attachmentData, string mimeType)
{
var byteData = Encoding.UTF8.GetBytes(attachmentData);
return WriteAttachment(document, attachmentName, byteData, mimeType);
}
/// <summary>
/// Add an attachment to an existing ICouchDocument, it may already exist in db and will then be overwritten.
/// </summary>
/// <param name="document">Couch document</param>
/// <param name="attachmentName">Name of the attachment.</param>
/// <param name="attachmentData">The attachment data.</param>
/// <param name="mimeType">The MIME type for the attachment.</param>
/// <returns>The document.</returns>
/// <remarks>This relies on the document to already have an id.</remarks>
public ICouchDocument WriteAttachment(ICouchDocument document, string attachmentName, byte[] attachmentData, string mimeType)
{
if (document.Id == null)
{
throw CouchException.Create(
"Failed to add attachment to document using PUT because it lacks an id");
}
JObject result =
Request(document.Id + "/" + attachmentName).Query("?rev=" + document.Rev).Data(attachmentData).MimeType(mimeType).Put().Check("Failed to write attachment")
.Result();
document.Id = result["id"].Value<string>(); // Not really neeed
document.Rev = result["rev"].Value<string>();
return document;
}
/// <summary>
/// Writes the attachment.
/// </summary>
/// <param name="document">The document.</param>
/// <param name="attachmentName">Name of the attachment.</param>
/// <param name="attachmentData">The attachment data.</param>
/// <param name="mimeType">Type of the MIME.</param>
/// <returns>The document.</returns>
/// <remarks>This relies on the document to already have an id.</remarks>
public ICouchDocument WriteAttachment(ICouchDocument document, string attachmentName, Stream attachmentData, string mimeType)
{
if (document.Id == null)
{
throw CouchException.Create(
"Failed to add attachment to document using PUT because it lacks an id");
}
JObject result =
Request(document.Id + "/" + attachmentName).Query("?rev=" + document.Rev).Data(attachmentData).MimeType(mimeType).Put().Check("Failed to write attachment")
.Result();
document.Id = result["id"].Value<string>(); // Not really neeed
document.Rev = result["rev"].Value<string>();
return document;
}
/// <summary>
/// Read a ICouchDocument with an id even if it has not changed revision.
/// </summary>
/// <param name="document">Document to fill.</param>
public void ReadDocument(ICouchDocument document)
{
document.ReadJson(ReadDocument(document.Id));
}
/// <summary>
/// Read the attachment for an ICouchDocument.
/// </summary>
/// <param name="document">Document to read.</param>
/// <param name="attachmentName">Name of the attachment.</param>
/// <returns></returns>
public WebResponse ReadAttachment(ICouchDocument document, string attachmentName)
{
return ReadAttachment(document.Id, attachmentName);
}
/// <summary>
/// First use HEAD to see if it has indeed changed.
/// </summary>
/// <param name="document">Document to fill.</param>
public void FetchDocumentIfChanged(ICouchDocument document)
{
if (HasDocumentChanged(document))
{
ReadDocument(document);
}
}
/// <summary>
/// Read a CouchDocument or ICouchDocument, this relies on the document to obviously have an id.
/// We also check the revision so that we can avoid parsing JSON if the document is unchanged.
/// </summary>
/// <param name="document">Document to fill.</param>
public void ReadDocumentIfChanged(ICouchDocument document)
{
JObject result = Request(document.Id).Etag(document.Rev).Parse();
if (result == null)
{
return;
}
document.ReadJson(result);
}
/// <summary>
/// Read a couch document given an id, this method does not have enough information to do caching.
/// </summary>
/// <param name="documentId">Document identifier</param>
/// <returns>Document Json as JObject</returns>
public JObject ReadDocument(string documentId)
{
try
{
return Request(documentId).Parse();
}
catch (WebException e)
{
throw CouchException.Create("Failed to read document", e);
}
}
/// <summary>
/// Read a couch document given an id, this method does not have enough information to do caching.
/// </summary>
/// <param name="documentId">Document identifier</param>
/// <returns>Document Json as string</returns>
public string ReadDocumentString(string documentId)
{
try
{
return Request(documentId).String();
}
catch (WebException e)
{
throw CouchException.Create("Failed to read document: " + e.Message, e);
}
}
/// <summary>
/// Read a couch attachment given a document id, this method does not have enough information to do caching.
/// </summary>
/// <param name="documentId">Document identifier</param>
/// <returns>Document attachment</returns>
public WebResponse ReadAttachment(string documentId, string attachmentName)
{
try
{
return Request(documentId + "/" + attachmentName).Response();
}
catch (WebException e)
{
throw CouchException.Create("Failed to read document: " + e.Message, e);
}
}
/// <summary>
/// Create a CouchDocument given JSON as a string. Uses POST and CouchDB will allocate a new id.
/// </summary>
/// <param name="json">Json data to store.</param>
/// <returns>Couch document with data, id and rev set.</returns>
/// <remarks>POST which may be problematic in some environments.</remarks>
public CouchJsonDocument CreateDocument(string json)
{
return (CouchJsonDocument)CreateDocument(new CouchJsonDocument(json));
}
/// <summary>
/// Create a given ICouchDocument in CouchDB. Uses POST and CouchDB will allocate a new id and overwrite any existing id.
/// </summary>
/// <param name="document">Document to store.</param>
/// <returns>Document with Id and Rev set.</returns>
/// <remarks>POST which may be problematic in some environments.</remarks>
public ICouchDocument CreateDocument(ICouchDocument document)
{
try
{
JObject result = Request().Data(CouchDocument.WriteJson(document)).PostJson().Check("Failed to create document").Result();
document.Id = result["id"].Value<string>();
document.Rev = result["rev"].Value<string>();
return document;
}
catch (WebException e)
{
throw CouchException.Create("Failed to create document", e);
}
}
public void SaveArbitraryDocuments<T>(IEnumerable<T> documents, bool allOrNothing)
{
SaveDocuments(documents.Select(doc => new CouchDocumentWrapper<T>(doc)).Cast<ICouchDocument>(), allOrNothing);
}
/// <summary>
/// Create or update a list of ICouchDocuments in CouchDB. Uses POST and CouchDB will
/// allocate new ids if the documents lack them.
/// </summary>
/// <param name="documents">List of documents to store.</param>
/// <remarks>POST may be problematic in some environments.</remarks>
public void SaveDocuments(IEnumerable<ICouchDocument> documents, bool allOrNothing)
{
var bulk = new CouchBulkDocuments(documents);
try
{
var result = Request("_bulk_docs")
.Data(CouchDocument.WriteJson(bulk))
.Query("?all_or_nothing=" + allOrNothing.ToString().ToLower())
.PostJson()
.Parse<JArray>();
int index = 0;
foreach (var document in documents)
{
document.Id = (result[index])["id"].Value<string>();
document.Rev = (result[index])["rev"].Value<string>();
++index;
}
}
catch (WebException e)
{
throw CouchException.Create("Failed to create bulk documents", e);
}
}
public void SaveArbitraryDocuments<T>(IEnumerable<T> documents, int chunkCount, IEnumerable<ICouchViewDefinition> views, bool allOrNothing)
{
SaveDocuments(
documents.Select(doc => new CouchDocumentWrapper<T>(doc)).Cast<ICouchDocument>(),
chunkCount,
views,
allOrNothing);
}
/// <summary>
/// Create or updates documents in bulk fashion, chunk wise. Optionally access given view
/// after each chunk to trigger reindexing.
/// </summary>
/// <param name="documents">List of documents to store.</param>
/// <param name="chunkCount">Number of documents to store per "POST"</param>
/// <param name="views">List of views to touch per chunk.</param>
public void SaveDocuments(IEnumerable<ICouchDocument> documents, int chunkCount, IEnumerable<ICouchViewDefinition> views, bool allOrNothing)
{
var chunk = new List<ICouchDocument>(chunkCount);
int counter = 0;
foreach (ICouchDocument doc in documents)
{
// Do we have a chunk ready to create?
if (counter == chunkCount)
{
counter = 0;
SaveDocuments(chunk, allOrNothing);
TouchViews(views);
/* Skipping separate thread for now, ASP.Net goes bonkers...
(new Thread(
() => GetView<CouchPermanentViewResult>(designDocumentName, viewName, ""))
{
Name = "View access in background", Priority = ThreadPriority.BelowNormal
}).Start(); */
chunk = new List<ICouchDocument>(chunkCount);
}
counter++;
chunk.Add(doc);
}
SaveDocuments(chunk, allOrNothing);
TouchViews(views);
}
public void TouchViews(IEnumerable<ICouchViewDefinition> views)
{
//var timer = new Stopwatch();
if (views != null)
{
foreach (var view in views)
{
if (view != null)
{
//timer.Reset();
//timer.Start();
view.Touch();
//timer.Stop();
//Server.Debug("Update view " + view.Path() + ":" + timer.ElapsedMilliseconds + " ms");
}
}
}
}
/// <summary>
/// Create documents in bulk fashion, chunk wise.
/// </summary>
/// <param name="documents">List of documents to store.</param>
/// <param name="chunkCount">Number of documents to store per "POST"</param>
public void SaveDocuments(IEnumerable<ICouchDocument> documents, int chunkCount, bool allOrNothing)
{
SaveDocuments(documents, chunkCount, null, allOrNothing);
}
public void SaveArbitraryDocuments<T>(IEnumerable<T> documents, int chunkCount, bool allOrNothing)
{
SaveArbitraryDocuments(documents, chunkCount, null, allOrNothing);
}
public IEnumerable<CouchJsonDocument> GetDocuments(IEnumerable<string> documentIds)
{
return GetDocuments<CouchJsonDocument>(documentIds);
}
public IEnumerable<T> GetDocuments<T>(IEnumerable<string> documentIds) where T : ICouchDocument, new()
{
var bulk = new CouchBulkKeys(documentIds.Cast<object>());
return QueryAllDocuments().Data(CouchDocument.WriteJson(bulk)).IncludeDocuments().GetResult().Documents<T>();
}
public T GetDocument<T>(string documentId) where T : ICouchDocument, new()
{
var doc = new T { Id = documentId };
try
{
ReadDocument(doc);
}
catch (CouchNotFoundException)
{
return default(T);
}
return doc;
}
public T GetArbitraryDocument<T>(string documentId, Func<T> ctor)
{
var doc = new CouchDocumentWrapper<T>(ctor);
doc.Id = documentId;
try
{
ReadDocument(doc);
}
catch (CouchNotFoundException)
{
return default(T);
}
return doc.Instance;
}
public IEnumerable<T> GetArbitraryDocuments<T>(IEnumerable<string> documentIds, Func<T> ctor)
{
var bulk = new CouchBulkKeys(documentIds.Cast<object>());
return QueryAllDocuments().Data(CouchDocument.WriteJson(bulk)).IncludeDocuments().GetResult().ArbitraryDocuments(ctor);
}
public CouchJsonDocument GetDocument(string documentId)
{
try
{
try
{
return new CouchJsonDocument(Request(documentId).Parse());
}
catch (WebException e)
{
throw CouchException.Create("Failed to get document", e);
}
}
catch (CouchNotFoundException)
{
return null;
}
}
/// <summary>
/// Query a view by name (that we know exists in CouchDB). This method then creates
/// a CouchViewDefinition on the fly. Better to use existing CouchViewDefinitions.
/// </summary>
public CouchQuery Query(string designName, string viewName)
{
return Query(new CouchViewDefinition(viewName, NewDesignDocument(designName)));
}
public CouchQuery Query(ICouchViewDefinition view)
{
return new CouchQuery(view);
}
#if XAMARIN
#else
public CouchLuceneQuery Query(CouchLuceneViewDefinition view)
{
return new CouchLuceneQuery(view);
}
#endif
public CouchQuery QueryAllDocuments()
{
return Query(null, "_all_docs");
}
public void TouchView(string designDocumentId, string viewName)
{
Query(designDocumentId, viewName).Limit(0).GetResult();
}
public void DeleteDocument(ICouchDocument document)
{
DeleteDocument(document.Id, document.Rev);
}
public ICouchDocument DeleteAttachment(ICouchDocument document, string attachmentName)
{
JObject result = Request(document.Id + "/" + attachmentName).Query("?rev=" + document.Rev).Delete().Check("Failed to delete attachment").Result();
document.Id = result["id"].Value<string>(); // Not really neeed
document.Rev = result["rev"].Value<string>();
return document;
}
public void DeleteAttachment(string id, string rev, string attachmentName)
{
Request(id + "/" + attachmentName).Query("?rev=" + rev).Delete().Check("Failed to delete attachment");
}
public void DeleteDocument(string id, string rev)
{
Request(id).Query("?rev=" + rev).Delete().Check("Failed to delete document");
}
/// <summary>
/// Delete documents in key range. This method needs to retrieve
/// revisions and then use them to post a bulk delete. Couch can not
/// delete documents without being told about their revisions.
/// </summary>
public void DeleteDocuments(string startKey, string endKey)
{
var docs = QueryAllDocuments().StartKey(startKey).EndKey(endKey).GetResult().RowDocuments().Cast<ICouchDocument>();
DeleteDocuments(docs);
}
/// <summary>
/// Delete documents in bulk fashion.
/// </summary>
/// <param name="documents">Array of documents to delete.</param>
public void DeleteDocuments(IEnumerable<ICouchDocument> documents)
{
DeleteDocuments(new CouchBulkDeleteDocuments(documents));
}
/// <summary>
/// Delete documents in bulk fashion.
/// </summary>
public void DeleteDocuments(ICanJson bulk)
{
try
{
var result = Request("_bulk_docs").Data(CouchDocument.WriteJson(bulk)).PostJson().Parse<JArray>();
for (int i = 0; i < result.Count(); i++)
{
//documents[i].id = (result[i])["id"].Value<string>();
//documents[i].rev = (result[i])["rev"].Value<string>();
if ((result[i])["error"] != null)
{
throw CouchException.Create(string.Format(CultureInfo.InvariantCulture,
"Document with id {0} was not deleted: {1}: {2}",
(result[i])["id"].Value<string>(), (result[i])["error"], (result[i])["reason"]));
}
}
}
catch (WebException e)
{
throw CouchException.Create("Failed to bulk delete documents", e);
}
}
public bool HasDocument(ICouchDocument document)
{
return HasDocument(document.Id);
}
public bool HasAttachment(ICouchDocument document, string attachmentName)
{
return HasAttachment(document.Id, attachmentName);
}
public bool HasDocumentChanged(ICouchDocument document)
{
return HasDocumentChanged(document.Id, document.Rev);
}
public bool HasDocumentChanged(string documentId, string rev)
{
return Request(documentId).Head().Send().Etag() != rev;
}
public bool HasDocument(string documentId, string revision)
{
try
{
Request(documentId).QueryOptions(new Dictionary<string, string> {{"Rev", revision}}).Head().Send();
return true;
}
catch (WebException)
{
return false;
}
}
public bool HasDocument(string documentId)
{
try
{
Request(documentId).Head().Send();
return true;
}
catch (WebException)
{
return false;
}
}
public bool HasAttachment(string documentId, string attachmentName)
{
try
{
Request(documentId + "/" + attachmentName).Head().Send();
return true;
}
catch (WebException)
{
return false;
}
}
/// <summary>
/// Copies a document based on its document id.
/// </summary>
/// <param name="sourceDocumentId">The source document id.</param>
/// <param name="destinationDocumentId">The destination document id.</param>
/// <remarks>Use this method when the destination document does not exist.</remarks>
public void Copy(string sourceDocumentId, string destinationDocumentId)
{
try
{
Request(sourceDocumentId)
.AddHeader("Destination", destinationDocumentId)
.Copy()
.Send()
.Parse();
// TODO add the following check statement.
// Currently on Windows the COPY command does not return an ok=true pair. This might be
// a bug in the implementation, but once it is sorted out the check should be added.
//.Check("Error copying document");
}
catch (WebException e)
{
throw new CouchException(e.Message, e);
}
}
/// <summary>
/// Copies a document based on its document id and replaces another existing document.
/// </summary>
/// <param name="sourceDocumentId">The source document id.</param>
/// <param name="destinationDocumentId">The destination document id.</param>
/// <param name="destinationRev">The destination rev.</param>
/// <remarks>Use this method when the destination document already exists</remarks>
public void Copy(string sourceDocumentId, string destinationDocumentId, string destinationRev)
{
try
{
Request(sourceDocumentId)
.AddHeader("Destination", destinationDocumentId + "?rev=" + destinationRev)
.Copy()
.Parse();
// TODO add the following check statement.
// Currently on Windows the COPY command does not return an ok=true pair. This might be
// a bug in the implementation, but once it is sorted out the check should be added.
//.Check("Error copying document");
}
catch (WebException e)
{
throw new CouchException(e.Message, e);
}
}
/// <summary>
/// Copies the specified source document to the destination document, replacing it.
/// </summary>
/// <param name="sourceDocument">The source document.</param>
/// <param name="destinationDocument">The destination document.</param>
/// <remarks>This method does not update the destinationDocument object.</remarks>
public void Copy(ICouchDocument sourceDocument, ICouchDocument destinationDocument)
{
Copy(sourceDocument.Id, destinationDocument.Id, destinationDocument.Rev);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using Server.Targeting;
using Server.Spells;
using Server.Mobiles;
using Server.Misc;
namespace Server.Items
{
public abstract class BaseClumsyPotion : BasePotion
{
public override bool CIT { get { return true; } }
public override bool CIS { get { return true; } }
private int Radius = 4;
public abstract int DexOffset { get; }
public abstract TimeSpan Duration { get; }
public BaseClumsyPotion(PotionEffect effect)
: base(0xF08, effect)
{
}
public BaseClumsyPotion( Serial serial ) : base( serial )
{
}
public void DoClumsy(Mobile from)
{
//Plume : Addiction
if (from is PlayerMobile)
{
PlayerMobile drinker = from as PlayerMobile;
double CurrentAddiction = drinker.CalculateAgilityAddiction(this)[0];
double GlobalAddiction = drinker.CalculateAgilityAddiction(this)[1];
int DexScalar = (int)Math.Floor(Math.Sqrt(CurrentAddiction/2));
double DurationScalar = GlobalAddiction * 0.95;
if (GlobalAddiction > 100)
{
drinker.SendMessage("Votre corps ne supporte plus ce traitement");
drinker.Dex --;
return;
}
if ( Spells.SpellHelper.AddStatOffset( from, StatType.Dex, (Scale( from, DexOffset+Math.Min(DexOffset,DexScalar))*-1),Duration+ TimeSpan.FromSeconds(DurationScalar )) )
{
from.FixedEffect( 0x375A, 10, 15 );
from.PlaySound( 0x1E7 );
return;
}
drinker.IncAddiction(this);
}
if ( Spells.SpellHelper.AddStatOffset( from, StatType.Dex, Scale( from, DexOffset), Duration ) )
{
from.FixedEffect( 0x375A, 10, 15 );
from.PlaySound( 0x1E7 );
return;
}
from.SendLocalizedMessage( 502173 ); // You are already under a similar effect.
return;
}
public override void Drink(Mobile from)
{
if (Core.AOS && (from.Paralyzed || from.Frozen || (from.Spell != null && from.Spell.IsCasting)))
{
from.SendLocalizedMessage(1062725); // You can not use that potion while paralyzed.
return;
}
int delay = GetDelay(from);
if (delay > 0)
{
from.SendLocalizedMessage(1072529, String.Format("{0}\t{1}", delay, delay > 1 ? "seconds." : "second.")); // You cannot use that for another ~1_NUM~ ~2_TIMEUNITS~
return;
}
ThrowTarget targ = from.Target as ThrowTarget;
if (targ != null && targ.Potion == this)
return;
from.RevealingAction();
if (!m_Users.Contains(from))
m_Users.Add(from);
from.Target = new ThrowTarget(this);
}
public override void Serialize(GenericWriter writer)
{
base.Serialize(writer);
writer.Write((int)0); // version
}
public override void Deserialize(GenericReader reader)
{
base.Deserialize(reader);
int version = reader.ReadInt();
}
private List<Mobile> m_Users = new List<Mobile>();
public void Explode_Callback(object state)
{
object[] states = (object[])state;
Explode((Mobile)states[0], (Point3D)states[1], (Map)states[2]);
}
public virtual void Explode(Mobile from, Point3D loc, Map map)
{
if (Deleted || map == null)
return;
Consume();
// Check if any other players are using this potion
for (int i = 0; i < m_Users.Count; i++)
{
ThrowTarget targ = m_Users[i].Target as ThrowTarget;
if (targ != null && targ.Potion == this)
Target.Cancel(from);
}
// Effects
Effects.PlaySound(loc, map, 0x207);
Geometry.Circle2D(loc, map, Radius, new DoEffect_Callback(BlastEffect), 270, 90);
Timer.DelayCall(TimeSpan.FromSeconds(0.3), new TimerStateCallback(CircleEffect2), new object[] { loc, map });
foreach (Mobile mobile in map.GetMobilesInRange(loc, Radius))
{
this.DoClumsy(from);
}
}
#region Effects
public virtual void BlastEffect(Point3D p, Map map)
{
if (map.CanFit(p, 12, true, false))
Effects.SendLocationEffect(p, map, 0x376A, 4, 9);
}
public void CircleEffect2(object state)
{
object[] states = (object[])state;
Geometry.Circle2D((Point3D)states[0], (Map)states[1], Radius, new DoEffect_Callback(BlastEffect), 90, 270);
}
#endregion
#region Delay
private static Hashtable m_Delay = new Hashtable();
public static void AddDelay(Mobile m)
{
Timer timer = m_Delay[m] as Timer;
if (timer != null)
timer.Stop();
m_Delay[m] = Timer.DelayCall(TimeSpan.FromSeconds(60), new TimerStateCallback(EndDelay_Callback), m);
}
public static int GetDelay(Mobile m)
{
Timer timer = m_Delay[m] as Timer;
if (timer != null && timer.Next > DateTime.Now)
return (int)(timer.Next - DateTime.Now).TotalSeconds;
return 0;
}
private static void EndDelay_Callback(object obj)
{
if (obj is Mobile)
EndDelay((Mobile)obj);
}
public static void EndDelay(Mobile m)
{
Timer timer = m_Delay[m] as Timer;
if (timer != null)
{
timer.Stop();
m_Delay.Remove(m);
}
}
#endregion
private class ThrowTarget : Target
{
private BaseClumsyPotion m_Potion;
public BaseClumsyPotion Potion
{
get { return m_Potion; }
}
public ThrowTarget(BaseClumsyPotion potion)
: base(12, true, TargetFlags.None)
{
m_Potion = potion;
}
protected override void OnTarget(Mobile from, object targeted)
{
if (m_Potion.Deleted || m_Potion.Map == Map.Internal)
return;
IPoint3D p = targeted as IPoint3D;
if (p == null || from.Map == null)
return;
// Add delay
BaseClumsyPotion.AddDelay(from);
SpellHelper.GetSurfaceTop(ref p);
from.RevealingAction();
IEntity to;
if (p is Mobile)
to = (Mobile)p;
else
to = new Entity(Serial.Zero, new Point3D(p), from.Map);
Effects.SendMovingEffect(from, to, 0xF0D, 7, 0, false, false, m_Potion.Hue, 0);
Timer.DelayCall(TimeSpan.FromSeconds(1.0), new TimerStateCallback(m_Potion.Explode_Callback), new object[] { from, new Point3D(p), from.Map });
}
}
}
}
| |
//
// Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog.UnitTests.Targets.Wrappers
{
using System;
using System.Collections.Generic;
using System.Threading;
using NLog.Common;
using NLog.Targets;
using NLog.Targets.Wrappers;
using Xunit;
public class FallbackGroupTargetTests : NLogTestBase
{
[Fact]
public void FirstTargetWorks_Write_AllEventsAreWrittenToFirstTarget()
{
var myTarget1 = new MyTarget();
var myTarget2 = new MyTarget();
var myTarget3 = new MyTarget();
var wrapper = CreateAndInitializeFallbackGroupTarget(false, myTarget1, myTarget2, myTarget3);
WriteAndAssertNoExceptions(wrapper);
Assert.Equal(10, myTarget1.WriteCount);
Assert.Equal(0, myTarget2.WriteCount);
Assert.Equal(0, myTarget3.WriteCount);
AssertNoFlushException(wrapper);
}
[Fact]
public void FirstTargetFails_Write_SecondTargetWritesAllEvents()
{
var myTarget1 = new MyTarget { FailCounter = 1 };
var myTarget2 = new MyTarget();
var myTarget3 = new MyTarget();
var wrapper = CreateAndInitializeFallbackGroupTarget(false, myTarget1, myTarget2, myTarget3);
WriteAndAssertNoExceptions(wrapper);
Assert.Equal(1, myTarget1.WriteCount);
Assert.Equal(10, myTarget2.WriteCount);
Assert.Equal(0, myTarget3.WriteCount);
AssertNoFlushException(wrapper);
}
[Fact]
public void FirstTwoTargetsFails_Write_ThirdTargetWritesAllEvents()
{
var myTarget1 = new MyTarget { FailCounter = 1 };
var myTarget2 = new MyTarget { FailCounter = 1 };
var myTarget3 = new MyTarget();
var wrapper = CreateAndInitializeFallbackGroupTarget(false, myTarget1, myTarget2, myTarget3);
WriteAndAssertNoExceptions(wrapper);
Assert.Equal(1, myTarget1.WriteCount);
Assert.Equal(1, myTarget2.WriteCount);
Assert.Equal(10, myTarget3.WriteCount);
AssertNoFlushException(wrapper);
}
[Fact]
public void ReturnToFirstOnSuccessAndSecondTargetSucceeds_Write_ReturnToFirstTargetOnSuccess()
{
var myTarget1 = new MyTarget { FailCounter = 1 };
var myTarget2 = new MyTarget();
var myTarget3 = new MyTarget();
var wrapper = CreateAndInitializeFallbackGroupTarget(true, myTarget1, myTarget2, myTarget3);
WriteAndAssertNoExceptions(wrapper);
Assert.Equal(10, myTarget1.WriteCount);
Assert.Equal(1, myTarget2.WriteCount);
Assert.Equal(0, myTarget3.WriteCount);
AssertNoFlushException(wrapper);
}
[Fact]
public void FallbackGroupTargetSyncTest5()
{
// fail once
var myTarget1 = new MyTarget { FailCounter = 3 };
var myTarget2 = new MyTarget { FailCounter = 3 };
var myTarget3 = new MyTarget { FailCounter = 3 };
var wrapper = CreateAndInitializeFallbackGroupTarget(true, myTarget1, myTarget2, myTarget3);
var exceptions = new List<Exception>();
// no exceptions
for (var i = 0; i < 10; ++i)
{
wrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
}
Assert.Equal(10, exceptions.Count);
for (var i = 0; i < 10; ++i)
{
if (i < 3)
{
Assert.NotNull(exceptions[i]);
}
else
{
Assert.Null(exceptions[i]);
}
}
Assert.Equal(10, myTarget1.WriteCount);
Assert.Equal(3, myTarget2.WriteCount);
Assert.Equal(3, myTarget3.WriteCount);
AssertNoFlushException(wrapper);
}
[Fact]
public void FallbackGroupTargetSyncTest6()
{
// fail once
var myTarget1 = new MyTarget { FailCounter = 10 };
var myTarget2 = new MyTarget { FailCounter = 3 };
var myTarget3 = new MyTarget { FailCounter = 3 };
var wrapper = CreateAndInitializeFallbackGroupTarget(true, myTarget1, myTarget2, myTarget3);
var exceptions = new List<Exception>();
// no exceptions
for (var i = 0; i < 10; ++i)
{
wrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
}
Assert.Equal(10, exceptions.Count);
for (var i = 0; i < 10; ++i)
{
if (i < 3)
{
// for the first 3 rounds, no target is available
Assert.NotNull(exceptions[i]);
Assert.IsType(typeof(InvalidOperationException), exceptions[i]);
Assert.Equal("Some failure.", exceptions[i].Message);
}
else
{
Assert.Null(exceptions[i]);
}
}
Assert.Equal(10, myTarget1.WriteCount);
Assert.Equal(10, myTarget2.WriteCount);
Assert.Equal(3, myTarget3.WriteCount);
AssertNoFlushException(wrapper);
Assert.Equal(1, myTarget1.FlushCount);
Assert.Equal(1, myTarget2.FlushCount);
Assert.Equal(1, myTarget3.FlushCount);
}
private static FallbackGroupTarget CreateAndInitializeFallbackGroupTarget(bool returnToFirstOnSuccess, params Target[] targets)
{
var wrapper = new FallbackGroupTarget(targets)
{
ReturnToFirstOnSuccess = returnToFirstOnSuccess,
};
foreach (var target in targets)
{
target.Initialize(null);
}
wrapper.Initialize(null);
return wrapper;
}
private static void WriteAndAssertNoExceptions(FallbackGroupTarget wrapper)
{
var exceptions = new List<Exception>();
for (var i = 0; i < 10; ++i)
{
wrapper.WriteAsyncLogEvent(LogEventInfo.CreateNullEvent().WithContinuation(exceptions.Add));
}
Assert.Equal(10, exceptions.Count);
foreach (var e in exceptions)
{
Assert.Null(e);
}
}
private static void AssertNoFlushException(FallbackGroupTarget wrapper)
{
Exception flushException = null;
var flushHit = new ManualResetEvent(false);
wrapper.Flush(ex =>
{
flushException = ex;
flushHit.Set();
});
flushHit.WaitOne();
if (flushException != null)
Assert.True(false, flushException.ToString());
}
private class MyTarget : Target
{
public int FlushCount { get; set; }
public int WriteCount { get; set; }
public int FailCounter { get; set; }
protected override void Write(LogEventInfo logEvent)
{
Assert.True(this.FlushCount <= this.WriteCount);
this.WriteCount++;
if (this.FailCounter > 0)
{
this.FailCounter--;
throw new InvalidOperationException("Some failure.");
}
}
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
this.FlushCount++;
asyncContinuation(null);
}
}
}
}
| |
/**
* (C) Copyright IBM Corp. 2018, 2021.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/**
* IBM OpenAPI SDK Code Generator Version: 99-SNAPSHOT-902c9336-20210513-140138
*/
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using IBM.Cloud.SDK.Core.Authentication;
using IBM.Cloud.SDK.Core.Http;
using IBM.Cloud.SDK.Core.Service;
using IBM.Watson.Assistant.v2.Model;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
namespace IBM.Watson.Assistant.v2
{
public partial class AssistantService : IBMService, IAssistantService
{
const string defaultServiceName = "assistant";
private const string defaultServiceUrl = "https://api.us-south.assistant.watson.cloud.ibm.com";
public string Version { get; set; }
public AssistantService(string version) : this(version, defaultServiceName, ConfigBasedAuthenticatorFactory.GetAuthenticator(defaultServiceName)) { }
public AssistantService(string version, IAuthenticator authenticator) : this(version, defaultServiceName, authenticator) {}
public AssistantService(string version, string serviceName) : this(version, serviceName, ConfigBasedAuthenticatorFactory.GetAuthenticator(serviceName)) { }
public AssistantService(IClient httpClient) : base(defaultServiceName, httpClient) { }
public AssistantService(string version, string serviceName, IAuthenticator authenticator) : base(serviceName, authenticator)
{
if (string.IsNullOrEmpty(version))
{
throw new ArgumentNullException("`version` is required");
}
Version = version;
if (string.IsNullOrEmpty(ServiceUrl))
{
SetServiceUrl(defaultServiceUrl);
}
}
/// <summary>
/// Create a session.
///
/// Create a new session. A session is used to send user input to a skill and receive responses. It also
/// maintains the state of the conversation. A session persists until it is deleted, or until it times out
/// because of inactivity. (For more information, see the
/// [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-assistant-settings).
/// </summary>
/// <param name="assistantId">Unique identifier of the assistant. To find the assistant ID in the Watson
/// Assistant user interface, open the assistant settings and click **API Details**. For information about
/// creating assistants, see the
/// [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-assistant-add#assistant-add-task).
///
/// **Note:** Currently, the v2 API does not support creating assistants.</param>
/// <returns><see cref="SessionResponse" />SessionResponse</returns>
public DetailedResponse<SessionResponse> CreateSession(string assistantId)
{
if (string.IsNullOrEmpty(assistantId))
{
throw new ArgumentNullException("`assistantId` is required for `CreateSession`");
}
else
{
assistantId = Uri.EscapeDataString(assistantId);
}
if (string.IsNullOrEmpty(Version))
{
throw new ArgumentNullException("`Version` is required");
}
DetailedResponse<SessionResponse> result = null;
try
{
IClient client = this.Client;
SetAuthentication();
var restRequest = client.PostAsync($"{this.Endpoint}/v2/assistants/{assistantId}/sessions");
restRequest.WithHeader("Accept", "application/json");
if (!string.IsNullOrEmpty(Version))
{
restRequest.WithArgument("version", Version);
}
restRequest.WithHeaders(Common.GetSdkHeaders("conversation", "v2", "CreateSession"));
restRequest.WithHeaders(customRequestHeaders);
ClearCustomRequestHeaders();
result = restRequest.As<SessionResponse>().Result;
if (result == null)
{
result = new DetailedResponse<SessionResponse>();
}
}
catch (AggregateException ae)
{
throw ae.Flatten();
}
return result;
}
/// <summary>
/// Delete session.
///
/// Deletes a session explicitly before it times out. (For more information about the session inactivity
/// timeout, see the [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-assistant-settings)).
/// </summary>
/// <param name="assistantId">Unique identifier of the assistant. To find the assistant ID in the Watson
/// Assistant user interface, open the assistant settings and click **API Details**. For information about
/// creating assistants, see the
/// [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-assistant-add#assistant-add-task).
///
/// **Note:** Currently, the v2 API does not support creating assistants.</param>
/// <param name="sessionId">Unique identifier of the session.</param>
/// <returns><see cref="object" />object</returns>
public DetailedResponse<object> DeleteSession(string assistantId, string sessionId)
{
if (string.IsNullOrEmpty(assistantId))
{
throw new ArgumentNullException("`assistantId` is required for `DeleteSession`");
}
else
{
assistantId = Uri.EscapeDataString(assistantId);
}
if (string.IsNullOrEmpty(Version))
{
throw new ArgumentNullException("`Version` is required");
}
if (string.IsNullOrEmpty(sessionId))
{
throw new ArgumentNullException("`sessionId` is required for `DeleteSession`");
}
else
{
sessionId = Uri.EscapeDataString(sessionId);
}
DetailedResponse<object> result = null;
try
{
IClient client = this.Client;
SetAuthentication();
var restRequest = client.DeleteAsync($"{this.Endpoint}/v2/assistants/{assistantId}/sessions/{sessionId}");
restRequest.WithHeader("Accept", "application/json");
if (!string.IsNullOrEmpty(Version))
{
restRequest.WithArgument("version", Version);
}
restRequest.WithHeaders(Common.GetSdkHeaders("conversation", "v2", "DeleteSession"));
restRequest.WithHeaders(customRequestHeaders);
ClearCustomRequestHeaders();
result = restRequest.As<object>().Result;
if (result == null)
{
result = new DetailedResponse<object>();
}
}
catch (AggregateException ae)
{
throw ae.Flatten();
}
return result;
}
/// <summary>
/// Send user input to assistant (stateful).
///
/// Send user input to an assistant and receive a response, with conversation state (including context data)
/// stored by Watson Assistant for the duration of the session.
/// </summary>
/// <param name="assistantId">Unique identifier of the assistant. To find the assistant ID in the Watson
/// Assistant user interface, open the assistant settings and click **API Details**. For information about
/// creating assistants, see the
/// [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-assistant-add#assistant-add-task).
///
/// **Note:** Currently, the v2 API does not support creating assistants.</param>
/// <param name="sessionId">Unique identifier of the session.</param>
/// <param name="input">An input object that includes the input text. (optional)</param>
/// <param name="context">Context data for the conversation. You can use this property to set or modify context
/// variables, which can also be accessed by dialog nodes. The context is stored by the assistant on a
/// per-session basis.
///
/// **Note:** The total size of the context data stored for a stateful session cannot exceed 100KB.
/// (optional)</param>
/// <param name="userId">A string value that identifies the user who is interacting with the assistant. The
/// client must provide a unique identifier for each individual end user who accesses the application. For
/// user-based plans, this user ID is used to identify unique users for billing purposes. This string cannot
/// contain carriage return, newline, or tab characters. If no value is specified in the input, **user_id** is
/// automatically set to the value of **context.global.session_id**.
///
/// **Note:** This property is the same as the **user_id** property in the global system context. If **user_id**
/// is specified in both locations, the value specified at the root is used. (optional)</param>
/// <returns><see cref="MessageResponse" />MessageResponse</returns>
public DetailedResponse<MessageResponse> Message(string assistantId, string sessionId, MessageInput input = null, MessageContext context = null, string userId = null)
{
if (string.IsNullOrEmpty(assistantId))
{
throw new ArgumentNullException("`assistantId` is required for `Message`");
}
else
{
assistantId = Uri.EscapeDataString(assistantId);
}
if (string.IsNullOrEmpty(sessionId))
{
throw new ArgumentNullException("`sessionId` is required for `Message`");
}
else
{
sessionId = Uri.EscapeDataString(sessionId);
}
if (string.IsNullOrEmpty(Version))
{
throw new ArgumentNullException("`Version` is required");
}
DetailedResponse<MessageResponse> result = null;
try
{
IClient client = this.Client;
SetAuthentication();
var restRequest = client.PostAsync($"{this.Endpoint}/v2/assistants/{assistantId}/sessions/{sessionId}/message");
restRequest.WithHeader("Accept", "application/json");
if (!string.IsNullOrEmpty(Version))
{
restRequest.WithArgument("version", Version);
}
restRequest.WithHeader("Content-Type", "application/json");
JObject bodyObject = new JObject();
if (input != null)
{
bodyObject["input"] = JToken.FromObject(input);
}
if (context != null)
{
bodyObject["context"] = JToken.FromObject(context);
}
if (!string.IsNullOrEmpty(userId))
{
bodyObject["user_id"] = userId;
}
var httpContent = new StringContent(JsonConvert.SerializeObject(bodyObject), Encoding.UTF8, HttpMediaType.APPLICATION_JSON);
restRequest.WithBodyContent(httpContent);
restRequest.WithHeaders(Common.GetSdkHeaders("conversation", "v2", "Message"));
restRequest.WithHeaders(customRequestHeaders);
ClearCustomRequestHeaders();
result = restRequest.As<MessageResponse>().Result;
if (result == null)
{
result = new DetailedResponse<MessageResponse>();
}
}
catch (AggregateException ae)
{
throw ae.Flatten();
}
return result;
}
/// <summary>
/// Send user input to assistant (stateless).
///
/// Send user input to an assistant and receive a response, with conversation state (including context data)
/// managed by your application.
/// </summary>
/// <param name="assistantId">Unique identifier of the assistant. To find the assistant ID in the Watson
/// Assistant user interface, open the assistant settings and click **API Details**. For information about
/// creating assistants, see the
/// [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-assistant-add#assistant-add-task).
///
/// **Note:** Currently, the v2 API does not support creating assistants.</param>
/// <param name="input">An input object that includes the input text. (optional)</param>
/// <param name="context">Context data for the conversation. You can use this property to set or modify context
/// variables, which can also be accessed by dialog nodes. The context is not stored by the assistant. To
/// maintain session state, include the context from the previous response.
///
/// **Note:** The total size of the context data for a stateless session cannot exceed 250KB. (optional)</param>
/// <param name="userId">A string value that identifies the user who is interacting with the assistant. The
/// client must provide a unique identifier for each individual end user who accesses the application. For
/// user-based plans, this user ID is used to identify unique users for billing purposes. This string cannot
/// contain carriage return, newline, or tab characters. If no value is specified in the input, **user_id** is
/// automatically set to the value of **context.global.session_id**.
///
/// **Note:** This property is the same as the **user_id** property in the global system context. If **user_id**
/// is specified in both locations in a message request, the value specified at the root is used.
/// (optional)</param>
/// <returns><see cref="MessageResponseStateless" />MessageResponseStateless</returns>
public DetailedResponse<MessageResponseStateless> MessageStateless(string assistantId, MessageInputStateless input = null, MessageContextStateless context = null, string userId = null)
{
if (string.IsNullOrEmpty(assistantId))
{
throw new ArgumentNullException("`assistantId` is required for `MessageStateless`");
}
else
{
assistantId = Uri.EscapeDataString(assistantId);
}
if (string.IsNullOrEmpty(Version))
{
throw new ArgumentNullException("`Version` is required");
}
DetailedResponse<MessageResponseStateless> result = null;
try
{
IClient client = this.Client;
SetAuthentication();
var restRequest = client.PostAsync($"{this.Endpoint}/v2/assistants/{assistantId}/message");
restRequest.WithHeader("Accept", "application/json");
if (!string.IsNullOrEmpty(Version))
{
restRequest.WithArgument("version", Version);
}
restRequest.WithHeader("Content-Type", "application/json");
JObject bodyObject = new JObject();
if (input != null)
{
bodyObject["input"] = JToken.FromObject(input);
}
if (context != null)
{
bodyObject["context"] = JToken.FromObject(context);
}
if (!string.IsNullOrEmpty(userId))
{
bodyObject["user_id"] = userId;
}
var httpContent = new StringContent(JsonConvert.SerializeObject(bodyObject), Encoding.UTF8, HttpMediaType.APPLICATION_JSON);
restRequest.WithBodyContent(httpContent);
restRequest.WithHeaders(Common.GetSdkHeaders("conversation", "v2", "MessageStateless"));
restRequest.WithHeaders(customRequestHeaders);
ClearCustomRequestHeaders();
result = restRequest.As<MessageResponseStateless>().Result;
if (result == null)
{
result = new DetailedResponse<MessageResponseStateless>();
}
}
catch (AggregateException ae)
{
throw ae.Flatten();
}
return result;
}
/// <summary>
/// Identify intents and entities in multiple user utterances.
///
/// Send multiple user inputs to a dialog skill in a single request and receive information about the intents
/// and entities recognized in each input. This method is useful for testing and comparing the performance of
/// different skills or skill versions.
///
/// This method is available only with Enterprise with Data Isolation plans.
/// </summary>
/// <param name="skillId">Unique identifier of the skill. To find the skill ID in the Watson Assistant user
/// interface, open the skill settings and click **API Details**.</param>
/// <param name="input">An array of input utterances to classify. (optional)</param>
/// <returns><see cref="BulkClassifyResponse" />BulkClassifyResponse</returns>
public DetailedResponse<BulkClassifyResponse> BulkClassify(string skillId, List<BulkClassifyUtterance> input = null)
{
if (string.IsNullOrEmpty(skillId))
{
throw new ArgumentNullException("`skillId` is required for `BulkClassify`");
}
else
{
skillId = Uri.EscapeDataString(skillId);
}
if (string.IsNullOrEmpty(Version))
{
throw new ArgumentNullException("`Version` is required");
}
DetailedResponse<BulkClassifyResponse> result = null;
try
{
IClient client = this.Client;
SetAuthentication();
var restRequest = client.PostAsync($"{this.Endpoint}/v2/skills/{skillId}/workspace/bulk_classify");
restRequest.WithHeader("Accept", "application/json");
if (!string.IsNullOrEmpty(Version))
{
restRequest.WithArgument("version", Version);
}
restRequest.WithHeader("Content-Type", "application/json");
JObject bodyObject = new JObject();
if (input != null && input.Count > 0)
{
bodyObject["input"] = JToken.FromObject(input);
}
var httpContent = new StringContent(JsonConvert.SerializeObject(bodyObject), Encoding.UTF8, HttpMediaType.APPLICATION_JSON);
restRequest.WithBodyContent(httpContent);
restRequest.WithHeaders(Common.GetSdkHeaders("conversation", "v2", "BulkClassify"));
restRequest.WithHeaders(customRequestHeaders);
ClearCustomRequestHeaders();
result = restRequest.As<BulkClassifyResponse>().Result;
if (result == null)
{
result = new DetailedResponse<BulkClassifyResponse>();
}
}
catch (AggregateException ae)
{
throw ae.Flatten();
}
return result;
}
/// <summary>
/// List log events for an assistant.
///
/// List the events from the log of an assistant.
///
/// This method requires Manager access, and is available only with Enterprise plans.
/// </summary>
/// <param name="assistantId">Unique identifier of the assistant. To find the assistant ID in the Watson
/// Assistant user interface, open the assistant settings and click **API Details**. For information about
/// creating assistants, see the
/// [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-assistant-add#assistant-add-task).
///
/// **Note:** Currently, the v2 API does not support creating assistants.</param>
/// <param name="sort">How to sort the returned log events. You can sort by **request_timestamp**. To reverse
/// the sort order, prefix the parameter value with a minus sign (`-`). (optional)</param>
/// <param name="filter">A cacheable parameter that limits the results to those matching the specified filter.
/// For more information, see the
/// [documentation](https://cloud.ibm.com/docs/assistant?topic=assistant-filter-reference#filter-reference).
/// (optional)</param>
/// <param name="pageLimit">The number of records to return in each page of results. (optional)</param>
/// <param name="cursor">A token identifying the page of results to retrieve. (optional)</param>
/// <returns><see cref="LogCollection" />LogCollection</returns>
public DetailedResponse<LogCollection> ListLogs(string assistantId, string sort = null, string filter = null, long? pageLimit = null, string cursor = null)
{
if (string.IsNullOrEmpty(assistantId))
{
throw new ArgumentNullException("`assistantId` is required for `ListLogs`");
}
else
{
assistantId = Uri.EscapeDataString(assistantId);
}
if (string.IsNullOrEmpty(Version))
{
throw new ArgumentNullException("`Version` is required");
}
DetailedResponse<LogCollection> result = null;
try
{
IClient client = this.Client;
SetAuthentication();
var restRequest = client.GetAsync($"{this.Endpoint}/v2/assistants/{assistantId}/logs");
restRequest.WithHeader("Accept", "application/json");
if (!string.IsNullOrEmpty(Version))
{
restRequest.WithArgument("version", Version);
}
if (!string.IsNullOrEmpty(sort))
{
restRequest.WithArgument("sort", sort);
}
if (!string.IsNullOrEmpty(filter))
{
restRequest.WithArgument("filter", filter);
}
if (pageLimit != null)
{
restRequest.WithArgument("page_limit", pageLimit);
}
if (!string.IsNullOrEmpty(cursor))
{
restRequest.WithArgument("cursor", cursor);
}
restRequest.WithHeaders(Common.GetSdkHeaders("conversation", "v2", "ListLogs"));
restRequest.WithHeaders(customRequestHeaders);
ClearCustomRequestHeaders();
result = restRequest.As<LogCollection>().Result;
if (result == null)
{
result = new DetailedResponse<LogCollection>();
}
}
catch (AggregateException ae)
{
throw ae.Flatten();
}
return result;
}
/// <summary>
/// Delete labeled data.
///
/// Deletes all data associated with a specified customer ID. The method has no effect if no data is associated
/// with the customer ID.
///
/// You associate a customer ID with data by passing the `X-Watson-Metadata` header with a request that passes
/// data. For more information about personal data and customer IDs, see [Information
/// security](https://cloud.ibm.com/docs/assistant?topic=assistant-information-security#information-security).
///
/// **Note:** This operation is intended only for deleting data associated with a single specific customer, not
/// for deleting data associated with multiple customers or for any other purpose. For more information, see
/// [Labeling and deleting data in Watson
/// Assistant](https://cloud.ibm.com/docs/assistant?topic=assistant-information-security#information-security-gdpr-wa).
/// </summary>
/// <param name="customerId">The customer ID for which all data is to be deleted.</param>
/// <returns><see cref="object" />object</returns>
public DetailedResponse<object> DeleteUserData(string customerId)
{
if (string.IsNullOrEmpty(Version))
{
throw new ArgumentNullException("`Version` is required");
}
if (string.IsNullOrEmpty(customerId))
{
throw new ArgumentNullException("`customerId` is required for `DeleteUserData`");
}
DetailedResponse<object> result = null;
try
{
IClient client = this.Client;
SetAuthentication();
var restRequest = client.DeleteAsync($"{this.Endpoint}/v2/user_data");
restRequest.WithHeader("Accept", "application/json");
if (!string.IsNullOrEmpty(Version))
{
restRequest.WithArgument("version", Version);
}
if (!string.IsNullOrEmpty(customerId))
{
restRequest.WithArgument("customer_id", customerId);
}
restRequest.WithHeaders(Common.GetSdkHeaders("conversation", "v2", "DeleteUserData"));
restRequest.WithHeaders(customRequestHeaders);
ClearCustomRequestHeaders();
result = restRequest.As<object>().Result;
if (result == null)
{
result = new DetailedResponse<object>();
}
}
catch (AggregateException ae)
{
throw ae.Flatten();
}
return result;
}
}
}
| |
using ClosedXML.Excel;
using NUnit.Framework;
using System;
using System.Linq;
namespace ClosedXML_Tests
{
[TestFixture]
public class XLRangeBaseTests
{
[Test]
public void IsEmpty1()
{
IXLWorksheet ws = new XLWorkbook().Worksheets.Add("Sheet1");
IXLCell cell = ws.Cell(1, 1);
IXLRange range = ws.Range("A1:B2");
bool actual = range.IsEmpty();
bool expected = true;
Assert.AreEqual(expected, actual);
}
[Test]
public void IsEmpty2()
{
IXLWorksheet ws = new XLWorkbook().Worksheets.Add("Sheet1");
IXLCell cell = ws.Cell(1, 1);
IXLRange range = ws.Range("A1:B2");
bool actual = range.IsEmpty(true);
bool expected = true;
Assert.AreEqual(expected, actual);
}
[Test]
public void IsEmpty3()
{
IXLWorksheet ws = new XLWorkbook().Worksheets.Add("Sheet1");
IXLCell cell = ws.Cell(1, 1);
cell.Style.Fill.BackgroundColor = XLColor.Red;
IXLRange range = ws.Range("A1:B2");
bool actual = range.IsEmpty();
bool expected = true;
Assert.AreEqual(expected, actual);
}
[Test]
public void IsEmpty4()
{
IXLWorksheet ws = new XLWorkbook().Worksheets.Add("Sheet1");
IXLCell cell = ws.Cell(1, 1);
cell.Style.Fill.BackgroundColor = XLColor.Red;
IXLRange range = ws.Range("A1:B2");
bool actual = range.IsEmpty(false);
bool expected = true;
Assert.AreEqual(expected, actual);
}
[Test]
public void IsEmpty5()
{
IXLWorksheet ws = new XLWorkbook().Worksheets.Add("Sheet1");
IXLCell cell = ws.Cell(1, 1);
cell.Style.Fill.BackgroundColor = XLColor.Red;
IXLRange range = ws.Range("A1:B2");
bool actual = range.IsEmpty(true);
bool expected = false;
Assert.AreEqual(expected, actual);
}
[Test]
public void IsEmpty6()
{
IXLWorksheet ws = new XLWorkbook().Worksheets.Add("Sheet1");
IXLCell cell = ws.Cell(1, 1);
cell.Value = "X";
IXLRange range = ws.Range("A1:B2");
bool actual = range.IsEmpty();
bool expected = false;
Assert.AreEqual(expected, actual);
}
[Test]
public void SingleCell()
{
var wb = new XLWorkbook();
IXLWorksheet ws = wb.Worksheets.Add("Sheet1");
ws.Cell(1, 1).Value = "Hello World!";
wb.NamedRanges.Add("SingleCell", "Sheet1!$A$1");
IXLRange range = wb.Range("SingleCell");
Assert.AreEqual(1, range.CellsUsed().Count());
Assert.AreEqual("Hello World!", range.CellsUsed().Single().GetString());
}
[Test]
public void TableRange()
{
var wb = new XLWorkbook();
IXLWorksheet ws = wb.Worksheets.Add("Sheet1");
IXLRangeColumn rangeColumn = ws.Column(1).Column(1, 4);
rangeColumn.Cell(1).Value = "FName";
rangeColumn.Cell(2).Value = "John";
rangeColumn.Cell(3).Value = "Hank";
rangeColumn.Cell(4).Value = "Dagny";
IXLTable table = rangeColumn.CreateTable();
wb.NamedRanges.Add("FNameColumn", String.Format("{0}[{1}]", table.Name, "FName"));
IXLRange namedRange = wb.Range("FNameColumn");
Assert.AreEqual(3, namedRange.Cells().Count());
Assert.IsTrue(
namedRange.CellsUsed().Select(cell => cell.GetString()).SequenceEqual(new[] { "John", "Hank", "Dagny" }));
}
[Test]
public void WsNamedCell()
{
var wb = new XLWorkbook();
IXLWorksheet ws = wb.Worksheets.Add("Sheet1");
ws.Cell(1, 1).SetValue("Test").AddToNamed("TestCell", XLScope.Worksheet);
Assert.AreEqual("Test", ws.Cell("TestCell").GetString());
}
[Test]
public void WsNamedCells()
{
var wb = new XLWorkbook();
IXLWorksheet ws = wb.Worksheets.Add("Sheet1");
ws.Cell(1, 1).SetValue("Test").AddToNamed("TestCell", XLScope.Worksheet);
ws.Cell(2, 1).SetValue("B");
IXLCells cells = ws.Cells("TestCell, A2");
Assert.AreEqual("Test", cells.First().GetString());
Assert.AreEqual("B", cells.Last().GetString());
}
[Test]
public void WsNamedRange()
{
var wb = new XLWorkbook();
IXLWorksheet ws = wb.Worksheets.Add("Sheet1");
ws.Cell(1, 1).SetValue("A");
ws.Cell(2, 1).SetValue("B");
IXLRange original = ws.Range("A1:A2");
original.AddToNamed("TestRange", XLScope.Worksheet);
IXLRange named = ws.Range("TestRange");
Assert.AreEqual(original.RangeAddress.ToStringFixed(), named.RangeAddress.ToString());
}
[Test]
public void WsNamedRanges()
{
var wb = new XLWorkbook();
IXLWorksheet ws = wb.Worksheets.Add("Sheet1");
ws.Cell(1, 1).SetValue("A");
ws.Cell(2, 1).SetValue("B");
ws.Cell(3, 1).SetValue("C");
IXLRange original = ws.Range("A1:A2");
original.AddToNamed("TestRange", XLScope.Worksheet);
IXLRanges namedRanges = ws.Ranges("TestRange, A3");
Assert.AreEqual(original.RangeAddress.ToStringFixed(), namedRanges.First().RangeAddress.ToString());
Assert.AreEqual("$A$3:$A$3", namedRanges.Last().RangeAddress.ToStringFixed());
}
[Test]
public void WsNamedRangesOneString()
{
var wb = new XLWorkbook();
IXLWorksheet ws = wb.Worksheets.Add("Sheet1");
ws.NamedRanges.Add("TestRange", "Sheet1!$A$1,Sheet1!$A$3");
IXLRanges namedRanges = ws.Ranges("TestRange");
Assert.AreEqual("$A$1:$A$1", namedRanges.First().RangeAddress.ToStringFixed());
Assert.AreEqual("$A$3:$A$3", namedRanges.Last().RangeAddress.ToStringFixed());
}
//[Test]
//public void WsNamedRangeLiteral()
//{
// var wb = new XLWorkbook();
// var ws = wb.Worksheets.Add("Sheet1");
// ws.NamedRanges.Add("TestRange", "\"Hello\"");
// using (MemoryStream memoryStream = new MemoryStream())
// {
// wb.SaveAs(memoryStream, true);
// var wb2 = new XLWorkbook(memoryStream);
// var text = wb2.Worksheet("Sheet1").NamedRanges.First()
// memoryStream.Close();
// }
//}
[Test]
public void GrowRange()
{
using (var wb = new XLWorkbook())
{
var ws = wb.AddWorksheet("Sheet1");
Assert.AreEqual("A1:B2", ws.Cell("A1").AsRange().Grow().RangeAddress.ToString());
Assert.AreEqual("A1:B3", ws.Cell("A2").AsRange().Grow().RangeAddress.ToString());
Assert.AreEqual("A1:C2", ws.Cell("B1").AsRange().Grow().RangeAddress.ToString());
Assert.AreEqual("E4:G6", ws.Cell("F5").AsRange().Grow().RangeAddress.ToString());
Assert.AreEqual("D3:H7", ws.Cell("F5").AsRange().Grow(2).RangeAddress.ToString());
Assert.AreEqual("A1:DB105", ws.Cell("F5").AsRange().Grow(100).RangeAddress.ToString());
}
}
[Test]
public void ShrinkRange()
{
using (var wb = new XLWorkbook())
{
var ws = wb.AddWorksheet("Sheet1");
Assert.Null(ws.Cell("A1").AsRange().Shrink());
Assert.Null(ws.Range("B2:C3").Shrink());
Assert.AreEqual("C3:C3", ws.Range("B2:D4").Shrink().RangeAddress.ToString());
Assert.AreEqual("K11:P16", ws.Range("A1:Z26").Shrink(10).RangeAddress.ToString());
// Grow and shrink back
Assert.AreEqual("Z26:Z26", ws.Cell("Z26").AsRange().Grow(10).Shrink(10).RangeAddress.ToString());
}
}
[Test]
public void Intersection()
{
using (var wb = new XLWorkbook())
{
var ws = wb.AddWorksheet("Sheet1");
Assert.AreEqual("D9:G11", ws.Range("B9:I11").Intersection(ws.Range("D4:G16")).RangeAddress.ToString());
Assert.AreEqual("E9:G11", ws.Range("E9:I11").Intersection(ws.Range("D4:G16")).RangeAddress.ToString());
Assert.AreEqual("E9:E9", ws.Cell("E9").AsRange().Intersection(ws.Range("D4:G16")).RangeAddress.ToString());
Assert.AreEqual("E9:E9", ws.Range("D4:G16").Intersection(ws.Cell("E9").AsRange()).RangeAddress.ToString());
Assert.Null(ws.Cell("A1").AsRange().Intersection(ws.Cell("C3").AsRange()));
Assert.Null(ws.Range("A1:C3").Intersection(null));
var otherWs = wb.AddWorksheet("Sheet2");
Assert.Null(ws.Intersection(otherWs));
Assert.Null(ws.Cell("A1").AsRange().Intersection(otherWs.Cell("A2").AsRange()));
}
}
[Test]
public void Union()
{
using (var wb = new XLWorkbook())
{
var ws = wb.AddWorksheet("Sheet1");
Assert.AreEqual(64, ws.Range("B9:I11").Union(ws.Range("D4:G16")).Count());
Assert.AreEqual(58, ws.Range("E9:I11").Union(ws.Range("D4:G16")).Count());
Assert.AreEqual(52, ws.Cell("E9").AsRange().Union(ws.Range("D4:G16")).Count());
Assert.AreEqual(52, ws.Range("D4:G16").Union(ws.Cell("E9").AsRange()).Count());
Assert.AreEqual(2, ws.Cell("A1").AsRange().Union(ws.Cell("C3").AsRange()).Count());
Assert.AreEqual(9, ws.Range("A1:C3").Union(null).Count());
var otherWs = wb.AddWorksheet("Sheet2");
Assert.False(ws.Union(otherWs).Any());
Assert.False(ws.Cell("A1").AsRange().Union(otherWs.Cell("A2").AsRange()).Any());
}
}
[Test]
public void Difference()
{
using (var wb = new XLWorkbook())
{
var ws = wb.AddWorksheet("Sheet1");
Assert.AreEqual(12, ws.Range("B9:I11").Difference(ws.Range("D4:G16")).Count());
Assert.AreEqual(6, ws.Range("E9:I11").Difference(ws.Range("D4:G16")).Count());
Assert.AreEqual(0, ws.Cell("E9").AsRange().Difference(ws.Range("D4:G16")).Count());
Assert.AreEqual(51, ws.Range("D4:G16").Difference(ws.Cell("E9").AsRange()).Count());
Assert.AreEqual(1, ws.Cell("A1").AsRange().Difference(ws.Cell("C3").AsRange()).Count());
Assert.AreEqual(9, ws.Range("A1:C3").Difference(null).Count());
var otherWs = wb.AddWorksheet("Sheet2");
Assert.False(ws.Difference(otherWs).Any());
Assert.False(ws.Cell("A1").AsRange().Difference(otherWs.Cell("A2").AsRange()).Any());
}
}
[Test]
public void SurroundingCells()
{
using (var wb = new XLWorkbook())
{
var ws = wb.AddWorksheet("Sheet1");
Assert.AreEqual(3, ws.FirstCell().AsRange().SurroundingCells().Count());
Assert.AreEqual(8, ws.Cell("C3").AsRange().SurroundingCells().Count());
Assert.AreEqual(16, ws.Range("C3:D6").AsRange().SurroundingCells().Count());
Assert.AreEqual(0, ws.Range("C3:D6").AsRange().SurroundingCells(c => !c.IsEmpty()).Count());
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
// Don't override IsAlwaysNormalized because it is just a Unicode Transformation and could be confused.
//
using System;
using System.Runtime.Serialization;
using System.Diagnostics;
using System.Diagnostics.Contracts;
namespace System.Text
{
public class UTF7Encoding : Encoding
{
private const String base64Chars =
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
// 0123456789111111111122222222223333333333444444444455555555556666
// 012345678901234567890123456789012345678901234567890123
// These are the characters that can be directly encoded in UTF7.
private const String directChars =
"\t\n\r '(),-./0123456789:?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
// These are the characters that can be optionally directly encoded in UTF7.
private const String optionalChars =
"!\"#$%&*;<=>@[]^_`{|}";
// Used by Encoding.UTF7 for lazy initialization
// The initialization code will not be run until a static member of the class is referenced
internal static readonly UTF7Encoding s_default = new UTF7Encoding();
// The set of base 64 characters.
private byte[] _base64Bytes;
// The decoded bits for every base64 values. This array has a size of 128 elements.
// The index is the code point value of the base 64 characters. The value is -1 if
// the code point is not a valid base 64 character. Otherwise, the value is a value
// from 0 ~ 63.
private sbyte[] _base64Values;
// The array to decide if a Unicode code point below 0x80 can be directly encoded in UTF7.
// This array has a size of 128.
private bool[] _directEncode;
private bool _allowOptionals;
private const int UTF7_CODEPAGE = 65000;
public UTF7Encoding()
: this(false)
{
}
public UTF7Encoding(bool allowOptionals)
: base(UTF7_CODEPAGE) //Set the data item.
{
// Allowing optionals?
_allowOptionals = allowOptionals;
// Make our tables
MakeTables();
}
private void MakeTables()
{
// Build our tables
_base64Bytes = new byte[64];
for (int i = 0; i < 64; i++) _base64Bytes[i] = (byte)base64Chars[i];
_base64Values = new sbyte[128];
for (int i = 0; i < 128; i++) _base64Values[i] = -1;
for (int i = 0; i < 64; i++) _base64Values[_base64Bytes[i]] = (sbyte)i;
_directEncode = new bool[128];
int count = directChars.Length;
for (int i = 0; i < count; i++)
{
_directEncode[directChars[i]] = true;
}
if (_allowOptionals)
{
count = optionalChars.Length;
for (int i = 0; i < count; i++)
{
_directEncode[optionalChars[i]] = true;
}
}
}
// We go ahead and set this because Encoding expects it, however nothing can fall back in UTF7.
internal override void SetDefaultFallbacks()
{
// UTF7 had an odd decoderFallback behavior, and the Encoder fallback
// is irrelevant because we encode surrogates individually and never check for unmatched ones
// (so nothing can fallback during encoding)
this.encoderFallback = new EncoderReplacementFallback(String.Empty);
this.decoderFallback = new DecoderUTF7Fallback();
}
public override bool Equals(Object value)
{
UTF7Encoding that = value as UTF7Encoding;
if (that != null)
{
return (_allowOptionals == that._allowOptionals) &&
(EncoderFallback.Equals(that.EncoderFallback)) &&
(DecoderFallback.Equals(that.DecoderFallback));
}
return (false);
}
// Compared to all the other encodings, variations of UTF7 are unlikely
public override int GetHashCode()
{
return this.CodePage + this.EncoderFallback.GetHashCode() + this.DecoderFallback.GetHashCode();
}
// The following methods are copied from EncodingNLS.cs.
// Unfortunately EncodingNLS.cs is internal and we're public, so we have to reimpliment them here.
// These should be kept in sync for the following classes:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// Returns the number of bytes required to encode a range of characters in
// a character array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetByteCount(char[] chars, int index, int count)
{
// Validate input parameters
if (chars == null)
throw new ArgumentNullException("chars", SR.ArgumentNull_Array);
if (index < 0 || count < 0)
throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (chars.Length - index < count)
throw new ArgumentOutOfRangeException("chars", SR.ArgumentOutOfRange_IndexCountBuffer);
Contract.EndContractBlock();
// If no input, return 0, avoid fixed empty array problem
if (count == 0)
return 0;
// Just call the pointer version
fixed (char* pChars = chars)
return GetByteCount(pChars + index, count, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetByteCount(string s)
{
// Validate input
if (s==null)
throw new ArgumentNullException("s");
Contract.EndContractBlock();
fixed (char* pChars = s)
return GetByteCount(pChars, s.Length, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public override unsafe int GetByteCount(char* chars, int count)
{
// Validate Parameters
if (chars == null)
throw new ArgumentNullException("chars", SR.ArgumentNull_Array);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
// Call it with empty encoder
return GetByteCount(chars, count, null);
}
// Parent method is safe.
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
public override unsafe int GetBytes(string s, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
if (s == null || bytes == null)
throw new ArgumentNullException((s == null ? "s" : "bytes"), SR.ArgumentNull_Array);
if (charIndex < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((charIndex < 0 ? "charIndex" : "charCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (s.Length - charIndex < charCount)
throw new ArgumentOutOfRangeException("s", SR.ArgumentOutOfRange_IndexCount);
if (byteIndex < 0 || byteIndex > bytes.Length)
throw new ArgumentOutOfRangeException("byteIndex", SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
int byteCount = bytes.Length - byteIndex;
// Fixed doesn't like empty arrays
if (bytes.Length == 0)
bytes = new byte[1];
fixed (char* pChars = s) fixed (byte* pBytes = &bytes[0])
return GetBytes(pChars + charIndex, charCount, pBytes + byteIndex, byteCount, null);
}
// Encodes a range of characters in a character array into a range of bytes
// in a byte array. An exception occurs if the byte array is not large
// enough to hold the complete encoding of the characters. The
// GetByteCount method can be used to determine the exact number of
// bytes that will be produced for a given range of characters.
// Alternatively, the GetMaxByteCount method can be used to
// determine the maximum number of bytes that will be produced for a given
// number of characters, regardless of the actual character values.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetBytes(char[] chars, int charIndex, int charCount,
byte[] bytes, int byteIndex)
{
// Validate parameters
if (chars == null || bytes == null)
throw new ArgumentNullException((chars == null ? "chars" : "bytes"), SR.ArgumentNull_Array);
if (charIndex < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((charIndex < 0 ? "charIndex" : "charCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (chars.Length - charIndex < charCount)
throw new ArgumentOutOfRangeException("chars", SR.ArgumentOutOfRange_IndexCountBuffer);
if (byteIndex < 0 || byteIndex > bytes.Length)
throw new ArgumentOutOfRangeException("byteIndex", SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
// If nothing to encode return 0, avoid fixed problem
if (charCount == 0)
return 0;
// Just call pointer version
int byteCount = bytes.Length - byteIndex;
// Fixed doesn't like empty arrays
if (bytes.Length == 0)
bytes = new byte[1];
fixed (char* pChars = chars) fixed (byte* pBytes = &bytes[0])
// Remember that byteCount is # to decode, not size of array.
return GetBytes(pChars + charIndex, charCount, pBytes + byteIndex, byteCount, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public override unsafe int GetBytes(char* chars, int charCount, byte* bytes, int byteCount)
{
// Validate Parameters
if (bytes == null || chars == null)
throw new ArgumentNullException(bytes == null ? "bytes" : "chars", SR.ArgumentNull_Array);
if (charCount < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((charCount < 0 ? "charCount" : "byteCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
return GetBytes(chars, charCount, bytes, byteCount, null);
}
// Returns the number of characters produced by decoding a range of bytes
// in a byte array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetCharCount(byte[] bytes, int index, int count)
{
// Validate Parameters
if (bytes == null)
throw new ArgumentNullException("bytes", SR.ArgumentNull_Array);
if (index < 0 || count < 0)
throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (bytes.Length - index < count)
throw new ArgumentOutOfRangeException("bytes", SR.ArgumentOutOfRange_IndexCountBuffer);
Contract.EndContractBlock();
// If no input just return 0, fixed doesn't like 0 length arrays.
if (count == 0)
return 0;
// Just call pointer version
fixed (byte* pBytes = bytes)
return GetCharCount(pBytes + index, count, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public override unsafe int GetCharCount(byte* bytes, int count)
{
// Validate Parameters
if (bytes == null)
throw new ArgumentNullException("bytes", SR.ArgumentNull_Array);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
return GetCharCount(bytes, count, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe int GetChars(byte[] bytes, int byteIndex, int byteCount,
char[] chars, int charIndex)
{
// Validate Parameters
if (bytes == null || chars == null)
throw new ArgumentNullException(bytes == null ? "bytes" : "chars", SR.ArgumentNull_Array);
if (byteIndex < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((byteIndex < 0 ? "byteIndex" : "byteCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
if ( bytes.Length - byteIndex < byteCount)
throw new ArgumentOutOfRangeException("bytes", SR.ArgumentOutOfRange_IndexCountBuffer);
if (charIndex < 0 || charIndex > chars.Length)
throw new ArgumentOutOfRangeException("charIndex", SR.ArgumentOutOfRange_Index);
Contract.EndContractBlock();
// If no input, return 0 & avoid fixed problem
if (byteCount == 0)
return 0;
// Just call pointer version
int charCount = chars.Length - charIndex;
// Fixed doesn't like empty arrays
if (chars.Length == 0)
chars = new char[1];
fixed (byte* pBytes = bytes) fixed (char* pChars = &chars[0])
// Remember that charCount is # to decode, not size of array
return GetChars(pBytes + byteIndex, byteCount, pChars + charIndex, charCount, null);
}
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
[CLSCompliant(false)]
public unsafe override int GetChars(byte* bytes, int byteCount, char* chars, int charCount)
{
// Validate Parameters
if (bytes == null || chars == null)
throw new ArgumentNullException(bytes == null ? "bytes" : "chars", SR.ArgumentNull_Array);
if (charCount < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((charCount < 0 ? "charCount" : "byteCount"), SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
return GetChars(bytes, byteCount, chars, charCount, null);
}
// Returns a string containing the decoded representation of a range of
// bytes in a byte array.
//
// All of our public Encodings that don't use EncodingNLS must have this (including EncodingNLS)
// So if you fix this, fix the others. Currently those include:
// EncodingNLS, UTF7Encoding, UTF8Encoding, UTF32Encoding, ASCIIEncoding, UnicodeEncoding
// parent method is safe
public override unsafe String GetString(byte[] bytes, int index, int count)
{
// Validate Parameters
if (bytes == null)
throw new ArgumentNullException("bytes", SR.ArgumentNull_Array);
if (index < 0 || count < 0)
throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum);
if (bytes.Length - index < count)
throw new ArgumentOutOfRangeException("bytes", SR.ArgumentOutOfRange_IndexCountBuffer);
Contract.EndContractBlock();
// Avoid problems with empty input buffer
if (count == 0) return String.Empty;
fixed (byte* pBytes = bytes)
return String.CreateStringFromEncoding(
pBytes + index, count, this);
}
//
// End of standard methods copied from EncodingNLS.cs
//
internal override unsafe int GetByteCount(char* chars, int count, EncoderNLS baseEncoder)
{
Debug.Assert(chars != null, "[UTF7Encoding.GetByteCount]chars!=null");
Debug.Assert(count >= 0, "[UTF7Encoding.GetByteCount]count >=0");
// Just call GetBytes with bytes == null
return GetBytes(chars, count, null, 0, baseEncoder);
}
internal override unsafe int GetBytes(char* chars, int charCount,
byte* bytes, int byteCount, EncoderNLS baseEncoder)
{
Debug.Assert(byteCount >= 0, "[UTF7Encoding.GetBytes]byteCount >=0");
Debug.Assert(chars != null, "[UTF7Encoding.GetBytes]chars!=null");
Debug.Assert(charCount >= 0, "[UTF7Encoding.GetBytes]charCount >=0");
// Get encoder info
UTF7Encoding.Encoder encoder = (UTF7Encoding.Encoder)baseEncoder;
// Default bits & count
int bits = 0;
int bitCount = -1;
// prepare our helpers
Encoding.EncodingByteBuffer buffer = new Encoding.EncodingByteBuffer(
this, encoder, bytes, byteCount, chars, charCount);
if (encoder != null)
{
bits = encoder.bits;
bitCount = encoder.bitCount;
// May have had too many left over
while (bitCount >= 6)
{
bitCount -= 6;
// If we fail we'll never really have enough room
if (!buffer.AddByte(_base64Bytes[(bits >> bitCount) & 0x3F]))
ThrowBytesOverflow(encoder, buffer.Count == 0);
}
}
while (buffer.MoreData)
{
char currentChar = buffer.GetNextChar();
if (currentChar < 0x80 && _directEncode[currentChar])
{
if (bitCount >= 0)
{
if (bitCount > 0)
{
// Try to add the next byte
if (!buffer.AddByte(_base64Bytes[bits << 6 - bitCount & 0x3F]))
break; // Stop here, didn't throw
bitCount = 0;
}
// Need to get emit '-' and our char, 2 bytes total
if (!buffer.AddByte((byte)'-'))
break; // Stop here, didn't throw
bitCount = -1;
}
// Need to emit our char
if (!buffer.AddByte((byte)currentChar))
break; // Stop here, didn't throw
}
else if (bitCount < 0 && currentChar == '+')
{
if (!buffer.AddByte((byte)'+', (byte)'-'))
break; // Stop here, didn't throw
}
else
{
if (bitCount < 0)
{
// Need to emit a + and 12 bits (3 bytes)
// Only 12 of the 16 bits will be emitted this time, the other 4 wait 'til next time
if (!buffer.AddByte((byte)'+'))
break; // Stop here, didn't throw
// We're now in bit mode, but haven't stored data yet
bitCount = 0;
}
// Add our bits
bits = bits << 16 | currentChar;
bitCount += 16;
while (bitCount >= 6)
{
bitCount -= 6;
if (!buffer.AddByte(_base64Bytes[(bits >> bitCount) & 0x3F]))
{
bitCount += 6; // We didn't use these bits
currentChar = buffer.GetNextChar(); // We're processing this char still, but AddByte
// --'d it when we ran out of space
break; // Stop here, not enough room for bytes
}
}
if (bitCount >= 6)
break; // Didn't have room to encode enough bits
}
}
// Now if we have bits left over we have to encode them.
// MustFlush may have been cleared by encoding.ThrowBytesOverflow earlier if converting
if (bitCount >= 0 && (encoder == null || encoder.MustFlush))
{
// Do we have bits we have to stick in?
if (bitCount > 0)
{
if (buffer.AddByte(_base64Bytes[(bits << (6 - bitCount)) & 0x3F]))
{
// Emitted spare bits, 0 bits left
bitCount = 0;
}
}
// If converting and failed bitCount above, then we'll fail this too
if (buffer.AddByte((byte)'-'))
{
// turned off bit mode';
bits = 0;
bitCount = -1;
}
else
// If not successful, convert will maintain state for next time, also
// AddByte will have decremented our char count, however we need it to remain the same
buffer.GetNextChar();
}
// Do we have an encoder we're allowed to use?
// bytes == null if counting, so don't use encoder then
if (bytes != null && encoder != null)
{
// We already cleared bits & bitcount for mustflush case
encoder.bits = bits;
encoder.bitCount = bitCount;
encoder.m_charsUsed = buffer.CharsUsed;
}
return buffer.Count;
}
internal override unsafe int GetCharCount(byte* bytes, int count, DecoderNLS baseDecoder)
{
Debug.Assert(count >= 0, "[UTF7Encoding.GetCharCount]count >=0");
Debug.Assert(bytes != null, "[UTF7Encoding.GetCharCount]bytes!=null");
// Just call GetChars with null char* to do counting
return GetChars(bytes, count, null, 0, baseDecoder);
}
internal override unsafe int GetChars(byte* bytes, int byteCount,
char* chars, int charCount, DecoderNLS baseDecoder)
{
Debug.Assert(byteCount >= 0, "[UTF7Encoding.GetChars]byteCount >=0");
Debug.Assert(bytes != null, "[UTF7Encoding.GetChars]bytes!=null");
Debug.Assert(charCount >= 0, "[UTF7Encoding.GetChars]charCount >=0");
// Might use a decoder
UTF7Encoding.Decoder decoder = (UTF7Encoding.Decoder)baseDecoder;
// Get our output buffer info.
Encoding.EncodingCharBuffer buffer = new Encoding.EncodingCharBuffer(
this, decoder, chars, charCount, bytes, byteCount);
// Get decoder info
int bits = 0;
int bitCount = -1;
bool firstByte = false;
if (decoder != null)
{
bits = decoder.bits;
bitCount = decoder.bitCount;
firstByte = decoder.firstByte;
Debug.Assert(firstByte == false || decoder.bitCount <= 0,
"[UTF7Encoding.GetChars]If remembered bits, then first byte flag shouldn't be set");
}
// We may have had bits in the decoder that we couldn't output last time, so do so now
if (bitCount >= 16)
{
// Check our decoder buffer
if (!buffer.AddChar((char)((bits >> (bitCount - 16)) & 0xFFFF)))
ThrowCharsOverflow(decoder, true); // Always throw, they need at least 1 char even in Convert
// Used this one, clean up extra bits
bitCount -= 16;
}
// Loop through the input
while (buffer.MoreData)
{
byte currentByte = buffer.GetNextByte();
int c;
if (bitCount >= 0)
{
//
// Modified base 64 encoding.
//
sbyte v;
if (currentByte < 0x80 && ((v = _base64Values[currentByte]) >= 0))
{
firstByte = false;
bits = (bits << 6) | ((byte)v);
bitCount += 6;
if (bitCount >= 16)
{
c = (bits >> (bitCount - 16)) & 0xFFFF;
bitCount -= 16;
}
// If not enough bits just continue
else continue;
}
else
{
// If it wasn't a base 64 byte, everything's going to turn off base 64 mode
bitCount = -1;
if (currentByte != '-')
{
// >= 0x80 (because of 1st if statemtn)
// We need this check since the _base64Values[b] check below need b <= 0x7f.
// This is not a valid base 64 byte. Terminate the shifted-sequence and
// emit this byte.
// not in base 64 table
// According to the RFC 1642 and the example code of UTF-7
// in Unicode 2.0, we should just zero-extend the invalid UTF7 byte
// Chars won't be updated unless this works, try to fallback
if (!buffer.Fallback(currentByte))
break; // Stop here, didn't throw
// Used that byte, we're done with it
continue;
}
//
// The encoding for '+' is "+-".
//
if (firstByte) c = '+';
// We just turn it off if not emitting a +, so we're done.
else continue;
}
//
// End of modified base 64 encoding block.
//
}
else if (currentByte == '+')
{
//
// Found the start of a modified base 64 encoding block or a plus sign.
//
bitCount = 0;
firstByte = true;
continue;
}
else
{
// Normal character
if (currentByte >= 0x80)
{
// Try to fallback
if (!buffer.Fallback(currentByte))
break; // Stop here, didn't throw
// Done falling back
continue;
}
// Use the normal character
c = currentByte;
}
if (c >= 0)
{
// Check our buffer
if (!buffer.AddChar((char)c))
{
// No room. If it was a plain char we'll try again later.
// Note, we'll consume this byte and stick it in decoder, even if we can't output it
if (bitCount >= 0) // Can we rememmber this byte (char)
{
buffer.AdjustBytes(+1); // Need to readd the byte that AddChar subtracted when it failed
bitCount += 16; // We'll still need that char we have in our bits
}
break; // didn't throw, stop
}
}
}
// Stick stuff in the decoder if we can (chars == null if counting, so don't store decoder)
if (chars != null && decoder != null)
{
// MustFlush? (Could've been cleared by ThrowCharsOverflow if Convert & didn't reach end of buffer)
if (decoder.MustFlush)
{
// RFC doesn't specify what would happen if we have non-0 leftover bits, we just drop them
decoder.bits = 0;
decoder.bitCount = -1;
decoder.firstByte = false;
}
else
{
decoder.bits = bits;
decoder.bitCount = bitCount;
decoder.firstByte = firstByte;
}
decoder.m_bytesUsed = buffer.BytesUsed;
}
// else ignore any hanging bits.
// Return our count
return buffer.Count;
}
public override System.Text.Decoder GetDecoder()
{
return new UTF7Encoding.Decoder(this);
}
public override System.Text.Encoder GetEncoder()
{
return new UTF7Encoding.Encoder(this);
}
public override int GetMaxByteCount(int charCount)
{
if (charCount < 0)
throw new ArgumentOutOfRangeException(nameof(charCount),
SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
// Suppose that every char can not be direct-encoded, we know that
// a byte can encode 6 bits of the Unicode character. And we will
// also need two extra bytes for the shift-in ('+') and shift-out ('-') mark.
// Therefore, the max byte should be:
// byteCount = 2 + Math.Ceiling((double)charCount * 16 / 6);
// That is always <= 2 + 3 * charCount;
// Longest case is alternating encoded, direct, encoded data for 5 + 1 + 5... bytes per char.
// UTF7 doesn't have left over surrogates, but if no input we may need an output - to turn off
// encoding if MustFlush is true.
// Its easiest to think of this as 2 bytes to turn on/off the base64 mode, then 3 bytes per char.
// 3 bytes is 18 bits of encoding, which is more than we need, but if its direct encoded then 3
// bytes allows us to turn off and then back on base64 mode if necessary.
// Note that UTF7 encoded surrogates individually and isn't worried about mismatches, so all
// code points are encodable int UTF7.
long byteCount = (long)charCount * 3 + 2;
// check for overflow
if (byteCount > 0x7fffffff)
throw new ArgumentOutOfRangeException(nameof(charCount), SR.ArgumentOutOfRange_GetByteCountOverflow);
return (int)byteCount;
}
public override int GetMaxCharCount(int byteCount)
{
if (byteCount < 0)
throw new ArgumentOutOfRangeException(nameof(byteCount),
SR.ArgumentOutOfRange_NeedNonNegNum);
Contract.EndContractBlock();
// Worst case is 1 char per byte. Minimum 1 for left over bits in case decoder is being flushed
// Also note that we ignore extra bits (per spec), so UTF7 doesn't have unknown in this direction.
int charCount = byteCount;
if (charCount == 0) charCount = 1;
return charCount;
}
// Of all the amazing things... This MUST be Decoder so that our com name
// for System.Text.Decoder doesn't change
private sealed class Decoder : DecoderNLS, ISerializable
{
/*private*/
internal int bits;
/*private*/
internal int bitCount;
/*private*/
internal bool firstByte;
public Decoder(UTF7Encoding encoding) : base(encoding)
{
// base calls reset
}
// ISerializable implementation, get data for this object
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context)
{
throw new PlatformNotSupportedException();
}
public override void Reset()
{
this.bits = 0;
this.bitCount = -1;
this.firstByte = false;
if (m_fallbackBuffer != null)
m_fallbackBuffer.Reset();
}
// Anything left in our encoder?
internal override bool HasState
{
get
{
// NOTE: This forces the last -, which some encoder might not encode. If we
// don't see it we don't think we're done reading.
return (this.bitCount != -1);
}
}
}
// Of all the amazing things... This MUST be Encoder so that our com name
// for System.Text.Encoder doesn't change
private sealed class Encoder : EncoderNLS, ISerializable
{
/*private*/
internal int bits;
/*private*/
internal int bitCount;
public Encoder(UTF7Encoding encoding) : base(encoding)
{
// base calls reset
}
// ISerializable implementation
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context)
{
throw new PlatformNotSupportedException();
}
public override void Reset()
{
this.bitCount = -1;
this.bits = 0;
if (m_fallbackBuffer != null)
m_fallbackBuffer.Reset();
}
// Anything left in our encoder?
internal override bool HasState
{
get
{
return (this.bits != 0 || this.bitCount != -1);
}
}
}
// Preexisting UTF7 behavior for bad bytes was just to spit out the byte as the next char
// and turn off base64 mode if it was in that mode. We still exit the mode, but now we fallback.
private sealed class DecoderUTF7Fallback : DecoderFallback
{
// Construction. Default replacement fallback uses no best fit and ? replacement string
public DecoderUTF7Fallback()
{
}
public override DecoderFallbackBuffer CreateFallbackBuffer()
{
return new DecoderUTF7FallbackBuffer(this);
}
// Maximum number of characters that this instance of this fallback could return
public override int MaxCharCount
{
get
{
// returns 1 char per bad byte
return 1;
}
}
public override bool Equals(Object value)
{
DecoderUTF7Fallback that = value as DecoderUTF7Fallback;
if (that != null)
{
return true;
}
return (false);
}
public override int GetHashCode()
{
return 984;
}
}
private sealed class DecoderUTF7FallbackBuffer : DecoderFallbackBuffer
{
// Store our default string
private char cFallback = (char)0;
private int iCount = -1;
private int iSize;
// Construction
public DecoderUTF7FallbackBuffer(DecoderUTF7Fallback fallback)
{
}
// Fallback Methods
public override bool Fallback(byte[] bytesUnknown, int index)
{
// We expect no previous fallback in our buffer
Debug.Assert(iCount < 0, "[DecoderUTF7FallbackBuffer.Fallback] Can't have recursive fallbacks");
Debug.Assert(bytesUnknown.Length == 1, "[DecoderUTF7FallbackBuffer.Fallback] Only possible fallback case should be 1 unknown byte");
// Go ahead and get our fallback
cFallback = (char)bytesUnknown[0];
// Any of the fallback characters can be handled except for 0
if (cFallback == 0)
{
return false;
}
iCount = iSize = 1;
return true;
}
public override char GetNextChar()
{
if (iCount-- > 0)
return cFallback;
// Note: this means that 0 in UTF7 stream will never be emitted.
return (char)0;
}
public override bool MovePrevious()
{
if (iCount >= 0)
{
iCount++;
}
// return true if we were allowed to do this
return (iCount >= 0 && iCount <= iSize);
}
// Return # of chars left in this fallback
public override int Remaining
{
get
{
return (iCount > 0) ? iCount : 0;
}
}
// Clear the buffer
public override unsafe void Reset()
{
iCount = -1;
byteStart = null;
}
// This version just counts the fallback and doesn't actually copy anything.
internal unsafe override int InternalFallback(byte[] bytes, byte* pBytes)
// Right now this has both bytes and bytes[], since we might have extra bytes, hence the
// array, and we might need the index, hence the byte*
{
// We expect no previous fallback in our buffer
Debug.Assert(iCount < 0, "[DecoderUTF7FallbackBuffer.InternalFallback] Can't have recursive fallbacks");
if (bytes.Length != 1)
{
throw new ArgumentException(SR.Argument_InvalidCharSequenceNoIndex);
}
// Can't fallback a byte 0, so return for that case, 1 otherwise.
return bytes[0] == 0 ? 0 : 1;
}
}
}
}
| |
using System;
using System.Xml;
using System.Web.Caching;
using System.Text;
using System.IO;
using System.Text.RegularExpressions;
using System.Data;
using System.Web.UI;
using System.Collections;
using System.Collections.Generic;
using Umbraco.Core;
using Umbraco.Core.Cache;
using Umbraco.Core.Configuration;
using Umbraco.Web;
using Umbraco.Web.Cache;
using umbraco.DataLayer;
using umbraco.BusinessLogic;
using Umbraco.Core.IO;
using System.Web;
namespace umbraco
{
/// <summary>
/// Holds methods for parsing and building umbraco templates
/// </summary>
[Obsolete("Do not use this class, use Umbraco.Core.Service.IFileService to work with templates")]
public class template
{
#region private variables
readonly StringBuilder _templateOutput = new StringBuilder();
private string _templateDesign = "";
int _masterTemplate = -1;
private string _templateName = "";
private string _templateAlias = "";
#endregion
#region public properties
public String TemplateContent
{
set
{
_templateOutput.Append(value);
}
get
{
return _templateOutput.ToString();
}
}
public int MasterTemplate
{
get { return _masterTemplate; }
}
//added fallback to the default template to avoid nasty .net errors.
//This is referenced in /default.aspx.cs during page rendering.
public string MasterPageFile
{
get
{
string file = TemplateAlias.Replace(" ", "") + ".master";
string path = SystemDirectories.Masterpages + "/" + file;
if (System.IO.File.Exists(IOHelper.MapPath(VirtualPathUtility.ToAbsolute(path))))
return path;
else
return SystemDirectories.Umbraco + "/masterPages/default.master";
}
}
//Support for template folders, if a alternative skin folder is requested
//we will try to look for template files in another folder
public string AlternateMasterPageFile(string templateFolder)
{
string file = TemplateAlias.Replace(" ", "") + ".master";
string path = SystemDirectories.Masterpages + "/" + templateFolder + "/" + file;
//if it doesn't exists then we return the normal file
if (!System.IO.File.Exists(IOHelper.MapPath(VirtualPathUtility.ToAbsolute(path))))
{
string originalPath = IOHelper.MapPath(VirtualPathUtility.ToAbsolute(MasterPageFile));
string copyPath = IOHelper.MapPath(VirtualPathUtility.ToAbsolute(path));
string newFile;
using (var fs = new FileStream(originalPath, FileMode.Open, FileAccess.ReadWrite))
using (var f = new StreamReader(fs))
{
newFile = f.ReadToEnd();
}
newFile = newFile.Replace("MasterPageFile=\"~/masterpages/", "MasterPageFile=\"");
using (var fs = new FileStream(copyPath, FileMode.Create, FileAccess.Write))
using (var replacement = new StreamWriter(fs))
{
replacement.Write(newFile);
}
}
return path;
}
public string TemplateAlias
{
get { return _templateAlias; }
}
#endregion
#region public methods
public override string ToString()
{
return this._templateName;
}
public Control ParseWithControls(page umbPage)
{
System.Web.HttpContext.Current.Trace.Write("umbracoTemplate", "Start parsing");
if (System.Web.HttpContext.Current.Items["macrosAdded"] == null)
System.Web.HttpContext.Current.Items.Add("macrosAdded", 0);
StringBuilder tempOutput = _templateOutput;
Control pageLayout = new Control();
Control pageHeader = new Control();
Control pageFooter = new Control();
Control pageContent = new Control();
System.Web.UI.HtmlControls.HtmlForm pageForm = new System.Web.UI.HtmlControls.HtmlForm();
System.Web.UI.HtmlControls.HtmlHead pageAspNetHead = new System.Web.UI.HtmlControls.HtmlHead();
// Find header and footer of page if there is an aspnet-form on page
if (_templateOutput.ToString().ToLower().IndexOf("<?aspnet_form>") > 0 ||
_templateOutput.ToString().ToLower().IndexOf("<?aspnet_form disablescriptmanager=\"true\">") > 0)
{
pageForm.Attributes.Add("method", "post");
pageForm.Attributes.Add("action", Convert.ToString(System.Web.HttpContext.Current.Items["VirtualUrl"]));
// Find header and footer from tempOutput
int aspnetFormTagBegin = tempOutput.ToString().ToLower().IndexOf("<?aspnet_form>");
int aspnetFormTagLength = 14;
int aspnetFormTagEnd = tempOutput.ToString().ToLower().IndexOf("</?aspnet_form>") + 15;
// check if we should disable the script manager
if (aspnetFormTagBegin == -1)
{
aspnetFormTagBegin =
_templateOutput.ToString().ToLower().IndexOf("<?aspnet_form disablescriptmanager=\"true\">");
aspnetFormTagLength = 42;
}
else
{
ScriptManager sm = new ScriptManager();
sm.ID = "umbracoScriptManager";
pageForm.Controls.Add(sm);
}
StringBuilder header = new StringBuilder(tempOutput.ToString().Substring(0, aspnetFormTagBegin));
// Check if there's an asp.net head element in the header
if (header.ToString().ToLower().Contains("<?aspnet_head>"))
{
StringBuilder beforeHeader = new StringBuilder(header.ToString().Substring(0, header.ToString().ToLower().IndexOf("<?aspnet_head>")));
header.Remove(0, header.ToString().ToLower().IndexOf("<?aspnet_head>") + 14);
StringBuilder afterHeader = new StringBuilder(header.ToString().Substring(header.ToString().ToLower().IndexOf("</?aspnet_head>") + 15, header.Length - header.ToString().ToLower().IndexOf("</?aspnet_head>") - 15));
header.Remove(header.ToString().ToLower().IndexOf("</?aspnet_head>"), header.Length - header.ToString().ToLower().IndexOf("</?aspnet_head>"));
// Find the title from head
MatchCollection matches = Regex.Matches(header.ToString(), @"<title>(.*?)</title>", RegexOptions.IgnoreCase | RegexOptions.Multiline);
if (matches.Count > 0)
{
StringBuilder titleText = new StringBuilder();
HtmlTextWriter titleTextTw = new HtmlTextWriter(new System.IO.StringWriter(titleText));
parseStringBuilder(new StringBuilder(matches[0].Groups[1].Value), umbPage).RenderControl(titleTextTw);
pageAspNetHead.Title = titleText.ToString();
header = new StringBuilder(header.ToString().Replace(matches[0].Value, ""));
}
pageAspNetHead.Controls.Add(parseStringBuilder(header, umbPage));
pageAspNetHead.ID = "head1";
// build the whole header part
pageHeader.Controls.Add(parseStringBuilder(beforeHeader, umbPage));
pageHeader.Controls.Add(pageAspNetHead);
pageHeader.Controls.Add(parseStringBuilder(afterHeader, umbPage));
}
else
pageHeader.Controls.Add(parseStringBuilder(header, umbPage));
pageFooter.Controls.Add(parseStringBuilder(new StringBuilder(tempOutput.ToString().Substring(aspnetFormTagEnd, tempOutput.Length - aspnetFormTagEnd)), umbPage));
tempOutput.Remove(0, aspnetFormTagBegin + aspnetFormTagLength);
aspnetFormTagEnd = tempOutput.ToString().ToLower().IndexOf("</?aspnet_form>");
tempOutput.Remove(aspnetFormTagEnd, tempOutput.Length - aspnetFormTagEnd);
//throw new ArgumentException(tempOutput.ToString());
pageForm.Controls.Add(parseStringBuilder(tempOutput, umbPage));
pageContent.Controls.Add(pageHeader);
pageContent.Controls.Add(pageForm);
pageContent.Controls.Add(pageFooter);
return pageContent;
}
else
return parseStringBuilder(tempOutput, umbPage);
}
public Control parseStringBuilder(StringBuilder tempOutput, page umbPage)
{
Control pageContent = new Control();
bool stop = false;
bool debugMode = umbraco.presentation.UmbracoContext.Current.Request.IsDebug;
while (!stop)
{
System.Web.HttpContext.Current.Trace.Write("template", "Begining of parsing rutine...");
int tagIndex = tempOutput.ToString().ToLower().IndexOf("<?umbraco");
if (tagIndex > -1)
{
String tempElementContent = "";
pageContent.Controls.Add(new LiteralControl(tempOutput.ToString().Substring(0, tagIndex)));
tempOutput.Remove(0, tagIndex);
String tag = tempOutput.ToString().Substring(0, tempOutput.ToString().IndexOf(">") + 1);
Hashtable attributes = helper.ReturnAttributes(tag);
// Check whether it's a single tag (<?.../>) or a tag with children (<?..>...</?...>)
if (tag.Substring(tag.Length - 2, 1) != "/" && tag.IndexOf(" ") > -1)
{
String closingTag = "</" + (tag.Substring(1, tag.IndexOf(" ") - 1)) + ">";
// Tag with children are only used when a macro is inserted by the umbraco-editor, in the
// following format: "<?UMBRACO_MACRO ...><IMG SRC="..."..></?UMBRACO_MACRO>", so we
// need to delete extra information inserted which is the image-tag and the closing
// umbraco_macro tag
if (tempOutput.ToString().IndexOf(closingTag) > -1)
{
tempOutput.Remove(0, tempOutput.ToString().IndexOf(closingTag));
}
}
System.Web.HttpContext.Current.Trace.Write("umbTemplate", "Outputting item: " + tag);
// Handle umbraco macro tags
if (tag.ToString().ToLower().IndexOf("umbraco_macro") > -1)
{
if (debugMode)
pageContent.Controls.Add(new LiteralControl("<div title=\"Macro Tag: '" + System.Web.HttpContext.Current.Server.HtmlEncode(tag) + "'\" style=\"border: 1px solid #009;\">"));
// NH: Switching to custom controls for macros
if (UmbracoConfig.For.UmbracoSettings().Templates.UseAspNetMasterPages)
{
umbraco.presentation.templateControls.Macro macroControl = new umbraco.presentation.templateControls.Macro();
macroControl.Alias = helper.FindAttribute(attributes, "macroalias");
IDictionaryEnumerator ide = attributes.GetEnumerator();
while (ide.MoveNext())
if (macroControl.Attributes[ide.Key.ToString()] == null)
macroControl.Attributes.Add(ide.Key.ToString(), ide.Value.ToString());
pageContent.Controls.Add(macroControl);
}
else
{
macro tempMacro;
String macroID = helper.FindAttribute(attributes, "macroid");
if (macroID != String.Empty)
tempMacro = getMacro(macroID);
else
tempMacro = macro.GetMacro(helper.FindAttribute(attributes, "macroalias"));
if (tempMacro != null)
{
try
{
Control c = tempMacro.renderMacro(attributes, umbPage.Elements, umbPage.PageID);
if (c != null)
pageContent.Controls.Add(c);
else
System.Web.HttpContext.Current.Trace.Warn("Template", "Result of macro " + tempMacro.Name + " is null");
}
catch (Exception e)
{
System.Web.HttpContext.Current.Trace.Warn("Template", "Error adding macro " + tempMacro.Name, e);
}
}
}
if (debugMode)
pageContent.Controls.Add(new LiteralControl("</div>"));
}
else
{
if (tag.ToLower().IndexOf("umbraco_getitem") > -1)
{
// NH: Switching to custom controls for items
if (UmbracoConfig.For.UmbracoSettings().Templates.UseAspNetMasterPages)
{
umbraco.presentation.templateControls.Item itemControl = new umbraco.presentation.templateControls.Item();
itemControl.Field = helper.FindAttribute(attributes, "field");
IDictionaryEnumerator ide = attributes.GetEnumerator();
while (ide.MoveNext())
if (itemControl.Attributes[ide.Key.ToString()] == null)
itemControl.Attributes.Add(ide.Key.ToString(), ide.Value.ToString());
pageContent.Controls.Add(itemControl);
}
else
{
try
{
if (helper.FindAttribute(attributes, "nodeId") != "" && int.Parse(helper.FindAttribute(attributes, "nodeId")) != 0)
{
cms.businesslogic.Content c = new umbraco.cms.businesslogic.Content(int.Parse(helper.FindAttribute(attributes, "nodeId")));
item umbItem = new item(c.getProperty(helper.FindAttribute(attributes, "field")).Value.ToString(), attributes);
tempElementContent = umbItem.FieldContent;
// Check if the content is published
if (c.nodeObjectType == cms.businesslogic.web.Document._objectType)
{
try
{
cms.businesslogic.web.Document d = (cms.businesslogic.web.Document)c;
if (!d.Published)
tempElementContent = "";
}
catch { }
}
}
else
{
// NH adds Live Editing test stuff
item umbItem = new item(umbPage.Elements, attributes);
// item umbItem = new item(umbPage.PageElements[helper.FindAttribute(attributes, "field")].ToString(), attributes);
tempElementContent = umbItem.FieldContent;
}
if (debugMode)
tempElementContent =
"<div title=\"Field Tag: '" + System.Web.HttpContext.Current.Server.HtmlEncode(tag) + "'\" style=\"border: 1px solid #fc6;\">" + tempElementContent + "</div>";
}
catch (Exception e)
{
System.Web.HttpContext.Current.Trace.Warn("umbracoTemplate", "Error reading element (" + helper.FindAttribute(attributes, "field") + ")", e);
}
}
}
}
tempOutput.Remove(0, tempOutput.ToString().IndexOf(">") + 1);
tempOutput.Insert(0, tempElementContent);
}
else
{
pageContent.Controls.Add(new LiteralControl(tempOutput.ToString()));
break;
}
}
return pageContent;
}
[Obsolete("Use Umbraco.Web.Templates.TemplateUtilities.ParseInternalLinks instead")]
public static string ParseInternalLinks(string pageContents)
{
return Umbraco.Web.Templates.TemplateUtilities.ParseInternalLinks(pageContents);
}
/// <summary>
/// Parses the content of the templateOutput stringbuilder, and matches any tags given in the
/// XML-file /umbraco/config/umbracoTemplateTags.xml.
/// Replaces the found tags in the StringBuilder object, with "real content"
/// </summary>
/// <param name="umbPage"></param>
public void Parse(page umbPage)
{
System.Web.HttpContext.Current.Trace.Write("umbracoTemplate", "Start parsing");
// First parse for known umbraco tags
// <?UMBRACO_MACRO/> - macros
// <?UMBRACO_GETITEM/> - print item from page, level, or recursive
MatchCollection tags = Regex.Matches(_templateOutput.ToString(), "<\\?UMBRACO_MACRO[^>]*/>|<\\?UMBRACO_GETITEM[^>]*/>|<\\?(?<tagName>[\\S]*)[^>]*/>", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace);
foreach (Match tag in tags)
{
Hashtable attributes = helper.ReturnAttributes(tag.Value.ToString());
if (tag.ToString().ToLower().IndexOf("umbraco_macro") > -1)
{
String macroID = helper.FindAttribute(attributes, "macroid");
if (macroID != "")
{
macro tempMacro = getMacro(macroID);
_templateOutput.Replace(tag.Value.ToString(), tempMacro.MacroContent.ToString());
}
}
else
{
if (tag.ToString().ToLower().IndexOf("umbraco_getitem") > -1)
{
try
{
String tempElementContent = umbPage.Elements[helper.FindAttribute(attributes, "field")].ToString();
MatchCollection tempMacros = Regex.Matches(tempElementContent, "<\\?UMBRACO_MACRO(?<attributes>[^>]*)><img[^>]*><\\/\\?UMBRACO_MACRO>", RegexOptions.IgnoreCase | RegexOptions.IgnorePatternWhitespace);
foreach (Match tempMacro in tempMacros)
{
Hashtable tempAttributes = helper.ReturnAttributes(tempMacro.Groups["attributes"].Value.ToString());
String macroID = helper.FindAttribute(tempAttributes, "macroid");
if (Convert.ToInt32(macroID) > 0)
{
macro tempContentMacro = getMacro(macroID);
_templateOutput.Replace(tag.Value.ToString(), tempContentMacro.MacroContent.ToString());
}
}
_templateOutput.Replace(tag.Value.ToString(), tempElementContent);
}
catch (Exception e)
{
System.Web.HttpContext.Current.Trace.Warn("umbracoTemplate", "Error reading element (" + helper.FindAttribute(attributes, "field") + ")", e);
}
}
}
}
System.Web.HttpContext.Current.Trace.Write("umbracoTemplate", "Done parsing");
}
#endregion
#region private methods
private macro getMacro(String macroID)
{
System.Web.HttpContext.Current.Trace.Write("umbracoTemplate", "Starting macro (" + macroID.ToString() + ")");
return macro.GetMacro(Convert.ToInt16(macroID));
}
private String FindAttribute(Hashtable attributes, String key)
{
if (attributes[key] != null)
return attributes[key].ToString();
else
return "";
}
#endregion
/// <summary>
/// Unused, please do not use
/// </summary>
[Obsolete("Obsolete, For querying the database use the new UmbracoDatabase object ApplicationContext.Current.DatabaseContext.Database", false)]
protected static ISqlHelper SqlHelper
{
get { return Application.SqlHelper; }
}
#region constructors
public static string GetMasterPageName(int templateID)
{
return GetMasterPageName(templateID, null);
}
public static string GetMasterPageName(int templateID, string templateFolder)
{
var t = new template(templateID);
return !string.IsNullOrEmpty(templateFolder)
? t.AlternateMasterPageFile(templateFolder)
: t.MasterPageFile;
}
public template(int templateID)
{
var tId = templateID;
var t = ApplicationContext.Current.ApplicationCache.RuntimeCache.GetCacheItem<template>(
string.Format("{0}{1}", CacheKeys.TemplateFrontEndCacheKey, tId), () =>
{
using (var sqlHelper = Application.SqlHelper)
using (var templateData = sqlHelper.ExecuteReader(@"select nodeId, alias, node.parentID as master, text, design
from cmsTemplate
inner join umbracoNode node on (node.id = cmsTemplate.nodeId)
where nodeId = @templateID",
sqlHelper.CreateParameter("@templateID", templateID)))
{
if (templateData.Read())
{
// Get template master and replace content where the template
if (!templateData.IsNull("master"))
_masterTemplate = templateData.GetInt("master");
if (!templateData.IsNull("alias"))
_templateAlias = templateData.GetString("alias");
if (!templateData.IsNull("text"))
_templateName = templateData.GetString("text");
if (!templateData.IsNull("design"))
_templateDesign = templateData.GetString("design");
}
}
return this;
});
if (t == null)
throw new InvalidOperationException("Could not find a tempalte with id " + templateID);
this._masterTemplate = t._masterTemplate;
this._templateAlias = t._templateAlias;
this._templateDesign = t._templateDesign;
this._masterTemplate = t._masterTemplate;
this._templateName = t._templateName;
// Only check for master on legacy templates - can show error when using master pages.
if (!UmbracoConfig.For.UmbracoSettings().Templates.UseAspNetMasterPages)
{
checkForMaster(tId);
}
}
private void checkForMaster(int templateID) {
// Get template design
if (_masterTemplate != 0 && _masterTemplate != templateID) {
template masterTemplateDesign = new template(_masterTemplate);
if (masterTemplateDesign.TemplateContent.IndexOf("<?UMBRACO_TEMPLATE_LOAD_CHILD/>") > -1
|| masterTemplateDesign.TemplateContent.IndexOf("<?UMBRACO_TEMPLATE_LOAD_CHILD />") > -1) {
_templateOutput.Append(
masterTemplateDesign.TemplateContent.Replace("<?UMBRACO_TEMPLATE_LOAD_CHILD/>",
_templateDesign).Replace("<?UMBRACO_TEMPLATE_LOAD_CHILD />", _templateDesign)
);
} else
_templateOutput.Append(_templateDesign);
} else {
if (_masterTemplate == templateID)
{
cms.businesslogic.template.Template t = cms.businesslogic.template.Template.GetTemplate(templateID);
string templateName = (t != null) ? t.Text : string.Format("'Template with id: '{0}", templateID);
System.Web.HttpContext.Current.Trace.Warn("template",
String.Format("Master template is the same as the current template. It would cause an endless loop! Make sure that the current template '{0}' has another Master Template than itself. You can change this in the template editor under 'Settings'", templateName));
_templateOutput.Append(_templateDesign);
}
}
}
[Obsolete("Use ApplicationContext.Current.ApplicationCache.ClearCacheForTemplate instead")]
public static void ClearCachedTemplate(int templateID)
{
DistributedCache.Instance.RefreshTemplateCache(templateID);
}
public template(String templateContent)
{
_templateOutput.Append(templateContent);
_masterTemplate = 0;
}
#endregion
}
}
| |
// Copyright 2010-2021 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Linq;
using Google.OrTools.Sat;
/// <summary>
/// Creates a shift scheduling problem and solves it
/// </summary>
public class ShiftSchedulingSat
{
static void Main(string[] args)
{
SolveShiftScheduling();
}
static void SolveShiftScheduling()
{
int numEmployees = 8;
int numWeeks = 3;
var shifts = new[] { "O", "M", "A", "N" };
// Fixed assignment: (employee, shift, day).
// This fixes the first 2 days of the schedule.
var fixedAssignments = new(int Employee, int Shift, int Day)[] {
(0, 0, 0), (1, 0, 0), (2, 1, 0), (3, 1, 0), (4, 2, 0), (5, 2, 0), (6, 2, 3), (7, 3, 0),
(0, 1, 1), (1, 1, 1), (2, 2, 1), (3, 2, 1), (4, 2, 1), (5, 0, 1), (6, 0, 1), (7, 3, 1),
};
// Request: (employee, shift, day, weight)
// A negative weight indicates that the employee desire this assignment.
var requests = new(int Employee, int Shift, int Day,
int Weight)[] {// Employee 3 wants the first Saturday off.
(3, 0, 5, -2),
// Employee 5 wants a night shift on the second Thursday.
(5, 3, 10, -2),
// Employee 2 does not want a night shift on the first Friday.
(2, 3, 4, 4)
};
// Shift constraints on continuous sequence :
// (shift, hard_min, soft_min, min_penalty,
// soft_max, hard_max, max_penalty)
var shiftConstraints =
new(int Shift, int HardMin, int SoftMin, int MinPenalty, int SoftMax, int HardMax, int MaxPenalty)[] {
// One or two consecutive days of rest, this is a hard constraint.
(0, 1, 1, 0, 2, 2, 0),
// Between 2 and 3 consecutive days of night shifts, 1 and 4 are
// possible but penalized.
(3, 1, 2, 20, 3, 4, 5),
};
// Weekly sum constraints on shifts days:
// (shift, hardMin, softMin, minPenalty,
// softMax, hardMax, maxPenalty)
var weeklySumConstraints =
new(int Shift, int HardMin, int SoftMin, int MinPenalty, int SoftMax, int HardMax, int MaxPenalty)[] {
// Constraints on rests per week.
(0, 1, 2, 7, 2, 3, 4),
// At least 1 night shift per week (penalized). At most 4 (hard).
(3, 0, 1, 3, 4, 4, 0),
};
// Penalized transitions:
// (previous_shift, next_shift, penalty (0 means forbidden))
var penalizedTransitions = new(int PreviousShift, int NextShift, int Penalty)[] {
// Afternoon to night has a penalty of 4.
(2, 3, 4),
// Night to morning is forbidden.
(3, 1, 0),
};
// daily demands for work shifts (morning, afternon, night) for each day
// of the week starting on Monday.
var weeklyCoverDemands = new int[][] {
new[] { 2, 3, 1 }, // Monday
new[] { 2, 3, 1 }, // Tuesday
new[] { 2, 2, 2 }, // Wednesday
new[] { 2, 3, 1 }, // Thursday
new[] { 2, 2, 2 }, // Friday
new[] { 1, 2, 3 }, // Saturday
new[] { 1, 3, 1 }, // Sunday
};
// Penalty for exceeding the cover constraint per shift type.
var excessCoverPenalties = new[] { 2, 2, 5 };
var numDays = numWeeks * 7;
var numShifts = shifts.Length;
var model = new CpModel();
IntVar[,,] work = new IntVar[numEmployees, numShifts, numDays];
foreach (int e in Range(numEmployees))
{
foreach (int s in Range(numShifts))
{
foreach (int d in Range(numDays))
{
work[e, s, d] = model.NewBoolVar($"work{e}_{s}_{d}");
}
}
}
// Linear terms of the objective in a minimization context.
var objIntVars = new List<IntVar>();
var objIntCoeffs = new List<int>();
var objBoolVars = new List<IntVar>();
var objBoolCoeffs = new List<int>();
// Exactly one shift per day.
foreach (int e in Range(numEmployees))
{
foreach (int d in Range(numDays))
{
var temp = new IntVar[numShifts];
foreach (int s in Range(numShifts))
{
temp[s] = work[e, s, d];
}
model.Add(LinearExpr.Sum(temp) == 1);
}
}
// Fixed assignments.
foreach (var (e, s, d) in fixedAssignments)
{
model.Add(work[e, s, d] == 1);
}
// Employee requests
foreach (var (e, s, d, w) in requests)
{
objBoolVars.Add(work[e, s, d]);
objBoolCoeffs.Add(w);
}
// Shift constraints
foreach (var constraint in shiftConstraints)
{
foreach (int e in Range(numEmployees))
{
var works = new IntVar[numDays];
foreach (int d in Range(numDays))
{
works[d] = work[e, constraint.Shift, d];
}
var (variables, coeffs) = AddSoftSequenceConstraint(
model, works, constraint.HardMin, constraint.SoftMin, constraint.MinPenalty, constraint.SoftMax,
constraint.HardMax, constraint.MaxPenalty,
$"shift_constraint(employee {e}, shift {constraint.Shift}");
objBoolVars.AddRange(variables);
objBoolCoeffs.AddRange(coeffs);
}
}
// Weekly sum constraints
foreach (var constraint in weeklySumConstraints)
{
foreach (int e in Range(numEmployees))
{
foreach (int w in Range(numWeeks))
{
var works = new IntVar[7];
foreach (int d in Range(7))
{
works[d] = work[e, constraint.Shift, d + w * 7];
}
var (variables, coeffs) = AddSoftSumConstraint(
model, works, constraint.HardMin, constraint.SoftMin, constraint.MinPenalty, constraint.SoftMax,
constraint.HardMax, constraint.MaxPenalty,
$"weekly_sum_constraint(employee {e}, shift {constraint.Shift}, week {w}");
objBoolVars.AddRange(variables);
objBoolCoeffs.AddRange(coeffs);
}
}
}
// Penalized transitions
foreach (var penalizedTransition in penalizedTransitions)
{
foreach (int e in Range(numEmployees))
{
foreach (int d in Range(numDays - 1))
{
var transition = new List<ILiteral>() { work[e, penalizedTransition.PreviousShift, d].Not(),
work[e, penalizedTransition.NextShift, d + 1].Not() };
if (penalizedTransition.Penalty == 0)
{
model.AddBoolOr(transition);
}
else
{
var transVar = model.NewBoolVar($"transition (employee {e}, day={d}");
transition.Add(transVar);
model.AddBoolOr(transition);
objBoolVars.Add(transVar);
objBoolCoeffs.Add(penalizedTransition.Penalty);
}
}
}
}
// Cover constraints
foreach (int s in Range(1, numShifts))
{
foreach (int w in Range(numWeeks))
{
foreach (int d in Range(7))
{
var works = new IntVar[numEmployees];
foreach (int e in Range(numEmployees))
{
works[e] = work[e, s, w * 7 + d];
}
// Ignore off shift
var minDemand = weeklyCoverDemands[d][s - 1];
var worked = model.NewIntVar(minDemand, numEmployees, "");
model.Add(LinearExpr.Sum(works) == worked);
var overPenalty = excessCoverPenalties[s - 1];
if (overPenalty > 0)
{
var name = $"excess_demand(shift={s}, week={w}, day={d}";
var excess = model.NewIntVar(0, numEmployees - minDemand, name);
model.Add(excess == worked - minDemand);
objIntVars.Add(excess);
objIntCoeffs.Add(overPenalty);
}
}
}
}
// Objective
var objBoolSum = LinearExpr.ScalProd(objBoolVars, objBoolCoeffs);
var objIntSum = LinearExpr.ScalProd(objIntVars, objIntCoeffs);
model.Minimize(objBoolSum + objIntSum);
// Solve model
var solver = new CpSolver();
solver.StringParameters = "num_search_workers:8, log_search_progress: true, max_time_in_seconds:30";
var status = solver.Solve(model);
// Print solution
if (status == CpSolverStatus.Optimal || status == CpSolverStatus.Feasible)
{
Console.WriteLine();
var header = " ";
for (int w = 0; w < numWeeks; w++)
{
header += "M T W T F S S ";
}
Console.WriteLine(header);
foreach (int e in Range(numEmployees))
{
var schedule = "";
foreach (int d in Range(numDays))
{
foreach (int s in Range(numShifts))
{
if (solver.BooleanValue(work[e, s, d]))
{
schedule += shifts[s] + " ";
}
}
}
Console.WriteLine($"worker {e}: {schedule}");
}
Console.WriteLine();
Console.WriteLine("Penalties:");
foreach (var (i, var) in objBoolVars.Select((x, i) => (i, x)))
{
if (solver.BooleanValue(var))
{
var penalty = objBoolCoeffs[i];
if (penalty > 0)
{
Console.WriteLine($" {var.Name()} violated, penalty={penalty}");
}
else
{
Console.WriteLine($" {var.Name()} fulfilled, gain={-penalty}");
}
}
}
foreach (var (i, var) in objIntVars.Select((x, i) => (i, x)))
{
if (solver.Value(var) > 0)
{
Console.WriteLine(
$" {var.Name()} violated by {solver.Value(var)}, linear penalty={objIntCoeffs[i]}");
}
}
Console.WriteLine();
Console.WriteLine("Statistics");
Console.WriteLine($" - status : {status}");
Console.WriteLine($" - conflicts : {solver.NumConflicts()}");
Console.WriteLine($" - branches : {solver.NumBranches()}");
Console.WriteLine($" - wall time : {solver.WallTime()}");
}
}
/// <summary>
/// Filters an isolated sub-sequence of variables assigned to True.
/// Extract the span of Boolean variables[start, start + length), negate them,
/// and if there is variables to the left / right of this span, surround the
/// span by them in non negated form.
/// </summary>
/// <param name="works">A list of variables to extract the span from.</param>
/// <param name="start">The start to the span.</param>
/// <param name="length">The length of the span.</param>
/// <returns>An array of variables which conjunction will be false if the
/// sub-list is assigned to True, and correctly bounded by variables assigned
/// to False, or by the start or end of works.</returns>
static ILiteral[] NegatedBoundedSpan(IntVar[] works, int start, int length)
{
var sequence = new List<ILiteral>();
if (start > 0)
sequence.Add(works[start - 1]);
foreach (var i in Range(length))
sequence.Add(works[start + i].Not());
if (start + length < works.Length)
sequence.Add(works[start + length]);
return sequence.ToArray();
}
/// <summary>
/// Sequence constraint on true variables with soft and hard bounds.
/// This constraint look at every maximal contiguous sequence of variables
/// assigned to true. If forbids sequence of length < hardMin or >
/// hardMax. Then it creates penalty terms if the length is < softMin or
/// > softMax.
/// </summary>
/// <param name="model">The sequence constraint is built on this
/// model.</param> <param name="works">A list of Boolean variables.</param>
/// <param name="hardMin">Any sequence of true variables must have a length of
/// at least hardMin.</param> <param name="softMin">Any sequence should have a
/// length of at least softMin, or a linear penalty on the delta will be added
/// to the objective.</param> <param name="minCost">The coefficient of the
/// linear penalty if the length is less than softMin.</param> <param
/// name="softMax">Any sequence should have a length of at most softMax, or a
/// linear penalty on the delta will be added to the objective.</param> <param
/// name="hardMax">Any sequence of true variables must have a length of at
/// most hardMax.</param> <param name="maxCost">The coefficient of the linear
/// penalty if the length is more than softMax.</param> <param name="prefix">A
/// base name for penalty literals.</param> <returns>A tuple (costLiterals,
/// costCoefficients) containing the different penalties created by the
/// sequence constraint.</returns>
static (IntVar[] costLiterals, int[] costCoefficients)
AddSoftSequenceConstraint(CpModel model, IntVar[] works, int hardMin, int softMin, int minCost, int softMax,
int hardMax, int maxCost, string prefix)
{
var costLiterals = new List<IntVar>();
var costCoefficients = new List<int>();
// Forbid sequences that are too short.
foreach (var length in Range(1, hardMin))
{
foreach (var start in Range(works.Length - length + 1))
{
model.AddBoolOr(NegatedBoundedSpan(works, start, length));
}
}
// Penalize sequences that are below the soft limit.
if (minCost > 0)
{
foreach (var length in Range(hardMin, softMin))
{
foreach (var start in Range(works.Length - length + 1))
{
var span = NegatedBoundedSpan(works, start, length).ToList();
var name = $": under_span(start={start}, length={length})";
var lit = model.NewBoolVar(prefix + name);
span.Add(lit);
model.AddBoolOr(span);
costLiterals.Add(lit);
// We filter exactly the sequence with a short length.
// The penalty is proportional to the delta with softMin.
costCoefficients.Add(minCost * (softMin - length));
}
}
}
// Penalize sequences that are above the soft limit.
if (maxCost > 0)
{
foreach (var length in Range(softMax + 1, hardMax + 1))
{
foreach (var start in Range(works.Length - length + 1))
{
var span = NegatedBoundedSpan(works, start, length).ToList();
var name = $": over_span(start={start}, length={length})";
var lit = model.NewBoolVar(prefix + name);
span.Add(lit);
model.AddBoolOr(span);
costLiterals.Add(lit);
// Cost paid is max_cost * excess length.
costCoefficients.Add(maxCost * (length - softMax));
}
}
}
// Just forbid any sequence of true variables with length hardMax + 1
foreach (var start in Range(works.Length - hardMax))
{
var temp = new List<ILiteral>();
foreach (var i in Range(start, start + hardMax + 1))
{
temp.Add(works[i].Not());
}
model.AddBoolOr(temp);
}
return (costLiterals.ToArray(), costCoefficients.ToArray());
}
/// <summary>
/// Sum constraint with soft and hard bounds.
/// This constraint counts the variables assigned to true from works.
/// If forbids sum < hardMin or > hardMax.
/// Then it creates penalty terms if the sum is < softMin or > softMax.
/// </summary>
/// <param name="model">The sequence constraint is built on this
/// model.</param> <param name="works">A list of Boolean variables.</param>
/// <param name="hardMin">Any sequence of true variables must have a length of
/// at least hardMin.</param> <param name="softMin">Any sequence should have a
/// length of at least softMin, or a linear penalty on the delta will be added
/// to the objective.</param> <param name="minCost">The coefficient of the
/// linear penalty if the length is less than softMin.</param> <param
/// name="softMax">Any sequence should have a length of at most softMax, or a
/// linear penalty on the delta will be added to the objective.</param> <param
/// name="hardMax">Any sequence of true variables must have a length of at
/// most hardMax.</param> <param name="maxCost">The coefficient of the linear
/// penalty if the length is more than softMax.</param> <param name="prefix">A
/// base name for penalty literals.</param> <returns>A tuple (costVariables,
/// costCoefficients) containing the different penalties created by the
/// sequence constraint.</returns>
static (IntVar[] costVariables, int[] costCoefficients)
AddSoftSumConstraint(CpModel model, IntVar[] works, int hardMin, int softMin, int minCost, int softMax,
int hardMax, int maxCost, string prefix)
{
var costVariables = new List<IntVar>();
var costCoefficients = new List<int>();
var sumVar = model.NewIntVar(hardMin, hardMax, "");
// This adds the hard constraints on the sum.
model.Add(sumVar == LinearExpr.Sum(works));
var zero = model.NewConstant(0);
// Penalize sums below the soft_min target.
if (softMin > hardMin && minCost > 0)
{
var delta = model.NewIntVar(-works.Length, works.Length, "");
model.Add(delta == (softMin - sumVar));
var excess = model.NewIntVar(0, works.Length, prefix + ": under_sum");
model.AddMaxEquality(excess, new[] { delta, zero });
costVariables.Add(excess);
costCoefficients.Add(minCost);
}
// Penalize sums above the soft_max target.
if (softMax < hardMax && maxCost > 0)
{
var delta = model.NewIntVar(-works.Length, works.Length, "");
model.Add(delta == sumVar - softMax);
var excess = model.NewIntVar(0, works.Length, prefix + ": over_sum");
model.AddMaxEquality(excess, new[] { delta, zero });
costVariables.Add(excess);
costCoefficients.Add(maxCost);
}
return (costVariables.ToArray(), costCoefficients.ToArray());
}
/// <summary>
/// C# equivalent of Python range (start, stop)
/// </summary>
/// <param name="start">The inclusive start.</param>
/// <param name="stop">The exclusive stop.</param>
/// <returns>A sequence of integers.</returns>
static IEnumerable<int> Range(int start, int stop)
{
foreach (var i in Enumerable.Range(start, stop - start))
yield return i;
}
/// <summary>
/// C# equivalent of Python range (stop)
/// </summary>
/// <param name="stop">The exclusive stop.</param>
/// <returns>A sequence of integers.</returns>
static IEnumerable<int> Range(int stop)
{
return Range(0, stop);
}
}
| |
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Reflection;
using System.Windows.Forms;
using System.IO;
using WeifenLuo.WinFormsUI.Docking;
using DockSample.Customization;
namespace DockSample
{
public partial class MainForm : Form
{
private bool m_bSaveLayout = true;
private DeserializeDockContent m_deserializeDockContent;
private DummySolutionExplorer m_solutionExplorer = new DummySolutionExplorer();
private DummyPropertyWindow m_propertyWindow = new DummyPropertyWindow();
private DummyToolbox m_toolbox = new DummyToolbox();
private DummyOutputWindow m_outputWindow = new DummyOutputWindow();
private DummyTaskList m_taskList = new DummyTaskList();
public MainForm()
{
InitializeComponent();
showRightToLeft.Checked = (RightToLeft == RightToLeft.Yes);
RightToLeftLayout = showRightToLeft.Checked;
m_solutionExplorer = new DummySolutionExplorer();
m_solutionExplorer.RightToLeftLayout = RightToLeftLayout;
m_deserializeDockContent = new DeserializeDockContent(GetContentFromPersistString);
}
#region Methods
private IDockContent FindDocument(string text)
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
foreach (Form form in MdiChildren)
if (form.Text == text)
return form as IDockContent;
return null;
}
else
{
foreach (IDockContent content in dockPanel.Documents)
if (content.DockHandler.TabText == text)
return content;
return null;
}
}
private DummyDoc CreateNewDocument()
{
DummyDoc dummyDoc = new DummyDoc();
int count = 1;
//string text = "C:\\MADFDKAJ\\ADAKFJASD\\ADFKDSAKFJASD\\ASDFKASDFJASDF\\ASDFIJADSFJ\\ASDFKDFDA" + count.ToString();
string text = "Document" + count.ToString();
while (FindDocument(text) != null)
{
count++;
//text = "C:\\MADFDKAJ\\ADAKFJASD\\ADFKDSAKFJASD\\ASDFKASDFJASDF\\ASDFIJADSFJ\\ASDFKDFDA" + count.ToString();
text = "Document" + count.ToString();
}
dummyDoc.Text = text;
return dummyDoc;
}
private DummyDoc CreateNewDocument(string text)
{
DummyDoc dummyDoc = new DummyDoc();
dummyDoc.Text = text;
return dummyDoc;
}
private void CloseAllDocuments()
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
foreach (Form form in MdiChildren)
form.Close();
}
else
{
for (int index = dockPanel.Contents.Count - 1; index >= 0; index--)
{
if (dockPanel.Contents[index] is IDockContent)
{
IDockContent content = (IDockContent)dockPanel.Contents[index];
content.DockHandler.Close();
}
}
}
}
private IDockContent GetContentFromPersistString(string persistString)
{
if (persistString == typeof(DummySolutionExplorer).ToString())
return m_solutionExplorer;
else if (persistString == typeof(DummyPropertyWindow).ToString())
return m_propertyWindow;
else if (persistString == typeof(DummyToolbox).ToString())
return m_toolbox;
else if (persistString == typeof(DummyOutputWindow).ToString())
return m_outputWindow;
else if (persistString == typeof(DummyTaskList).ToString())
return m_taskList;
else
{
// DummyDoc overrides GetPersistString to add extra information into persistString.
// Any DockContent may override this value to add any needed information for deserialization.
string[] parsedStrings = persistString.Split(new char[] { ',' });
if (parsedStrings.Length != 3)
return null;
if (parsedStrings[0] != typeof(DummyDoc).ToString())
return null;
DummyDoc dummyDoc = new DummyDoc();
if (parsedStrings[1] != string.Empty)
dummyDoc.FileName = parsedStrings[1];
if (parsedStrings[2] != string.Empty)
dummyDoc.Text = parsedStrings[2];
return dummyDoc;
}
}
private void CloseAllContents()
{
// we don't want to create another instance of tool window, set DockPanel to null
m_solutionExplorer.DockPanel = null;
m_propertyWindow.DockPanel = null;
m_toolbox.DockPanel = null;
m_outputWindow.DockPanel = null;
m_taskList.DockPanel = null;
// Close all other document windows
CloseAllDocuments();
}
private void SetSchema(object sender, System.EventArgs e)
{
CloseAllContents();
if (sender == menuItemSchemaVS2005)
Extender.SetSchema(dockPanel, Extender.Schema.VS2005);
else if (sender == menuItemSchemaVS2003)
Extender.SetSchema(dockPanel, Extender.Schema.VS2003);
menuItemSchemaVS2005.Checked = (sender == menuItemSchemaVS2005);
menuItemSchemaVS2003.Checked = (sender == menuItemSchemaVS2003);
}
private void SetDocumentStyle(object sender, System.EventArgs e)
{
DocumentStyle oldStyle = dockPanel.DocumentStyle;
DocumentStyle newStyle;
if (sender == menuItemDockingMdi)
newStyle = DocumentStyle.DockingMdi;
else if (sender == menuItemDockingWindow)
newStyle = DocumentStyle.DockingWindow;
else if (sender == menuItemDockingSdi)
newStyle = DocumentStyle.DockingSdi;
else
newStyle = DocumentStyle.SystemMdi;
if (oldStyle == newStyle)
return;
if (oldStyle == DocumentStyle.SystemMdi || newStyle == DocumentStyle.SystemMdi)
CloseAllDocuments();
dockPanel.DocumentStyle = newStyle;
menuItemDockingMdi.Checked = (newStyle == DocumentStyle.DockingMdi);
menuItemDockingWindow.Checked = (newStyle == DocumentStyle.DockingWindow);
menuItemDockingSdi.Checked = (newStyle == DocumentStyle.DockingSdi);
menuItemSystemMdi.Checked = (newStyle == DocumentStyle.SystemMdi);
menuItemLayoutByCode.Enabled = (newStyle != DocumentStyle.SystemMdi);
menuItemLayoutByXml.Enabled = (newStyle != DocumentStyle.SystemMdi);
toolBarButtonLayoutByCode.Enabled = (newStyle != DocumentStyle.SystemMdi);
toolBarButtonLayoutByXml.Enabled = (newStyle != DocumentStyle.SystemMdi);
}
private void SetDockPanelSkinOptions(bool isChecked)
{
if (isChecked)
{
// All of these options may be set in the designer.
// This is not a complete list of possible options available in the skin.
AutoHideStripSkin autoHideSkin = new AutoHideStripSkin();
autoHideSkin.DockStripGradient.StartColor = Color.AliceBlue;
autoHideSkin.DockStripGradient.EndColor = Color.Blue;
autoHideSkin.DockStripGradient.LinearGradientMode = System.Drawing.Drawing2D.LinearGradientMode.ForwardDiagonal;
autoHideSkin.TabGradient.StartColor = SystemColors.Control;
autoHideSkin.TabGradient.EndColor = SystemColors.ControlDark;
autoHideSkin.TabGradient.TextColor = SystemColors.ControlText;
autoHideSkin.TextFont = new Font("Showcard Gothic", 10);
dockPanel.Skin.AutoHideStripSkin = autoHideSkin;
DockPaneStripSkin dockPaneSkin = new DockPaneStripSkin();
dockPaneSkin.DocumentGradient.DockStripGradient.StartColor = Color.Red;
dockPaneSkin.DocumentGradient.DockStripGradient.EndColor = Color.Pink;
dockPaneSkin.DocumentGradient.ActiveTabGradient.StartColor = Color.Green;
dockPaneSkin.DocumentGradient.ActiveTabGradient.EndColor = Color.Green;
dockPaneSkin.DocumentGradient.ActiveTabGradient.TextColor = Color.White;
dockPaneSkin.DocumentGradient.InactiveTabGradient.StartColor = Color.Gray;
dockPaneSkin.DocumentGradient.InactiveTabGradient.EndColor = Color.Gray;
dockPaneSkin.DocumentGradient.InactiveTabGradient.TextColor = Color.Black;
dockPaneSkin.TextFont = new Font("SketchFlow Print", 10);
dockPanel.Skin.DockPaneStripSkin = dockPaneSkin;
}
else
{
dockPanel.Skin = new DockPanelSkin();
}
menuItemLayoutByXml_Click(menuItemLayoutByXml, EventArgs.Empty);
}
#endregion
#region Event Handlers
private void menuItemExit_Click(object sender, System.EventArgs e)
{
Close();
}
private void menuItemSolutionExplorer_Click(object sender, System.EventArgs e)
{
m_solutionExplorer.Show(dockPanel);
}
private void menuItemPropertyWindow_Click(object sender, System.EventArgs e)
{
m_propertyWindow.Show(dockPanel);
}
private void menuItemToolbox_Click(object sender, System.EventArgs e)
{
m_toolbox.Show(dockPanel);
}
private void menuItemOutputWindow_Click(object sender, System.EventArgs e)
{
m_outputWindow.Show(dockPanel);
}
private void menuItemTaskList_Click(object sender, System.EventArgs e)
{
m_taskList.Show(dockPanel);
}
private void menuItemAbout_Click(object sender, System.EventArgs e)
{
AboutDialog aboutDialog = new AboutDialog();
aboutDialog.ShowDialog(this);
}
private void menuItemNew_Click(object sender, System.EventArgs e)
{
DummyDoc dummyDoc = CreateNewDocument();
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
dummyDoc.MdiParent = this;
dummyDoc.Show();
}
else
dummyDoc.Show(dockPanel);
}
private void menuItemOpen_Click(object sender, System.EventArgs e)
{
OpenFileDialog openFile = new OpenFileDialog();
openFile.InitialDirectory = Application.ExecutablePath;
openFile.Filter = "rtf files (*.rtf)|*.rtf|txt files (*.txt)|*.txt|All files (*.*)|*.*";
openFile.FilterIndex = 1;
openFile.RestoreDirectory = true;
if (openFile.ShowDialog() == DialogResult.OK)
{
string fullName = openFile.FileName;
string fileName = Path.GetFileName(fullName);
if (FindDocument(fileName) != null)
{
MessageBox.Show("The document: " + fileName + " has already opened!");
return;
}
DummyDoc dummyDoc = new DummyDoc();
dummyDoc.Text = fileName;
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
dummyDoc.MdiParent = this;
dummyDoc.Show();
}
else
dummyDoc.Show(dockPanel);
try
{
dummyDoc.FileName = fullName;
}
catch (Exception exception)
{
dummyDoc.Close();
MessageBox.Show(exception.Message);
}
}
}
private void menuItemFile_Popup(object sender, System.EventArgs e)
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
menuItemClose.Enabled = menuItemCloseAll.Enabled = (ActiveMdiChild != null);
}
else
{
menuItemClose.Enabled = (dockPanel.ActiveDocument != null);
menuItemCloseAll.Enabled = (dockPanel.DocumentsCount > 0);
}
}
private void menuItemClose_Click(object sender, System.EventArgs e)
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
ActiveMdiChild.Close();
else if (dockPanel.ActiveDocument != null)
dockPanel.ActiveDocument.DockHandler.Close();
}
private void menuItemCloseAll_Click(object sender, System.EventArgs e)
{
CloseAllDocuments();
}
private void MainForm_Load(object sender, System.EventArgs e)
{
string configFile = Path.Combine(Path.GetDirectoryName(Application.ExecutablePath), "DockPanel.config");
if (File.Exists(configFile))
dockPanel.LoadFromXml(configFile, m_deserializeDockContent);
}
private void MainForm_Closing(object sender, System.ComponentModel.CancelEventArgs e)
{
string configFile = Path.Combine(Path.GetDirectoryName(Application.ExecutablePath), "DockPanel.config");
if (m_bSaveLayout)
dockPanel.SaveAsXml(configFile);
else if (File.Exists(configFile))
File.Delete(configFile);
}
private void menuItemToolBar_Click(object sender, System.EventArgs e)
{
toolBar.Visible = menuItemToolBar.Checked = !menuItemToolBar.Checked;
}
private void menuItemStatusBar_Click(object sender, System.EventArgs e)
{
statusBar.Visible = menuItemStatusBar.Checked = !menuItemStatusBar.Checked;
}
private void toolBar_ButtonClick(object sender, System.Windows.Forms.ToolStripItemClickedEventArgs e)
{
if (e.ClickedItem == toolBarButtonNew)
menuItemNew_Click(null, null);
else if (e.ClickedItem == toolBarButtonOpen)
menuItemOpen_Click(null, null);
else if (e.ClickedItem == toolBarButtonSolutionExplorer)
menuItemSolutionExplorer_Click(null, null);
else if (e.ClickedItem == toolBarButtonPropertyWindow)
menuItemPropertyWindow_Click(null, null);
else if (e.ClickedItem == toolBarButtonToolbox)
menuItemToolbox_Click(null, null);
else if (e.ClickedItem == toolBarButtonOutputWindow)
menuItemOutputWindow_Click(null, null);
else if (e.ClickedItem == toolBarButtonTaskList)
menuItemTaskList_Click(null, null);
else if (e.ClickedItem == toolBarButtonLayoutByCode)
menuItemLayoutByCode_Click(null, null);
else if (e.ClickedItem == toolBarButtonLayoutByXml)
menuItemLayoutByXml_Click(null, null);
else if (e.ClickedItem == toolBarButtonDockPanelSkinDemo)
SetDockPanelSkinOptions(!toolBarButtonDockPanelSkinDemo.Checked);
}
private void menuItemNewWindow_Click(object sender, System.EventArgs e)
{
MainForm newWindow = new MainForm();
newWindow.Text = newWindow.Text + " - New";
newWindow.Show();
}
private void menuItemTools_Popup(object sender, System.EventArgs e)
{
menuItemLockLayout.Checked = !this.dockPanel.AllowEndUserDocking;
}
private void menuItemLockLayout_Click(object sender, System.EventArgs e)
{
dockPanel.AllowEndUserDocking = !dockPanel.AllowEndUserDocking;
}
private void menuItemLayoutByCode_Click(object sender, System.EventArgs e)
{
dockPanel.SuspendLayout(true);
CloseAllDocuments();
m_solutionExplorer = new DummySolutionExplorer();
m_propertyWindow = new DummyPropertyWindow();
m_toolbox = new DummyToolbox();
m_outputWindow = new DummyOutputWindow();
m_taskList = new DummyTaskList();
m_solutionExplorer.Show(dockPanel, DockState.DockRight);
m_propertyWindow.Show(m_solutionExplorer.Pane, m_solutionExplorer);
m_toolbox.Show(dockPanel, new Rectangle(98, 133, 200, 383));
m_outputWindow.Show(m_solutionExplorer.Pane, DockAlignment.Bottom, 0.35);
m_taskList.Show(m_toolbox.Pane, DockAlignment.Left, 0.4);
DummyDoc doc1 = CreateNewDocument("Document1");
DummyDoc doc2 = CreateNewDocument("Document2");
DummyDoc doc3 = CreateNewDocument("Document3");
DummyDoc doc4 = CreateNewDocument("Document4");
doc1.Show(dockPanel, DockState.Document);
doc2.Show(doc1.Pane, null);
doc3.Show(doc1.Pane, DockAlignment.Bottom, 0.5);
doc4.Show(doc3.Pane, DockAlignment.Right, 0.5);
dockPanel.ResumeLayout(true, true);
}
private void menuItemLayoutByXml_Click(object sender, System.EventArgs e)
{
dockPanel.SuspendLayout(true);
// In order to load layout from XML, we need to close all the DockContents
CloseAllContents();
Assembly assembly = Assembly.GetAssembly(typeof(MainForm));
Stream xmlStream = assembly.GetManifestResourceStream("DockSample.Resources.DockPanel.xml");
dockPanel.LoadFromXml(xmlStream, m_deserializeDockContent);
xmlStream.Close();
dockPanel.ResumeLayout(true, true);
}
private void menuItemCloseAllButThisOne_Click(object sender, System.EventArgs e)
{
if (dockPanel.DocumentStyle == DocumentStyle.SystemMdi)
{
Form activeMdi = ActiveMdiChild;
foreach (Form form in MdiChildren)
{
if (form != activeMdi)
form.Close();
}
}
else
{
foreach (IDockContent document in dockPanel.DocumentsToArray())
{
if (!document.DockHandler.IsActivated)
document.DockHandler.Close();
}
}
}
private void menuItemShowDocumentIcon_Click(object sender, System.EventArgs e)
{
dockPanel.ShowDocumentIcon = menuItemShowDocumentIcon.Checked = !menuItemShowDocumentIcon.Checked;
}
private void showRightToLeft_Click(object sender, EventArgs e)
{
CloseAllContents();
if (showRightToLeft.Checked)
{
this.RightToLeft = RightToLeft.No;
this.RightToLeftLayout = false;
}
else
{
this.RightToLeft = RightToLeft.Yes;
this.RightToLeftLayout = true;
}
m_solutionExplorer.RightToLeftLayout = this.RightToLeftLayout;
showRightToLeft.Checked = !showRightToLeft.Checked;
}
private void exitWithoutSavingLayout_Click(object sender, EventArgs e)
{
m_bSaveLayout = false;
Close();
m_bSaveLayout = true;
}
#endregion
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text.RegularExpressions;
using DiscUtils.Internal;
using DiscUtils.Streams;
namespace DiscUtils.Nfs
{
/// <summary>
/// A file system backed by an NFS server.
/// </summary>
/// <remarks>NFS is a common storage protocol for Virtual Machines. Currently, only NFS v3 is supported.</remarks>
public class NfsFileSystem : DiscFileSystem
{
private Nfs3Client _client;
/// <summary>
/// Initializes a new instance of the NfsFileSystem class.
/// </summary>
/// <param name="address">The address of the NFS server (IP or DNS address).</param>
/// <param name="mountPoint">The mount point on the server to root the file system.</param>
/// <remarks>
/// The created instance uses default credentials.
/// </remarks>
public NfsFileSystem(string address, string mountPoint)
: base(new NfsFileSystemOptions())
{
_client = new Nfs3Client(address, RpcUnixCredential.Default, mountPoint);
}
/// <summary>
/// Initializes a new instance of the NfsFileSystem class.
/// </summary>
/// <param name="address">The address of the NFS server (IP or DNS address).</param>
/// <param name="credentials">The credentials to use when accessing the NFS server.</param>
/// <param name="mountPoint">The mount point on the server to root the file system.</param>
public NfsFileSystem(string address, RpcCredentials credentials, string mountPoint)
: base(new NfsFileSystemOptions())
{
_client = new Nfs3Client(address, credentials, mountPoint);
}
/// <summary>
/// Gets whether this file system supports modification (true for NFS).
/// </summary>
public override bool CanWrite
{
get { return true; }
}
/// <summary>
/// Gets the friendly name for this file system (NFS).
/// </summary>
public override string FriendlyName
{
get { return "NFS"; }
}
/// <summary>
/// Gets the options controlling this instance.
/// </summary>
public NfsFileSystemOptions NfsOptions
{
get { return (NfsFileSystemOptions)Options; }
}
/// <summary>
/// Gets the preferred NFS read size.
/// </summary>
public int PreferredReadSize
{
get { return _client == null ? 0 : (int)_client.FileSystemInfo.ReadPreferredBytes; }
}
/// <summary>
/// Gets the preferred NFS write size.
/// </summary>
public int PreferredWriteSize
{
get { return _client == null ? 0 : (int)_client.FileSystemInfo.WritePreferredBytes; }
}
/// <summary>
/// Gets the folders exported by a server.
/// </summary>
/// <param name="address">The address of the server.</param>
/// <returns>An enumeration of exported folders.</returns>
public static IEnumerable<string> GetExports(string address)
{
using (RpcClient rpcClient = new RpcClient(address, null))
{
Nfs3Mount mountClient = new Nfs3Mount(rpcClient);
foreach (Nfs3Export export in mountClient.Exports())
{
yield return export.DirPath;
}
}
}
/// <summary>
/// Copies a file from one location to another.
/// </summary>
/// <param name="sourceFile">The source file to copy.</param>
/// <param name="destinationFile">The destination path.</param>
/// <param name="overwrite">Whether to overwrite any existing file (true), or fail if such a file exists.</param>
public override void CopyFile(string sourceFile, string destinationFile, bool overwrite)
{
try
{
Nfs3FileHandle sourceParent = GetParentDirectory(sourceFile);
Nfs3FileHandle destParent = GetParentDirectory(destinationFile);
string sourceFileName = Utilities.GetFileFromPath(sourceFile);
string destFileName = Utilities.GetFileFromPath(destinationFile);
Nfs3FileHandle sourceFileHandle = _client.Lookup(sourceParent, sourceFileName);
if (sourceFileHandle == null)
{
throw new FileNotFoundException(
string.Format(CultureInfo.InvariantCulture, "The file '{0}' does not exist", sourceFile),
sourceFile);
}
Nfs3FileAttributes sourceAttrs = _client.GetAttributes(sourceFileHandle);
if ((sourceAttrs.Type & Nfs3FileType.Directory) != 0)
{
throw new FileNotFoundException(
string.Format(CultureInfo.InvariantCulture, "The path '{0}' is not a file", sourceFile),
sourceFile);
}
Nfs3FileHandle destFileHandle = _client.Lookup(destParent, destFileName);
if (destFileHandle != null)
{
if (overwrite == false)
{
throw new IOException(string.Format(CultureInfo.InvariantCulture,
"The destination file '{0}' already exists", destinationFile));
}
}
// Create the file, with temporary permissions
Nfs3SetAttributes setAttrs = new Nfs3SetAttributes();
setAttrs.Mode = UnixFilePermissions.OwnerRead | UnixFilePermissions.OwnerWrite;
setAttrs.SetMode = true;
setAttrs.Size = sourceAttrs.Size;
setAttrs.SetSize = true;
destFileHandle = _client.Create(destParent, destFileName, !overwrite, setAttrs);
// Copy the file contents
using (Nfs3FileStream sourceFs = new Nfs3FileStream(_client, sourceFileHandle, FileAccess.Read))
using (Nfs3FileStream destFs = new Nfs3FileStream(_client, destFileHandle, FileAccess.Write))
{
int bufferSize =
(int)
Math.Max(1 * Sizes.OneMiB,
Math.Min(_client.FileSystemInfo.WritePreferredBytes,
_client.FileSystemInfo.ReadPreferredBytes));
byte[] buffer = new byte[bufferSize];
int numRead = sourceFs.Read(buffer, 0, bufferSize);
while (numRead > 0)
{
destFs.Write(buffer, 0, numRead);
numRead = sourceFs.Read(buffer, 0, bufferSize);
}
}
// Set the new file's attributes based on the source file
setAttrs = new Nfs3SetAttributes();
setAttrs.Mode = sourceAttrs.Mode;
setAttrs.SetMode = true;
setAttrs.AccessTime = sourceAttrs.AccessTime;
setAttrs.SetAccessTime = Nfs3SetTimeMethod.ClientTime;
setAttrs.ModifyTime = sourceAttrs.ModifyTime;
setAttrs.SetModifyTime = Nfs3SetTimeMethod.ClientTime;
setAttrs.Gid = sourceAttrs.Gid;
setAttrs.SetGid = true;
_client.SetAttributes(destFileHandle, setAttrs);
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Creates a directory at the specified path.
/// </summary>
/// <param name="path">The path of the directory to create.</param>
public override void CreateDirectory(string path)
{
try
{
Nfs3FileHandle parent = GetParentDirectory(path);
Nfs3SetAttributes setAttrs = new Nfs3SetAttributes();
setAttrs.Mode = NfsOptions.NewDirectoryPermissions;
setAttrs.SetMode = true;
_client.MakeDirectory(parent, Utilities.GetFileFromPath(path), setAttrs);
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Deletes a directory at the specified path.
/// </summary>
/// <param name="path">The directory to delete.</param>
public override void DeleteDirectory(string path)
{
try
{
Nfs3FileHandle handle = GetFile(path);
if (handle != null && _client.GetAttributes(handle).Type != Nfs3FileType.Directory)
{
throw new DirectoryNotFoundException("No such directory: " + path);
}
Nfs3FileHandle parent = GetParentDirectory(path);
if (handle != null)
{
_client.RemoveDirectory(parent, Utilities.GetFileFromPath(path));
}
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Deletes a file at the specified path.
/// </summary>
/// <param name="path">The path of the file to delete.</param>
public override void DeleteFile(string path)
{
try
{
Nfs3FileHandle handle = GetFile(path);
if (handle != null && _client.GetAttributes(handle).Type == Nfs3FileType.Directory)
{
throw new FileNotFoundException("No such file", path);
}
Nfs3FileHandle parent = GetParentDirectory(path);
if (handle != null)
{
_client.Remove(parent, Utilities.GetFileFromPath(path));
}
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Indicates whether a specified path exists, and refers to a directory.
/// </summary>
/// <param name="path">The path to inspect.</param>
/// <returns><c>true</c> if the path is a directory, else <c>false</c>.</returns>
public override bool DirectoryExists(string path)
{
return (GetAttributes(path) & FileAttributes.Directory) != 0;
}
/// <summary>
/// Indicates whether a specified path exists, and refers to a directory.
/// </summary>
/// <param name="path">The path to inspect.</param>
/// <returns><c>true</c> if the path is a file, else <c>false</c>.</returns>
public override bool FileExists(string path)
{
return (GetAttributes(path) & FileAttributes.Normal) != 0;
}
/// <summary>
/// Gets the names of subdirectories in a specified directory matching a specified
/// search pattern, using a value to determine whether to search subdirectories.
/// </summary>
/// <param name="path">The path to search.</param>
/// <param name="searchPattern">The search string to match against.</param>
/// <param name="searchOption">Indicates whether to search subdirectories.</param>
/// <returns>Array of directories matching the search pattern.</returns>
public override string[] GetDirectories(string path, string searchPattern, SearchOption searchOption)
{
try
{
Regex re = Utilities.ConvertWildcardsToRegEx(searchPattern);
List<string> dirs = new List<string>();
DoSearch(dirs, path, re, searchOption == SearchOption.AllDirectories, true, false);
return dirs.ToArray();
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Gets the names of files in a specified directory matching a specified
/// search pattern, using a value to determine whether to search subdirectories.
/// </summary>
/// <param name="path">The path to search.</param>
/// <param name="searchPattern">The search string to match against.</param>
/// <param name="searchOption">Indicates whether to search subdirectories.</param>
/// <returns>Array of files matching the search pattern.</returns>
public override string[] GetFiles(string path, string searchPattern, SearchOption searchOption)
{
try
{
Regex re = Utilities.ConvertWildcardsToRegEx(searchPattern);
List<string> results = new List<string>();
DoSearch(results, path, re, searchOption == SearchOption.AllDirectories, false, true);
return results.ToArray();
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Gets the names of all files and subdirectories in a specified directory.
/// </summary>
/// <param name="path">The path to search.</param>
/// <returns>Array of files and subdirectories matching the search pattern.</returns>
public override string[] GetFileSystemEntries(string path)
{
try
{
Regex re = Utilities.ConvertWildcardsToRegEx("*.*");
List<string> results = new List<string>();
DoSearch(results, path, re, false, true, true);
return results.ToArray();
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Gets the names of files and subdirectories in a specified directory matching a specified
/// search pattern.
/// </summary>
/// <param name="path">The path to search.</param>
/// <param name="searchPattern">The search string to match against.</param>
/// <returns>Array of files and subdirectories matching the search pattern.</returns>
public override string[] GetFileSystemEntries(string path, string searchPattern)
{
try
{
Regex re = Utilities.ConvertWildcardsToRegEx(searchPattern);
List<string> results = new List<string>();
DoSearch(results, path, re, false, true, true);
return results.ToArray();
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Moves a directory.
/// </summary>
/// <param name="sourceDirectoryName">The directory to move.</param>
/// <param name="destinationDirectoryName">The target directory name.</param>
public override void MoveDirectory(string sourceDirectoryName, string destinationDirectoryName)
{
try
{
Nfs3FileHandle sourceParent = GetParentDirectory(sourceDirectoryName);
Nfs3FileHandle destParent = GetParentDirectory(destinationDirectoryName);
string sourceName = Utilities.GetFileFromPath(sourceDirectoryName);
string destName = Utilities.GetFileFromPath(destinationDirectoryName);
Nfs3FileHandle fileHandle = _client.Lookup(sourceParent, sourceName);
if (fileHandle == null)
{
throw new DirectoryNotFoundException(string.Format(CultureInfo.InvariantCulture,
"The directory '{0}' does not exist", sourceDirectoryName));
}
Nfs3FileAttributes sourceAttrs = _client.GetAttributes(fileHandle);
if ((sourceAttrs.Type & Nfs3FileType.Directory) == 0)
{
throw new DirectoryNotFoundException(string.Format(CultureInfo.InvariantCulture,
"The path '{0}' is not a directory", sourceDirectoryName));
}
_client.Rename(sourceParent, sourceName, destParent, destName);
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Moves a file, allowing an existing file to be overwritten.
/// </summary>
/// <param name="sourceName">The file to move.</param>
/// <param name="destinationName">The target file name.</param>
/// <param name="overwrite">Whether to permit a destination file to be overwritten.</param>
public override void MoveFile(string sourceName, string destinationName, bool overwrite)
{
try
{
Nfs3FileHandle sourceParent = GetParentDirectory(sourceName);
Nfs3FileHandle destParent = GetParentDirectory(destinationName);
string sourceFileName = Utilities.GetFileFromPath(sourceName);
string destFileName = Utilities.GetFileFromPath(destinationName);
Nfs3FileHandle sourceFileHandle = _client.Lookup(sourceParent, sourceFileName);
if (sourceFileHandle == null)
{
throw new FileNotFoundException(
string.Format(CultureInfo.InvariantCulture, "The file '{0}' does not exist", sourceName),
sourceName);
}
Nfs3FileAttributes sourceAttrs = _client.GetAttributes(sourceFileHandle);
if ((sourceAttrs.Type & Nfs3FileType.Directory) != 0)
{
throw new FileNotFoundException(
string.Format(CultureInfo.InvariantCulture, "The path '{0}' is not a file", sourceName),
sourceName);
}
Nfs3FileHandle destFileHandle = _client.Lookup(destParent, destFileName);
if (destFileHandle != null && overwrite == false)
{
throw new IOException(string.Format(CultureInfo.InvariantCulture,
"The destination file '{0}' already exists", destinationName));
}
_client.Rename(sourceParent, sourceFileName, destParent, destFileName);
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Opens the specified file.
/// </summary>
/// <param name="path">The full path of the file to open.</param>
/// <param name="mode">The file mode for the created stream.</param>
/// <param name="access">The access permissions for the created stream.</param>
/// <returns>The new stream.</returns>
public override SparseStream OpenFile(string path, FileMode mode, FileAccess access)
{
try
{
Nfs3AccessPermissions requested;
if (access == FileAccess.Read)
{
requested = Nfs3AccessPermissions.Read;
}
else if (access == FileAccess.ReadWrite)
{
requested = Nfs3AccessPermissions.Read | Nfs3AccessPermissions.Modify;
}
else
{
requested = Nfs3AccessPermissions.Modify;
}
if (mode == FileMode.Create || mode == FileMode.CreateNew ||
(mode == FileMode.OpenOrCreate && !FileExists(path)))
{
Nfs3FileHandle parent = GetParentDirectory(path);
Nfs3SetAttributes setAttrs = new Nfs3SetAttributes();
setAttrs.Mode = NfsOptions.NewFilePermissions;
setAttrs.SetMode = true;
setAttrs.Size = 0;
setAttrs.SetSize = true;
Nfs3FileHandle handle = _client.Create(parent, Utilities.GetFileFromPath(path),
mode != FileMode.Create, setAttrs);
return new Nfs3FileStream(_client, handle, access);
}
else
{
Nfs3FileHandle handle = GetFile(path);
Nfs3AccessPermissions actualPerms = _client.Access(handle, requested);
if (actualPerms != requested)
{
throw new UnauthorizedAccessException(string.Format(CultureInfo.InvariantCulture,
"Access denied opening '{0}'. Requested permission '{1}', got '{2}'", path, requested,
actualPerms));
}
Nfs3FileStream result = new Nfs3FileStream(_client, handle, access);
if (mode == FileMode.Append)
{
result.Seek(0, SeekOrigin.End);
}
else if (mode == FileMode.Truncate)
{
result.SetLength(0);
}
return result;
}
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Gets the attributes of a file or directory.
/// </summary>
/// <param name="path">The file or directory to inspect.</param>
/// <returns>The attributes of the file or directory.</returns>
public override FileAttributes GetAttributes(string path)
{
try
{
Nfs3FileHandle handle = GetFile(path);
Nfs3FileAttributes nfsAttrs = _client.GetAttributes(handle);
FileAttributes result = 0;
if (nfsAttrs.Type == Nfs3FileType.Directory)
{
result |= FileAttributes.Directory;
}
else if (nfsAttrs.Type == Nfs3FileType.BlockDevice || nfsAttrs.Type == Nfs3FileType.CharacterDevice)
{
result |= FileAttributes.Device;
}
else
{
result |= FileAttributes.Normal;
}
if (Utilities.GetFileFromPath(path).StartsWith(".", StringComparison.Ordinal))
{
result |= FileAttributes.Hidden;
}
return result;
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Sets the attributes of a file or directory.
/// </summary>
/// <param name="path">The file or directory to change.</param>
/// <param name="newValue">The new attributes of the file or directory.</param>
public override void SetAttributes(string path, FileAttributes newValue)
{
if (newValue != GetAttributes(path))
{
throw new NotSupportedException("Unable to change file attributes over NFS");
}
}
/// <summary>
/// Gets the creation time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory.</param>
/// <returns>The creation time.</returns>
public override DateTime GetCreationTimeUtc(string path)
{
try
{
// Note creation time is not available, so simulating from last modification time
Nfs3FileHandle handle = GetFile(path);
Nfs3FileAttributes attrs = _client.GetAttributes(handle);
return attrs.ModifyTime.ToDateTime();
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Sets the creation time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory.</param>
/// <param name="newTime">The new time to set.</param>
public override void SetCreationTimeUtc(string path, DateTime newTime)
{
// No action - creation time is not accessible over NFS
}
/// <summary>
/// Gets the last access time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory.</param>
/// <returns>The last access time.</returns>
public override DateTime GetLastAccessTimeUtc(string path)
{
try
{
Nfs3FileHandle handle = GetFile(path);
Nfs3FileAttributes attrs = _client.GetAttributes(handle);
return attrs.AccessTime.ToDateTime();
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Sets the last access time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory.</param>
/// <param name="newTime">The new time to set.</param>
public override void SetLastAccessTimeUtc(string path, DateTime newTime)
{
try
{
Nfs3FileHandle handle = GetFile(path);
_client.SetAttributes(handle,
new Nfs3SetAttributes
{
SetAccessTime = Nfs3SetTimeMethod.ClientTime,
AccessTime = new Nfs3FileTime(newTime)
});
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Gets the last modification time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory.</param>
/// <returns>The last write time.</returns>
public override DateTime GetLastWriteTimeUtc(string path)
{
try
{
Nfs3FileHandle handle = GetFile(path);
Nfs3FileAttributes attrs = _client.GetAttributes(handle);
return attrs.ModifyTime.ToDateTime();
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Sets the last modification time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory.</param>
/// <param name="newTime">The new time to set.</param>
public override void SetLastWriteTimeUtc(string path, DateTime newTime)
{
try
{
Nfs3FileHandle handle = GetFile(path);
_client.SetAttributes(handle,
new Nfs3SetAttributes
{
SetModifyTime = Nfs3SetTimeMethod.ClientTime,
ModifyTime = new Nfs3FileTime(newTime)
});
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Gets the length of a file.
/// </summary>
/// <param name="path">The path to the file.</param>
/// <returns>The length in bytes.</returns>
public override long GetFileLength(string path)
{
try
{
Nfs3FileHandle handle = GetFile(path);
Nfs3FileAttributes attrs = _client.GetAttributes(handle);
return attrs.Size;
}
catch (Nfs3Exception ne)
{
throw ConvertNfsException(ne);
}
}
/// <summary>
/// Size of the Filesystem in bytes
/// </summary>
public override long Size
{
get { return (long) _client.FsStat(_client.RootHandle).TotalSizeBytes; }
}
/// <summary>
/// Used space of the Filesystem in bytes
/// </summary>
public override long UsedSpace
{
get { return Size - AvailableSpace; }
}
/// <summary>
/// Available space of the Filesystem in bytes
/// </summary>
public override long AvailableSpace
{
get { return (long) _client.FsStat(_client.RootHandle).FreeSpaceBytes; }
}
/// <summary>
/// Disposes of this instance, freeing up any resources used.
/// </summary>
/// <param name="disposing"><c>true</c> if called from Dispose, else <c>false</c>.</param>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (_client != null)
{
_client.Dispose();
_client = null;
}
}
base.Dispose(disposing);
}
private static Exception ConvertNfsException(Nfs3Exception ne)
{
throw new IOException("NFS Status: " + ne.Message, ne);
}
private void DoSearch(List<string> results, string path, Regex regex, bool subFolders, bool dirs, bool files)
{
Nfs3FileHandle dir = GetDirectory(path);
foreach (Nfs3DirectoryEntry de in _client.ReadDirectory(dir, true))
{
if (de.Name == "." || de.Name == "..")
{
continue;
}
bool isDir = de.FileAttributes.Type == Nfs3FileType.Directory;
if ((isDir && dirs) || (!isDir && files))
{
string searchName = de.Name.IndexOf('.') == -1 ? de.Name + "." : de.Name;
if (regex.IsMatch(searchName))
{
results.Add(Utilities.CombinePaths(path, de.Name));
}
}
if (subFolders && isDir)
{
DoSearch(results, Utilities.CombinePaths(path, de.Name), regex, subFolders, dirs, files);
}
}
}
private Nfs3FileHandle GetFile(string path)
{
string file = Utilities.GetFileFromPath(path);
Nfs3FileHandle parent = GetParentDirectory(path);
Nfs3FileHandle handle = _client.Lookup(parent, file);
if (handle == null)
{
throw new FileNotFoundException("No such file or directory", path);
}
return handle;
}
private Nfs3FileHandle GetParentDirectory(string path)
{
string[] dirs = Utilities.GetDirectoryFromPath(path)
.Split(new[] { '\\' }, StringSplitOptions.RemoveEmptyEntries);
Nfs3FileHandle parent = GetDirectory(_client.RootHandle, dirs);
return parent;
}
private Nfs3FileHandle GetDirectory(string path)
{
string[] dirs = path.Split(new[] { '\\' }, StringSplitOptions.RemoveEmptyEntries);
return GetDirectory(_client.RootHandle, dirs);
}
private Nfs3FileHandle GetDirectory(Nfs3FileHandle parent, string[] dirs)
{
if (dirs == null)
{
return parent;
}
Nfs3FileHandle handle = parent;
for (int i = 0; i < dirs.Length; ++i)
{
handle = _client.Lookup(handle, dirs[i]);
if (handle == null || _client.GetAttributes(handle).Type != Nfs3FileType.Directory)
{
throw new DirectoryNotFoundException();
}
}
return handle;
}
}
}
| |
#if (UNITY_WINRT || UNITY_WP_8_1) && !UNITY_EDITOR && !UNITY_WP8
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace Newtonsoft.Json.Linq
{
internal class JPropertyKeyedCollection : Collection<JToken>
{
private static readonly IEqualityComparer<string> Comparer = StringComparer.Ordinal;
private Dictionary<string, JToken> _dictionary;
private void AddKey(string key, JToken item)
{
EnsureDictionary();
_dictionary[key] = item;
}
protected void ChangeItemKey(JToken item, string newKey)
{
if (!ContainsItem(item))
throw new ArgumentException("The specified item does not exist in this KeyedCollection.");
string keyForItem = GetKeyForItem(item);
if (!Comparer.Equals(keyForItem, newKey))
{
if (newKey != null)
AddKey(newKey, item);
if (keyForItem != null)
RemoveKey(keyForItem);
}
}
protected override void ClearItems()
{
base.ClearItems();
if (_dictionary != null)
_dictionary.Clear();
}
public bool Contains(string key)
{
if (key == null)
throw new ArgumentNullException("key");
if (_dictionary != null)
return _dictionary.ContainsKey(key);
return false;
}
private bool ContainsItem(JToken item)
{
if (_dictionary == null)
return false;
string key = GetKeyForItem(item);
JToken value;
return _dictionary.TryGetValue(key, out value);
}
private void EnsureDictionary()
{
if (_dictionary == null)
_dictionary = new Dictionary<string, JToken>(Comparer);
}
private string GetKeyForItem(JToken item)
{
return ((JProperty)item).Name;
}
protected override void InsertItem(int index, JToken item)
{
AddKey(GetKeyForItem(item), item);
base.InsertItem(index, item);
}
public bool Remove(string key)
{
if (key == null)
throw new ArgumentNullException("key");
if (_dictionary != null)
return _dictionary.ContainsKey(key) && Remove(_dictionary[key]);
return false;
}
protected override void RemoveItem(int index)
{
string keyForItem = GetKeyForItem(Items[index]);
RemoveKey(keyForItem);
base.RemoveItem(index);
}
private void RemoveKey(string key)
{
if (_dictionary != null)
_dictionary.Remove(key);
}
protected override void SetItem(int index, JToken item)
{
string keyForItem = GetKeyForItem(item);
string keyAtIndex = GetKeyForItem(Items[index]);
if (Comparer.Equals(keyAtIndex, keyForItem))
{
if (_dictionary != null)
_dictionary[keyForItem] = item;
}
else
{
AddKey(keyForItem, item);
if (keyAtIndex != null)
RemoveKey(keyAtIndex);
}
base.SetItem(index, item);
}
public JToken this[string key]
{
get
{
if (key == null)
throw new ArgumentNullException("key");
if (_dictionary != null)
return _dictionary[key];
throw new KeyNotFoundException();
}
}
public bool TryGetValue(string key, out JToken value)
{
if (_dictionary == null)
{
value = null;
return false;
}
return _dictionary.TryGetValue(key, out value);
}
public ICollection<string> Keys
{
get
{
EnsureDictionary();
return _dictionary.Keys;
}
}
public ICollection<JToken> Values
{
get
{
EnsureDictionary();
return _dictionary.Values;
}
}
public bool Compare(JPropertyKeyedCollection other)
{
if (this == other)
return true;
// dictionaries in JavaScript aren't ordered
// ignore order when comparing properties
Dictionary<string, JToken> d1 = _dictionary;
Dictionary<string, JToken> d2 = other._dictionary;
if (d1 == null && d2 == null)
return true;
if (d1 == null)
return (d2.Count == 0);
if (d2 == null)
return (d1.Count == 0);
if (d1.Count != d2.Count)
return false;
foreach (KeyValuePair<string, JToken> keyAndProperty in d1)
{
JToken secondValue;
if (!d2.TryGetValue(keyAndProperty.Key, out secondValue))
return false;
JProperty p1 = (JProperty)keyAndProperty.Value;
JProperty p2 = (JProperty)secondValue;
if (!p1.Value.DeepEquals(p2.Value))
return false;
}
return true;
}
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
** Purpose: Unsafe code that uses pointers should use
** SafePointer to fix subtle lifetime problems with the
** underlying resource.
**
===========================================================*/
// Design points:
// *) Avoid handle-recycling problems (including ones triggered via
// resurrection attacks) for all accesses via pointers. This requires tying
// together the lifetime of the unmanaged resource with the code that reads
// from that resource, in a package that uses synchronization to enforce
// the correct semantics during finalization. We're using SafeHandle's
// ref count as a gate on whether the pointer can be dereferenced because that
// controls the lifetime of the resource.
//
// *) Keep the penalties for using this class small, both in terms of space
// and time. Having multiple threads reading from a memory mapped file
// will already require 2 additional interlocked operations. If we add in
// a "current position" concept, that requires additional space in memory and
// synchronization. Since the position in memory is often (but not always)
// something that can be stored on the stack, we can save some memory by
// excluding it from this object. However, avoiding the need for
// synchronization is a more significant win. This design allows multiple
// threads to read and write memory simultaneously without locks (as long as
// you don't write to a region of memory that overlaps with what another
// thread is accessing).
//
// *) Space-wise, we use the following memory, including SafeHandle's fields:
// Object Header MT* handle int bool bool <2 pad bytes> length
// On 32 bit platforms: 24 bytes. On 64 bit platforms: 40 bytes.
// (We can safe 4 bytes on x86 only by shrinking SafeHandle)
//
// *) Wrapping a SafeHandle would have been a nice solution, but without an
// ordering between critical finalizable objects, it would have required
// changes to each SafeHandle subclass to opt in to being usable from a
// SafeBuffer (or some clever exposure of SafeHandle's state fields and a
// way of forcing ReleaseHandle to run even after the SafeHandle has been
// finalized with a ref count > 1). We can use less memory and create fewer
// objects by simply inserting a SafeBuffer into the class hierarchy.
//
// *) In an ideal world, we could get marshaling support for SafeBuffer that
// would allow us to annotate a P/Invoke declaration, saying this parameter
// specifies the length of the buffer, and the units of that length are X.
// P/Invoke would then pass that size parameter to SafeBuffer.
// [DllImport(...)]
// static extern SafeMemoryHandle AllocCharBuffer(int numChars);
// If we could put an attribute on the SafeMemoryHandle saying numChars is
// the element length, and it must be multiplied by 2 to get to the byte
// length, we can simplify the usage model for SafeBuffer.
//
// *) This class could benefit from a constraint saying T is a value type
// containing no GC references.
// Implementation notes:
// *) The Initialize method must be called before you use any instance of
// a SafeBuffer. To avoid race conditions when storing SafeBuffers in statics,
// you either need to take a lock when publishing the SafeBuffer, or you
// need to create a local, initialize the SafeBuffer, then assign to the
// static variable (perhaps using Interlocked.CompareExchange). Of course,
// assignments in a static class constructor are under a lock implicitly.
using System;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Runtime.ConstrainedExecution;
using System.Runtime.Versioning;
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Diagnostics.Contracts;
namespace System.Runtime.InteropServices
{
public abstract unsafe class SafeBuffer : SafeHandleZeroOrMinusOneIsInvalid
{
// Steal UIntPtr.MaxValue as our uninitialized value.
private static readonly UIntPtr Uninitialized = (UIntPtr.Size == 4) ?
((UIntPtr)UInt32.MaxValue) : ((UIntPtr)UInt64.MaxValue);
private UIntPtr _numBytes;
protected SafeBuffer(bool ownsHandle) : base(ownsHandle)
{
_numBytes = Uninitialized;
}
/// <summary>
/// Specifies the size of the region of memory, in bytes. Must be
/// called before using the SafeBuffer.
/// </summary>
/// <param name="numBytes">Number of valid bytes in memory.</param>
[CLSCompliant(false)]
public void Initialize(ulong numBytes)
{
if (IntPtr.Size == 4 && numBytes > UInt32.MaxValue)
throw new ArgumentOutOfRangeException(nameof(numBytes), SR.ArgumentOutOfRange_AddressSpace);
Contract.EndContractBlock();
if (numBytes >= (ulong)Uninitialized)
throw new ArgumentOutOfRangeException(nameof(numBytes), SR.ArgumentOutOfRange_UIntPtrMax);
_numBytes = (UIntPtr)numBytes;
}
/// <summary>
/// Specifies the the size of the region in memory, as the number of
/// elements in an array. Must be called before using the SafeBuffer.
/// </summary>
[CLSCompliant(false)]
public void Initialize(uint numElements, uint sizeOfEachElement)
{
if (IntPtr.Size == 4 && numElements * sizeOfEachElement > UInt32.MaxValue)
throw new ArgumentOutOfRangeException("numBytes", SR.ArgumentOutOfRange_AddressSpace);
Contract.EndContractBlock();
if (numElements * sizeOfEachElement >= (ulong)Uninitialized)
throw new ArgumentOutOfRangeException(nameof(numElements), SR.ArgumentOutOfRange_UIntPtrMax);
_numBytes = checked((UIntPtr)(numElements * sizeOfEachElement));
}
/// <summary>
/// Specifies the the size of the region in memory, as the number of
/// elements in an array. Must be called before using the SafeBuffer.
/// </summary>
[CLSCompliant(false)]
public void Initialize<T>(uint numElements) where T : struct
{
Initialize(numElements, Marshal.AlignedSizeOf<T>());
}
// Callers should ensure that they check whether the pointer ref param
// is null when AcquirePointer returns. If it is not null, they must
// call ReleasePointer in a CER. This method calls DangerousAddRef
// & exposes the pointer. Unlike Read, it does not alter the "current
// position" of the pointer. Here's how to use it:
//
// byte* pointer = null;
// RuntimeHelpers.PrepareConstrainedRegions();
// try {
// safeBuffer.AcquirePointer(ref pointer);
// // Use pointer here, with your own bounds checking
// }
// finally {
// if (pointer != null)
// safeBuffer.ReleasePointer();
// }
//
// Note: If you cast this byte* to a T*, you have to worry about
// whether your pointer is aligned. Additionally, you must take
// responsibility for all bounds checking with this pointer.
/// <summary>
/// Obtain the pointer from a SafeBuffer for a block of code,
/// with the express responsibility for bounds checking and calling
/// ReleasePointer later within a CER to ensure the pointer can be
/// freed later. This method either completes successfully or
/// throws an exception and returns with pointer set to null.
/// </summary>
/// <param name="pointer">A byte*, passed by reference, to receive
/// the pointer from within the SafeBuffer. You must set
/// pointer to null before calling this method.</param>
[CLSCompliant(false)]
public void AcquirePointer(ref byte* pointer)
{
if (_numBytes == Uninitialized)
throw NotInitialized();
pointer = null;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
}
finally
{
bool junk = false;
DangerousAddRef(ref junk);
pointer = (byte*)handle;
}
}
public void ReleasePointer()
{
if (_numBytes == Uninitialized)
throw NotInitialized();
DangerousRelease();
}
/// <summary>
/// Read a value type from memory at the given offset. This is
/// equivalent to: return *(T*)(bytePtr + byteOffset);
/// </summary>
/// <typeparam name="T">The value type to read</typeparam>
/// <param name="byteOffset">Where to start reading from memory. You
/// may have to consider alignment.</param>
/// <returns>An instance of T read from memory.</returns>
[CLSCompliant(false)]
public T Read<T>(ulong byteOffset) where T : struct
{
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = Marshal.SizeOfType(typeof(T));
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, sizeofT);
// return *(T*) (_ptr + byteOffset);
T value;
bool mustCallRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
DangerousAddRef(ref mustCallRelease);
GenericPtrToStructure<T>(ptr, out value, sizeofT);
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
return value;
}
[CLSCompliant(false)]
public void ReadArray<T>(ulong byteOffset, T[] array, int index, int count)
where T : struct
{
if (array == null)
throw new ArgumentNullException(nameof(array), SR.ArgumentNull_Buffer);
if (index < 0)
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
if (array.Length - index < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
Contract.EndContractBlock();
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = Marshal.SizeOfType(typeof(T));
uint alignedSizeofT = Marshal.AlignedSizeOf<T>();
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, checked((ulong)(alignedSizeofT * count)));
bool mustCallRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
DangerousAddRef(ref mustCallRelease);
for (int i = 0; i < count; i++)
unsafe { GenericPtrToStructure<T>(ptr + alignedSizeofT * i, out array[i + index], sizeofT); }
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
}
/// <summary>
/// Write a value type to memory at the given offset. This is
/// equivalent to: *(T*)(bytePtr + byteOffset) = value;
/// </summary>
/// <typeparam name="T">The type of the value type to write to memory.</typeparam>
/// <param name="byteOffset">The location in memory to write to. You
/// may have to consider alignment.</param>
/// <param name="value">The value type to write to memory.</param>
[CLSCompliant(false)]
public void Write<T>(ulong byteOffset, T value) where T : struct
{
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = Marshal.SizeOfType(typeof(T));
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, sizeofT);
// *((T*) (_ptr + byteOffset)) = value;
bool mustCallRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
DangerousAddRef(ref mustCallRelease);
GenericStructureToPtr(ref value, ptr, sizeofT);
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
}
[CLSCompliant(false)]
public void WriteArray<T>(ulong byteOffset, T[] array, int index, int count)
where T : struct
{
if (array == null)
throw new ArgumentNullException(nameof(array), SR.ArgumentNull_Buffer);
if (index < 0)
throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
if (array.Length - index < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
Contract.EndContractBlock();
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = Marshal.SizeOfType(typeof(T));
uint alignedSizeofT = Marshal.AlignedSizeOf<T>();
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, checked((ulong)(alignedSizeofT * count)));
bool mustCallRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
DangerousAddRef(ref mustCallRelease);
for (int i = 0; i < count; i++)
unsafe { GenericStructureToPtr(ref array[i + index], ptr + alignedSizeofT * i, sizeofT); }
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
}
/// <summary>
/// Returns the number of bytes in the memory region.
/// </summary>
[CLSCompliant(false)]
public ulong ByteLength
{
get
{
if (_numBytes == Uninitialized)
throw NotInitialized();
return (ulong)_numBytes;
}
}
/* No indexer. The perf would be misleadingly bad. People should use
* AcquirePointer and ReleasePointer instead. */
private void SpaceCheck(byte* ptr, ulong sizeInBytes)
{
if ((ulong)_numBytes < sizeInBytes)
NotEnoughRoom();
if ((ulong)(ptr - (byte*)handle) > ((ulong)_numBytes) - sizeInBytes)
NotEnoughRoom();
}
private static void NotEnoughRoom()
{
throw new ArgumentException(SR.Arg_BufferTooSmall);
}
private static InvalidOperationException NotInitialized()
{
return new InvalidOperationException(SR.InvalidOperation_MustCallInitialize);
}
// FCALL limitations mean we can't have generic FCALL methods. However, we can pass
// TypedReferences to FCALL methods.
internal static void GenericPtrToStructure<T>(byte* ptr, out T structure, uint sizeofT) where T : struct
{
structure = default(T); // Dummy assignment to silence the compiler
PtrToStructureNative(ptr, __makeref(structure), sizeofT);
}
[MethodImpl(MethodImplOptions.InternalCall)]
private static extern void PtrToStructureNative(byte* ptr, /*out T*/ TypedReference structure, uint sizeofT);
internal static void GenericStructureToPtr<T>(ref T structure, byte* ptr, uint sizeofT) where T : struct
{
StructureToPtrNative(__makeref(structure), ptr, sizeofT);
}
[MethodImpl(MethodImplOptions.InternalCall)]
private static extern void StructureToPtrNative(/*ref T*/ TypedReference structure, byte* ptr, uint sizeofT);
}
}
| |
using Microsoft.IdentityModel;
using Microsoft.IdentityModel.S2S.Protocols.OAuth2;
using Microsoft.IdentityModel.S2S.Tokens;
using Microsoft.SharePoint.Client;
using Microsoft.SharePoint.Client.EventReceivers;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IdentityModel.Selectors;
using System.IdentityModel.Tokens;
using System.IO;
using System.Linq;
using System.Net;
using System.Security.Cryptography.X509Certificates;
using System.Security.Principal;
using System.ServiceModel;
using System.Text;
using System.Web;
using System.Web.Configuration;
using System.Web.Script.Serialization;
using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction;
using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException;
using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration;
using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials;
namespace BusinessApps.O365ProjectsApp.Infrastructure
{
public static class TokenHelper
{
#region public fields
/// <summary>
/// SharePoint principal.
/// </summary>
public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000";
/// <summary>
/// Lifetime of HighTrust access token, 12 hours.
/// </summary>
public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0);
#endregion public fields
#region public methods
/// <summary>
/// Retrieves the context token string from the specified request by looking for well-known parameter names in the
/// POSTed form parameters and the querystring. Returns null if no context token is found.
/// </summary>
/// <param name="request">HttpRequest in which to look for a context token</param>
/// <returns>The context token string</returns>
public static string GetContextTokenFromRequest(HttpRequest request)
{
return GetContextTokenFromRequest(new HttpRequestWrapper(request));
}
/// <summary>
/// Retrieves the context token string from the specified request by looking for well-known parameter names in the
/// POSTed form parameters and the querystring. Returns null if no context token is found.
/// </summary>
/// <param name="request">HttpRequest in which to look for a context token</param>
/// <returns>The context token string</returns>
public static string GetContextTokenFromRequest(HttpRequestBase request)
{
string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" };
foreach (string paramName in paramNames)
{
if (!string.IsNullOrEmpty(request.Form[paramName]))
{
return request.Form[paramName];
}
if (!string.IsNullOrEmpty(request.QueryString[paramName]))
{
return request.QueryString[paramName];
}
}
return null;
}
/// <summary>
/// Validate that a specified context token string is intended for this application based on the parameters
/// specified in web.config. Parameters used from web.config used for validation include ClientId,
/// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present,
/// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not
/// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an
/// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents
/// and a JsonWebSecurityToken based on the context token is returned.
/// </summary>
/// <param name="contextTokenString">The context token to validate</param>
/// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation.
/// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used
/// for validation instead of <paramref name="appHostName"/> .</param>
/// <returns>A JsonWebSecurityToken based on the context token.</returns>
public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null)
{
JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler();
SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString);
JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken;
SharePointContextToken token = SharePointContextToken.Create(jsonToken);
string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority;
int firstDot = stsAuthority.IndexOf('.');
GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot);
AcsHostUrl = stsAuthority.Substring(firstDot + 1);
tokenHandler.ValidateToken(jsonToken);
string[] acceptableAudiences;
if (!String.IsNullOrEmpty(HostedAppHostNameOverride))
{
acceptableAudiences = HostedAppHostNameOverride.Split(';');
}
else if (appHostName == null)
{
acceptableAudiences = new[] { HostedAppHostName };
}
else
{
acceptableAudiences = new[] { appHostName };
}
bool validationSuccessful = false;
string realm = Realm ?? token.Realm;
foreach (var audience in acceptableAudiences)
{
string principal = GetFormattedPrincipal(ClientId, audience, realm);
if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal))
{
validationSuccessful = true;
break;
}
}
if (!validationSuccessful)
{
throw new AudienceUriValidationFailedException(
String.Format(CultureInfo.CurrentCulture,
"\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience));
}
return token;
}
/// <summary>
/// Retrieves an access token from ACS to call the source of the specified context token at the specified
/// targetHost. The targetHost must be registered for the principal that sent the context token.
/// </summary>
/// <param name="contextToken">Context token issued by the intended access token audience</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <returns>An access token with an audience matching the context token's source</returns>
public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost)
{
string targetPrincipalName = contextToken.TargetPrincipalName;
// Extract the refreshToken from the context token
string refreshToken = contextToken.RefreshToken;
if (String.IsNullOrEmpty(refreshToken))
{
return null;
}
string targetRealm = Realm ?? contextToken.Realm;
return GetAccessToken(refreshToken,
targetPrincipalName,
targetHost,
targetRealm);
}
/// <summary>
/// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="authorizationCode">Authorization code to exchange for access token</param>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAccessToken(
string authorizationCode,
string targetPrincipalName,
string targetHost,
string targetRealm,
Uri redirectUri)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, null, targetRealm);
// Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered
OAuth2AccessTokenRequest oauth2Request =
OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode(
clientId,
ClientSecret,
authorizationCode,
redirectUri,
resource);
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="refreshToken">Refresh token to exchange for access token</param>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAccessToken(
string refreshToken,
string targetPrincipalName,
string targetHost,
string targetRealm)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, null, targetRealm);
OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource);
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Retrieves an app-only access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAppOnlyAccessToken(
string targetPrincipalName,
string targetHost,
string targetRealm)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm);
OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource);
oauth2Request.Resource = resource;
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Creates a client context based on the properties of a remote event receiver
/// </summary>
/// <param name="properties">Properties of a remote event receiver</param>
/// <returns>A ClientContext ready to call the web where the event originated</returns>
public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties)
{
Uri sharepointUrl;
if (properties.ListEventProperties != null)
{
sharepointUrl = new Uri(properties.ListEventProperties.WebUrl);
}
else if (properties.ItemEventProperties != null)
{
sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl);
}
else if (properties.WebEventProperties != null)
{
sharepointUrl = new Uri(properties.WebEventProperties.FullUrl);
}
else
{
return null;
}
if (IsHighTrustApp())
{
return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null);
}
return CreateAcsClientContextForUrl(properties, sharepointUrl);
}
/// <summary>
/// Creates a client context based on the properties of an app event
/// </summary>
/// <param name="properties">Properties of an app event</param>
/// <param name="useAppWeb">True to target the app web, false to target the host web</param>
/// <returns>A ClientContext ready to call the app web or the parent web</returns>
public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb)
{
if (properties.AppEventProperties == null)
{
return null;
}
Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl;
if (IsHighTrustApp())
{
return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null);
}
return CreateAcsClientContextForUrl(properties, sharepointUrl);
}
/// <summary>
/// Retrieves an access token from ACS using the specified authorization code, and uses that access token to
/// create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithAuthorizationCode(
string targetUrl,
string authorizationCode,
Uri redirectUri)
{
return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri);
}
/// <summary>
/// Retrieves an access token from ACS using the specified authorization code, and uses that access token to
/// create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="targetPrincipalName">Name of the target SharePoint principal</param>
/// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithAuthorizationCode(
string targetUrl,
string targetPrincipalName,
string authorizationCode,
string targetRealm,
Uri redirectUri)
{
Uri targetUri = new Uri(targetUrl);
string accessToken =
GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken;
return GetClientContextWithAccessToken(targetUrl, accessToken);
}
/// <summary>
/// Uses the specified access token to create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="accessToken">Access token to be used when calling the specified targetUrl</param>
/// <returns>A ClientContext ready to call targetUrl with the specified access token</returns>
public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken)
{
ClientContext clientContext = new ClientContext(targetUrl);
clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous;
clientContext.FormDigestHandlingEnabled = false;
clientContext.ExecutingWebRequest +=
delegate(object oSender, WebRequestEventArgs webRequestEventArgs)
{
webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] =
"Bearer " + accessToken;
};
return clientContext;
}
/// <summary>
/// Retrieves an access token from ACS using the specified context token, and uses that access token to create
/// a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="contextTokenString">Context token received from the target SharePoint site</param>
/// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName
/// of web.config will be used instead</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithContextToken(
string targetUrl,
string contextTokenString,
string appHostUrl)
{
SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl);
Uri targetUri = new Uri(targetUrl);
string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken;
return GetClientContextWithAccessToken(targetUrl, accessToken);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request consent and get back
/// an authorization code.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format
/// (e.g. "Web.Read Site.Write")</param>
/// <returns>Url of the SharePoint site's OAuth authorization page</returns>
public static string GetAuthorizationUrl(string contextUrl, string scope)
{
return string.Format(
"{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code",
EnsureTrailingSlash(contextUrl),
AuthorizationPage,
ClientId,
scope);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request consent and get back
/// an authorization code.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format
/// (e.g. "Web.Read Site.Write")</param>
/// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is
/// granted</param>
/// <returns>Url of the SharePoint site's OAuth authorization page</returns>
public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri)
{
return string.Format(
"{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}",
EnsureTrailingSlash(contextUrl),
AuthorizationPage,
ClientId,
scope,
redirectUri);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request a new context token.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param>
/// <returns>Url of the SharePoint site's context token redirect page</returns>
public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri)
{
return string.Format(
"{0}{1}?client_id={2}&redirect_uri={3}",
EnsureTrailingSlash(contextUrl),
RedirectPage,
ClientId,
redirectUri);
}
/// <summary>
/// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified
/// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in
/// web.config, an auth challenge will be issued to the targetApplicationUri to discover it.
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <param name="identity">Windows identity of the user on whose behalf to create the access token</param>
/// <returns>An access token with an audience of the target principal</returns>
public static string GetS2SAccessTokenWithWindowsIdentity(
Uri targetApplicationUri,
WindowsIdentity identity)
{
string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm;
JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null;
return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims);
}
/// <summary>
/// Retrieves an S2S client context with an access token signed by the application's private certificate on
/// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the
/// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the
/// targetApplicationUri to discover it.
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <param name="identity">Windows identity of the user on whose behalf to create the access token</param>
/// <returns>A ClientContext using an access token with an audience of the target application</returns>
public static ClientContext GetS2SClientContextWithWindowsIdentity(
Uri targetApplicationUri,
WindowsIdentity identity)
{
string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm;
JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null;
string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims);
return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken);
}
/// <summary>
/// Get authentication realm from SharePoint
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <returns>String representation of the realm GUID</returns>
public static string GetRealmFromTargetUrl(Uri targetApplicationUri)
{
WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc");
request.Headers.Add("Authorization: Bearer ");
try
{
using (request.GetResponse())
{
}
}
catch (WebException e)
{
if (e.Response == null)
{
return null;
}
string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"];
if (string.IsNullOrEmpty(bearerResponseHeader))
{
return null;
}
const string bearer = "Bearer realm=\"";
int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal);
if (bearerIndex < 0)
{
return null;
}
int realmIndex = bearerIndex + bearer.Length;
if (bearerResponseHeader.Length >= realmIndex + 36)
{
string targetRealm = bearerResponseHeader.Substring(realmIndex, 36);
Guid realmGuid;
if (Guid.TryParse(targetRealm, out realmGuid))
{
return targetRealm;
}
}
}
return null;
}
/// <summary>
/// Determines if this is a high trust app.
/// </summary>
/// <returns>True if this is a high trust app.</returns>
public static bool IsHighTrustApp()
{
return SigningCredentials != null;
}
/// <summary>
/// Ensures that the specified URL ends with '/' if it is not null or empty.
/// </summary>
/// <param name="url">The url.</param>
/// <returns>The url ending with '/' if it is not null or empty.</returns>
public static string EnsureTrailingSlash(string url)
{
if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/')
{
return url + "/";
}
return url;
}
#endregion
#region private fields
//
// Configuration Constants
//
private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx";
private const string RedirectPage = "_layouts/15/AppRedirect.aspx";
private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000";
private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1";
private const string S2SProtocol = "OAuth2";
private const string DelegationIssuance = "DelegationIssuance1.0";
private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier;
private const string TrustedForImpersonationClaimType = "trustedfordelegation";
private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken;
//
// Environment Constants
//
private static string GlobalEndPointPrefix = "accounts";
private static string AcsHostUrl = "accesscontrol.windows.net";
//
// Hosted app configuration
//
private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId");
private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId");
private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride");
private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName");
private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret");
private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret");
private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm");
private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm");
private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath");
private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword");
private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword);
private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest);
#endregion
#region private methods
private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl)
{
string contextTokenString = properties.ContextToken;
if (String.IsNullOrEmpty(contextTokenString))
{
return null;
}
SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host);
string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken;
return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken);
}
private static string GetAcsMetadataEndpointUrl()
{
return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl);
}
private static string GetFormattedPrincipal(string principalName, string hostName, string realm)
{
if (!String.IsNullOrEmpty(hostName))
{
return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm);
}
return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm);
}
private static string GetAcsPrincipalName(string realm)
{
return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm);
}
private static string GetAcsGlobalEndpointUrl()
{
return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl);
}
private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler()
{
JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler();
handler.Configuration = new SecurityTokenHandlerConfiguration();
handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never);
handler.Configuration.CertificateValidator = X509CertificateValidator.None;
List<byte[]> securityKeys = new List<byte[]>();
securityKeys.Add(Convert.FromBase64String(ClientSecret));
if (!string.IsNullOrEmpty(SecondaryClientSecret))
{
securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret));
}
List<SecurityToken> securityTokens = new List<SecurityToken>();
securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys));
handler.Configuration.IssuerTokenResolver =
SecurityTokenResolver.CreateDefaultSecurityTokenResolver(
new ReadOnlyCollection<SecurityToken>(securityTokens),
false);
SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry();
foreach (byte[] securitykey in securityKeys)
{
issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace));
}
handler.Configuration.IssuerNameRegistry = issuerNameRegistry;
return handler;
}
private static string GetS2SAccessTokenWithClaims(
string targetApplicationHostName,
string targetRealm,
IEnumerable<JsonWebTokenClaim> claims)
{
return IssueToken(
ClientId,
IssuerId,
targetRealm,
SharePointPrincipal,
targetRealm,
targetApplicationHostName,
true,
claims,
claims == null);
}
private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity)
{
JsonWebTokenClaim[] claims = new JsonWebTokenClaim[]
{
new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()),
new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory")
};
return claims;
}
private static string IssueToken(
string sourceApplication,
string issuerApplication,
string sourceRealm,
string targetApplication,
string targetRealm,
string targetApplicationHostName,
bool trustedForDelegation,
IEnumerable<JsonWebTokenClaim> claims,
bool appOnly = false)
{
if (null == SigningCredentials)
{
throw new InvalidOperationException("SigningCredentials was not initialized");
}
#region Actor token
string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm);
string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm);
string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm);
List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>();
actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid));
if (trustedForDelegation && !appOnly)
{
actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true"));
}
// Create token
JsonWebSecurityToken actorToken = new JsonWebSecurityToken(
issuer: issuer,
audience: audience,
validFrom: DateTime.UtcNow,
validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime),
signingCredentials: SigningCredentials,
claims: actorClaims);
string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken);
if (appOnly)
{
// App-only token is the same as actor token for delegated case
return actorTokenString;
}
#endregion Actor token
#region Outer token
List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims);
outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString));
JsonWebSecurityToken jsonToken = new JsonWebSecurityToken(
nameid, // outer token issuer should match actor token nameid
audience,
DateTime.UtcNow,
DateTime.UtcNow.Add(HighTrustAccessTokenLifetime),
outerClaims);
string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken);
#endregion Outer token
return accessToken;
}
#endregion
#region AcsMetadataParser
// This class is used to get MetaData document from the global STS endpoint. It contains
// methods to parse the MetaData document and get endpoints and STS certificate.
public static class AcsMetadataParser
{
public static X509Certificate2 GetAcsSigningCert(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
if (null != document.keys && document.keys.Count > 0)
{
JsonKey signingKey = document.keys[0];
if (null != signingKey && null != signingKey.keyValue)
{
return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value));
}
}
throw new Exception("Metadata document does not contain ACS signing certificate.");
}
public static string GetDelegationServiceUrl(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance);
if (null != delegationEndpoint)
{
return delegationEndpoint.location;
}
throw new Exception("Metadata document does not contain Delegation Service endpoint Url");
}
private static JsonMetadataDocument GetMetadataDocument(string realm)
{
string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}",
GetAcsMetadataEndpointUrl(),
realm);
byte[] acsMetadata;
using (WebClient webClient = new WebClient())
{
acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm);
}
string jsonResponseString = Encoding.UTF8.GetString(acsMetadata);
JavaScriptSerializer serializer = new JavaScriptSerializer();
JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString);
if (null == document)
{
throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm);
}
return document;
}
public static string GetStsUrl(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol);
if (null != s2sEndpoint)
{
return s2sEndpoint.location;
}
throw new Exception("Metadata document does not contain STS endpoint url");
}
private class JsonMetadataDocument
{
public string serviceName { get; set; }
public List<JsonEndpoint> endpoints { get; set; }
public List<JsonKey> keys { get; set; }
}
private class JsonEndpoint
{
public string location { get; set; }
public string protocol { get; set; }
public string usage { get; set; }
}
private class JsonKeyValue
{
public string type { get; set; }
public string value { get; set; }
}
private class JsonKey
{
public string usage { get; set; }
public JsonKeyValue keyValue { get; set; }
}
}
#endregion
}
/// <summary>
/// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token
/// </summary>
public class SharePointContextToken : JsonWebSecurityToken
{
public static SharePointContextToken Create(JsonWebSecurityToken contextToken)
{
return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims);
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims)
: base(issuer, audience, validFrom, validTo, claims)
{
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken)
: base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken)
{
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials)
: base(issuer, audience, validFrom, validTo, claims, signingCredentials)
{
}
public string NameId
{
get
{
return GetClaimValue(this, "nameid");
}
}
/// <summary>
/// The principal name portion of the context token's "appctxsender" claim
/// </summary>
public string TargetPrincipalName
{
get
{
string appctxsender = GetClaimValue(this, "appctxsender");
if (appctxsender == null)
{
return null;
}
return appctxsender.Split('@')[0];
}
}
/// <summary>
/// The context token's "refreshtoken" claim
/// </summary>
public string RefreshToken
{
get
{
return GetClaimValue(this, "refreshtoken");
}
}
/// <summary>
/// The context token's "CacheKey" claim
/// </summary>
public string CacheKey
{
get
{
string appctx = GetClaimValue(this, "appctx");
if (appctx == null)
{
return null;
}
ClientContext ctx = new ClientContext("http://tempuri.org");
Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx);
string cacheKey = (string)dict["CacheKey"];
return cacheKey;
}
}
/// <summary>
/// The context token's "SecurityTokenServiceUri" claim
/// </summary>
public string SecurityTokenServiceUri
{
get
{
string appctx = GetClaimValue(this, "appctx");
if (appctx == null)
{
return null;
}
ClientContext ctx = new ClientContext("http://tempuri.org");
Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx);
string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"];
return securityTokenServiceUri;
}
}
/// <summary>
/// The realm portion of the context token's "audience" claim
/// </summary>
public string Realm
{
get
{
string aud = Audience;
if (aud == null)
{
return null;
}
string tokenRealm = aud.Substring(aud.IndexOf('@') + 1);
return tokenRealm;
}
}
private static string GetClaimValue(JsonWebSecurityToken token, string claimType)
{
if (token == null)
{
throw new ArgumentNullException("token");
}
foreach (JsonWebTokenClaim claim in token.Claims)
{
if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType))
{
return claim.Value;
}
}
return null;
}
}
/// <summary>
/// Represents a security token which contains multiple security keys that are generated using symmetric algorithms.
/// </summary>
public class MultipleSymmetricKeySecurityToken : SecurityToken
{
/// <summary>
/// Initializes a new instance of the MultipleSymmetricKeySecurityToken class.
/// </summary>
/// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param>
public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys)
: this(UniqueId.CreateUniqueId(), keys)
{
}
/// <summary>
/// Initializes a new instance of the MultipleSymmetricKeySecurityToken class.
/// </summary>
/// <param name="tokenId">The unique identifier of the security token.</param>
/// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param>
public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys)
{
if (keys == null)
{
throw new ArgumentNullException("keys");
}
if (String.IsNullOrEmpty(tokenId))
{
throw new ArgumentException("Value cannot be a null or empty string.", "tokenId");
}
foreach (byte[] key in keys)
{
if (key.Length <= 0)
{
throw new ArgumentException("The key length must be greater then zero.", "keys");
}
}
id = tokenId;
effectiveTime = DateTime.UtcNow;
securityKeys = CreateSymmetricSecurityKeys(keys);
}
/// <summary>
/// Gets the unique identifier of the security token.
/// </summary>
public override string Id
{
get
{
return id;
}
}
/// <summary>
/// Gets the cryptographic keys associated with the security token.
/// </summary>
public override ReadOnlyCollection<SecurityKey> SecurityKeys
{
get
{
return securityKeys.AsReadOnly();
}
}
/// <summary>
/// Gets the first instant in time at which this security token is valid.
/// </summary>
public override DateTime ValidFrom
{
get
{
return effectiveTime;
}
}
/// <summary>
/// Gets the last instant in time at which this security token is valid.
/// </summary>
public override DateTime ValidTo
{
get
{
// Never expire
return DateTime.MaxValue;
}
}
/// <summary>
/// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier.
/// </summary>
/// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param>
/// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns>
public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause)
{
if (keyIdentifierClause == null)
{
throw new ArgumentNullException("keyIdentifierClause");
}
// Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the
// presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later
// when the key is matched to the issuer.
if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause)
{
return true;
}
return base.MatchesKeyIdentifierClause(keyIdentifierClause);
}
#region private members
private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys)
{
List<SecurityKey> symmetricKeys = new List<SecurityKey>();
foreach (byte[] key in keys)
{
symmetricKeys.Add(new InMemorySymmetricSecurityKey(key));
}
return symmetricKeys;
}
private string id;
private DateTime effectiveTime;
private List<SecurityKey> securityKeys;
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.Editor.Commands;
using Microsoft.CodeAnalysis.Editor.CSharp.RenameTracking;
using Microsoft.CodeAnalysis.Editor.Implementation.RenameTracking;
using Microsoft.CodeAnalysis.Editor.Shared.Extensions;
using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces;
using Microsoft.CodeAnalysis.Editor.VisualBasic.RenameTracking;
using Microsoft.CodeAnalysis.Notification;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.UnitTests.Diagnostics;
using Microsoft.VisualStudio.Composition;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Editor;
using Microsoft.VisualStudio.Text.Operations;
using Microsoft.VisualStudio.Text.Tagging;
using Roslyn.Test.Utilities;
using Roslyn.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.UnitTests.RenameTracking
{
internal sealed class RenameTrackingTestState : IDisposable
{
private readonly ITagger<RenameTrackingTag> _tagger;
public readonly TestWorkspace Workspace;
private readonly IWpfTextView _view;
private readonly ITextUndoHistoryRegistry _historyRegistry;
private string _notificationMessage = null;
private readonly TestHostDocument _hostDocument;
public TestHostDocument HostDocument { get { return _hostDocument; } }
private readonly IEditorOperations _editorOperations;
public IEditorOperations EditorOperations { get { return _editorOperations; } }
private readonly MockRefactorNotifyService _mockRefactorNotifyService;
public MockRefactorNotifyService RefactorNotifyService { get { return _mockRefactorNotifyService; } }
private readonly CodeFixProvider _codeFixProvider;
private readonly RenameTrackingCancellationCommandHandler _commandHandler = new RenameTrackingCancellationCommandHandler();
public RenameTrackingTestState(
string markup,
string languageName,
bool onBeforeGlobalSymbolRenamedReturnValue = true,
bool onAfterGlobalSymbolRenamedReturnValue = true)
{
this.Workspace = CreateTestWorkspace(markup, languageName, TestExportProvider.CreateExportProviderWithCSharpAndVisualBasic());
_hostDocument = Workspace.Documents.First();
_view = _hostDocument.GetTextView();
_view.Caret.MoveTo(new SnapshotPoint(_view.TextSnapshot, _hostDocument.CursorPosition.Value));
_editorOperations = Workspace.GetService<IEditorOperationsFactoryService>().GetEditorOperations(_view);
_historyRegistry = Workspace.ExportProvider.GetExport<ITextUndoHistoryRegistry>().Value;
_mockRefactorNotifyService = new MockRefactorNotifyService
{
OnBeforeSymbolRenamedReturnValue = onBeforeGlobalSymbolRenamedReturnValue,
OnAfterSymbolRenamedReturnValue = onAfterGlobalSymbolRenamedReturnValue
};
var optionService = this.Workspace.Services.GetService<IOptionService>();
// Mock the action taken by the workspace INotificationService
var notificationService = Workspace.Services.GetService<INotificationService>() as INotificationServiceCallback;
var callback = new Action<string, string, NotificationSeverity>((message, title, severity) => _notificationMessage = message);
notificationService.NotificationCallback = callback;
var tracker = new RenameTrackingTaggerProvider(
_historyRegistry,
Workspace.ExportProvider.GetExport<Host.IWaitIndicator>().Value,
Workspace.ExportProvider.GetExport<IInlineRenameService>().Value,
SpecializedCollections.SingletonEnumerable(_mockRefactorNotifyService),
Workspace.ExportProvider.GetExports<IAsynchronousOperationListener, FeatureMetadata>());
_tagger = tracker.CreateTagger<RenameTrackingTag>(_hostDocument.GetTextBuffer());
if (languageName == LanguageNames.CSharp)
{
_codeFixProvider = new CSharpRenameTrackingCodeFixProvider(
Workspace.ExportProvider.GetExport<Host.IWaitIndicator>().Value,
_historyRegistry,
SpecializedCollections.SingletonEnumerable(_mockRefactorNotifyService));
}
else if (languageName == LanguageNames.VisualBasic)
{
_codeFixProvider = new VisualBasicRenameTrackingCodeFixProvider(
Workspace.ExportProvider.GetExport<Host.IWaitIndicator>().Value,
_historyRegistry,
SpecializedCollections.SingletonEnumerable(_mockRefactorNotifyService));
}
else
{
throw new ArgumentException("Invalid langauge name: " + languageName, "languageName");
}
}
private static TestWorkspace CreateTestWorkspace(string code, string languageName, ExportProvider exportProvider = null)
{
var xml = string.Format(@"
<Workspace>
<Project Language=""{0}"" CommonReferences=""true"">
<Document>{1}</Document>
</Project>
</Workspace>", languageName, code);
return TestWorkspaceFactory.CreateWorkspace(xml, exportProvider: exportProvider);
}
public void SendEscape()
{
_commandHandler.ExecuteCommand(new EscapeKeyCommandArgs(_view, _view.TextBuffer), () => { });
}
public void MoveCaret(int delta)
{
var position = _view.Caret.Position.BufferPosition.Position;
_view.Caret.MoveTo(new SnapshotPoint(_view.TextSnapshot, position + delta));
}
public void Undo(int count = 1)
{
var history = _historyRegistry.GetHistory(_view.TextBuffer);
history.Undo(count);
}
public void Redo(int count = 1)
{
var history = _historyRegistry.GetHistory(_view.TextBuffer);
history.Redo(count);
}
public void AssertNoTag()
{
WaitForAsyncOperations();
var tags = _tagger.GetTags(new NormalizedSnapshotSpanCollection(new SnapshotSpan(_view.TextBuffer.CurrentSnapshot, new Span(0, _view.TextBuffer.CurrentSnapshot.Length))));
Assert.Equal(0, tags.Count());
}
public void AssertTag(string expectedFromName, string expectedToName, bool invokeAction = false, int actionIndex = 0)
{
WaitForAsyncOperations();
var tags = _tagger.GetTags(new NormalizedSnapshotSpanCollection(new SnapshotSpan(_view.TextBuffer.CurrentSnapshot, new Span(0, _view.TextBuffer.CurrentSnapshot.Length))));
// There should only ever be one tag
Assert.Equal(1, tags.Count());
var tag = tags.Single();
var document = this.Workspace.CurrentSolution.GetDocument(_hostDocument.Id);
var analyzer = new RenameTrackingDiagnosticAnalyzer();
var diagnostics = DiagnosticProviderTestUtilities.GetDocumentDiagnostics(analyzer, document, tag.Span.Span.ToTextSpan()).ToList();
// There should be a single rename tracking diagnostic
Assert.Equal(1, diagnostics.Count);
Assert.Equal(RenameTrackingDiagnosticAnalyzer.DiagnosticId, diagnostics[0].Id);
var actions = new List<CodeAction>();
var context = new CodeFixContext(document, diagnostics[0], (a, d) => actions.Add(a), CancellationToken.None);
_codeFixProvider.RegisterCodeFixesAsync(context).Wait();
// There should be two actions
Assert.Equal(2, actions.Count);
Assert.Equal(string.Format("Rename '{0}' to '{1}'", expectedFromName, expectedToName), actions[0].Title);
Assert.Equal(string.Format("Rename '{0}' to '{1}' with preview...", expectedFromName, expectedToName), actions[1].Title);
if (invokeAction)
{
var operations = actions[actionIndex]
.GetOperationsAsync(CancellationToken.None)
.WaitAndGetResult(CancellationToken.None)
.ToArray();
Assert.Equal(1, operations.Length);
operations[0].Apply(this.Workspace, CancellationToken.None);
}
}
public void AssertNoNotificationMessage()
{
Assert.Null(_notificationMessage);
}
public void AssertNotificationMessage()
{
Assert.NotNull(_notificationMessage);
}
private void WaitForAsyncOperations()
{
var waiters = Workspace.ExportProvider.GetExportedValues<IAsynchronousOperationWaiter>();
var tasks = waiters.Select(w => w.CreateWaitTask()).ToList();
tasks.PumpingWaitAll();
}
public void Dispose()
{
Workspace.Dispose();
}
}
}
| |
using System.Windows;
using Drawing = System.Drawing;
using DrawingImg = System.Drawing.Imaging;
using Forms = System.Windows.Forms;
using System.Windows.Media.Imaging;
using System.IO;
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace CustomPrintScreen
{
/// <summary>
/// This class is responisble for:
/// Creating and holding screens, determining if the app is run in advanced mode and holds last capture time
/// </summary>
class Handler
{
/// <summary>
/// Holds all bitmaps made when print screen is clicked
/// </summary>
public static List<Drawing.Bitmap> Bitmaps = new List<Drawing.Bitmap>();
/// <summary>
/// Determines the application mode
/// </summary>
public static bool AdvancedMode;
/// <summary>
/// Time when last print screen was made. Used for file save.
/// </summary>
public static DateTime ShotTime;
public static MainWindow mainWindow;
public static CropWindow cropWindow;
public static InfoWindow infoWindow;
public static SettingsWindow settingsWindow;
public static ScreenNamePrompt namePrompt;
/// <summary>
/// Creates print screens, divides them into screens and saves in Handler.Bitmaps list
/// </summary>
public static void CreateScreens()
{
mainWindow.HideWindow();
ShotTime = DateTime.Now;
for (int i = 0; i < Forms.Screen.AllScreens.Length; i++)
{
Forms.Screen s = Forms.Screen.AllScreens[i];
// function CopyFromScreen doesn't work for fullscreen applications
// Games are run in primary screens, when they are full screen
// so if the screen is primary, than use low level print screen
if (s.Primary)
{
Bitmaps.Add(ScreenCapture.CaptureScreen());
continue;
}
//Create a new bitmap.
Bitmaps.Add(new Drawing.Bitmap(s.Bounds.Width,
s.Bounds.Height,
DrawingImg.PixelFormat.Format32bppArgb));
// Create a graphics object from the bitmap.
var tmp = Drawing.Graphics.FromImage(Bitmaps[i]);
tmp.CopyFromScreen(s.Bounds.X, s.Bounds.Y, 0, 0, s.Bounds.Size, Drawing.CopyPixelOperation.SourceCopy);
}
}
/// <summary>
/// Saves the bitmap stored in Handler.Bitmaps list and pointed by id
/// </summary>
/// <param name="id">Pointer to index of an array</param>
public static void SaveScreen(int id, bool hideAppAfter = true)
{
if (Settings.AskForScreenName)
{
if (namePrompt == null)
{
namePrompt = new ScreenNamePrompt();
namePrompt.BitmapId = id;
namePrompt.Show();
namePrompt.Topmost = true;
}
}
else
{
OutputScreen(id);
if (hideAppAfter)
Handler.mainWindow.HideWindow(true);
}
}
/// <summary>
/// Outputs the screen to desktop with name of capture date
/// </summary>
/// <param name="id">Index of bitmap in array</param>
/// <param name="filename">(w/o extension)If given, saves the screen with this name</param>
public static void OutputScreen(int id, string filename = "")
{
// if parameter is default, than generate name basing on the date
if (filename.Equals(""))
filename = GenerateFilenameBasedOnDate();
// If file with given name exists, than add a number at its end
filename = GetAvailableName(filename);
// Convert name to path on desktop and add extension
filename = Settings.SaveDirectory + filename + ".png";
// save
Bitmaps[id]?.Save(filename, DrawingImg.ImageFormat.Png);
}
/// <summary>
/// Returns a filename(w/o extension) basing on the date.
/// </summary>
/// <returns></returns>
static string GenerateFilenameBasedOnDate()
{
string filename = ShotTime.ToString(Settings.DateFormat);
filename = GetAvailableName(filename);
return filename;
}
/// <summary>
/// Returns name with additional number at the end if given name exists on desktop
/// </summary>
/// <param name="basename">name of file w/o extension</param>
/// <returns>Parameter basename with additional number at the end</returns>
static string GetAvailableName(string basename)
{
for (int i = 0; ; i++)
{
string add = i > 0 ? i.ToString() : "";
string fullname = Settings.SaveDirectory + basename + add + ".png";
if (!File.Exists(fullname))
{
return basename + add;
}
}
}
/// <summary>
/// Returns string with path, available filename and extension
/// </summary>
/// <param name="filename">If not given, than it's date</param>
/// <returns></returns>
public static string GetFullPath(string filename = "")
{
if(filename.Equals(""))
return Settings.SaveDirectory + GetAvailableName(ShotTime.ToString(Settings.DateFormat)) + ".png";
else return Settings.SaveDirectory + GetAvailableName(filename) + ".png";
}
/// <summary>
/// Removes all the bitmaps and images from the app
/// </summary>
public static void ClearData()
{
for(int i = Bitmaps.Count-1; i >= 0; i--)
{
Bitmaps[i].Dispose();
}
Bitmaps.Clear();
}
/// <summary>
/// Loads an image from the Resources folder in solution
/// </summary>
/// <param name="name">Name of file with extension</param>
/// <returns>BitmapImage of image</returns>
public static BitmapImage LoadImage(string name)
{
BitmapImage logo = new BitmapImage();
logo.BeginInit();
logo.UriSource = new Uri("pack://application:,,,/CustomPrintScreen;component/Resources/"+name);
logo.EndInit();
return logo;
}
[DllImport("gdi32")]
static extern int DeleteObject(IntPtr o);
public static BitmapSource BitmapToImageSource(System.Drawing.Bitmap source)
{
IntPtr ip = source.GetHbitmap();
BitmapSource bs = null;
try
{
bs = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(ip,
IntPtr.Zero, Int32Rect.Empty,
System.Windows.Media.Imaging.BitmapSizeOptions.FromEmptyOptions());
}
finally
{
DeleteObject(ip);
}
return bs;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
namespace FP.Radius
{
public class RadiusPacket
{
#region Constants
private const byte RADIUS_CODE_INDEX = 0;
private const byte RADIUS_IDENTIFIER_INDEX = 1;
private const byte RADIUS_LENGTH_INDEX = 2;
private const byte RADIUS_AUTHENTICATOR_INDEX = 4;
private const byte RADIUS_AUTHENTICATOR_FIELD_LENGTH = 16;
private const byte RADIUS_MESSAGE_AUTH_HASH_LENGTH = 16;
private const byte RADIUS_MESSAGE_AUTHENTICATOR_LENGTH = 18;
private const byte ATTRIBUTES_INDEX = 20;
private const byte RADIUS_HEADER_LENGTH = ATTRIBUTES_INDEX;
#endregion
#region Private
private readonly List<RadiusAttribute> _Attributes = new List<RadiusAttribute>();
private byte[] _Authenticator = new byte[RADIUS_AUTHENTICATOR_FIELD_LENGTH];
private ushort _Length;
private NasPortType _NasPortType;
#endregion
#region Properties
public byte[] RawData { get; private set; }
public RadiusCode PacketType { get; private set; }
public byte Identifier { get; private set; }
public byte[] Header { get; private set; }
public bool Valid { get; private set; }
public NasPortType NasPortType
{
get { return _NasPortType; }
set
{
_NasPortType = value;
_Attributes.Add(new RadiusAttribute(RadiusAttributeType.NAS_PORT_TYPE, BitConverter.GetBytes((int)value)));
}
}
public List<RadiusAttribute> Attributes
{
get { return _Attributes; }
}
public byte[] Authenticator
{
get { return _Authenticator; }
}
#endregion
#region Constructors
// Create a new RADIUS packet
public RadiusPacket(RadiusCode packetType)
{
PacketType = packetType;
Identifier = (Guid.NewGuid().ToByteArray())[0];
_Length = RADIUS_HEADER_LENGTH;
RawData = new byte[RADIUS_HEADER_LENGTH];
RawData[RADIUS_CODE_INDEX] = (byte)PacketType;
RawData[RADIUS_IDENTIFIER_INDEX] = Identifier;
Array.Copy(BitConverter.GetBytes(_Length), 0, RawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
Array.Reverse(RawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
}
public RadiusPacket(RadiusCode packetType, byte identifier)
{
PacketType = packetType;
Identifier = identifier;
_Length = RADIUS_HEADER_LENGTH;
RawData = new byte[RADIUS_HEADER_LENGTH];
RawData[RADIUS_CODE_INDEX] = (byte)PacketType;
RawData[RADIUS_IDENTIFIER_INDEX] = Identifier;
Array.Copy(BitConverter.GetBytes(_Length), 0, RawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
Array.Reverse(RawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
}
// Parse received RADIUS packet
public RadiusPacket(byte[] receivedData)
{
try
{
Valid = true;
RawData = receivedData;
if (RawData.Length < 20 || RawData.Length > 4096)
{
Valid = false;
return;
}
//Get the RADIUS Code
PacketType = (RadiusCode)RawData[RADIUS_CODE_INDEX];
//Get the RADIUS Identifier
Identifier = RawData[RADIUS_IDENTIFIER_INDEX];
//Get the RADIUS Length
_Length = (ushort)((RawData[2] << 8) + RawData[3]);
// RADIUS length field must be equal to or greater than packet length
if (_Length > RawData.Length)
{
Valid = false;
return;
}
//Get the RADIUS Authenticator
Array.Copy(RawData, RADIUS_AUTHENTICATOR_INDEX, _Authenticator, 0, RADIUS_AUTHENTICATOR_FIELD_LENGTH);
//GET the RADIUS Attributes
byte[] attributesArray = new byte[_Length - ATTRIBUTES_INDEX];
Array.Copy(receivedData, ATTRIBUTES_INDEX, attributesArray, 0, attributesArray.Length);
ParseAttributes(attributesArray);
}
catch
{
Valid = false;
}
}
#endregion
public void SetAuthenticator(string sharedsecret, byte[] requestAuthenticator = null)
{
switch (PacketType)
{
case RadiusCode.ACCESS_REQUEST:
_Authenticator = Utils.AccessRequestAuthenticator(sharedsecret);
break;
case RadiusCode.ACCESS_ACCEPT:
_Authenticator = Utils.ResponseAuthenticator(RawData, requestAuthenticator, sharedsecret);
break;
case RadiusCode.ACCESS_REJECT:
break;
case RadiusCode.ACCOUNTING_REQUEST:
_Authenticator = Utils.AccountingRequestAuthenticator(RawData, sharedsecret);
break;
case RadiusCode.ACCOUNTING_RESPONSE:
_Authenticator = Utils.ResponseAuthenticator(RawData, requestAuthenticator, sharedsecret);
break;
case RadiusCode.ACCOUNTING_STATUS:
break;
case RadiusCode.PASSWORD_REQUEST:
break;
case RadiusCode.PASSWORD_ACCEPT:
break;
case RadiusCode.PASSWORD_REJECT:
break;
case RadiusCode.ACCOUNTING_MESSAGE:
break;
case RadiusCode.ACCESS_CHALLENGE:
break;
case RadiusCode.SERVER_STATUS:
_Authenticator = Utils.AccessRequestAuthenticator(sharedsecret);
break;
case RadiusCode.COA_REQUEST:
_Authenticator = Utils.AccountingRequestAuthenticator(RawData, sharedsecret);
break;
case RadiusCode.DISCONNECT_REQUEST:
_Authenticator = Utils.AccountingRequestAuthenticator(RawData, sharedsecret);
break;
default:
throw new ArgumentOutOfRangeException();
}
Array.Copy(_Authenticator, 0, RawData, RADIUS_AUTHENTICATOR_INDEX, RADIUS_AUTHENTICATOR_FIELD_LENGTH);
}
public void SetIdentifier(byte id)
{
Identifier = id;
RawData[RADIUS_IDENTIFIER_INDEX] = Identifier;
}
public void SetAttribute(RadiusAttribute attribute)
{
_Attributes.Add(attribute);
//Make an array with a size of the current RawData plus the new attribute
byte[] newRawData = new byte[RawData.Length + attribute.Length];
//Copy the current RawData into the temp array
Array.Copy(RawData, 0, newRawData, 0, RawData.Length);
//Copy the new attribute into the temp array
Array.Copy(attribute.RawData, 0, newRawData, RawData.Length, attribute.Length);
RawData = newRawData;
//Update the length of the RadiusPacket
_Length = (ushort)RawData.Length;
Array.Copy(BitConverter.GetBytes(_Length), 0, RawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
Array.Reverse(RawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
}
/// <summary>
/// Sets the Message-Autheticator attribute on a RADIUS packet. This should be called as a last step after all attributes have been added
/// </summary>
/// <param name="sharedSecret"></param>
public void SetMessageAuthenticator(string sharedSecret)
{
// We need to add the Message-Authenticator attribute with 16 octects of zero
byte[] newRawData = new byte[RawData.Length + RADIUS_MESSAGE_AUTHENTICATOR_LENGTH];
// Copy the current packet into the new array
Array.Copy(RawData, 0, newRawData, 0, RawData.Length);
// Adjust the length field of the packet to account for the new attribute
Array.Copy(BitConverter.GetBytes(newRawData.Length), 0, newRawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
Array.Reverse(newRawData, RADIUS_LENGTH_INDEX, sizeof(ushort));
// Set the type and length of the Message-Authenticator attribute
newRawData[RawData.Length] = (byte)RadiusAttributeType.MESSAGE_AUTHENTICATOR;
newRawData[RawData.Length + 1] = RADIUS_MESSAGE_AUTHENTICATOR_LENGTH;
// Calculate the hash of the new array using the shared secret
HMACMD5 hmacmd5 = new HMACMD5(Encoding.ASCII.GetBytes(sharedSecret));
var hash = hmacmd5.ComputeHash(newRawData);
// Copy the hash value into the 16 octects to replace the 0's with the actual hash
Array.Copy(hash, 0, newRawData, newRawData.Length - RADIUS_MESSAGE_AUTH_HASH_LENGTH, hash.Length);
// Set the final result as the new RawData
RawData = newRawData;
// Update the Length to include the Message-Authenticator attribute
_Length += RADIUS_MESSAGE_AUTHENTICATOR_LENGTH;
}
private void ParseAttributes(byte[] attributeByteArray)
{
int currentAttributeOffset = 0;
while (currentAttributeOffset < attributeByteArray.Length)
{
//Get the RADIUS attribute type
RadiusAttributeType type = (RadiusAttributeType)attributeByteArray[currentAttributeOffset];
//Get the RADIUS attribute length
byte length = attributeByteArray[currentAttributeOffset + 1];
// Check minimum length and make sure the attribute doesn't run off the end of the packet
if (length < 2 || currentAttributeOffset + length > _Length)
{
Valid = false;
return;
}
//Get the RADIUS attribute data
byte[] data = new byte[length - 2];
Array.Copy(attributeByteArray, currentAttributeOffset + 2, data, 0, length - 2);
_Attributes.Add(type == RadiusAttributeType.VENDOR_SPECIFIC
? new VendorSpecificAttribute(attributeByteArray, currentAttributeOffset)
: new RadiusAttribute(type, data));
currentAttributeOffset += length;
}
}
}
}
| |
using System;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Numerics;
using Nethereum.Hex.HexTypes;
using Nethereum.ABI.FunctionEncoding.Attributes;
using Nethereum.Web3;
using Nethereum.RPC.Eth.DTOs;
using Nethereum.Contracts.CQS;
using Nethereum.Contracts.ContractHandlers;
using Nethereum.Contracts;
using System.Threading;
using Nethereum.GnosisSafe.GnosisSafe.ContractDefinition;
namespace Nethereum.GnosisSafe.GnosisSafe
{
public partial class GnosisSafeService
{
public static Task<TransactionReceipt> DeployContractAndWaitForReceiptAsync(Nethereum.Web3.Web3 web3, GnosisSafeDeployment gnosisSafeDeployment, CancellationTokenSource cancellationTokenSource = null)
{
return web3.Eth.GetContractDeploymentHandler<GnosisSafeDeployment>().SendRequestAndWaitForReceiptAsync(gnosisSafeDeployment, cancellationTokenSource);
}
public static Task<string> DeployContractAsync(Nethereum.Web3.Web3 web3, GnosisSafeDeployment gnosisSafeDeployment)
{
return web3.Eth.GetContractDeploymentHandler<GnosisSafeDeployment>().SendRequestAsync(gnosisSafeDeployment);
}
public static async Task<GnosisSafeService> DeployContractAndGetServiceAsync(Nethereum.Web3.Web3 web3, GnosisSafeDeployment gnosisSafeDeployment, CancellationTokenSource cancellationTokenSource = null)
{
var receipt = await DeployContractAndWaitForReceiptAsync(web3, gnosisSafeDeployment, cancellationTokenSource);
return new GnosisSafeService(web3, receipt.ContractAddress);
}
protected Nethereum.Web3.Web3 Web3{ get; }
public ContractHandler ContractHandler { get; }
public GnosisSafeService(Nethereum.Web3.Web3 web3, string contractAddress)
{
Web3 = web3;
ContractHandler = web3.Eth.GetContractHandler(contractAddress);
}
public Task<string> VERSIONQueryAsync(VERSIONFunction vERSIONFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<VERSIONFunction, string>(vERSIONFunction, blockParameter);
}
public Task<string> VERSIONQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<VERSIONFunction, string>(null, blockParameter);
}
public Task<string> AddOwnerWithThresholdRequestAsync(AddOwnerWithThresholdFunction addOwnerWithThresholdFunction)
{
return ContractHandler.SendRequestAsync(addOwnerWithThresholdFunction);
}
public Task<TransactionReceipt> AddOwnerWithThresholdRequestAndWaitForReceiptAsync(AddOwnerWithThresholdFunction addOwnerWithThresholdFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(addOwnerWithThresholdFunction, cancellationToken);
}
public Task<string> AddOwnerWithThresholdRequestAsync(string owner, BigInteger threshold)
{
var addOwnerWithThresholdFunction = new AddOwnerWithThresholdFunction();
addOwnerWithThresholdFunction.Owner = owner;
addOwnerWithThresholdFunction.Threshold = threshold;
return ContractHandler.SendRequestAsync(addOwnerWithThresholdFunction);
}
public Task<TransactionReceipt> AddOwnerWithThresholdRequestAndWaitForReceiptAsync(string owner, BigInteger threshold, CancellationTokenSource cancellationToken = null)
{
var addOwnerWithThresholdFunction = new AddOwnerWithThresholdFunction();
addOwnerWithThresholdFunction.Owner = owner;
addOwnerWithThresholdFunction.Threshold = threshold;
return ContractHandler.SendRequestAndWaitForReceiptAsync(addOwnerWithThresholdFunction, cancellationToken);
}
public Task<string> ApproveHashRequestAsync(ApproveHashFunction approveHashFunction)
{
return ContractHandler.SendRequestAsync(approveHashFunction);
}
public Task<TransactionReceipt> ApproveHashRequestAndWaitForReceiptAsync(ApproveHashFunction approveHashFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(approveHashFunction, cancellationToken);
}
public Task<string> ApproveHashRequestAsync(byte[] hashToApprove)
{
var approveHashFunction = new ApproveHashFunction();
approveHashFunction.HashToApprove = hashToApprove;
return ContractHandler.SendRequestAsync(approveHashFunction);
}
public Task<TransactionReceipt> ApproveHashRequestAndWaitForReceiptAsync(byte[] hashToApprove, CancellationTokenSource cancellationToken = null)
{
var approveHashFunction = new ApproveHashFunction();
approveHashFunction.HashToApprove = hashToApprove;
return ContractHandler.SendRequestAndWaitForReceiptAsync(approveHashFunction, cancellationToken);
}
public Task<BigInteger> ApprovedHashesQueryAsync(ApprovedHashesFunction approvedHashesFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<ApprovedHashesFunction, BigInteger>(approvedHashesFunction, blockParameter);
}
public Task<BigInteger> ApprovedHashesQueryAsync(string returnValue1, byte[] returnValue2, BlockParameter blockParameter = null)
{
var approvedHashesFunction = new ApprovedHashesFunction();
approvedHashesFunction.ReturnValue1 = returnValue1;
approvedHashesFunction.ReturnValue2 = returnValue2;
return ContractHandler.QueryAsync<ApprovedHashesFunction, BigInteger>(approvedHashesFunction, blockParameter);
}
public Task<string> ChangeThresholdRequestAsync(ChangeThresholdFunction changeThresholdFunction)
{
return ContractHandler.SendRequestAsync(changeThresholdFunction);
}
public Task<TransactionReceipt> ChangeThresholdRequestAndWaitForReceiptAsync(ChangeThresholdFunction changeThresholdFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(changeThresholdFunction, cancellationToken);
}
public Task<string> ChangeThresholdRequestAsync(BigInteger threshold)
{
var changeThresholdFunction = new ChangeThresholdFunction();
changeThresholdFunction.Threshold = threshold;
return ContractHandler.SendRequestAsync(changeThresholdFunction);
}
public Task<TransactionReceipt> ChangeThresholdRequestAndWaitForReceiptAsync(BigInteger threshold, CancellationTokenSource cancellationToken = null)
{
var changeThresholdFunction = new ChangeThresholdFunction();
changeThresholdFunction.Threshold = threshold;
return ContractHandler.SendRequestAndWaitForReceiptAsync(changeThresholdFunction, cancellationToken);
}
public Task<string> DisableModuleRequestAsync(DisableModuleFunction disableModuleFunction)
{
return ContractHandler.SendRequestAsync(disableModuleFunction);
}
public Task<TransactionReceipt> DisableModuleRequestAndWaitForReceiptAsync(DisableModuleFunction disableModuleFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(disableModuleFunction, cancellationToken);
}
public Task<string> DisableModuleRequestAsync(string prevModule, string module)
{
var disableModuleFunction = new DisableModuleFunction();
disableModuleFunction.PrevModule = prevModule;
disableModuleFunction.Module = module;
return ContractHandler.SendRequestAsync(disableModuleFunction);
}
public Task<TransactionReceipt> DisableModuleRequestAndWaitForReceiptAsync(string prevModule, string module, CancellationTokenSource cancellationToken = null)
{
var disableModuleFunction = new DisableModuleFunction();
disableModuleFunction.PrevModule = prevModule;
disableModuleFunction.Module = module;
return ContractHandler.SendRequestAndWaitForReceiptAsync(disableModuleFunction, cancellationToken);
}
public Task<byte[]> DomainSeparatorQueryAsync(DomainSeparatorFunction domainSeparatorFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<DomainSeparatorFunction, byte[]>(domainSeparatorFunction, blockParameter);
}
public Task<byte[]> DomainSeparatorQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<DomainSeparatorFunction, byte[]>(null, blockParameter);
}
public Task<string> EnableModuleRequestAsync(EnableModuleFunction enableModuleFunction)
{
return ContractHandler.SendRequestAsync(enableModuleFunction);
}
public Task<TransactionReceipt> EnableModuleRequestAndWaitForReceiptAsync(EnableModuleFunction enableModuleFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(enableModuleFunction, cancellationToken);
}
public Task<string> EnableModuleRequestAsync(string module)
{
var enableModuleFunction = new EnableModuleFunction();
enableModuleFunction.Module = module;
return ContractHandler.SendRequestAsync(enableModuleFunction);
}
public Task<TransactionReceipt> EnableModuleRequestAndWaitForReceiptAsync(string module, CancellationTokenSource cancellationToken = null)
{
var enableModuleFunction = new EnableModuleFunction();
enableModuleFunction.Module = module;
return ContractHandler.SendRequestAndWaitForReceiptAsync(enableModuleFunction, cancellationToken);
}
public Task<byte[]> EncodeTransactionDataQueryAsync(EncodeTransactionDataFunction encodeTransactionDataFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<EncodeTransactionDataFunction, byte[]>(encodeTransactionDataFunction, blockParameter);
}
public Task<byte[]> EncodeTransactionDataQueryAsync(string to, BigInteger value, byte[] data, byte operation, BigInteger safeTxGas, BigInteger baseGas, BigInteger gasPrice, string gasToken, string refundReceiver, BigInteger nonce, BlockParameter blockParameter = null)
{
var encodeTransactionDataFunction = new EncodeTransactionDataFunction();
encodeTransactionDataFunction.To = to;
encodeTransactionDataFunction.Value = value;
encodeTransactionDataFunction.Data = data;
encodeTransactionDataFunction.Operation = operation;
encodeTransactionDataFunction.SafeTxGas = safeTxGas;
encodeTransactionDataFunction.BaseGas = baseGas;
encodeTransactionDataFunction.GasPrice = gasPrice;
encodeTransactionDataFunction.GasToken = gasToken;
encodeTransactionDataFunction.RefundReceiver = refundReceiver;
encodeTransactionDataFunction.Nonce = nonce;
return ContractHandler.QueryAsync<EncodeTransactionDataFunction, byte[]>(encodeTransactionDataFunction, blockParameter);
}
public Task<string> ExecTransactionRequestAsync(ExecTransactionFunction execTransactionFunction)
{
return ContractHandler.SendRequestAsync(execTransactionFunction);
}
public Task<TransactionReceipt> ExecTransactionRequestAndWaitForReceiptAsync(ExecTransactionFunction execTransactionFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(execTransactionFunction, cancellationToken);
}
public Task<string> ExecTransactionRequestAsync(string to, BigInteger value, byte[] data, byte operation, BigInteger safeTxGas, BigInteger baseGas, BigInteger gasPrice, string gasToken, string refundReceiver, byte[] signatures)
{
var execTransactionFunction = new ExecTransactionFunction();
execTransactionFunction.To = to;
execTransactionFunction.Value = value;
execTransactionFunction.Data = data;
execTransactionFunction.Operation = operation;
execTransactionFunction.SafeTxGas = safeTxGas;
execTransactionFunction.BaseGas = baseGas;
execTransactionFunction.GasPrice = gasPrice;
execTransactionFunction.GasToken = gasToken;
execTransactionFunction.RefundReceiver = refundReceiver;
execTransactionFunction.Signatures = signatures;
return ContractHandler.SendRequestAsync(execTransactionFunction);
}
public Task<TransactionReceipt> ExecTransactionRequestAndWaitForReceiptAsync(string to, BigInteger value, byte[] data, byte operation, BigInteger safeTxGas, BigInteger baseGas, BigInteger gasPrice, string gasToken, string refundReceiver, byte[] signatures, CancellationTokenSource cancellationToken = null)
{
var execTransactionFunction = new ExecTransactionFunction();
execTransactionFunction.To = to;
execTransactionFunction.Value = value;
execTransactionFunction.Data = data;
execTransactionFunction.Operation = operation;
execTransactionFunction.SafeTxGas = safeTxGas;
execTransactionFunction.BaseGas = baseGas;
execTransactionFunction.GasPrice = gasPrice;
execTransactionFunction.GasToken = gasToken;
execTransactionFunction.RefundReceiver = refundReceiver;
execTransactionFunction.Signatures = signatures;
return ContractHandler.SendRequestAndWaitForReceiptAsync(execTransactionFunction, cancellationToken);
}
public Task<string> ExecTransactionFromModuleRequestAsync(ExecTransactionFromModuleFunction execTransactionFromModuleFunction)
{
return ContractHandler.SendRequestAsync(execTransactionFromModuleFunction);
}
public Task<TransactionReceipt> ExecTransactionFromModuleRequestAndWaitForReceiptAsync(ExecTransactionFromModuleFunction execTransactionFromModuleFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(execTransactionFromModuleFunction, cancellationToken);
}
public Task<string> ExecTransactionFromModuleRequestAsync(string to, BigInteger value, byte[] data, byte operation)
{
var execTransactionFromModuleFunction = new ExecTransactionFromModuleFunction();
execTransactionFromModuleFunction.To = to;
execTransactionFromModuleFunction.Value = value;
execTransactionFromModuleFunction.Data = data;
execTransactionFromModuleFunction.Operation = operation;
return ContractHandler.SendRequestAsync(execTransactionFromModuleFunction);
}
public Task<TransactionReceipt> ExecTransactionFromModuleRequestAndWaitForReceiptAsync(string to, BigInteger value, byte[] data, byte operation, CancellationTokenSource cancellationToken = null)
{
var execTransactionFromModuleFunction = new ExecTransactionFromModuleFunction();
execTransactionFromModuleFunction.To = to;
execTransactionFromModuleFunction.Value = value;
execTransactionFromModuleFunction.Data = data;
execTransactionFromModuleFunction.Operation = operation;
return ContractHandler.SendRequestAndWaitForReceiptAsync(execTransactionFromModuleFunction, cancellationToken);
}
public Task<string> ExecTransactionFromModuleReturnDataRequestAsync(ExecTransactionFromModuleReturnDataFunction execTransactionFromModuleReturnDataFunction)
{
return ContractHandler.SendRequestAsync(execTransactionFromModuleReturnDataFunction);
}
public Task<TransactionReceipt> ExecTransactionFromModuleReturnDataRequestAndWaitForReceiptAsync(ExecTransactionFromModuleReturnDataFunction execTransactionFromModuleReturnDataFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(execTransactionFromModuleReturnDataFunction, cancellationToken);
}
public Task<string> ExecTransactionFromModuleReturnDataRequestAsync(string to, BigInteger value, byte[] data, byte operation)
{
var execTransactionFromModuleReturnDataFunction = new ExecTransactionFromModuleReturnDataFunction();
execTransactionFromModuleReturnDataFunction.To = to;
execTransactionFromModuleReturnDataFunction.Value = value;
execTransactionFromModuleReturnDataFunction.Data = data;
execTransactionFromModuleReturnDataFunction.Operation = operation;
return ContractHandler.SendRequestAsync(execTransactionFromModuleReturnDataFunction);
}
public Task<TransactionReceipt> ExecTransactionFromModuleReturnDataRequestAndWaitForReceiptAsync(string to, BigInteger value, byte[] data, byte operation, CancellationTokenSource cancellationToken = null)
{
var execTransactionFromModuleReturnDataFunction = new ExecTransactionFromModuleReturnDataFunction();
execTransactionFromModuleReturnDataFunction.To = to;
execTransactionFromModuleReturnDataFunction.Value = value;
execTransactionFromModuleReturnDataFunction.Data = data;
execTransactionFromModuleReturnDataFunction.Operation = operation;
return ContractHandler.SendRequestAndWaitForReceiptAsync(execTransactionFromModuleReturnDataFunction, cancellationToken);
}
public Task<BigInteger> GetChainIdQueryAsync(GetChainIdFunction getChainIdFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetChainIdFunction, BigInteger>(getChainIdFunction, blockParameter);
}
public Task<BigInteger> GetChainIdQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetChainIdFunction, BigInteger>(null, blockParameter);
}
public Task<GetModulesPaginatedOutputDTO> GetModulesPaginatedQueryAsync(GetModulesPaginatedFunction getModulesPaginatedFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryDeserializingToObjectAsync<GetModulesPaginatedFunction, GetModulesPaginatedOutputDTO>(getModulesPaginatedFunction, blockParameter);
}
public Task<GetModulesPaginatedOutputDTO> GetModulesPaginatedQueryAsync(string start, BigInteger pageSize, BlockParameter blockParameter = null)
{
var getModulesPaginatedFunction = new GetModulesPaginatedFunction();
getModulesPaginatedFunction.Start = start;
getModulesPaginatedFunction.PageSize = pageSize;
return ContractHandler.QueryDeserializingToObjectAsync<GetModulesPaginatedFunction, GetModulesPaginatedOutputDTO>(getModulesPaginatedFunction, blockParameter);
}
public Task<List<string>> GetOwnersQueryAsync(GetOwnersFunction getOwnersFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetOwnersFunction, List<string>>(getOwnersFunction, blockParameter);
}
public Task<List<string>> GetOwnersQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetOwnersFunction, List<string>>(null, blockParameter);
}
public Task<byte[]> GetStorageAtQueryAsync(GetStorageAtFunction getStorageAtFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetStorageAtFunction, byte[]>(getStorageAtFunction, blockParameter);
}
public Task<byte[]> GetStorageAtQueryAsync(BigInteger offset, BigInteger length, BlockParameter blockParameter = null)
{
var getStorageAtFunction = new GetStorageAtFunction();
getStorageAtFunction.Offset = offset;
getStorageAtFunction.Length = length;
return ContractHandler.QueryAsync<GetStorageAtFunction, byte[]>(getStorageAtFunction, blockParameter);
}
public Task<BigInteger> GetThresholdQueryAsync(GetThresholdFunction getThresholdFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetThresholdFunction, BigInteger>(getThresholdFunction, blockParameter);
}
public Task<BigInteger> GetThresholdQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetThresholdFunction, BigInteger>(null, blockParameter);
}
public Task<byte[]> GetTransactionHashQueryAsync(GetTransactionHashFunction getTransactionHashFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<GetTransactionHashFunction, byte[]>(getTransactionHashFunction, blockParameter);
}
public Task<byte[]> GetTransactionHashQueryAsync(string to, BigInteger value, byte[] data, byte operation, BigInteger safeTxGas, BigInteger baseGas, BigInteger gasPrice, string gasToken, string refundReceiver, BigInteger nonce, BlockParameter blockParameter = null)
{
var getTransactionHashFunction = new GetTransactionHashFunction();
getTransactionHashFunction.To = to;
getTransactionHashFunction.Value = value;
getTransactionHashFunction.Data = data;
getTransactionHashFunction.Operation = operation;
getTransactionHashFunction.SafeTxGas = safeTxGas;
getTransactionHashFunction.BaseGas = baseGas;
getTransactionHashFunction.GasPrice = gasPrice;
getTransactionHashFunction.GasToken = gasToken;
getTransactionHashFunction.RefundReceiver = refundReceiver;
getTransactionHashFunction.Nonce = nonce;
return ContractHandler.QueryAsync<GetTransactionHashFunction, byte[]>(getTransactionHashFunction, blockParameter);
}
public Task<bool> IsModuleEnabledQueryAsync(IsModuleEnabledFunction isModuleEnabledFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<IsModuleEnabledFunction, bool>(isModuleEnabledFunction, blockParameter);
}
public Task<bool> IsModuleEnabledQueryAsync(string module, BlockParameter blockParameter = null)
{
var isModuleEnabledFunction = new IsModuleEnabledFunction();
isModuleEnabledFunction.Module = module;
return ContractHandler.QueryAsync<IsModuleEnabledFunction, bool>(isModuleEnabledFunction, blockParameter);
}
public Task<bool> IsOwnerQueryAsync(IsOwnerFunction isOwnerFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<IsOwnerFunction, bool>(isOwnerFunction, blockParameter);
}
public Task<bool> IsOwnerQueryAsync(string owner, BlockParameter blockParameter = null)
{
var isOwnerFunction = new IsOwnerFunction();
isOwnerFunction.Owner = owner;
return ContractHandler.QueryAsync<IsOwnerFunction, bool>(isOwnerFunction, blockParameter);
}
public Task<BigInteger> NonceQueryAsync(NonceFunction nonceFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<NonceFunction, BigInteger>(nonceFunction, blockParameter);
}
public Task<BigInteger> NonceQueryAsync(BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<NonceFunction, BigInteger>(null, blockParameter);
}
public Task<string> RemoveOwnerRequestAsync(RemoveOwnerFunction removeOwnerFunction)
{
return ContractHandler.SendRequestAsync(removeOwnerFunction);
}
public Task<TransactionReceipt> RemoveOwnerRequestAndWaitForReceiptAsync(RemoveOwnerFunction removeOwnerFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(removeOwnerFunction, cancellationToken);
}
public Task<string> RemoveOwnerRequestAsync(string prevOwner, string owner, BigInteger threshold)
{
var removeOwnerFunction = new RemoveOwnerFunction();
removeOwnerFunction.PrevOwner = prevOwner;
removeOwnerFunction.Owner = owner;
removeOwnerFunction.Threshold = threshold;
return ContractHandler.SendRequestAsync(removeOwnerFunction);
}
public Task<TransactionReceipt> RemoveOwnerRequestAndWaitForReceiptAsync(string prevOwner, string owner, BigInteger threshold, CancellationTokenSource cancellationToken = null)
{
var removeOwnerFunction = new RemoveOwnerFunction();
removeOwnerFunction.PrevOwner = prevOwner;
removeOwnerFunction.Owner = owner;
removeOwnerFunction.Threshold = threshold;
return ContractHandler.SendRequestAndWaitForReceiptAsync(removeOwnerFunction, cancellationToken);
}
public Task<string> RequiredTxGasRequestAsync(RequiredTxGasFunction requiredTxGasFunction)
{
return ContractHandler.SendRequestAsync(requiredTxGasFunction);
}
public Task<TransactionReceipt> RequiredTxGasRequestAndWaitForReceiptAsync(RequiredTxGasFunction requiredTxGasFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(requiredTxGasFunction, cancellationToken);
}
public Task<string> RequiredTxGasRequestAsync(string to, BigInteger value, byte[] data, byte operation)
{
var requiredTxGasFunction = new RequiredTxGasFunction();
requiredTxGasFunction.To = to;
requiredTxGasFunction.Value = value;
requiredTxGasFunction.Data = data;
requiredTxGasFunction.Operation = operation;
return ContractHandler.SendRequestAsync(requiredTxGasFunction);
}
public Task<TransactionReceipt> RequiredTxGasRequestAndWaitForReceiptAsync(string to, BigInteger value, byte[] data, byte operation, CancellationTokenSource cancellationToken = null)
{
var requiredTxGasFunction = new RequiredTxGasFunction();
requiredTxGasFunction.To = to;
requiredTxGasFunction.Value = value;
requiredTxGasFunction.Data = data;
requiredTxGasFunction.Operation = operation;
return ContractHandler.SendRequestAndWaitForReceiptAsync(requiredTxGasFunction, cancellationToken);
}
public Task<string> SetFallbackHandlerRequestAsync(SetFallbackHandlerFunction setFallbackHandlerFunction)
{
return ContractHandler.SendRequestAsync(setFallbackHandlerFunction);
}
public Task<TransactionReceipt> SetFallbackHandlerRequestAndWaitForReceiptAsync(SetFallbackHandlerFunction setFallbackHandlerFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(setFallbackHandlerFunction, cancellationToken);
}
public Task<string> SetFallbackHandlerRequestAsync(string handler)
{
var setFallbackHandlerFunction = new SetFallbackHandlerFunction();
setFallbackHandlerFunction.Handler = handler;
return ContractHandler.SendRequestAsync(setFallbackHandlerFunction);
}
public Task<TransactionReceipt> SetFallbackHandlerRequestAndWaitForReceiptAsync(string handler, CancellationTokenSource cancellationToken = null)
{
var setFallbackHandlerFunction = new SetFallbackHandlerFunction();
setFallbackHandlerFunction.Handler = handler;
return ContractHandler.SendRequestAndWaitForReceiptAsync(setFallbackHandlerFunction, cancellationToken);
}
public Task<string> SetGuardRequestAsync(SetGuardFunction setGuardFunction)
{
return ContractHandler.SendRequestAsync(setGuardFunction);
}
public Task<TransactionReceipt> SetGuardRequestAndWaitForReceiptAsync(SetGuardFunction setGuardFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(setGuardFunction, cancellationToken);
}
public Task<string> SetGuardRequestAsync(string guard)
{
var setGuardFunction = new SetGuardFunction();
setGuardFunction.Guard = guard;
return ContractHandler.SendRequestAsync(setGuardFunction);
}
public Task<TransactionReceipt> SetGuardRequestAndWaitForReceiptAsync(string guard, CancellationTokenSource cancellationToken = null)
{
var setGuardFunction = new SetGuardFunction();
setGuardFunction.Guard = guard;
return ContractHandler.SendRequestAndWaitForReceiptAsync(setGuardFunction, cancellationToken);
}
public Task<string> SetupRequestAsync(SetupFunction setupFunction)
{
return ContractHandler.SendRequestAsync(setupFunction);
}
public Task<TransactionReceipt> SetupRequestAndWaitForReceiptAsync(SetupFunction setupFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(setupFunction, cancellationToken);
}
public Task<string> SetupRequestAsync(List<string> owners, BigInteger threshold, string to, byte[] data, string fallbackHandler, string paymentToken, BigInteger payment, string paymentReceiver)
{
var setupFunction = new SetupFunction();
setupFunction.Owners = owners;
setupFunction.Threshold = threshold;
setupFunction.To = to;
setupFunction.Data = data;
setupFunction.FallbackHandler = fallbackHandler;
setupFunction.PaymentToken = paymentToken;
setupFunction.Payment = payment;
setupFunction.PaymentReceiver = paymentReceiver;
return ContractHandler.SendRequestAsync(setupFunction);
}
public Task<TransactionReceipt> SetupRequestAndWaitForReceiptAsync(List<string> owners, BigInteger threshold, string to, byte[] data, string fallbackHandler, string paymentToken, BigInteger payment, string paymentReceiver, CancellationTokenSource cancellationToken = null)
{
var setupFunction = new SetupFunction();
setupFunction.Owners = owners;
setupFunction.Threshold = threshold;
setupFunction.To = to;
setupFunction.Data = data;
setupFunction.FallbackHandler = fallbackHandler;
setupFunction.PaymentToken = paymentToken;
setupFunction.Payment = payment;
setupFunction.PaymentReceiver = paymentReceiver;
return ContractHandler.SendRequestAndWaitForReceiptAsync(setupFunction, cancellationToken);
}
public Task<BigInteger> SignedMessagesQueryAsync(SignedMessagesFunction signedMessagesFunction, BlockParameter blockParameter = null)
{
return ContractHandler.QueryAsync<SignedMessagesFunction, BigInteger>(signedMessagesFunction, blockParameter);
}
public Task<BigInteger> SignedMessagesQueryAsync(byte[] returnValue1, BlockParameter blockParameter = null)
{
var signedMessagesFunction = new SignedMessagesFunction();
signedMessagesFunction.ReturnValue1 = returnValue1;
return ContractHandler.QueryAsync<SignedMessagesFunction, BigInteger>(signedMessagesFunction, blockParameter);
}
public Task<string> SimulateAndRevertRequestAsync(SimulateAndRevertFunction simulateAndRevertFunction)
{
return ContractHandler.SendRequestAsync(simulateAndRevertFunction);
}
public Task<TransactionReceipt> SimulateAndRevertRequestAndWaitForReceiptAsync(SimulateAndRevertFunction simulateAndRevertFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(simulateAndRevertFunction, cancellationToken);
}
public Task<string> SimulateAndRevertRequestAsync(string targetContract, byte[] calldataPayload)
{
var simulateAndRevertFunction = new SimulateAndRevertFunction();
simulateAndRevertFunction.TargetContract = targetContract;
simulateAndRevertFunction.CalldataPayload = calldataPayload;
return ContractHandler.SendRequestAsync(simulateAndRevertFunction);
}
public Task<TransactionReceipt> SimulateAndRevertRequestAndWaitForReceiptAsync(string targetContract, byte[] calldataPayload, CancellationTokenSource cancellationToken = null)
{
var simulateAndRevertFunction = new SimulateAndRevertFunction();
simulateAndRevertFunction.TargetContract = targetContract;
simulateAndRevertFunction.CalldataPayload = calldataPayload;
return ContractHandler.SendRequestAndWaitForReceiptAsync(simulateAndRevertFunction, cancellationToken);
}
public Task<string> SwapOwnerRequestAsync(SwapOwnerFunction swapOwnerFunction)
{
return ContractHandler.SendRequestAsync(swapOwnerFunction);
}
public Task<TransactionReceipt> SwapOwnerRequestAndWaitForReceiptAsync(SwapOwnerFunction swapOwnerFunction, CancellationTokenSource cancellationToken = null)
{
return ContractHandler.SendRequestAndWaitForReceiptAsync(swapOwnerFunction, cancellationToken);
}
public Task<string> SwapOwnerRequestAsync(string prevOwner, string oldOwner, string newOwner)
{
var swapOwnerFunction = new SwapOwnerFunction();
swapOwnerFunction.PrevOwner = prevOwner;
swapOwnerFunction.OldOwner = oldOwner;
swapOwnerFunction.NewOwner = newOwner;
return ContractHandler.SendRequestAsync(swapOwnerFunction);
}
public Task<TransactionReceipt> SwapOwnerRequestAndWaitForReceiptAsync(string prevOwner, string oldOwner, string newOwner, CancellationTokenSource cancellationToken = null)
{
var swapOwnerFunction = new SwapOwnerFunction();
swapOwnerFunction.PrevOwner = prevOwner;
swapOwnerFunction.OldOwner = oldOwner;
swapOwnerFunction.NewOwner = newOwner;
return ContractHandler.SendRequestAndWaitForReceiptAsync(swapOwnerFunction, cancellationToken);
}
}
}
| |
#pragma warning disable 162,108,618
using Casanova.Prelude;
using System.Linq;
using System;
using System.Collections.Generic;
using UnityEngine;
public class World : MonoBehaviour{
public static int frame;
void Update () { Update(Time.deltaTime, this);
frame++; }
public bool JustEntered = true;
public void Start()
{
Ship = new Ship();
Projectiles = (
Enumerable.Empty<Projectile>()).ToList<Projectile>();
Asteroids = (
Enumerable.Empty<Asteroid>()).ToList<Asteroid>();
}
public List<Asteroid> __Asteroids;
public List<Asteroid> Asteroids{ get { return __Asteroids; }
set{ __Asteroids = value;
foreach(var e in value){if(e.JustEntered){ e.JustEntered = false;
}
} }
}
public List<Projectile> __Projectiles;
public List<Projectile> Projectiles{ get { return __Projectiles; }
set{ __Projectiles = value;
foreach(var e in value){if(e.JustEntered){ e.JustEntered = false;
}
} }
}
public Ship Ship;
public System.Single count_down1;
System.DateTime init_time = System.DateTime.Now;
public void Update(float dt, World world) {
var t = System.DateTime.Now; this.Rule0(dt, world);
for(int x0 = 0; x0 < Asteroids.Count; x0++) {
Asteroids[x0].Update(dt, world);
}
for(int x0 = 0; x0 < Projectiles.Count; x0++) {
Projectiles[x0].Update(dt, world);
}
Ship.Update(dt, world);
this.Rule1(dt, world);
this.Rule2(dt, world);
}
public void Rule0(float dt, World world)
{
Asteroids = (
(Asteroids).Select(__ContextSymbol2 => new { ___a00 = __ContextSymbol2 })
.Where(__ContextSymbol3 => !(__ContextSymbol3.___a00.Destroyed))
.Select(__ContextSymbol4 => __ContextSymbol4.___a00)
.ToList<Asteroid>()).ToList<Asteroid>();
Projectiles = (
(Projectiles).Select(__ContextSymbol5 => new { ___p00 = __ContextSymbol5 })
.Where(__ContextSymbol6 => !(__ContextSymbol6.___p00.Destroyed))
.Select(__ContextSymbol7 => __ContextSymbol7.___p00)
.ToList<Projectile>()).ToList<Projectile>();
}
int s1=-1;
public void Rule1(float dt, World world){
switch (s1)
{
case -1:
if(!(UnityEngine.Input.GetKeyDown(KeyCode.Space)))
{
s1 = -1;
return; }else
{
goto case 0; }
case 0:
Projectiles = new Cons<Projectile>(new Projectile(Ship.Position), (Projectiles)).ToList<Projectile>();
s1 = -1;
return;
default: return;}}
int s2=-1;
public void Rule2(float dt, World world){
switch (s2)
{
case -1:
count_down1 = 1f;
goto case 2;
case 2:
if(((count_down1) > (0f)))
{
count_down1 = ((count_down1) - (dt));
s2 = 2;
return; }else
{
goto case 0; }
case 0:
Asteroids = new Cons<Asteroid>(new Asteroid(), (Asteroids)).ToList<Asteroid>();
s2 = -1;
return;
default: return;}}
}
public class Asteroid{
public int frame;
public bool JustEntered = true;
public int ID;
public Asteroid()
{JustEntered = false;
frame = World.frame;
Velocity = new UnityEngine.Vector3(0f,(UnityEngine.Random.value) * (-1f),0f);
UnityAsteroid = UnityAsteroid.Instantiate(new UnityEngine.Vector3((-7f) + ((UnityEngine.Random.value) * (17f)),4f,0f));
}
public System.Boolean Destroyed{ get { return UnityAsteroid.Destroyed; }
set{UnityAsteroid.Destroyed = value; }
}
public UnityEngine.Vector3 Position{ get { return UnityAsteroid.Position; }
set{UnityAsteroid.Position = value; }
}
public UnityAsteroid UnityAsteroid;
public UnityEngine.Vector3 Velocity;
public UnityEngine.Animation animation{ get { return UnityAsteroid.animation; }
}
public UnityEngine.AudioSource audio{ get { return UnityAsteroid.audio; }
}
public UnityEngine.Camera camera{ get { return UnityAsteroid.camera; }
}
public UnityEngine.Collider collider{ get { return UnityAsteroid.collider; }
}
public UnityEngine.Collider2D collider2D{ get { return UnityAsteroid.collider2D; }
}
public UnityEngine.ConstantForce constantForce{ get { return UnityAsteroid.constantForce; }
}
public System.Boolean enabled{ get { return UnityAsteroid.enabled; }
set{UnityAsteroid.enabled = value; }
}
public UnityEngine.GameObject gameObject{ get { return UnityAsteroid.gameObject; }
}
public UnityEngine.GUIElement guiElement{ get { return UnityAsteroid.guiElement; }
}
public UnityEngine.GUIText guiText{ get { return UnityAsteroid.guiText; }
}
public UnityEngine.GUITexture guiTexture{ get { return UnityAsteroid.guiTexture; }
}
public UnityEngine.HideFlags hideFlags{ get { return UnityAsteroid.hideFlags; }
set{UnityAsteroid.hideFlags = value; }
}
public UnityEngine.HingeJoint hingeJoint{ get { return UnityAsteroid.hingeJoint; }
}
public UnityEngine.Light light{ get { return UnityAsteroid.light; }
}
public System.String name{ get { return UnityAsteroid.name; }
set{UnityAsteroid.name = value; }
}
public UnityEngine.ParticleEmitter particleEmitter{ get { return UnityAsteroid.particleEmitter; }
}
public UnityEngine.ParticleSystem particleSystem{ get { return UnityAsteroid.particleSystem; }
}
public UnityEngine.Renderer renderer{ get { return UnityAsteroid.renderer; }
}
public UnityEngine.Rigidbody rigidbody{ get { return UnityAsteroid.rigidbody; }
}
public UnityEngine.Rigidbody2D rigidbody2D{ get { return UnityAsteroid.rigidbody2D; }
}
public System.String tag{ get { return UnityAsteroid.tag; }
set{UnityAsteroid.tag = value; }
}
public UnityEngine.Transform transform{ get { return UnityAsteroid.transform; }
}
public System.Boolean useGUILayout{ get { return UnityAsteroid.useGUILayout; }
set{UnityAsteroid.useGUILayout = value; }
}
public List<Projectile> ___colliding_projectiles10;
public System.Single count_down2;
public void Update(float dt, World world) {
frame = World.frame; this.Rule2(dt, world);
this.Rule0(dt, world);
this.Rule1(dt, world);
}
public void Rule2(float dt, World world)
{
Position = (Position) + ((Velocity) * (dt));
}
int s0=-1;
public void Rule0(float dt, World world){
switch (s0)
{
case -1:
if(!(((-4f) > (Position.y))))
{
s0 = -1;
return; }else
{
goto case 0; }
case 0:
Destroyed = true;
s0 = -1;
return;
default: return;}}
int s1=-1;
public void Rule1(float dt, World world){
switch (s1)
{
case -1:
___colliding_projectiles10 = (
(world.Projectiles).Select(__ContextSymbol8 => new { ___p11 = __ContextSymbol8 })
.Where(__ContextSymbol9 => ((1f) > (UnityEngine.Vector3.Distance(Position,__ContextSymbol9.___p11.Position))))
.Select(__ContextSymbol10 => __ContextSymbol10.___p11)
.ToList<Projectile>()).ToList<Projectile>();
if(((___colliding_projectiles10.Count) > (0)))
{
goto case 3; }else
{
s1 = -1;
return; }
case 3:
count_down2 = dt;
goto case 6;
case 6:
if(((count_down2) > (0f)))
{
count_down2 = ((count_down2) - (dt));
s1 = 6;
return; }else
{
goto case 4; }
case 4:
Destroyed = true;
s1 = -1;
return;
default: return;}}
}
public class Projectile{
public int frame;
public bool JustEntered = true;
private UnityEngine.Vector3 p;
public int ID;
public Projectile(UnityEngine.Vector3 p)
{JustEntered = false;
frame = World.frame;
Velocity = new UnityEngine.Vector3(0f,1f,0f);
UnityProjectile = UnityProjectile.Instantiate(p);
}
public System.Boolean Destroyed{ get { return UnityProjectile.Destroyed; }
set{UnityProjectile.Destroyed = value; }
}
public UnityEngine.Vector3 Position{ get { return UnityProjectile.Position; }
set{UnityProjectile.Position = value; }
}
public UnityProjectile UnityProjectile;
public UnityEngine.Vector3 Velocity;
public UnityEngine.Animation animation{ get { return UnityProjectile.animation; }
}
public UnityEngine.AudioSource audio{ get { return UnityProjectile.audio; }
}
public UnityEngine.Camera camera{ get { return UnityProjectile.camera; }
}
public UnityEngine.Collider collider{ get { return UnityProjectile.collider; }
}
public UnityEngine.Collider2D collider2D{ get { return UnityProjectile.collider2D; }
}
public UnityEngine.ConstantForce constantForce{ get { return UnityProjectile.constantForce; }
}
public System.Boolean enabled{ get { return UnityProjectile.enabled; }
set{UnityProjectile.enabled = value; }
}
public UnityEngine.GameObject gameObject{ get { return UnityProjectile.gameObject; }
}
public UnityEngine.GUIElement guiElement{ get { return UnityProjectile.guiElement; }
}
public UnityEngine.GUIText guiText{ get { return UnityProjectile.guiText; }
}
public UnityEngine.GUITexture guiTexture{ get { return UnityProjectile.guiTexture; }
}
public UnityEngine.HideFlags hideFlags{ get { return UnityProjectile.hideFlags; }
set{UnityProjectile.hideFlags = value; }
}
public UnityEngine.HingeJoint hingeJoint{ get { return UnityProjectile.hingeJoint; }
}
public UnityEngine.Light light{ get { return UnityProjectile.light; }
}
public System.String name{ get { return UnityProjectile.name; }
set{UnityProjectile.name = value; }
}
public UnityEngine.ParticleEmitter particleEmitter{ get { return UnityProjectile.particleEmitter; }
}
public UnityEngine.ParticleSystem particleSystem{ get { return UnityProjectile.particleSystem; }
}
public UnityEngine.Renderer renderer{ get { return UnityProjectile.renderer; }
}
public UnityEngine.Rigidbody rigidbody{ get { return UnityProjectile.rigidbody; }
}
public UnityEngine.Rigidbody2D rigidbody2D{ get { return UnityProjectile.rigidbody2D; }
}
public System.String tag{ get { return UnityProjectile.tag; }
set{UnityProjectile.tag = value; }
}
public UnityEngine.Transform transform{ get { return UnityProjectile.transform; }
}
public System.Boolean useGUILayout{ get { return UnityProjectile.useGUILayout; }
set{UnityProjectile.useGUILayout = value; }
}
public List<Asteroid> ___colliding_asteroids10;
public System.Single count_down3;
public void Update(float dt, World world) {
frame = World.frame; this.Rule2(dt, world);
this.Rule0(dt, world);
this.Rule1(dt, world);
}
public void Rule2(float dt, World world)
{
Position = (Position) + ((Velocity) * (dt));
}
int s0=-1;
public void Rule0(float dt, World world){
switch (s0)
{
case -1:
if(!(((Position.y) > (4f))))
{
s0 = -1;
return; }else
{
goto case 0; }
case 0:
Destroyed = true;
s0 = -1;
return;
default: return;}}
int s1=-1;
public void Rule1(float dt, World world){
switch (s1)
{
case -1:
___colliding_asteroids10 = (
(world.Asteroids).Select(__ContextSymbol11 => new { ___a11 = __ContextSymbol11 })
.Where(__ContextSymbol12 => ((1f) > (UnityEngine.Vector3.Distance(Position,__ContextSymbol12.___a11.Position))))
.Select(__ContextSymbol13 => __ContextSymbol13.___a11)
.ToList<Asteroid>()).ToList<Asteroid>();
if(((___colliding_asteroids10.Count) > (0)))
{
goto case 3; }else
{
s1 = -1;
return; }
case 3:
count_down3 = dt;
goto case 6;
case 6:
if(((count_down3) > (0f)))
{
count_down3 = ((count_down3) - (dt));
s1 = 6;
return; }else
{
goto case 4; }
case 4:
Destroyed = true;
s1 = -1;
return;
default: return;}}
}
public class Ship{
public int frame;
public bool JustEntered = true;
public int ID;
public Ship()
{JustEntered = false;
frame = World.frame;
UnityShip = UnityShip.Find();
}
public UnityEngine.Vector3 Position{ get { return UnityShip.Position; }
set{UnityShip.Position = value; }
}
public UnityShip UnityShip;
public UnityEngine.Animation animation{ get { return UnityShip.animation; }
}
public UnityEngine.AudioSource audio{ get { return UnityShip.audio; }
}
public UnityEngine.Camera camera{ get { return UnityShip.camera; }
}
public UnityEngine.Collider collider{ get { return UnityShip.collider; }
}
public UnityEngine.Collider2D collider2D{ get { return UnityShip.collider2D; }
}
public UnityEngine.ConstantForce constantForce{ get { return UnityShip.constantForce; }
}
public System.Boolean enabled{ get { return UnityShip.enabled; }
set{UnityShip.enabled = value; }
}
public UnityEngine.GameObject gameObject{ get { return UnityShip.gameObject; }
}
public UnityEngine.GUIElement guiElement{ get { return UnityShip.guiElement; }
}
public UnityEngine.GUIText guiText{ get { return UnityShip.guiText; }
}
public UnityEngine.GUITexture guiTexture{ get { return UnityShip.guiTexture; }
}
public UnityEngine.HideFlags hideFlags{ get { return UnityShip.hideFlags; }
set{UnityShip.hideFlags = value; }
}
public UnityEngine.HingeJoint hingeJoint{ get { return UnityShip.hingeJoint; }
}
public UnityEngine.Light light{ get { return UnityShip.light; }
}
public System.String name{ get { return UnityShip.name; }
set{UnityShip.name = value; }
}
public UnityEngine.ParticleEmitter particleEmitter{ get { return UnityShip.particleEmitter; }
}
public UnityEngine.ParticleSystem particleSystem{ get { return UnityShip.particleSystem; }
}
public UnityEngine.Renderer renderer{ get { return UnityShip.renderer; }
}
public UnityEngine.Rigidbody rigidbody{ get { return UnityShip.rigidbody; }
}
public UnityEngine.Rigidbody2D rigidbody2D{ get { return UnityShip.rigidbody2D; }
}
public System.String tag{ get { return UnityShip.tag; }
set{UnityShip.tag = value; }
}
public UnityEngine.Transform transform{ get { return UnityShip.transform; }
}
public System.Boolean useGUILayout{ get { return UnityShip.useGUILayout; }
set{UnityShip.useGUILayout = value; }
}
public void Update(float dt, World world) {
frame = World.frame;
this.Rule0(dt, world);
this.Rule1(dt, world);
}
int s0=-1;
public void Rule0(float dt, World world){
switch (s0)
{
case -1:
if(!(UnityEngine.Input.GetKey(KeyCode.D)))
{
s0 = -1;
return; }else
{
goto case 0; }
case 0:
Position = ((Position) + (((new UnityEngine.Vector3(3f,0f,0f)) * (dt))));
s0 = -1;
return;
default: return;}}
int s1=-1;
public void Rule1(float dt, World world){
switch (s1)
{
case -1:
if(!(UnityEngine.Input.GetKey(KeyCode.A)))
{
s1 = -1;
return; }else
{
goto case 0; }
case 0:
Position = ((Position) + (((new UnityEngine.Vector3(-3f,0f,0f)) * (dt))));
s1 = -1;
return;
default: return;}}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
namespace Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition
{
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition;
using Microsoft.Azure.Management.ContainerService.Fluent;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions;
using Microsoft.Azure.Management.ContainerService.Fluent.ContainerServiceAgentPool.Definition;
using Microsoft.Azure.Management.ResourceManager.Fluent.Core.GroupableResource.Definition;
/// <summary>
/// The stage of the container service definition allowing to specific the Linux root username.
/// </summary>
public interface IWithLinuxRootUsername
{
/// <summary>
/// Begins the definition to specify Linux root username.
/// </summary>
/// <param name="rootUserName">The root username.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinuxSshKey WithRootUsername(string rootUserName);
}
/// <summary>
/// The stage of the container service definition allowing to specify the master VM size.
/// </summary>
public interface IWithMasterVMSize
{
/// <summary>
/// Specifies the size of the master VMs, default set to "Standard_D2_v2".
/// </summary>
/// <param name="vmSize">The size of the VM.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate WithMasterVMSize(ContainerServiceVirtualMachineSizeTypes vmSize);
}
/// <summary>
/// Container interface for all the definitions related to a container service.
/// </summary>
public interface IDefinition :
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IBlank,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithGroup,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithOrchestrator,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithMasterNodeCount,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinux,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinuxRootUsername,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinuxSshKey,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithAgentPool,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithServicePrincipalProfile,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate
{
}
/// <summary>
/// The first stage of a container service definition.
/// </summary>
public interface IBlank :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithRegion<Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithGroup>
{
}
/// <summary>
/// The stage of the definition which contains all the minimum required inputs for the resource to be created,
/// but also allows for any other optional settings to be specified.
/// </summary>
public interface IWithCreate :
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithMasterDnsPrefix,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithDiagnostics,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithMasterVMSize,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithMasterStorageProfile,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithMasterOSDiskSize,
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithSubnet,
Microsoft.Azure.Management.ResourceManager.Fluent.Core.ResourceActions.ICreatable<Microsoft.Azure.Management.ContainerService.Fluent.IContainerService>,
Microsoft.Azure.Management.ResourceManager.Fluent.Core.Resource.Definition.IDefinitionWithTags<Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate>
{
}
/// <summary>
/// The stage of the container service definition allowing to specify an agent pool profile.
/// </summary>
public interface IWithAgentPool
{
/// <summary>
/// Begins the definition of a agent pool profile to be attached to the container service.
/// </summary>
/// <param name="name">The name for the agent pool profile.</param>
/// <return>The stage representing configuration for the agent pool profile.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerServiceAgentPool.Definition.IBlank<Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate> DefineAgentPool(string name);
}
/// <summary>
/// The stage of the container service definition allowing to specify the master node count.
/// </summary>
public interface IWithMasterNodeCount
{
/// <summary>
/// Specifies the master node count.
/// </summary>
/// <param name="count">Master profile count (1, 3, 5).</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithAgentPool WithMasterNodeCount(ContainerServiceMasterProfileCount count);
}
/// <summary>
/// The stage of a container service definition allowing to specify the master pool OS disk size.
/// </summary>
public interface IWithMasterOSDiskSize
{
/// <summary>
/// OS Disk Size in GB to be used for every machine in the master pool.
/// If you specify 0, the default osDisk size will be used according to the vmSize specified.
/// </summary>
/// <param name="osDiskSizeInGB">OS Disk Size in GB to be used for every machine in the master pool.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate WithMasterOSDiskSizeInGB(int osDiskSizeInGB);
}
/// <summary>
/// The stage of the container service definition allowing to specific the Linux SSH key.
/// </summary>
public interface IWithLinuxSshKey
{
/// <summary>
/// Begins the definition to specify Linux ssh key.
/// </summary>
/// <param name="sshKeyData">The SSH key data.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithMasterNodeCount WithSshKey(string sshKeyData);
}
/// <summary>
/// The stage of the container service definition allowing to specify the master DNS prefix label.
/// </summary>
public interface IWithMasterDnsPrefix
{
/// <summary>
/// Specifies the DNS prefix to be used to create the FQDN for the master pool.
/// </summary>
/// <param name="dnsPrefix">The DNS prefix to be used to create the FQDN for the master pool.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate WithMasterDnsPrefix(string dnsPrefix);
}
/// <summary>
/// The stage of the container service definition allowing to specify the virtual network and subnet for the machines.
/// </summary>
public interface IWithSubnet
{
/// <summary>
/// Specifies the virtual network and subnet for the virtual machines in the master and agent pools.
/// </summary>
/// <param name="networkId">The network ID to be used by the machines.</param>
/// <param name="subnetName">The name of the subnet.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate WithSubnet(string networkId, string subnetName);
}
/// <summary>
/// The stage of a container service definition allowing to specify the master's virtual machine storage kind.
/// </summary>
public interface IWithMasterStorageProfile
{
/// <summary>
/// Specifies the storage kind to be used for every machine in master pool.
/// </summary>
/// <param name="storageProfile">The storage kind to be used for every machine in the master pool.</param>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate WithMasterStorageProfile(StorageProfileTypes storageProfile);
}
/// <summary>
/// The stage of the container service definition allowing to specify the resource group.
/// </summary>
public interface IWithGroup :
Microsoft.Azure.Management.ResourceManager.Fluent.Core.GroupableResource.Definition.IWithGroup<Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithOrchestrator>
{
}
/// <summary>
/// The stage of the container service definition allowing to specify orchestration type.
/// </summary>
public interface IWithOrchestrator
{
/// <summary>
/// Specifies the Kubernetes orchestration type for the container service.
/// </summary>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithServicePrincipalProfile WithKubernetesOrchestration();
/// <summary>
/// Specifies the DCOS orchestration type for the container service.
/// </summary>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinux WithDcosOrchestration();
/// <summary>
/// Specifies the Swarm orchestration type for the container service.
/// </summary>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinux WithSwarmOrchestration();
}
/// <summary>
/// The stage of the container service definition allowing the start of defining Linux specific settings.
/// </summary>
public interface IWithLinux
{
/// <summary>
/// Begins the definition to specify Linux settings.
/// </summary>
/// <return>The stage representing configuration of Linux specific settings.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinuxRootUsername WithLinux();
}
/// <summary>
/// The stage allowing properties for cluster service principals to be specified.
/// </summary>
public interface IWithServicePrincipalProfile
{
/// <summary>
/// Properties for cluster service principals.
/// </summary>
/// <param name="clientId">The ID for the service principal.</param>
/// <param name="secret">The secret password associated with the service principal.</param>
/// <return>The next stage.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithLinux WithServicePrincipal(string clientId, string secret);
}
/// <summary>
/// The stage of the container service definition allowing to enable diagnostics.
/// </summary>
public interface IWithDiagnostics
{
/// <summary>
/// Enables diagnostics.
/// </summary>
/// <return>The next stage of the definition.</return>
Microsoft.Azure.Management.ContainerService.Fluent.ContainerService.Definition.IWithCreate WithDiagnostics();
}
}
| |
//
// MethodDefinition.cs
//
// Author:
// Jb Evain (jbevain@gmail.com)
//
// Copyright (c) 2008 - 2011 Jb Evain
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using Mono.Cecil.Cil;
using Mono.Collections.Generic;
using RVA = System.UInt32;
namespace Mono.Cecil {
public sealed class MethodDefinition : MethodReference, IMemberDefinition, ISecurityDeclarationProvider {
ushort attributes;
ushort impl_attributes;
internal volatile bool sem_attrs_ready;
internal MethodSemanticsAttributes sem_attrs;
Collection<CustomAttribute> custom_attributes;
Collection<SecurityDeclaration> security_declarations;
internal RVA rva;
internal PInvokeInfo pinvoke;
Collection<MethodReference> overrides;
internal MethodBody body;
public MethodAttributes Attributes {
get { return (MethodAttributes) attributes; }
set { attributes = (ushort) value; }
}
public MethodImplAttributes ImplAttributes {
get { return (MethodImplAttributes) impl_attributes; }
set { impl_attributes = (ushort) value; }
}
public MethodSemanticsAttributes SemanticsAttributes {
get {
if (sem_attrs_ready)
return sem_attrs;
if (HasImage) {
ReadSemantics ();
return sem_attrs;
}
sem_attrs = MethodSemanticsAttributes.None;
sem_attrs_ready = true;
return sem_attrs;
}
set { sem_attrs = value; }
}
internal void ReadSemantics ()
{
if (sem_attrs_ready)
return;
var module = this.Module;
if (module == null)
return;
if (!module.HasImage)
return;
module.Read (this, (method, reader) => reader.ReadAllSemantics (method));
}
public bool HasSecurityDeclarations {
get {
if (security_declarations != null)
return security_declarations.Count > 0;
return this.GetHasSecurityDeclarations (Module);
}
}
public Collection<SecurityDeclaration> SecurityDeclarations {
get { return security_declarations ?? (this.GetSecurityDeclarations (ref security_declarations, Module)); }
}
public bool HasCustomAttributes {
get {
if (custom_attributes != null)
return custom_attributes.Count > 0;
return this.GetHasCustomAttributes (Module);
}
}
public Collection<CustomAttribute> CustomAttributes {
get { return custom_attributes ?? (this.GetCustomAttributes (ref custom_attributes, Module)); }
}
public int RVA {
get { return (int) rva; }
}
public bool HasBody {
get {
return (attributes & (ushort) MethodAttributes.Abstract) == 0 &&
(attributes & (ushort) MethodAttributes.PInvokeImpl) == 0 &&
(impl_attributes & (ushort) MethodImplAttributes.InternalCall) == 0 &&
(impl_attributes & (ushort) MethodImplAttributes.Native) == 0 &&
(impl_attributes & (ushort) MethodImplAttributes.Unmanaged) == 0 &&
(impl_attributes & (ushort) MethodImplAttributes.Runtime) == 0;
}
}
public MethodBody Body {
get {
MethodBody localBody = this.body;
if (localBody != null)
return localBody;
if (!HasBody)
return null;
if (HasImage && rva != 0)
return Module.Read (ref body, this, (method, reader) => reader.ReadMethodBody (method));
return body = new MethodBody (this);
}
set {
var module = this.Module;
if (module == null) {
body = value;
return;
}
// we reset Body to null in ILSpy to save memory; so we need that operation to be thread-safe
lock (module.SyncRoot) {
body = value;
}
}
}
public bool HasPInvokeInfo {
get {
if (pinvoke != null)
return true;
return IsPInvokeImpl;
}
}
public PInvokeInfo PInvokeInfo {
get {
if (pinvoke != null)
return pinvoke;
if (HasImage && IsPInvokeImpl)
return Module.Read (ref pinvoke, this, (method, reader) => reader.ReadPInvokeInfo (method));
return null;
}
set {
IsPInvokeImpl = true;
pinvoke = value;
}
}
public bool HasOverrides {
get {
if (overrides != null)
return overrides.Count > 0;
if (HasImage)
return Module.Read (this, (method, reader) => reader.HasOverrides (method));
return false;
}
}
public Collection<MethodReference> Overrides {
get {
if (overrides != null)
return overrides;
if (HasImage)
return Module.Read (ref overrides, this, (method, reader) => reader.ReadOverrides (method));
return overrides = new Collection<MethodReference> ();
}
}
public override bool HasGenericParameters {
get {
if (generic_parameters != null)
return generic_parameters.Count > 0;
return this.GetHasGenericParameters (Module);
}
}
public override Collection<GenericParameter> GenericParameters {
get { return generic_parameters ?? (this.GetGenericParameters (ref generic_parameters, Module)); }
}
#region MethodAttributes
public bool IsCompilerControlled {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.CompilerControlled); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.CompilerControlled, value); }
}
public bool IsPrivate {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Private); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Private, value); }
}
public bool IsFamilyAndAssembly {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.FamANDAssem); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.FamANDAssem, value); }
}
public bool IsAssembly {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Assembly); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Assembly, value); }
}
public bool IsFamily {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Family); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Family, value); }
}
public bool IsFamilyOrAssembly {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.FamORAssem); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.FamORAssem, value); }
}
public bool IsPublic {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Public); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.MemberAccessMask, (ushort) MethodAttributes.Public, value); }
}
public bool IsStatic {
get { return attributes.GetAttributes ((ushort) MethodAttributes.Static); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.Static, value); }
}
public bool IsFinal {
get { return attributes.GetAttributes ((ushort) MethodAttributes.Final); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.Final, value); }
}
public bool IsVirtual {
get { return attributes.GetAttributes ((ushort) MethodAttributes.Virtual); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.Virtual, value); }
}
public bool IsHideBySig {
get { return attributes.GetAttributes ((ushort) MethodAttributes.HideBySig); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.HideBySig, value); }
}
public bool IsReuseSlot {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.VtableLayoutMask, (ushort) MethodAttributes.ReuseSlot); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.VtableLayoutMask, (ushort) MethodAttributes.ReuseSlot, value); }
}
public bool IsNewSlot {
get { return attributes.GetMaskedAttributes ((ushort) MethodAttributes.VtableLayoutMask, (ushort) MethodAttributes.NewSlot); }
set { attributes = attributes.SetMaskedAttributes ((ushort) MethodAttributes.VtableLayoutMask, (ushort) MethodAttributes.NewSlot, value); }
}
public bool IsCheckAccessOnOverride {
get { return attributes.GetAttributes ((ushort) MethodAttributes.CheckAccessOnOverride); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.CheckAccessOnOverride, value); }
}
public bool IsAbstract {
get { return attributes.GetAttributes ((ushort) MethodAttributes.Abstract); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.Abstract, value); }
}
public bool IsSpecialName {
get { return attributes.GetAttributes ((ushort) MethodAttributes.SpecialName); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.SpecialName, value); }
}
public bool IsPInvokeImpl {
get { return attributes.GetAttributes ((ushort) MethodAttributes.PInvokeImpl); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.PInvokeImpl, value); }
}
public bool IsUnmanagedExport {
get { return attributes.GetAttributes ((ushort) MethodAttributes.UnmanagedExport); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.UnmanagedExport, value); }
}
public bool IsRuntimeSpecialName {
get { return attributes.GetAttributes ((ushort) MethodAttributes.RTSpecialName); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.RTSpecialName, value); }
}
public bool HasSecurity {
get { return attributes.GetAttributes ((ushort) MethodAttributes.HasSecurity); }
set { attributes = attributes.SetAttributes ((ushort) MethodAttributes.HasSecurity, value); }
}
#endregion
#region MethodImplAttributes
public bool IsIL {
get { return impl_attributes.GetMaskedAttributes ((ushort) MethodImplAttributes.CodeTypeMask, (ushort) MethodImplAttributes.IL); }
set { impl_attributes = impl_attributes.SetMaskedAttributes ((ushort) MethodImplAttributes.CodeTypeMask, (ushort) MethodImplAttributes.IL, value); }
}
public bool IsNative {
get { return impl_attributes.GetMaskedAttributes ((ushort) MethodImplAttributes.CodeTypeMask, (ushort) MethodImplAttributes.Native); }
set { impl_attributes = impl_attributes.SetMaskedAttributes ((ushort) MethodImplAttributes.CodeTypeMask, (ushort) MethodImplAttributes.Native, value); }
}
public bool IsRuntime {
get { return impl_attributes.GetMaskedAttributes ((ushort) MethodImplAttributes.CodeTypeMask, (ushort) MethodImplAttributes.Runtime); }
set { impl_attributes = impl_attributes.SetMaskedAttributes ((ushort) MethodImplAttributes.CodeTypeMask, (ushort) MethodImplAttributes.Runtime, value); }
}
public bool IsUnmanaged {
get { return impl_attributes.GetMaskedAttributes ((ushort) MethodImplAttributes.ManagedMask, (ushort) MethodImplAttributes.Unmanaged); }
set { impl_attributes = impl_attributes.SetMaskedAttributes ((ushort) MethodImplAttributes.ManagedMask, (ushort) MethodImplAttributes.Unmanaged, value); }
}
public bool IsManaged {
get { return impl_attributes.GetMaskedAttributes ((ushort) MethodImplAttributes.ManagedMask, (ushort) MethodImplAttributes.Managed); }
set { impl_attributes = impl_attributes.SetMaskedAttributes ((ushort) MethodImplAttributes.ManagedMask, (ushort) MethodImplAttributes.Managed, value); }
}
public bool IsForwardRef {
get { return impl_attributes.GetAttributes ((ushort) MethodImplAttributes.ForwardRef); }
set { impl_attributes = impl_attributes.SetAttributes ((ushort) MethodImplAttributes.ForwardRef, value); }
}
public bool IsPreserveSig {
get { return impl_attributes.GetAttributes ((ushort) MethodImplAttributes.PreserveSig); }
set { impl_attributes = impl_attributes.SetAttributes ((ushort) MethodImplAttributes.PreserveSig, value); }
}
public bool IsInternalCall {
get { return impl_attributes.GetAttributes ((ushort) MethodImplAttributes.InternalCall); }
set { impl_attributes = impl_attributes.SetAttributes ((ushort) MethodImplAttributes.InternalCall, value); }
}
public bool IsSynchronized {
get { return impl_attributes.GetAttributes ((ushort) MethodImplAttributes.Synchronized); }
set { impl_attributes = impl_attributes.SetAttributes ((ushort) MethodImplAttributes.Synchronized, value); }
}
public bool NoInlining {
get { return impl_attributes.GetAttributes ((ushort) MethodImplAttributes.NoInlining); }
set { impl_attributes = impl_attributes.SetAttributes ((ushort) MethodImplAttributes.NoInlining, value); }
}
public bool NoOptimization {
get { return impl_attributes.GetAttributes ((ushort) MethodImplAttributes.NoOptimization); }
set { impl_attributes = impl_attributes.SetAttributes ((ushort) MethodImplAttributes.NoOptimization, value); }
}
#endregion
#region MethodSemanticsAttributes
public bool IsSetter {
get { return this.GetSemantics (MethodSemanticsAttributes.Setter); }
set { this.SetSemantics (MethodSemanticsAttributes.Setter, value); }
}
public bool IsGetter {
get { return this.GetSemantics (MethodSemanticsAttributes.Getter); }
set { this.SetSemantics (MethodSemanticsAttributes.Getter, value); }
}
public bool IsOther {
get { return this.GetSemantics (MethodSemanticsAttributes.Other); }
set { this.SetSemantics (MethodSemanticsAttributes.Other, value); }
}
public bool IsAddOn {
get { return this.GetSemantics (MethodSemanticsAttributes.AddOn); }
set { this.SetSemantics (MethodSemanticsAttributes.AddOn, value); }
}
public bool IsRemoveOn {
get { return this.GetSemantics (MethodSemanticsAttributes.RemoveOn); }
set { this.SetSemantics (MethodSemanticsAttributes.RemoveOn, value); }
}
public bool IsFire {
get { return this.GetSemantics (MethodSemanticsAttributes.Fire); }
set { this.SetSemantics (MethodSemanticsAttributes.Fire, value); }
}
#endregion
public new TypeDefinition DeclaringType {
get { return (TypeDefinition) base.DeclaringType; }
set { base.DeclaringType = value; }
}
public bool IsConstructor {
get {
return this.IsRuntimeSpecialName
&& this.IsSpecialName
&& (this.Name == ".cctor" || this.Name == ".ctor");
}
}
public override bool IsDefinition {
get { return true; }
}
internal MethodDefinition ()
{
this.token = new MetadataToken (TokenType.Method);
}
public MethodDefinition (string name, MethodAttributes attributes, TypeReference returnType)
: base (name, returnType)
{
this.attributes = (ushort) attributes;
this.HasThis = !this.IsStatic;
this.token = new MetadataToken (TokenType.Method);
}
public override MethodDefinition Resolve ()
{
return this;
}
}
static partial class Mixin {
public static ParameterDefinition GetParameter (this MethodBody self, int index)
{
var method = self.method;
if (method.HasThis) {
if (index == 0)
return self.ThisParameter;
index--;
}
var parameters = method.Parameters;
if (index < 0 || index >= parameters.size)
return null;
return parameters [index];
}
public static VariableDefinition GetVariable (this MethodBody self, int index)
{
var variables = self.Variables;
if (index < 0 || index >= variables.size)
return null;
return variables [index];
}
public static bool GetSemantics (this MethodDefinition self, MethodSemanticsAttributes semantics)
{
return (self.SemanticsAttributes & semantics) != 0;
}
public static void SetSemantics (this MethodDefinition self, MethodSemanticsAttributes semantics, bool value)
{
if (value)
self.SemanticsAttributes |= semantics;
else
self.SemanticsAttributes &= ~semantics;
}
}
}
| |
/// This code was generated by
/// \ / _ _ _| _ _
/// | (_)\/(_)(_|\/| |(/_ v1.0.0
/// / /
/// <summary>
/// PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you
/// currently do not have developer preview access, please contact help@twilio.com.
///
/// SyncMapPermissionResource
/// </summary>
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using Twilio.Base;
using Twilio.Clients;
using Twilio.Converters;
using Twilio.Exceptions;
using Twilio.Http;
namespace Twilio.Rest.Preview.Sync.Service.SyncMap
{
public class SyncMapPermissionResource : Resource
{
private static Request BuildFetchRequest(FetchSyncMapPermissionOptions options, ITwilioRestClient client)
{
return new Request(
HttpMethod.Get,
Rest.Domain.Preview,
"/Sync/Services/" + options.PathServiceSid + "/Maps/" + options.PathMapSid + "/Permissions/" + options.PathIdentity + "",
queryParams: options.GetParams(),
headerParams: null
);
}
/// <summary>
/// Fetch a specific Sync Map Permission.
/// </summary>
/// <param name="options"> Fetch SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static SyncMapPermissionResource Fetch(FetchSyncMapPermissionOptions options, ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = client.Request(BuildFetchRequest(options, client));
return FromJson(response.Content);
}
#if !NET35
/// <summary>
/// Fetch a specific Sync Map Permission.
/// </summary>
/// <param name="options"> Fetch SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<SyncMapPermissionResource> FetchAsync(FetchSyncMapPermissionOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = await client.RequestAsync(BuildFetchRequest(options, client));
return FromJson(response.Content);
}
#endif
/// <summary>
/// Fetch a specific Sync Map Permission.
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pathIdentity"> Identity of the user to whom the Sync Map Permission applies. </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static SyncMapPermissionResource Fetch(string pathServiceSid,
string pathMapSid,
string pathIdentity,
ITwilioRestClient client = null)
{
var options = new FetchSyncMapPermissionOptions(pathServiceSid, pathMapSid, pathIdentity);
return Fetch(options, client);
}
#if !NET35
/// <summary>
/// Fetch a specific Sync Map Permission.
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pathIdentity"> Identity of the user to whom the Sync Map Permission applies. </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<SyncMapPermissionResource> FetchAsync(string pathServiceSid,
string pathMapSid,
string pathIdentity,
ITwilioRestClient client = null)
{
var options = new FetchSyncMapPermissionOptions(pathServiceSid, pathMapSid, pathIdentity);
return await FetchAsync(options, client);
}
#endif
private static Request BuildDeleteRequest(DeleteSyncMapPermissionOptions options, ITwilioRestClient client)
{
return new Request(
HttpMethod.Delete,
Rest.Domain.Preview,
"/Sync/Services/" + options.PathServiceSid + "/Maps/" + options.PathMapSid + "/Permissions/" + options.PathIdentity + "",
queryParams: options.GetParams(),
headerParams: null
);
}
/// <summary>
/// Delete a specific Sync Map Permission.
/// </summary>
/// <param name="options"> Delete SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static bool Delete(DeleteSyncMapPermissionOptions options, ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = client.Request(BuildDeleteRequest(options, client));
return response.StatusCode == System.Net.HttpStatusCode.NoContent;
}
#if !NET35
/// <summary>
/// Delete a specific Sync Map Permission.
/// </summary>
/// <param name="options"> Delete SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<bool> DeleteAsync(DeleteSyncMapPermissionOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = await client.RequestAsync(BuildDeleteRequest(options, client));
return response.StatusCode == System.Net.HttpStatusCode.NoContent;
}
#endif
/// <summary>
/// Delete a specific Sync Map Permission.
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pathIdentity"> Identity of the user to whom the Sync Map Permission applies. </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static bool Delete(string pathServiceSid,
string pathMapSid,
string pathIdentity,
ITwilioRestClient client = null)
{
var options = new DeleteSyncMapPermissionOptions(pathServiceSid, pathMapSid, pathIdentity);
return Delete(options, client);
}
#if !NET35
/// <summary>
/// Delete a specific Sync Map Permission.
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pathIdentity"> Identity of the user to whom the Sync Map Permission applies. </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<bool> DeleteAsync(string pathServiceSid,
string pathMapSid,
string pathIdentity,
ITwilioRestClient client = null)
{
var options = new DeleteSyncMapPermissionOptions(pathServiceSid, pathMapSid, pathIdentity);
return await DeleteAsync(options, client);
}
#endif
private static Request BuildReadRequest(ReadSyncMapPermissionOptions options, ITwilioRestClient client)
{
return new Request(
HttpMethod.Get,
Rest.Domain.Preview,
"/Sync/Services/" + options.PathServiceSid + "/Maps/" + options.PathMapSid + "/Permissions",
queryParams: options.GetParams(),
headerParams: null
);
}
/// <summary>
/// Retrieve a list of all Permissions applying to a Sync Map.
/// </summary>
/// <param name="options"> Read SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static ResourceSet<SyncMapPermissionResource> Read(ReadSyncMapPermissionOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = client.Request(BuildReadRequest(options, client));
var page = Page<SyncMapPermissionResource>.FromJson("permissions", response.Content);
return new ResourceSet<SyncMapPermissionResource>(page, options, client);
}
#if !NET35
/// <summary>
/// Retrieve a list of all Permissions applying to a Sync Map.
/// </summary>
/// <param name="options"> Read SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<ResourceSet<SyncMapPermissionResource>> ReadAsync(ReadSyncMapPermissionOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = await client.RequestAsync(BuildReadRequest(options, client));
var page = Page<SyncMapPermissionResource>.FromJson("permissions", response.Content);
return new ResourceSet<SyncMapPermissionResource>(page, options, client);
}
#endif
/// <summary>
/// Retrieve a list of all Permissions applying to a Sync Map.
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pageSize"> Page size </param>
/// <param name="limit"> Record limit </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static ResourceSet<SyncMapPermissionResource> Read(string pathServiceSid,
string pathMapSid,
int? pageSize = null,
long? limit = null,
ITwilioRestClient client = null)
{
var options = new ReadSyncMapPermissionOptions(pathServiceSid, pathMapSid){PageSize = pageSize, Limit = limit};
return Read(options, client);
}
#if !NET35
/// <summary>
/// Retrieve a list of all Permissions applying to a Sync Map.
/// </summary>
/// <param name="pathServiceSid"> The service_sid </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pageSize"> Page size </param>
/// <param name="limit"> Record limit </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<ResourceSet<SyncMapPermissionResource>> ReadAsync(string pathServiceSid,
string pathMapSid,
int? pageSize = null,
long? limit = null,
ITwilioRestClient client = null)
{
var options = new ReadSyncMapPermissionOptions(pathServiceSid, pathMapSid){PageSize = pageSize, Limit = limit};
return await ReadAsync(options, client);
}
#endif
/// <summary>
/// Fetch the target page of records
/// </summary>
/// <param name="targetUrl"> API-generated URL for the requested results page </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> The target page of records </returns>
public static Page<SyncMapPermissionResource> GetPage(string targetUrl, ITwilioRestClient client)
{
client = client ?? TwilioClient.GetRestClient();
var request = new Request(
HttpMethod.Get,
targetUrl
);
var response = client.Request(request);
return Page<SyncMapPermissionResource>.FromJson("permissions", response.Content);
}
/// <summary>
/// Fetch the next page of records
/// </summary>
/// <param name="page"> current page of records </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> The next page of records </returns>
public static Page<SyncMapPermissionResource> NextPage(Page<SyncMapPermissionResource> page,
ITwilioRestClient client)
{
var request = new Request(
HttpMethod.Get,
page.GetNextPageUrl(Rest.Domain.Preview)
);
var response = client.Request(request);
return Page<SyncMapPermissionResource>.FromJson("permissions", response.Content);
}
/// <summary>
/// Fetch the previous page of records
/// </summary>
/// <param name="page"> current page of records </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> The previous page of records </returns>
public static Page<SyncMapPermissionResource> PreviousPage(Page<SyncMapPermissionResource> page,
ITwilioRestClient client)
{
var request = new Request(
HttpMethod.Get,
page.GetPreviousPageUrl(Rest.Domain.Preview)
);
var response = client.Request(request);
return Page<SyncMapPermissionResource>.FromJson("permissions", response.Content);
}
private static Request BuildUpdateRequest(UpdateSyncMapPermissionOptions options, ITwilioRestClient client)
{
return new Request(
HttpMethod.Post,
Rest.Domain.Preview,
"/Sync/Services/" + options.PathServiceSid + "/Maps/" + options.PathMapSid + "/Permissions/" + options.PathIdentity + "",
postParams: options.GetParams(),
headerParams: null
);
}
/// <summary>
/// Update an identity's access to a specific Sync Map.
/// </summary>
/// <param name="options"> Update SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static SyncMapPermissionResource Update(UpdateSyncMapPermissionOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = client.Request(BuildUpdateRequest(options, client));
return FromJson(response.Content);
}
#if !NET35
/// <summary>
/// Update an identity's access to a specific Sync Map.
/// </summary>
/// <param name="options"> Update SyncMapPermission parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<SyncMapPermissionResource> UpdateAsync(UpdateSyncMapPermissionOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = await client.RequestAsync(BuildUpdateRequest(options, client));
return FromJson(response.Content);
}
#endif
/// <summary>
/// Update an identity's access to a specific Sync Map.
/// </summary>
/// <param name="pathServiceSid"> Sync Service Instance SID. </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pathIdentity"> Identity of the user to whom the Sync Map Permission applies. </param>
/// <param name="read"> Read access. </param>
/// <param name="write"> Write access. </param>
/// <param name="manage"> Manage access. </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of SyncMapPermission </returns>
public static SyncMapPermissionResource Update(string pathServiceSid,
string pathMapSid,
string pathIdentity,
bool? read,
bool? write,
bool? manage,
ITwilioRestClient client = null)
{
var options = new UpdateSyncMapPermissionOptions(pathServiceSid, pathMapSid, pathIdentity, read, write, manage);
return Update(options, client);
}
#if !NET35
/// <summary>
/// Update an identity's access to a specific Sync Map.
/// </summary>
/// <param name="pathServiceSid"> Sync Service Instance SID. </param>
/// <param name="pathMapSid"> Sync Map SID or unique name. </param>
/// <param name="pathIdentity"> Identity of the user to whom the Sync Map Permission applies. </param>
/// <param name="read"> Read access. </param>
/// <param name="write"> Write access. </param>
/// <param name="manage"> Manage access. </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of SyncMapPermission </returns>
public static async System.Threading.Tasks.Task<SyncMapPermissionResource> UpdateAsync(string pathServiceSid,
string pathMapSid,
string pathIdentity,
bool? read,
bool? write,
bool? manage,
ITwilioRestClient client = null)
{
var options = new UpdateSyncMapPermissionOptions(pathServiceSid, pathMapSid, pathIdentity, read, write, manage);
return await UpdateAsync(options, client);
}
#endif
/// <summary>
/// Converts a JSON string into a SyncMapPermissionResource object
/// </summary>
/// <param name="json"> Raw JSON string </param>
/// <returns> SyncMapPermissionResource object represented by the provided JSON </returns>
public static SyncMapPermissionResource FromJson(string json)
{
// Convert all checked exceptions to Runtime
try
{
return JsonConvert.DeserializeObject<SyncMapPermissionResource>(json);
}
catch (JsonException e)
{
throw new ApiException(e.Message, e);
}
}
/// <summary>
/// Twilio Account SID.
/// </summary>
[JsonProperty("account_sid")]
public string AccountSid { get; private set; }
/// <summary>
/// Sync Service Instance SID.
/// </summary>
[JsonProperty("service_sid")]
public string ServiceSid { get; private set; }
/// <summary>
/// Sync Map SID.
/// </summary>
[JsonProperty("map_sid")]
public string MapSid { get; private set; }
/// <summary>
/// Identity of the user to whom the Sync Map Permission applies.
/// </summary>
[JsonProperty("identity")]
public string Identity { get; private set; }
/// <summary>
/// Read access.
/// </summary>
[JsonProperty("read")]
public bool? _Read { get; private set; }
/// <summary>
/// Write access.
/// </summary>
[JsonProperty("write")]
public bool? Write { get; private set; }
/// <summary>
/// Manage access.
/// </summary>
[JsonProperty("manage")]
public bool? Manage { get; private set; }
/// <summary>
/// URL of this Sync Map Permission.
/// </summary>
[JsonProperty("url")]
public Uri Url { get; private set; }
private SyncMapPermissionResource()
{
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using System.Xml.Linq;
using Microsoft.Extensions.CommandLineUtils;
namespace Tmds.DBus.Tool
{
class ListCommand : Command
{
CommandOption _serviceOption;
CommandOption _busOption;
CommandOption _pathOption;
CommandOption _norecurseOption;
CommandArgument _typeArgument;
CommandArgument _files;
public ListCommand(CommandLineApplication parent) :
base("list", parent)
{}
public override void Configure()
{
_serviceOption = AddServiceOption();
_busOption = AddBusOption();
_pathOption = AddPathOption();
_norecurseOption = AddNoRecurseOption();
_typeArgument = Configuration.Argument("type", "Type to list. 'objects'/'interfaces'/services'/'activatable-services'");
_files = AddFilesArgument();
}
public override void Execute()
{
var address = ParseBusAddress(_busOption);
if (_typeArgument.Value == null)
{
throw new ArgumentNullException("Type argument is required.", "type");
}
if (_typeArgument.Value == "services")
{
ListServicesAsync(address).Wait();
}
else if (_typeArgument.Value == "activatable-services")
{
ListActivatableServicesAsync(address).Wait();
}
else if (_typeArgument.Value == "objects")
{
if (!_serviceOption.HasValue())
{
throw new ArgumentException("Service option must be specified for listing objects.", "service");
}
string service = _serviceOption.Value();
bool recurse = !_norecurseOption.HasValue();
string path = _pathOption.HasValue() ? _pathOption.Value() : "/";
ListObjectsAsync(address, service, path, recurse).Wait();
}
else if (_typeArgument.Value == "interfaces")
{
if (!_serviceOption.HasValue() && _files.Values == null)
{
throw new ArgumentException("Service option or files argument must be specified for listing interfaces.", "service");
}
string service = _serviceOption.Value();
bool recurse = !_norecurseOption.HasValue();
string path = _pathOption.HasValue() ? _pathOption.Value() : "/";
ListInterfacesAsync(address, service, path, recurse, _files.Values).Wait();
}
else
{
throw new ArgumentException("Unknown type", "type");
}
}
class DBusObject
{
public string Path { get; set; }
public List<string> Interfaces { get; set; }
}
class ObjectsVisitor
{
private static readonly IEnumerable<string> s_skipInterfaces = new[] { "org.freedesktop.DBus.Introspectable", "org.freedesktop.DBus.Peer", "org.freedesktop.DBus.Properties" };
public List<DBusObject> Objects { private set; get; }
public ObjectsVisitor()
{
Objects = new List<DBusObject>();
}
public bool Visit(string path, XElement nodeXml)
{
var interfaces = nodeXml.Elements("interface")
.Select(i => i.Attribute("name").Value)
.Where(i => !s_skipInterfaces.Contains(i));
if (interfaces.Any())
{
var o = new DBusObject()
{
Path = path,
Interfaces = interfaces.OrderBy(s => s).ToList()
};
Objects.Add(o);
}
return true;
}
}
private async Task ListObjectsAsync(string address, string service, string path, bool recurse)
{
var visitor = new ObjectsVisitor();
using (var connection = new Connection(address))
{
await connection.ConnectAsync();
await NodeVisitor.VisitAsync(connection, service, path, recurse, visitor.Visit);
}
foreach (var o in visitor.Objects.OrderBy(o => o.Path))
{
Console.WriteLine($"{o.Path} : {string.Join(" ", o.Interfaces)}");
}
}
class InterfacesVisitor
{
private static readonly IEnumerable<string> s_skipInterfaces = new[] { "org.freedesktop.DBus.Introspectable", "org.freedesktop.DBus.Peer", "org.freedesktop.DBus.Properties" };
public HashSet<string> Interfaces { private set; get; }
public InterfacesVisitor()
{
Interfaces = new HashSet<string>();
}
public bool Visit(string path, XElement nodeXml)
{
var interfaces = nodeXml.Elements("interface")
.Select(i => i.Attribute("name").Value)
.Where(i => !s_skipInterfaces.Contains(i));
foreach (var interf in interfaces)
{
Interfaces.Add(interf);
}
return true;
}
}
private async Task ListInterfacesAsync(string address, string service, string path, bool recurse, List<string> files)
{
var visitor = new InterfacesVisitor();
if (service != null)
{
using (var connection = new Connection(address))
{
await connection.ConnectAsync();
await NodeVisitor.VisitAsync(connection, service, path, recurse, visitor.Visit);
}
}
if (files != null)
{
foreach (var file in files)
{
await NodeVisitor.VisitAsync(file, visitor.Visit);
}
}
foreach (var interf in visitor.Interfaces.OrderBy(i => i))
{
Console.WriteLine(interf);
}
}
public async Task ListServicesAsync(string address)
{
using (var connection = new Connection(address))
{
await connection.ConnectAsync();
var services = await connection.ListServicesAsync();
Array.Sort(services);
foreach (var service in services)
{
if (!service.StartsWith(":", StringComparison.Ordinal))
{
Console.WriteLine(service);
}
}
}
}
public async Task ListActivatableServicesAsync(string address)
{
using (var connection = new Connection(address))
{
await connection.ConnectAsync();
var services = await connection.ListActivatableServicesAsync();
Array.Sort(services);
foreach (var service in services)
{
Console.WriteLine(service);
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
namespace System.Reflection.Metadata.Ecma335
{
public sealed class MetadataAggregator
{
// For each heap handle and each delta contains aggregate heap lengths.
// heapSizes[heap kind][reader index] == Sum { 0..index | reader[i].XxxHeap.Block.Length }
private readonly ImmutableArray<ImmutableArray<int>> _heapSizes;
private readonly ImmutableArray<ImmutableArray<RowCounts>> _rowCounts;
// internal for testing
internal struct RowCounts : IComparable<RowCounts>
{
public int AggregateInserts;
public int Updates;
public int CompareTo(RowCounts other)
{
return AggregateInserts - other.AggregateInserts;
}
public override string ToString()
{
return string.Format("+0x{0:x} ~0x{1:x}", AggregateInserts, Updates);
}
}
public MetadataAggregator(MetadataReader baseReader, IReadOnlyList<MetadataReader> deltaReaders)
: this(baseReader, null, null, deltaReaders)
{
}
public MetadataAggregator(
IReadOnlyList<int> baseTableRowCounts,
IReadOnlyList<int> baseHeapSizes,
IReadOnlyList<MetadataReader> deltaReaders)
: this(null, baseTableRowCounts, baseHeapSizes, deltaReaders)
{
}
private MetadataAggregator(
MetadataReader baseReader,
IReadOnlyList<int> baseTableRowCounts,
IReadOnlyList<int> baseHeapSizes,
IReadOnlyList<MetadataReader> deltaReaders)
{
if (baseTableRowCounts == null)
{
if (baseReader == null)
{
throw new ArgumentNullException(nameof(baseReader));
}
if (baseReader.GetTableRowCount(TableIndex.EncMap) != 0)
{
throw new ArgumentException(SR.BaseReaderMustBeFullMetadataReader, nameof(baseReader));
}
CalculateBaseCounts(baseReader, out baseTableRowCounts, out baseHeapSizes);
Debug.Assert(baseTableRowCounts != null);
}
else
{
if (baseTableRowCounts.Count != MetadataTokens.TableCount)
{
throw new ArgumentException(SR.Format(SR.ExpectedListOfSize, MetadataTokens.TableCount), nameof(baseTableRowCounts));
}
if (baseHeapSizes == null)
{
throw new ArgumentNullException(nameof(baseHeapSizes));
}
if (baseHeapSizes.Count != MetadataTokens.HeapCount)
{
throw new ArgumentException(SR.Format(SR.ExpectedListOfSize, MetadataTokens.HeapCount), nameof(baseTableRowCounts));
}
}
if (deltaReaders == null || deltaReaders.Count == 0)
{
throw new ArgumentException(SR.ExpectedNonEmptyList, nameof(deltaReaders));
}
for (int i = 0; i < deltaReaders.Count; i++)
{
if (deltaReaders[i].GetTableRowCount(TableIndex.EncMap) == 0 || !deltaReaders[i].IsMinimalDelta)
{
throw new ArgumentException(SR.ReadersMustBeDeltaReaders, nameof(deltaReaders));
}
}
_heapSizes = CalculateHeapSizes(baseHeapSizes, deltaReaders);
_rowCounts = CalculateRowCounts(baseTableRowCounts, deltaReaders);
}
// for testing only
internal MetadataAggregator(RowCounts[][] rowCounts, int[][] heapSizes)
{
_rowCounts = ToImmutable(rowCounts);
_heapSizes = ToImmutable(heapSizes);
}
private static void CalculateBaseCounts(
MetadataReader baseReader,
out IReadOnlyList<int> baseTableRowCounts,
out IReadOnlyList<int> baseHeapSizes)
{
int[] rowCounts = new int[MetadataTokens.TableCount];
int[] heapSizes = new int[MetadataTokens.HeapCount];
for (int i = 0; i < rowCounts.Length; i++)
{
rowCounts[i] = baseReader.GetTableRowCount((TableIndex)i);
}
for (int i = 0; i < heapSizes.Length; i++)
{
heapSizes[i] = baseReader.GetHeapSize((HeapIndex)i);
}
baseTableRowCounts = rowCounts;
baseHeapSizes = heapSizes;
}
private static ImmutableArray<ImmutableArray<int>> CalculateHeapSizes(
IReadOnlyList<int> baseSizes,
IReadOnlyList<MetadataReader> deltaReaders)
{
// GUID heap index is multiple of sizeof(Guid) == 16
const int guidSize = 16;
int generationCount = 1 + deltaReaders.Count;
var userStringSizes = new int[generationCount];
var stringSizes = new int[generationCount];
var blobSizes = new int[generationCount];
var guidSizes = new int[generationCount];
userStringSizes[0] = baseSizes[(int)HeapIndex.UserString];
stringSizes[0] = baseSizes[(int)HeapIndex.String];
blobSizes[0] = baseSizes[(int)HeapIndex.Blob];
guidSizes[0] = baseSizes[(int)HeapIndex.Guid] / guidSize;
for (int r = 0; r < deltaReaders.Count; r++)
{
userStringSizes[r + 1] = userStringSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.UserString);
stringSizes[r + 1] = stringSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.String);
blobSizes[r + 1] = blobSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.Blob);
guidSizes[r + 1] = guidSizes[r] + deltaReaders[r].GetHeapSize(HeapIndex.Guid) / guidSize;
}
return ImmutableArray.Create(
userStringSizes.ToImmutableArray(),
stringSizes.ToImmutableArray(),
blobSizes.ToImmutableArray(),
guidSizes.ToImmutableArray());
}
private static ImmutableArray<ImmutableArray<RowCounts>> CalculateRowCounts(
IReadOnlyList<int> baseRowCounts,
IReadOnlyList<MetadataReader> deltaReaders)
{
// TODO: optimize - we don't need to allocate all these arrays
var rowCounts = GetBaseRowCounts(baseRowCounts, generations: 1 + deltaReaders.Count);
for (int generation = 1; generation <= deltaReaders.Count; generation++)
{
CalculateDeltaRowCountsForGeneration(rowCounts, generation, ref deltaReaders[generation - 1].EncMapTable);
}
return ToImmutable(rowCounts);
}
private static ImmutableArray<ImmutableArray<T>> ToImmutable<T>(T[][] array)
{
var immutable = new ImmutableArray<T>[array.Length];
for (int i = 0; i < array.Length; i++)
{
immutable[i] = array[i].ToImmutableArray();
}
return immutable.ToImmutableArray();
}
// internal for testing
internal static RowCounts[][] GetBaseRowCounts(IReadOnlyList<int> baseRowCounts, int generations)
{
var rowCounts = new RowCounts[MetadataTokens.TableCount][];
for (int t = 0; t < rowCounts.Length; t++)
{
rowCounts[t] = new RowCounts[generations];
rowCounts[t][0].AggregateInserts = baseRowCounts[t];
}
return rowCounts;
}
// internal for testing
internal static void CalculateDeltaRowCountsForGeneration(RowCounts[][] rowCounts, int generation, ref EnCMapTableReader encMapTable)
{
foreach (var tableRowCounts in rowCounts)
{
tableRowCounts[generation].AggregateInserts = tableRowCounts[generation - 1].AggregateInserts;
}
int mapRowCount = encMapTable.NumberOfRows;
for (int mapRid = 1; mapRid <= mapRowCount; mapRid++)
{
uint token = encMapTable.GetToken(mapRid);
int rid = (int)(token & TokenTypeIds.RIDMask);
var tableRowCounts = rowCounts[token >> TokenTypeIds.RowIdBitCount];
if (rid > tableRowCounts[generation].AggregateInserts)
{
if (rid != tableRowCounts[generation].AggregateInserts + 1)
{
throw new BadImageFormatException(SR.EnCMapNotSorted);
}
// insert:
tableRowCounts[generation].AggregateInserts = rid;
}
else
{
// update:
tableRowCounts[generation].Updates++;
}
}
}
/// <summary>
/// Given a handle of an entity in an aggregate metadata calculates
/// a handle of the entity within the metadata generation it is defined in.
/// </summary>
/// <param name="handle">Handle of an entity in an aggregate metadata.</param>
/// <param name="generation">The generation the entity is defined in.</param>
/// <returns>Handle of the entity within the metadata generation <paramref name="generation"/>.</returns>
public Handle GetGenerationHandle(Handle handle, out int generation)
{
if (handle.IsVirtual)
{
// TODO: if a virtual handle is connected to real handle then translate the rid,
// otherwise return vhandle and base.
throw new NotSupportedException();
}
if (handle.IsHeapHandle)
{
int heapOffset = handle.Offset;
HeapIndex heapIndex;
MetadataTokens.TryGetHeapIndex(handle.Kind, out heapIndex);
var sizes = _heapSizes[(int)heapIndex];
// #Guid heap offset is 1-based, other heaps have 0-based offset:
var size = (handle.Type == HandleType.Guid) ? heapOffset - 1 : heapOffset;
generation = sizes.BinarySearch(size);
if (generation >= 0)
{
Debug.Assert(sizes[generation] == size);
// the index points to the start of the next generation that added data to the heap:
do
{
generation++;
}
while (generation < sizes.Length && sizes[generation] == size);
}
else
{
generation = ~generation;
}
if (generation >= sizes.Length)
{
throw new ArgumentException(SR.HandleBelongsToFutureGeneration, nameof(handle));
}
// GUID heap accumulates - previous heap is copied to the next generation
int relativeHeapOffset = (handle.Type == HandleType.Guid || generation == 0) ? heapOffset : heapOffset - sizes[generation - 1];
return new Handle((byte)handle.Type, relativeHeapOffset);
}
else
{
int rowId = handle.RowId;
var sizes = _rowCounts[(int)handle.Type];
generation = sizes.BinarySearch(new RowCounts { AggregateInserts = rowId });
if (generation >= 0)
{
Debug.Assert(sizes[generation].AggregateInserts == rowId);
// the row is in a generation that inserted exactly one row -- the one that we are looking for;
// or it's in a preceding generation if the current one didn't insert any rows of the kind:
while (generation > 0 && sizes[generation - 1].AggregateInserts == rowId)
{
generation--;
}
}
else
{
// the row is in a generation that inserted multiple new rows:
generation = ~generation;
if (generation >= sizes.Length)
{
throw new ArgumentException(SR.HandleBelongsToFutureGeneration, nameof(handle));
}
}
// In each delta table updates always precede inserts.
int relativeRowId = (generation == 0) ? rowId :
rowId -
sizes[generation - 1].AggregateInserts +
sizes[generation].Updates;
return new Handle((byte)handle.Type, relativeRowId);
}
}
}
}
| |
using Foundation;
using System;
using System.CodeDom.Compiler;
using UIKit;
using AVFoundation;
using CoreGraphics;
using Xamarin;
using System.Collections.Generic;
namespace BeerDrinkin.iOS
{
partial class CameraViewController : UIViewController
{
AVCaptureSession captureSession;
AVCaptureDeviceInput captureDeviceInput;
AVCaptureStillImageOutput stillImageOutput;
UIView liveCameraStream;
bool flashOn;
public CameraViewController(IntPtr handle)
: base(handle)
{
}
public override void ViewDidLoad()
{
base.ViewDidLoad();
SetupUserInterface();
AuthorizeCameraUse();
SetupLiveCameraStream();
}
public async void AuthorizeCameraUse()
{
var authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video);
if (authorizationStatus != AVAuthorizationStatus.Authorized)
{
await AVCaptureDevice.RequestAccessForMediaTypeAsync(AVMediaType.Video);
}
}
public void SetupLiveCameraStream()
{
captureSession = new AVCaptureSession();
var viewLayer = liveCameraStream.Layer;
Console.WriteLine(viewLayer.Frame.Width);
var videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession)
{
Frame = liveCameraStream.Bounds
};
liveCameraStream.Layer.AddSublayer(videoPreviewLayer);
Console.WriteLine(liveCameraStream.Layer.Frame.Width);
var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video);
ConfigureCameraForDevice(captureDevice);
captureDeviceInput = AVCaptureDeviceInput.FromDevice(captureDevice);
var dictionary = new NSMutableDictionary();
dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG);
stillImageOutput = new AVCaptureStillImageOutput()
{
OutputSettings = new NSDictionary()
};
captureSession.AddOutput(stillImageOutput);
captureSession.AddInput(captureDeviceInput);
captureSession.StartRunning();
ViewWillLayoutSubviews();
}
public async void CapturePhoto()
{
var videoConnection = stillImageOutput.ConnectionFromMediaType(AVMediaType.Video);
var sampleBuffer = await stillImageOutput.CaptureStillImageTaskAsync(videoConnection);
// var jpegImageAsBytes = AVCaptureStillImageOutput.JpegStillToNSData (sampleBuffer).ToArray ();
var jpegImageAsNsData = AVCaptureStillImageOutput.JpegStillToNSData(sampleBuffer);
// SendPhoto (data);
PhotoTaken(jpegImageAsNsData.ToArray());
var image = new UIImage (jpegImageAsNsData);
var imageView = new UIImageView(liveCameraStream.Frame);
imageView.Image = image;
Add(imageView);
View.SendSubviewToBack(imageView);
View.SendSubviewToBack(liveCameraStream);
}
public void ToggleFrontBackCamera()
{
var devicePosition = captureDeviceInput.Device.Position;
if (devicePosition == AVCaptureDevicePosition.Front)
{
devicePosition = AVCaptureDevicePosition.Back;
}
else
{
devicePosition = AVCaptureDevicePosition.Front;
}
var device = GetCameraForOrientation(devicePosition);
ConfigureCameraForDevice(device);
captureSession.BeginConfiguration();
captureSession.RemoveInput(captureDeviceInput);
captureDeviceInput = AVCaptureDeviceInput.FromDevice(device);
captureSession.AddInput(captureDeviceInput);
captureSession.CommitConfiguration();
}
public void ConfigureCameraForDevice(AVCaptureDevice device)
{
var error = new NSError();
if (device.IsFocusModeSupported(AVCaptureFocusMode.ContinuousAutoFocus))
{
device.LockForConfiguration(out error);
device.FocusMode = AVCaptureFocusMode.ContinuousAutoFocus;
device.UnlockForConfiguration();
}
else if (device.IsExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure))
{
device.LockForConfiguration(out error);
device.ExposureMode = AVCaptureExposureMode.ContinuousAutoExposure;
device.UnlockForConfiguration();
}
else if (device.IsWhiteBalanceModeSupported(AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance))
{
device.LockForConfiguration(out error);
device.WhiteBalanceMode = AVCaptureWhiteBalanceMode.ContinuousAutoWhiteBalance;
device.UnlockForConfiguration();
}
}
public void ToggleFlash()
{
var device = captureDeviceInput.Device;
var error = new NSError();
if (device.HasFlash)
{
if (device.FlashMode == AVCaptureFlashMode.On)
{
device.LockForConfiguration(out error);
device.FlashMode = AVCaptureFlashMode.Off;
device.UnlockForConfiguration();
btnToggleFlash.SetBackgroundImage(UIImage.FromFile("flashOff.png"), UIControlState.Normal);
flashOn = false;
}
else
{
device.LockForConfiguration(out error);
device.FlashMode = AVCaptureFlashMode.On;
device.UnlockForConfiguration();
btnToggleFlash.SetBackgroundImage(UIImage.FromFile("flashOn.png"), UIControlState.Normal);
flashOn = true;
}
}
}
public AVCaptureDevice GetCameraForOrientation(AVCaptureDevicePosition orientation)
{
var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video);
foreach (var device in devices)
{
if (device.Position == orientation)
{
return device;
}
}
return null;
}
private void SetupUserInterface()
{
liveCameraStream = new UIView();
var totalHeightToReduce = topBar.Frame.Height + bottomBar.Frame.Height;
liveCameraStream.Frame = new CGRect(0f, 0f, View.Frame.Width, View.Frame.Height);
liveCameraStream.BackgroundColor = UIColor.Red;
View.Add(liveCameraStream);
View.SendSubviewToBack(liveCameraStream);
ViewWillLayoutSubviews();
}
public delegate void PhotoTakenHandler(byte[] image);
public event PhotoTakenHandler PhotoTaken;
partial void btnTakePhoto_TouchUpInside(UIButton sender)
{
CapturePhoto();
Insights.Track("Photo Taken", new Dictionary<string, string> {
{"Flash On", flashOn.ToString()}
});
}
partial void btnToggleFlash_TouchUpInside(UIButton sender)
{
ToggleFlash();
}
partial void btnBack_TouchUpInside(UIButton sender)
{
this.DismissViewController(true, null);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Security;
using Xunit;
[assembly: System.Reflection.CustomAttributesTests.Data.Attr(77, name = "AttrSimple")]
[assembly: System.Reflection.CustomAttributesTests.Data.Int32Attr(77, name = "Int32AttrSimple"),
System.Reflection.CustomAttributesTests.Data.Int64Attr((Int64)77, name = "Int64AttrSimple"),
System.Reflection.CustomAttributesTests.Data.StringAttr("hello", name = "StringAttrSimple"),
System.Reflection.CustomAttributesTests.Data.EnumAttr(System.Reflection.CustomAttributesTests.Data.MyColorEnum.RED, name = "EnumAttrSimple"),
System.Reflection.CustomAttributesTests.Data.TypeAttr(typeof(Object), name = "TypeAttrSimple")]
[assembly: System.Runtime.CompilerServices.CompilationRelaxationsAttribute((Int32)8)]
[assembly: System.Diagnostics.Debuggable((System.Diagnostics.DebuggableAttribute.DebuggingModes)263)]
[assembly: System.CLSCompliant(false)]
namespace System.Reflection.Tests
{
public class AssemblyTests : IDisposable
{
string sourceTestAssemblyPath = Path.Combine(Environment.CurrentDirectory, "TestAssembly.dll");
string destTestAssemblyPath = Path.Combine(Environment.CurrentDirectory, "TestAssembly", "TestAssembly.dll");
public AssemblyTests()
{
// Move TestAssembly.dll to subfolder TestAssembly
if(!File.Exists(destTestAssemblyPath))
{
Directory.CreateDirectory(Path.GetDirectoryName(destTestAssemblyPath));
File.Move(sourceTestAssemblyPath, destTestAssemblyPath);
}
}
public void Dispose()
{
// Revert TestAssembly.dll back to its previous location
if(!File.Exists(sourceTestAssemblyPath))
File.Move(destTestAssemblyPath, sourceTestAssemblyPath);
}
public static IEnumerable<object[]> Equality_TestData()
{
yield return new object[] { Assembly.Load(new AssemblyName(typeof(int).GetTypeInfo().Assembly.FullName)), Assembly.Load(new AssemblyName(typeof(int).GetTypeInfo().Assembly.FullName)), true };
yield return new object[] { Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName)), Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName)), true };
yield return new object[] { Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName)), typeof(AssemblyTests).Assembly, false };
}
[Theory]
[MemberData(nameof(Equality_TestData))]
public static void Equality(Assembly assembly1, Assembly assembly2, bool expected)
{
Assert.Equal(expected, assembly1 == assembly2);
Assert.NotEqual(expected, assembly1 != assembly2);
}
[Fact]
public static void GetAssembly_Nullery()
{
Assert.Throws<ArgumentNullException>("type", () => Assembly.GetAssembly(null));
}
public static IEnumerable<object[]> GetAssembly_TestData()
{
yield return new object[] { Assembly.Load(new AssemblyName(typeof(HashSet<int>).GetTypeInfo().Assembly.FullName)), Assembly.GetAssembly(typeof(HashSet<int>)), true };
yield return new object[] { Assembly.Load(new AssemblyName(typeof(int).GetTypeInfo().Assembly.FullName)), Assembly.GetAssembly(typeof(int)), true };
yield return new object[] { typeof(AssemblyTests).Assembly, Assembly.GetAssembly(typeof(AssemblyTests)), true };
}
[Theory]
[MemberData(nameof(GetAssembly_TestData))]
public static void GetAssembly(Assembly assembly1, Assembly assembly2, bool expected)
{
Assert.Equal(expected, assembly1.Equals(assembly2));
}
public static IEnumerable<object[]> GetCallingAssembly_TestData()
{
yield return new object[] { typeof(AssemblyTests).Assembly, GetGetCallingAssembly(), true };
yield return new object[] { Assembly.GetCallingAssembly(), GetGetCallingAssembly(), false };
}
[Theory]
[MemberData(nameof(GetCallingAssembly_TestData))]
public static void GetCallingAssembly(Assembly assembly1, Assembly assembly2, bool expected)
{
Assert.Equal(expected, assembly1.Equals(assembly2));
}
[Fact]
public static void GetExecutingAssembly()
{
Assert.True(typeof(AssemblyTests).Assembly.Equals(Assembly.GetExecutingAssembly()));
}
[Fact]
public static void GetSatelliteAssemblyNeg()
{
Assert.Throws<ArgumentNullException>(() => (typeof(AssemblyTests).Assembly.GetSatelliteAssembly(null)));
Assert.Throws<System.IO.FileNotFoundException>(() => (typeof(AssemblyTests).Assembly.GetSatelliteAssembly(CultureInfo.InvariantCulture)));
}
[Fact]
public static void AssemblyLoadFromString()
{
AssemblyName an = typeof(AssemblyTests).Assembly.GetName();
string fullName = an.FullName;
string simpleName = an.Name;
Assembly a1 = Assembly.Load(fullName);
Assert.NotNull(a1);
Assert.Equal(fullName, a1.GetName().FullName);
Assembly a2 = Assembly.Load(simpleName);
Assert.NotNull(a2);
Assert.Equal(fullName, a2.GetName().FullName);
}
[Fact]
public static void AssemblyLoadFromStringNeg()
{
Assert.Throws<ArgumentNullException>(() => Assembly.Load((string)null));
Assert.Throws<ArgumentException>(() => Assembly.Load(string.Empty));
string emptyCName = new string('\0', 1);
Assert.Throws<ArgumentException>(() => Assembly.Load(emptyCName));
}
[Fact]
public static void AssemblyLoadFromBytes()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
byte[] aBytes = System.IO.File.ReadAllBytes(assembly.Location);
Assembly loadedAssembly = Assembly.Load(aBytes);
Assert.NotNull(loadedAssembly);
Assert.Equal(assembly.FullName, loadedAssembly.FullName);
}
[Fact]
public static void AssemblyLoadFromBytesNeg()
{
Assert.Throws<ArgumentNullException>(() => Assembly.Load((byte[])null));
Assert.Throws<BadImageFormatException>(() => Assembly.Load(new byte[0]));
}
[Fact]
public static void AssemblyLoadFromBytesWithSymbols()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
byte[] aBytes = System.IO.File.ReadAllBytes(assembly.Location);
byte[] symbols = System.IO.File.ReadAllBytes((System.IO.Path.ChangeExtension(assembly.Location, ".pdb")));
Assembly loadedAssembly = Assembly.Load(aBytes, symbols);
Assert.NotNull(loadedAssembly);
Assert.Equal(assembly.FullName, loadedAssembly.FullName);
}
public static void AssemblyReflectionOnlyLoadFromString()
{
AssemblyName an = typeof(AssemblyTests).Assembly.GetName();
Assert.Throws<NotSupportedException>(() => Assembly.ReflectionOnlyLoad(an.FullName));
}
public static void AssemblyReflectionOnlyLoadFromBytes()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
byte[] aBytes = System.IO.File.ReadAllBytes(assembly.Location);
Assert.Throws<NotSupportedException>(() => Assembly.ReflectionOnlyLoad(aBytes));
}
public static void AssemblyReflectionOnlyLoadFromNeg()
{
Assert.Throws<ArgumentNullException>(() => Assembly.ReflectionOnlyLoad((string)null));
Assert.Throws<ArgumentException>(() => Assembly.ReflectionOnlyLoad(string.Empty));
Assert.Throws<ArgumentNullException>(() => Assembly.ReflectionOnlyLoad((byte[])null));
}
public static IEnumerable<object[]> GetModules_TestData()
{
yield return new object[] { LoadSystemCollectionsAssembly() };
yield return new object[] { LoadSystemReflectionAssembly() };
}
[Theory]
[MemberData(nameof(GetModules_TestData))]
public static void GetModules_GetModule(Assembly assembly)
{
Assert.NotEmpty(assembly.GetModules());
foreach (Module module in assembly.GetModules())
{
Assert.NotNull(module);
Assert.Equal(module, assembly.GetModule(module.Name));
}
}
[Fact]
public static void GetLoadedModules()
{
Assembly assembly = typeof(AssemblyTests).Assembly;
Assert.NotEmpty(assembly.GetLoadedModules());
foreach (Module module in assembly.GetLoadedModules())
{
Assert.NotNull(module);
Assert.Equal(module, assembly.GetModule(module.Name));
}
}
public static IEnumerable<object[]> CreateInstance_TestData()
{
yield return new object[] { typeof(AssemblyTests).Assembly, typeof(AssemblyPublicClass).FullName, BindingFlags.CreateInstance, typeof(AssemblyPublicClass) };
yield return new object[] { typeof(int).Assembly, typeof(int).FullName, BindingFlags.Default, typeof(int) };
yield return new object[] { typeof(int).Assembly, typeof(Dictionary<int, string>).FullName, BindingFlags.Default, typeof(Dictionary<int, string>) };
}
[Theory]
[MemberData(nameof(CreateInstance_TestData))]
public static void CreateInstance(Assembly assembly, string typeName, BindingFlags bindingFlags, Type expectedType)
{
Assert.IsType(expectedType, assembly.CreateInstance(typeName, true, bindingFlags, null, null, null, null));
Assert.IsType(expectedType, assembly.CreateInstance(typeName, false, bindingFlags, null, null, null, null));
}
public static IEnumerable<object[]> CreateInstance_Invalid_TestData()
{
yield return new object[] { "", typeof(ArgumentException) };
yield return new object[] { null, typeof(ArgumentNullException) };
yield return new object[] { typeof(AssemblyClassWithPrivateCtor).FullName, typeof(MissingMethodException) };
}
[Theory]
[MemberData(nameof(CreateInstance_Invalid_TestData))]
public static void CreateInstance_Invalid(string typeName, Type exceptionType)
{
Assembly assembly = typeof(AssemblyTests).Assembly;
Assert.Throws(exceptionType, () => assembly.CreateInstance(typeName, true, BindingFlags.Public, null, null, null, null));
Assert.Throws(exceptionType, () => assembly.CreateInstance(typeName, false, BindingFlags.Public, null, null, null, null));
}
[Fact]
public static void GetManifestResourceStream()
{
Assert.NotNull(typeof(AssemblyTests).Assembly.GetManifestResourceStream(typeof(AssemblyTests), "EmbeddedImage.png"));
Assert.NotNull(typeof(AssemblyTests).Assembly.GetManifestResourceStream(typeof(AssemblyTests), "EmbeddedTextFile.txt"));
Assert.Null(typeof(AssemblyTests).Assembly.GetManifestResourceStream(typeof(AssemblyTests), "IDontExist"));
}
[Fact]
public static void Test_GlobalAssemblyCache()
{
Assert.False(typeof(AssemblyTests).Assembly.GlobalAssemblyCache);
}
[Fact]
public static void Test_HostContext()
{
Assert.Equal(0, typeof(AssemblyTests).Assembly.HostContext);
}
[Fact]
public static void Test_IsFullyTrusted()
{
Assert.True(typeof(AssemblyTests).Assembly.IsFullyTrusted);
}
[Fact]
public static void Test_SecurityRuleSet()
{
Assert.Equal(SecurityRuleSet.None, typeof(AssemblyTests).Assembly.SecurityRuleSet);
}
[Fact]
public static void Test_LoadFile()
{
var assem = typeof(AssemblyTests).Assembly;
string path = "System.Runtime.Tests.dll";
string fullpath = Path.GetFullPath(path);
Assert.Throws<ArgumentNullException>("path", () => Assembly.LoadFile(null));
Assert.Throws<ArgumentException>(() => Assembly.LoadFile(path));
var loadfile1 = Assembly.LoadFile(fullpath);
Assert.NotEqual(assem, loadfile1);
string dir = Path.GetDirectoryName(fullpath);
fullpath = Path.Combine(dir, ".", path);
var loadfile2 = Assembly.LoadFile(fullpath);
Assert.Equal(loadfile1,loadfile2);
}
[Fact]
public static void Test_LoadFromUsingHashValue()
{
Assert.Throws<NotSupportedException>(() => Assembly.LoadFrom("abc", null, System.Configuration.Assemblies.AssemblyHashAlgorithm.SHA1));
}
[Fact]
public static void Test_LoadModule()
{
var assem = typeof(AssemblyTests).Assembly;
Assert.Throws<NotImplementedException>(() => assem.LoadModule("abc", null));
Assert.Throws<NotImplementedException>(() => assem.LoadModule("abc", null, null));
}
#pragma warning disable 618
[Fact]
public static void Test_LoadWithPartialName()
{
string simplename = typeof(AssemblyTests).Assembly.GetName().Name;
var assem = Assembly.LoadWithPartialName(simplename);
Assert.Equal(typeof(AssemblyTests).Assembly, assem);
}
#pragma warning restore 618
[Fact]
public void Test_LoadFrom()
{
var assem = Assembly.LoadFrom(destTestAssemblyPath);
Assert.Throws<ArgumentNullException>("assemblyFile", () => Assembly.LoadFrom(null));
var assem1 = Assembly.LoadFrom(destTestAssemblyPath);
Assert.Equal(assem, assem1);
}
[Fact]
public void Test_UnsafeLoadFrom()
{
var assem = Assembly.UnsafeLoadFrom(destTestAssemblyPath);
Assert.Throws<ArgumentNullException>("assemblyFile", () => Assembly.UnsafeLoadFrom(null));
}
[Fact]
public void GetFile()
{
Assert.Throws<ArgumentNullException>(() => typeof(AssemblyTests).Assembly.GetFile(null));
Assert.Throws<ArgumentException>(() => typeof(AssemblyTests).Assembly.GetFile(""));
Assert.Null(typeof(AssemblyTests).Assembly.GetFile("NonExistentfile.dll"));
Assert.NotNull(typeof(AssemblyTests).Assembly.GetFile("System.Runtime.Tests.dll"));
Assert.Equal(typeof(AssemblyTests).Assembly.GetFile("System.Runtime.Tests.dll").Name, typeof(AssemblyTests).Assembly.Location);
}
[Fact]
public void GetFiles()
{
Assert.NotNull(typeof(AssemblyTests).Assembly.GetFiles());
Assert.Equal(typeof(AssemblyTests).Assembly.GetFiles().Length, 1);
Assert.Equal(typeof(AssemblyTests).Assembly.GetFiles()[0].Name, typeof(AssemblyTests).Assembly.Location);
}
// Helpers
private static Assembly GetGetCallingAssembly()
{
return Assembly.GetCallingAssembly();
}
private static Assembly LoadSystemCollectionsAssembly()
{
// Force System.collections to be linked statically
List<int> li = new List<int>();
li.Add(1);
return Assembly.Load(new AssemblyName(typeof(List<int>).GetTypeInfo().Assembly.FullName));
}
private static Assembly LoadSystemReflectionAssembly()
{
// Force System.Reflection to be linked statically
return Assembly.Load(new AssemblyName(typeof(AssemblyName).GetTypeInfo().Assembly.FullName));
}
public class AssemblyPublicClass
{
public class PublicNestedClass { }
}
private static class AssemblyPrivateClass { }
public class AssemblyClassWithPrivateCtor
{
private AssemblyClassWithPrivateCtor() { }
}
}
public class AssemblyCustomAttributeTest
{
[Fact]
public void Test_Int32AttrSimple()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.Int32Attr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.Int32Attr((Int32)77, name = \"Int32AttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_Int64Attr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.Int64Attr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.Int64Attr((Int64)77, name = \"Int64AttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_StringAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.StringAttr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.StringAttr(\"hello\", name = \"StringAttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_EnumAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.EnumAttr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.EnumAttr((System.Reflection.CustomAttributesTests.Data.MyColorEnum)1, name = \"EnumAttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_TypeAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.TypeAttr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.TypeAttr(typeof(System.Object), name = \"TypeAttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_CompilationRelaxationsAttr()
{
bool result = false;
Type attrType = typeof(System.Runtime.CompilerServices.CompilationRelaxationsAttribute);
string attrstr = "[System.Runtime.CompilerServices.CompilationRelaxationsAttribute((Int32)8)]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_AssemblyIdentityAttr()
{
bool result = false;
Type attrType = typeof(System.Reflection.AssemblyTitleAttribute);
string attrstr = "[System.Reflection.AssemblyTitleAttribute(\"System.Reflection.Tests\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_AssemblyDescriptionAttribute()
{
bool result = false;
Type attrType = typeof(System.Reflection.AssemblyDescriptionAttribute);
string attrstr = "[System.Reflection.AssemblyDescriptionAttribute(\"System.Reflection.Tests\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_AssemblyCompanyAttribute()
{
bool result = false;
Type attrType = typeof(System.Reflection.AssemblyCompanyAttribute);
string attrstr = "[System.Reflection.AssemblyCompanyAttribute(\"Microsoft Corporation\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_CLSCompliantAttribute()
{
bool result = false;
Type attrType = typeof(System.CLSCompliantAttribute);
string attrstr = "[System.CLSCompliantAttribute((Boolean)True)]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_DebuggableAttribute()
{
bool result = false;
Type attrType = typeof(System.Diagnostics.DebuggableAttribute);
string attrstr = "[System.Diagnostics.DebuggableAttribute((System.Diagnostics.DebuggableAttribute+DebuggingModes)263)]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
[Fact]
public void Test_SimpleAttribute()
{
bool result = false;
Type attrType = typeof(System.Reflection.CustomAttributesTests.Data.Attr);
string attrstr = "[System.Reflection.CustomAttributesTests.Data.Attr((Int32)77, name = \"AttrSimple\")]";
result = VerifyCustomAttribute(attrType, attrstr);
Assert.True(result, string.Format("Did not find custom attribute of type {0} ", attrType));
}
private static bool VerifyCustomAttribute(Type type, String attributeStr)
{
Assembly asm = typeof(AssemblyCustomAttributeTest).Assembly;
foreach (CustomAttributeData cad in asm.GetCustomAttributesData())
{
if (cad.AttributeType.Equals(type))
{
return true;
}
}
return false;
}
}
public static class AssemblyTests_GetTYpe
{
[Fact]
public static void AssemblyGetTypeNoQualifierAllowed()
{
Assembly a = typeof(G<int>).Assembly;
string s = typeof(G<int>).AssemblyQualifiedName;
Assert.Throws<ArgumentException>(() => a.GetType(s, throwOnError: true, ignoreCase: false));
}
[Fact]
public static void AssemblyGetTypeDoesntSearchMscorlib()
{
Assembly a = typeof(AssemblyTests_GetTYpe).Assembly;
Assert.Throws<TypeLoadException>(() => a.GetType("System.Object", throwOnError: true, ignoreCase: false));
Assert.Throws<TypeLoadException>(() => a.GetType("G`1[[System.Object]]", throwOnError: true, ignoreCase: false));
}
[Fact]
public static void AssemblyGetTypeDefaultsToItself()
{
Assembly a = typeof(AssemblyTests_GetTYpe).Assembly;
Type t = a.GetType("G`1[[G`1[[System.Int32, mscorlib]]]]", throwOnError: true, ignoreCase: false);
Assert.Equal(typeof(G<G<int>>), t);
}
}
}
internal class G<T> { }
| |
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
using System;
using Tests.Domain;
using Tests.Framework.DocumentationTests;
using Xunit;
using static Tests.Core.Serialization.SerializationTestHelper;
using static Elastic.Clients.Elasticsearch.Infer;
namespace Tests.ClientConcepts.HighLevel.Inference
{
/**[[index-name-inference]]
*=== Index name inference
*
* Many endpoints within the Elasticsearch API expect to receive one or more index names
* as part of the request, in order to know what index/indices a request should operate on.
*
* NEST has a number of ways in which the index name(s) can be specified
*/
public class IndexNameInference : DocumentationTestBase
{
//hide
private class ConnectionSettings : ElasticsearchClientSettings
{
public ConnectionSettings() : base(new InMemoryConnection())
{
}
}
/**
* ==== Default Index name on Connection Settings
* A default index name can be specified on `ConnectionSettings` using `.DefaultIndex()`.
* This is the default index name to use, when no other index name can be resolved for a request
*/
[U] public void DefaultIndexIsInferred()
{
var settings = new ConnectionSettings()
.DefaultIndex("defaultindex"); // <1> set the default index
var client = new ElasticsearchClient(settings);
var searchResponse = client.Search<Project>();
/**
* will send a search request to the API endpoint
*/
//json
var expected = "http://localhost:9200/defaultindex/_search";
//hide
{
searchResponse.ApiCall.Uri.GetLeftPart(UriPartial.Path).Should().Be(expected);
var resolver = new IndexNameResolver(settings);
var index = resolver.Resolve<Project>();
index.Should().Be("defaultindex");
}
}
/**
* [[index-name-type-mapping]]
* ==== Index name for a .NET type
* An index name can be mapped for a _Plain Old CLR Object_ (POCO) using `.DefaultMappingFor<T>()` on `ConnectionSettings`
*/
[U]
public void ExplicitMappingIsInferredUsingDefaultMappingFor()
{
var settings = new ConnectionSettings()
.DefaultMappingFor<Project>(m => m
.IndexName("projects")
);
var client = new ElasticsearchClient(settings);
var searchResponse = client.Search<Project>();
/**
* will send a search request to the API endpoint
*/
//json
var expected = "http://localhost:9200/projects/_search";
//hide
{
searchResponse.ApiCall.Uri.GetLeftPart(UriPartial.Path).Should().Be(expected);
var resolver = new IndexNameResolver(settings);
var index = resolver.Resolve<Project>();
index.Should().Be("projects");
}
}
/**
* `.DefaultMappingFor<T>()` can also be used to specify other defaults for a POCO, including
* property names, property to use for the document id, amongst others.
*
* An index name for a POCO provided using `.DefaultMappingFor<T>()` **will take precedence** over
* the default index name set on `ConnectionSettings`. This way, the client can be configured with a default index to use if no
* index is specified, and a specific index to use for different POCO types.
*/
[U] public void ExplicitMappingTakesPrecedence()
{
var settings = new ConnectionSettings()
.DefaultIndex("defaultindex") // <1> a default index to use, when no other index can be inferred
.DefaultMappingFor<Project>(m => m
.IndexName("projects") // <2> a index to use when `Project` is the target POCO type
);
var client = new ElasticsearchClient(settings);
var projectSearchResponse = client.Search<Project>();
/**
* will send a search request to the API endpoint
*/
//json
var expected = "http://localhost:9200/projects/_search";
//hide
{
projectSearchResponse.ApiCall.Uri.GetLeftPart(UriPartial.Path).Should().Be(expected);
var resolver = new IndexNameResolver(settings);
var index = resolver.Resolve<Project>();
index.Should().Be("projects");
}
/**
* but
*/
var objectSearchResponse = client.Search<object>();
/**
* will send a search request to the API endpoint
*/
//json
expected = "http://localhost:9200/defaultindex/_search";
//hide
objectSearchResponse.ApiCall.Uri.GetLeftPart(UriPartial.Path).Should().Be(expected);
}
/**
* ==== Explicitly specifying Index name on the request
* For API calls that expect an index name, an index name can be explicitly provided
* on the request
*/
[U] public void ExplicitIndexOnRequest()
{
var settings = new ConnectionSettings();
var client = new ElasticsearchClient(settings);
var response = client.Search<Project>(s => s
.Index("some-other-index") //<1> Provide the index name on the request
);
/**
* will send a search request to the API endpoint
*/
//json
var expected = "http://localhost:9200/some-other-index/_search";
//hide
response.ApiCall.Uri.GetLeftPart(UriPartial.Path).Should().Be(expected);
}
/** When an index name is provided on a request, it **will take precedence** over the default
* index name specified on `ConnectionSettings`, _and_ any index name specified for the POCO
* using `.DefaultMappingFor<T>()`. The following example will send a search request
* to the same API endpoint as the previous example
*/
[U] public void ExplicitIndexOnRequestTakesPrecedence()
{
var settings = new ConnectionSettings()
.DefaultIndex("defaultindex")
.DefaultMappingFor<Project>(m => m
.IndexName("projects")
);
var client = new ElasticsearchClient(settings);
var response = client.Search<Project>(s => s
.Index("some-other-index")
);
//hide
{
var expected = "http://localhost:9200/some-other-index/_search";
response.ApiCall.Uri.GetLeftPart(UriPartial.Path).Should().Be(expected);
}
}
/** In summary, the order of precedence for determining the index name for a request is
*
* . Index name specified on the request
* . Index name specified for the generic type parameter in the request using `.DefaultMappingFor<T>()`
* . Default index name specified on `ConnectionSettings`
*
* [IMPORTANT]
* --
* If no index can be determined for a request that requires an index, the client will throw
* an exception to indicate that this is the case.
* --
*/
//hide
[U] public void NoIndexThrowsArgumentException()
{
var settings = new ConnectionSettings();
var resolver = new IndexNameResolver(settings);
var e = Assert.Throws<ArgumentException>(() => resolver.Resolve<Project>());
e.Message.Should().Contain("Index name is null");
}
//hide
[U] public void ArgumentExceptionBubblesOut()
{
var client = new ElasticsearchClient(new ConnectionSettings());
var e = Assert.Throws<ArgumentException>(() => client.Search<Project>());
}
//hide
[U] public void RoundTripSerializationPreservesCluster()
{
Expect("cluster_one:project").WhenSerializing(Index<Project>("cluster_one"));
Expect("cluster_one:project").WhenSerializing((IndexName)"cluster_one:project");
Expect("cluster_one:project,x").WhenSerializing(Index<Project>("cluster_one").And("x"));
Expect("cluster_one:project,x:devs").WhenSerializing(Index<Project>("cluster_one").And<Developer>("x"));
}
//hide
[U] public void ImplicitConversionReadsCluster()
{
var i = (IndexName)"cluster_one : project ";
i.Cluster.Should().Be("cluster_one ");
i.Name.Should().Be(" project ");
i = (IndexName)"cluster_one:project";
i.Cluster.Should().Be("cluster_one");
i.Name.Should().Be("project");
i = (IndexName)" ";
i.Should().BeNull();
}
//hide
[U] public void EqualsValidation()
{
var clusterIndex = (IndexName)"cluster_one:p";
var index = (IndexName)"p";
Index<Project>("cluster_one").Should().NotBe(Index<Project>("cluster_two"));
clusterIndex.Should().NotBe(index);
clusterIndex.Should().Be("cluster_one:p");
clusterIndex.Should().Be((IndexName)"cluster_one:p");
Index<Project>().Should().Be(Index<Project>());
Index<Project>().Should().NotBe(Index<Project>("cluster_two"));
Index<Project>("cluster_one").Should().NotBe("cluster_one:project");
Index<Project>().Should().NotBe(Index<Developer>());
Index<Project>("cluster_one").Should().NotBe(Index<Developer>("cluster_one"));
Indices indices1 = "foo,bar";
Indices indices2 = "bar,foo";
indices1.Should().Be(indices2);
(indices1 == indices2).Should().BeTrue();
}
//hide
[U] public void GetHashCodeValidation()
{
var clusterIndex = (IndexName)"cluster_one:p";
var index = (IndexName)"p";
clusterIndex.GetHashCode().Should().NotBe(index.GetHashCode()).And.NotBe(0);
clusterIndex.GetHashCode().Should().Be(((IndexName)"cluster_one:p").GetHashCode()).And.NotBe(0);
clusterIndex.GetHashCode().Should().Be(((IndexName)"cluster_one:p").GetHashCode()).And.NotBe(0);
Index<Project>().GetHashCode().Should().Be(Index<Project>().GetHashCode()).And.NotBe(0);
Index<Project>().GetHashCode().Should().NotBe(Index<Project>("cluster_two").GetHashCode()).And.NotBe(0);
Index<Project>("cluster_one").GetHashCode().Should().NotBe(Index<Project>("cluster_two").GetHashCode()).And.NotBe(0);
Index<Project>("cluster_one").Should().NotBe("cluster_one:project").And.NotBe(0);
Index<Project>().GetHashCode().Should().NotBe(Index<Developer>().GetHashCode()).And.NotBe(0);
Index<Project>("cluster_one").GetHashCode().Should().NotBe(Index<Developer>("cluster_one").GetHashCode()).And.NotBe(0);
}
}
}
| |
using System;
using System.Linq;
using System.Drawing;
using System.Collections.Generic;
using Rimss.GraphicsProcessing.Palette.Helpers;
using Rimss.GraphicsProcessing.Palette.ColorCaches.Common;
namespace Rimss.GraphicsProcessing.Palette.ColorCaches.LocalitySensitiveHash
{
public class LshColorCache : BaseColorCache
{
#region | Constants |
private const Byte DefaultQuality = 16; // 16
private const Int64 MaximalDistance = 4096;
private const Single NormalizedDistanceRGB = 1.0f / 196608.0f; // 256*256*3 (RGB) = 196608 / 768.0f
private const Single NormalizedDistanceHSL = 1.0f / 260672.0f; // 360*360 (H) + 256*256*2 (SL) = 260672 / 872.0f
private const Single NormalizedDistanceLab = 1.0f / 507.0f; // 13*13*3 = 507 / 300.0f
#endregion
#region | Fields |
private Byte quality;
private Int64 bucketSize;
private Int64 minBucketIndex;
private Int64 maxBucketIndex;
private BucketInfo[] buckets;
#endregion
#region | Properties |
/// <summary>
/// Gets or sets the quality.
/// </summary>
/// <value>The quality.</value>
public Byte Quality
{
get { return quality; }
set
{
quality = value;
bucketSize = MaximalDistance / quality;
minBucketIndex = quality;
maxBucketIndex = 0;
buckets = new BucketInfo[quality];
}
}
/// <summary>
/// Gets a value indicating whether this instance is color model supported.
/// </summary>
/// <value>
/// <c>true</c> if this instance is color model supported; otherwise, <c>false</c>.
/// </value>
public override Boolean IsColorModelSupported
{
get { return true; }
}
#endregion
#region | Constructors |
/// <summary>
/// Initializes a new instance of the <see cref="LshColorCache"/> class.
/// </summary>
public LshColorCache()
{
ColorModel = ColorModel.RedGreenBlue;
Quality = DefaultQuality;
}
/// <summary>
/// Initializes a new instance of the <see cref="LshColorCache"/> class.
/// </summary>
/// <param name="colorModel">The color model.</param>
/// <param name="quality">The quality.</param>
public LshColorCache(ColorModel colorModel, Byte quality)
{
ColorModel = colorModel;
Quality = quality;
}
#endregion
#region | Helper methods |
private Int64 GetColorBucketIndex(Color color)
{
Single normalizedDistance = 0.0f;
Single componentA, componentB, componentC;
switch (ColorModel)
{
case ColorModel.RedGreenBlue: normalizedDistance = NormalizedDistanceRGB; break;
case ColorModel.HueSaturationLuminance: normalizedDistance = NormalizedDistanceHSL; break;
case ColorModel.LabColorSpace: normalizedDistance = NormalizedDistanceLab; break;
}
ColorModelHelper.GetColorComponents(ColorModel, color, out componentA, out componentB, out componentC);
Single distance = componentA*componentA + componentB*componentB + componentC*componentC;
Single normalized = distance * normalizedDistance * MaximalDistance;
Int64 resultHash = (Int64) normalized / bucketSize;
return resultHash;
}
private BucketInfo GetBucket(Color color)
{
Int64 bucketIndex = GetColorBucketIndex(color);
if (bucketIndex < minBucketIndex)
{
bucketIndex = minBucketIndex;
}
else if (bucketIndex > maxBucketIndex)
{
bucketIndex = maxBucketIndex;
}
else if (buckets[bucketIndex] == null)
{
Boolean bottomFound = false;
Boolean topFound = false;
Int64 bottomBucketIndex = bucketIndex;
Int64 topBucketIndex = bucketIndex;
while (!bottomFound && !topFound)
{
bottomBucketIndex--;
topBucketIndex++;
bottomFound = bottomBucketIndex > 0 && buckets[bottomBucketIndex] != null;
topFound = topBucketIndex < quality && buckets[topBucketIndex] != null;
}
bucketIndex = bottomFound ? bottomBucketIndex : topBucketIndex;
}
return buckets[bucketIndex];
}
#endregion
#region << BaseColorCache >>
/// <summary>
/// See <see cref="BaseColorCache.Prepare"/> for more details.
/// </summary>
public override void Prepare()
{
base.Prepare();
buckets = new BucketInfo[quality];
}
/// <summary>
/// See <see cref="BaseColorCache.OnCachePalette"/> for more details.
/// </summary>
protected override void OnCachePalette(IList<Color> palette)
{
Int32 paletteIndex = 0;
minBucketIndex = quality;
maxBucketIndex = 0;
foreach (Color color in palette)
{
Int64 bucketIndex = GetColorBucketIndex(color);
BucketInfo bucket = buckets[bucketIndex] ?? new BucketInfo();
bucket.AddColor(paletteIndex++, color);
buckets[bucketIndex] = bucket;
if (bucketIndex < minBucketIndex) minBucketIndex = bucketIndex;
if (bucketIndex > maxBucketIndex) maxBucketIndex = bucketIndex;
}
}
/// <summary>
/// See <see cref="BaseColorCache.OnGetColorPaletteIndex"/> for more details.
/// </summary>
protected override void OnGetColorPaletteIndex(Color color, out Int32 paletteIndex)
{
BucketInfo bucket = GetBucket(color);
Int32 colorCount = bucket.Colors.Count();
paletteIndex = 0;
if (colorCount == 1)
{
paletteIndex = bucket.Colors.First().Key;
}
else
{
Int32 index = 0;
Int32 colorIndex = ColorModelHelper.GetEuclideanDistance(color, ColorModel, bucket.Colors.Values.ToList());
foreach (Int32 colorPaletteIndex in bucket.Colors.Keys)
{
if (index == colorIndex)
{
paletteIndex = colorPaletteIndex;
break;
}
index++;
}
}
}
#endregion
}
}
| |
namespace EZOper.TechTester.DataAccess.EZMssql.Works.Bigsail.Honor3Supervise
{
using System;
using System.Data.Entity.Migrations;
public partial class AddInitDatabase : DbMigration
{
public override void Up()
{
CreateTable(
"dbo.EquipmentCatalog",
c => new
{
ID = c.Long(nullable: false, identity: true),
VersionID = c.Long(nullable: false),
SchoolType = c.String(nullable: false, maxLength: 16),
CategoryType = c.Int(nullable: false),
Name = c.String(nullable: false, maxLength: 4000),
Memo = c.String(nullable: false, maxLength: 1024),
})
.PrimaryKey(t => t.ID)
.ForeignKey("dbo.StandardVersion", t => t.VersionID, cascadeDelete: true)
.Index(t => t.VersionID);
CreateTable(
"dbo.EquipmentCatalogDimension",
c => new
{
ID = c.Long(nullable: false, identity: true),
CatalogID = c.Long(nullable: false),
DimensionID = c.Long(nullable: false),
Formula = c.String(nullable: false),
IsBasic = c.Boolean(nullable: false),
})
.PrimaryKey(t => t.ID)
.ForeignKey("dbo.EquipmentCatalog", t => t.CatalogID, cascadeDelete: true)
.ForeignKey("dbo.EquipmentDimension", t => t.DimensionID, cascadeDelete: true)
.Index(t => t.CatalogID)
.Index(t => t.DimensionID);
CreateTable(
"dbo.EquipmentDimension",
c => new
{
ID = c.Long(nullable: false, identity: true),
VersionID = c.Long(nullable: false),
SchoolType = c.String(nullable: false, maxLength: 20),
Name = c.String(nullable: false, maxLength: 64),
MaxClass = c.Int(nullable: false),
MinClass = c.Int(nullable: false),
Memo = c.String(nullable: false, maxLength: 1024),
})
.PrimaryKey(t => t.ID)
.ForeignKey("dbo.StandardVersion", t => t.VersionID, cascadeDelete: true)
.Index(t => t.VersionID);
CreateTable(
"dbo.EquipmentRequirement",
c => new
{
EquipmentID = c.Long(nullable: false),
DimensionID = c.Long(nullable: false),
OptionType = c.Int(nullable: false),
NumberType = c.Int(nullable: false),
Value1 = c.Double(nullable: false),
Value2 = c.Double(nullable: false),
})
.PrimaryKey(t => new { t.EquipmentID, t.DimensionID })
.ForeignKey("dbo.EquipmentDimension", t => t.DimensionID, cascadeDelete: true)
.ForeignKey("dbo.EquipmentItem", t => t.EquipmentID, cascadeDelete: true)
.Index(t => t.EquipmentID)
.Index(t => t.DimensionID);
CreateTable(
"dbo.EquipmentItem",
c => new
{
ID = c.Long(nullable: false, identity: true),
SerialID = c.Long(nullable: false),
Number = c.String(nullable: false, maxLength: 32),
Name = c.String(nullable: false, maxLength: 128),
InternalGroup = c.String(nullable: false, maxLength: 128),
ClassifyAndCode = c.String(nullable: false, maxLength: 128),
RefStandardCode = c.String(nullable: false, maxLength: 128),
SpecifyAndFunction = c.String(nullable: false, maxLength: 128),
Unit = c.String(nullable: false, maxLength: 16),
SequenceNumber = c.Int(nullable: false),
EquipmentType = c.Int(nullable: false),
UsageType = c.Int(nullable: false),
Memo = c.String(nullable: false, maxLength: 1024),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.EquipmentUnitPrice",
c => new
{
EquipmentID = c.Long(nullable: false),
ProjectID = c.Long(nullable: false),
UnitPrice = c.Decimal(nullable: false, storeType: "money"),
Memo = c.String(nullable: false, maxLength: 1024),
})
.PrimaryKey(t => new { t.EquipmentID, t.ProjectID })
.ForeignKey("dbo.EquipmentItem", t => t.EquipmentID, cascadeDelete: true)
.ForeignKey("dbo.EquipmentUnitPriceProject", t => t.ProjectID, cascadeDelete: true)
.Index(t => t.EquipmentID)
.Index(t => t.ProjectID);
CreateTable(
"dbo.EquipmentUnitPriceProject",
c => new
{
ID = c.Long(nullable: false, identity: true),
Name = c.String(nullable: false, maxLength: 64),
Description = c.String(nullable: false, maxLength: 1024),
RevisionLog = c.String(nullable: false),
CreateTime = c.DateTime(nullable: false),
Memo = c.String(nullable: false, maxLength: 1024),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.EquipmentVersionPriceProject",
c => new
{
VersionID = c.Long(nullable: false),
ProjectID = c.Long(nullable: false),
Name = c.String(nullable: false, maxLength: 64),
DefineType = c.Int(nullable: false),
DefinedRegionCode = c.String(nullable: false, maxLength: 32),
Rating = c.Int(nullable: false),
Description = c.String(nullable: false, maxLength: 1024),
UsageType = c.Int(nullable: false),
})
.PrimaryKey(t => new { t.VersionID, t.ProjectID })
.ForeignKey("dbo.EquipmentUnitPriceProject", t => t.ProjectID, cascadeDelete: true)
.ForeignKey("dbo.StandardVersion", t => t.VersionID, cascadeDelete: true)
.Index(t => t.VersionID)
.Index(t => t.ProjectID);
CreateTable(
"dbo.StandardVersion",
c => new
{
ID = c.Long(nullable: false, identity: true),
Name = c.String(nullable: false, maxLength: 64),
CreateTime = c.DateTime(nullable: false),
Memo = c.String(nullable: false, maxLength: 1024),
})
.PrimaryKey(t => t.ID);
CreateTable(
"dbo.EquipmentCatalogAllocation",
c => new
{
CatalogID = c.Long(nullable: false),
EquipmentID = c.Long(nullable: false),
LowGroupCoef = c.Double(nullable: false),
HighGroupCoef = c.Double(nullable: false),
SmallSchoolGroupCoef = c.Double(nullable: false),
ShareType = c.Int(nullable: false),
})
.PrimaryKey(t => new { t.CatalogID, t.EquipmentID })
.ForeignKey("dbo.EquipmentCatalog", t => t.CatalogID, cascadeDelete: true)
.ForeignKey("dbo.EquipmentItem", t => t.EquipmentID, cascadeDelete: true)
.Index(t => t.CatalogID)
.Index(t => t.EquipmentID);
CreateTable(
"dbo.RegistrantOrganizations",
c => new
{
OrgCode = c.String(nullable: false, maxLength: 64),
OrgName = c.String(),
RegName = c.String(),
Level = c.Int(nullable: false),
SubLevel = c.String(),
CombiType = c.Int(nullable: false),
ParentCode = c.String(maxLength: 64),
Status = c.Int(nullable: false),
Time = c.DateTime(nullable: false),
})
.PrimaryKey(t => t.OrgCode);
CreateTable(
"dbo.RegistrantOrgUsers",
c => new
{
ID = c.Guid(nullable: false),
OrgCode = c.String(nullable: false, maxLength: 64),
Account = c.String(nullable: false, maxLength: 64),
Salt = c.String(nullable: false, maxLength: 256),
Password = c.String(nullable: false, maxLength: 256),
Permissions = c.String(nullable: false),
Status = c.Int(nullable: false),
Time = c.DateTime(nullable: false),
Memo = c.String(nullable: false, maxLength: 1024),
})
.PrimaryKey(t => t.ID)
.ForeignKey("dbo.RegistrantOrganizations", t => t.OrgCode, cascadeDelete: true)
.Index(t => t.OrgCode);
CreateTable(
"dbo.StandardEquipment",
c => new
{
SerialID = c.Long(nullable: false, identity: true),
Number = c.String(nullable: false, maxLength: 32),
Name = c.String(nullable: false, maxLength: 128),
InternalGroup = c.String(nullable: false, maxLength: 128),
ClassifyAndCode = c.String(nullable: false, maxLength: 128),
RefStandardCode = c.String(nullable: false, maxLength: 128),
SpecifyAndFunction = c.String(nullable: false, maxLength: 128),
Unit = c.String(nullable: false, maxLength: 16),
SequenceNumber = c.Int(nullable: false),
EquipmentType = c.Int(nullable: false),
})
.PrimaryKey(t => t.SerialID);
}
public override void Down()
{
DropForeignKey("dbo.RegistrantOrgUsers", "OrgCode", "dbo.RegistrantOrganizations");
DropForeignKey("dbo.EquipmentCatalog", "VersionID", "dbo.StandardVersion");
DropForeignKey("dbo.EquipmentDimension", "VersionID", "dbo.StandardVersion");
DropForeignKey("dbo.EquipmentCatalogAllocation", "EquipmentID", "dbo.EquipmentItem");
DropForeignKey("dbo.EquipmentCatalogAllocation", "CatalogID", "dbo.EquipmentCatalog");
DropForeignKey("dbo.EquipmentVersionPriceProject", "VersionID", "dbo.StandardVersion");
DropForeignKey("dbo.EquipmentVersionPriceProject", "ProjectID", "dbo.EquipmentUnitPriceProject");
DropForeignKey("dbo.EquipmentUnitPrice", "ProjectID", "dbo.EquipmentUnitPriceProject");
DropForeignKey("dbo.EquipmentUnitPrice", "EquipmentID", "dbo.EquipmentItem");
DropForeignKey("dbo.EquipmentRequirement", "EquipmentID", "dbo.EquipmentItem");
DropForeignKey("dbo.EquipmentRequirement", "DimensionID", "dbo.EquipmentDimension");
DropForeignKey("dbo.EquipmentCatalogDimension", "DimensionID", "dbo.EquipmentDimension");
DropForeignKey("dbo.EquipmentCatalogDimension", "CatalogID", "dbo.EquipmentCatalog");
DropIndex("dbo.RegistrantOrgUsers", new[] { "OrgCode" });
DropIndex("dbo.EquipmentCatalogAllocation", new[] { "EquipmentID" });
DropIndex("dbo.EquipmentCatalogAllocation", new[] { "CatalogID" });
DropIndex("dbo.EquipmentVersionPriceProject", new[] { "ProjectID" });
DropIndex("dbo.EquipmentVersionPriceProject", new[] { "VersionID" });
DropIndex("dbo.EquipmentUnitPrice", new[] { "ProjectID" });
DropIndex("dbo.EquipmentUnitPrice", new[] { "EquipmentID" });
DropIndex("dbo.EquipmentRequirement", new[] { "DimensionID" });
DropIndex("dbo.EquipmentRequirement", new[] { "EquipmentID" });
DropIndex("dbo.EquipmentDimension", new[] { "VersionID" });
DropIndex("dbo.EquipmentCatalogDimension", new[] { "DimensionID" });
DropIndex("dbo.EquipmentCatalogDimension", new[] { "CatalogID" });
DropIndex("dbo.EquipmentCatalog", new[] { "VersionID" });
DropTable("dbo.StandardEquipment");
DropTable("dbo.RegistrantOrgUsers");
DropTable("dbo.RegistrantOrganizations");
DropTable("dbo.EquipmentCatalogAllocation");
DropTable("dbo.StandardVersion");
DropTable("dbo.EquipmentVersionPriceProject");
DropTable("dbo.EquipmentUnitPriceProject");
DropTable("dbo.EquipmentUnitPrice");
DropTable("dbo.EquipmentItem");
DropTable("dbo.EquipmentRequirement");
DropTable("dbo.EquipmentDimension");
DropTable("dbo.EquipmentCatalogDimension");
DropTable("dbo.EquipmentCatalog");
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Network
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// RouteFilterRulesOperations operations.
/// </summary>
internal partial class RouteFilterRulesOperations : IServiceOperations<NetworkManagementClient>, IRouteFilterRulesOperations
{
/// <summary>
/// Initializes a new instance of the RouteFilterRulesOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal RouteFilterRulesOperations(NetworkManagementClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the NetworkManagementClient
/// </summary>
public NetworkManagementClient Client { get; private set; }
/// <summary>
/// Deletes the specified rule from a route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='ruleName'>
/// The name of the rule.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string ruleName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse _response = await BeginDeleteWithHttpMessagesAsync(resourceGroupName, routeFilterName, ruleName, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified rule from a route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='ruleName'>
/// The name of the rule.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<RouteFilterRule>> GetWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string ruleName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (routeFilterName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "routeFilterName");
}
if (ruleName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "ruleName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2017-08-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("routeFilterName", routeFilterName);
tracingParameters.Add("ruleName", ruleName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{routeFilterName}", System.Uri.EscapeDataString(routeFilterName));
_url = _url.Replace("{ruleName}", System.Uri.EscapeDataString(ruleName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<RouteFilterRule>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<RouteFilterRule>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Creates or updates a route in the specified route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='ruleName'>
/// The name of the route filter rule.
/// </param>
/// <param name='routeFilterRuleParameters'>
/// Parameters supplied to the create or update route filter rule operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<RouteFilterRule>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string ruleName, RouteFilterRule routeFilterRuleParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send Request
AzureOperationResponse<RouteFilterRule> _response = await BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, routeFilterName, ruleName, routeFilterRuleParameters, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPutOrPatchOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Updates a route in the specified route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='ruleName'>
/// The name of the route filter rule.
/// </param>
/// <param name='routeFilterRuleParameters'>
/// Parameters supplied to the update route filter rule operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<RouteFilterRule>> UpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string ruleName, PatchRouteFilterRule routeFilterRuleParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send Request
AzureOperationResponse<RouteFilterRule> _response = await BeginUpdateWithHttpMessagesAsync(resourceGroupName, routeFilterName, ruleName, routeFilterRuleParameters, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPutOrPatchOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets all RouteFilterRules in a route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<RouteFilterRule>>> ListByRouteFilterWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (routeFilterName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "routeFilterName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2017-08-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("routeFilterName", routeFilterName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListByRouteFilter", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{routeFilterName}", System.Uri.EscapeDataString(routeFilterName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<RouteFilterRule>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<RouteFilterRule>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Deletes the specified rule from a route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='ruleName'>
/// The name of the rule.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string ruleName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (routeFilterName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "routeFilterName");
}
if (ruleName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "ruleName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2017-08-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("routeFilterName", routeFilterName);
tracingParameters.Add("ruleName", ruleName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginDelete", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{routeFilterName}", System.Uri.EscapeDataString(routeFilterName));
_url = _url.Replace("{ruleName}", System.Uri.EscapeDataString(ruleName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("DELETE");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 202 && (int)_statusCode != 200 && (int)_statusCode != 204)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Creates or updates a route in the specified route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='ruleName'>
/// The name of the route filter rule.
/// </param>
/// <param name='routeFilterRuleParameters'>
/// Parameters supplied to the create or update route filter rule operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<RouteFilterRule>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string ruleName, RouteFilterRule routeFilterRuleParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (routeFilterName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "routeFilterName");
}
if (ruleName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "ruleName");
}
if (routeFilterRuleParameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "routeFilterRuleParameters");
}
if (routeFilterRuleParameters != null)
{
routeFilterRuleParameters.Validate();
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (routeFilterRuleParameters == null)
{
routeFilterRuleParameters = new RouteFilterRule();
}
string apiVersion = "2017-08-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("routeFilterName", routeFilterName);
tracingParameters.Add("ruleName", ruleName);
tracingParameters.Add("routeFilterRuleParameters", routeFilterRuleParameters);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginCreateOrUpdate", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{routeFilterName}", System.Uri.EscapeDataString(routeFilterName));
_url = _url.Replace("{ruleName}", System.Uri.EscapeDataString(ruleName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(routeFilterRuleParameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(routeFilterRuleParameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 201)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<RouteFilterRule>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<RouteFilterRule>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
// Deserialize Response
if ((int)_statusCode == 201)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<RouteFilterRule>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Updates a route in the specified route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='ruleName'>
/// The name of the route filter rule.
/// </param>
/// <param name='routeFilterRuleParameters'>
/// Parameters supplied to the update route filter rule operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<RouteFilterRule>> BeginUpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string ruleName, PatchRouteFilterRule routeFilterRuleParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (routeFilterName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "routeFilterName");
}
if (ruleName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "ruleName");
}
if (routeFilterRuleParameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "routeFilterRuleParameters");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (routeFilterRuleParameters == null)
{
routeFilterRuleParameters = new PatchRouteFilterRule();
}
string apiVersion = "2017-08-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("routeFilterName", routeFilterName);
tracingParameters.Add("ruleName", ruleName);
tracingParameters.Add("routeFilterRuleParameters", routeFilterRuleParameters);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginUpdate", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{routeFilterName}", System.Uri.EscapeDataString(routeFilterName));
_url = _url.Replace("{ruleName}", System.Uri.EscapeDataString(ruleName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PATCH");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(routeFilterRuleParameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(routeFilterRuleParameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<RouteFilterRule>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<RouteFilterRule>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all RouteFilterRules in a route filter.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<RouteFilterRule>>> ListByRouteFilterNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListByRouteFilterNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<RouteFilterRule>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<RouteFilterRule>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using System.Runtime.InteropServices;
using System.Text;
namespace LuaInterface
{
#pragma warning disable 414
public class MonoPInvokeCallbackAttribute : System.Attribute
{
private Type type;
public MonoPInvokeCallbackAttribute(Type t)
{
type = t;
}
}
#pragma warning restore 414
public enum LuaTypes : int
{
LUA_TNONE = -1,
LUA_TNIL = 0,
LUA_TBOOLEAN = 1,
LUA_TLIGHTUSERDATA = 2,
LUA_TNUMBER = 3,
LUA_TSTRING = 4,
LUA_TTABLE = 5,
LUA_TFUNCTION = 6,
LUA_TUSERDATA = 7,
LUA_TTHREAD = 8,
}
public enum LuaGCOptions
{
LUA_GCSTOP = 0,
LUA_GCRESTART = 1,
LUA_GCCOLLECT = 2,
LUA_GCCOUNT = 3,
LUA_GCCOUNTB = 4,
LUA_GCSTEP = 5,
LUA_GCSETPAUSE = 6,
LUA_GCSETSTEPMUL = 7,
}
public enum LuaThreadStatus : int
{
LUA_YIELD = 1,
LUA_ERRRUN = 2,
LUA_ERRSYNTAX = 3,
LUA_ERRMEM = 4,
LUA_ERRERR = 5,
}
public sealed class LuaIndexes
{
#if LUA_5_3
// for lua5.3
public static int LUA_REGISTRYINDEX = -1000000 - 1000;
#else
// for lua5.1 or luajit
public static int LUA_REGISTRYINDEX = -10000;
public static int LUA_GLOBALSINDEX = -10002;
#endif
}
[StructLayout(LayoutKind.Sequential)]
public struct ReaderInfo
{
public String chunkData;
public bool finished;
}
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
[UnmanagedFunctionPointer(CallingConvention.Cdecl)]
public delegate int LuaCSFunction(IntPtr luaState);
#else
public delegate int LuaCSFunction(IntPtr luaState);
#endif
public delegate string LuaChunkReader(IntPtr luaState, ref ReaderInfo data, ref uint size);
public delegate int LuaFunctionCallback(IntPtr luaState);
public class LuaDLL
{
public static int LUA_MULTRET = -1;
#if UNITY_IPHONE && !UNITY_EDITOR
const string LUADLL = "__Internal";
#else
const string LUADLL = "slua";
#endif
//[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
//public static extern void luaS_openextlibs(IntPtr L);
// Thread Funcs
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_tothread(IntPtr L, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_xmove(IntPtr from, IntPtr to, int n);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr lua_newthread(IntPtr L);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_status(IntPtr L);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_pushthread(IntPtr L);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_gc(IntPtr luaState, LuaGCOptions what, int data);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr lua_typename(IntPtr luaState, int type);
public static string lua_typenamestr(IntPtr luaState, LuaTypes type)
{
IntPtr p = lua_typename(luaState, (int)type);
return Marshal.PtrToStringAnsi(p);
}
public static string luaL_typename(IntPtr luaState, int stackPos)
{
return LuaDLL.lua_typenamestr(luaState, LuaDLL.lua_type(luaState, stackPos));
}
public static bool lua_isfunction(IntPtr luaState, int stackPos)
{
return lua_type(luaState, stackPos) == LuaTypes.LUA_TFUNCTION;
}
public static bool lua_islightuserdata(IntPtr luaState, int stackPos)
{
return lua_type(luaState, stackPos) == LuaTypes.LUA_TLIGHTUSERDATA;
}
public static bool lua_istable(IntPtr luaState, int stackPos)
{
return lua_type(luaState, stackPos) == LuaTypes.LUA_TTABLE;
}
public static bool lua_isthread(IntPtr luaState, int stackPos)
{
return lua_type(luaState, stackPos) == LuaTypes.LUA_TTHREAD;
}
public static void luaL_error(IntPtr luaState, string message)
{
LuaDLL.luaL_where(luaState, 1);
LuaDLL.lua_pushstring(luaState, message);
LuaDLL.lua_concat(luaState, 2);
LuaDLL.lua_error(luaState);
}
public static void luaL_error(IntPtr luaState, string fmt, params object[] args)
{
string str = string.Format(fmt, args);
luaL_error(luaState, str);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern string luaL_gsub(IntPtr luaState, string str, string pattern, string replacement);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_isuserdata(IntPtr luaState, int stackPos);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_rawequal(IntPtr luaState, int stackPos1, int stackPos2);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_setfield(IntPtr luaState, int stackPos, string name);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaL_callmeta(IntPtr luaState, int stackPos, string name);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr luaL_newstate();
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_close(IntPtr luaState);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaL_openlibs(IntPtr luaState);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaL_loadstring(IntPtr luaState, string chunk);
public static int luaL_dostring(IntPtr luaState, string chunk)
{
int result = LuaDLL.luaL_loadstring(luaState, chunk);
if (result != 0)
return result;
return LuaDLL.lua_pcall(luaState, 0, -1, 0);
}
public static int lua_dostring(IntPtr luaState, string chunk)
{
return LuaDLL.luaL_dostring(luaState, chunk);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_createtable(IntPtr luaState, int narr, int nrec);
public static void lua_newtable(IntPtr luaState)
{
LuaDLL.lua_createtable(luaState, 0, 0);
}
#if LUA_5_3
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_getglobal(IntPtr luaState, string name);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_setglobal(IntPtr luaState, string name);
public static void lua_insert(IntPtr luaState, int newTop)
{
lua_rotate(luaState, newTop, 1);
}
public static void lua_pushglobaltable(IntPtr l)
{
lua_rawgeti(l, LuaIndexes.LUA_REGISTRYINDEX, 2);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_rotate(IntPtr luaState, int index, int n);
public static int lua_rawlen(IntPtr luaState, int stackPos)
{
return LuaDLLWrapper.luaS_rawlen(luaState, stackPos);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaL_loadbufferx(IntPtr luaState, byte[] buff, int size, string name, IntPtr x);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_callk(IntPtr luaState, int nArgs, int nResults,int ctx,IntPtr k);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_pcallk(IntPtr luaState, int nArgs, int nResults, int errfunc,int ctx,IntPtr k);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaS_pcall(IntPtr luaState, int nArgs, int nResults, int errfunc);
public static int lua_call(IntPtr luaState, int nArgs, int nResults)
{
return lua_callk(luaState, nArgs, nResults, 0, IntPtr.Zero);
}
public static int lua_pcall(IntPtr luaState, int nArgs, int nResults, int errfunc)
{
return luaS_pcall(luaState, nArgs, nResults, errfunc);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern double lua_tonumberx(IntPtr luaState, int index, IntPtr x);
public static double lua_tonumber(IntPtr luaState, int index)
{
return lua_tonumberx(luaState, index, IntPtr.Zero);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern Int64 lua_tointegerx(IntPtr luaState, int index,IntPtr x);
public static int lua_tointeger(IntPtr luaState, int index)
{
return (int)lua_tointegerx(luaState, index, IntPtr.Zero);
}
public static int luaL_loadbuffer(IntPtr luaState, byte[] buff, int size, string name)
{
return luaL_loadbufferx(luaState, buff, size, name, IntPtr.Zero);
}
public static void lua_remove(IntPtr l, int idx)
{
lua_rotate(l, (idx), -1);
lua_pop(l, 1);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_rawgeti(IntPtr luaState, int tableIndex, Int64 index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_rawseti(IntPtr luaState, int tableIndex, Int64 index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushinteger(IntPtr luaState, Int64 i);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern Int64 luaL_checkinteger(IntPtr luaState, int stackPos);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaS_yield(IntPtr luaState,int nrets);
public static int lua_yield(IntPtr luaState,int nrets) {
return luaS_yield(luaState,nrets);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_resume(IntPtr L, IntPtr from, int narg);
public static void lua_replace(IntPtr luaState, int index) {
lua_copy(luaState, -1, (index));
lua_pop(luaState, 1);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_copy(IntPtr luaState,int from,int toidx);
#else
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_resume(IntPtr L, int narg);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_lessthan(IntPtr luaState, int stackPos1, int stackPos2);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_getfenv(IntPtr luaState, int stackPos);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_yield(IntPtr L, int nresults);
public static void lua_getglobal(IntPtr luaState, string name)
{
LuaDLL.lua_pushstring(luaState, name);
LuaDLL.lua_gettable(luaState, LuaIndexes.LUA_GLOBALSINDEX);
}
public static void lua_setglobal(IntPtr luaState, string name)
{
LuaDLL.lua_pushstring(luaState, name);
LuaDLL.lua_insert(luaState, -2);
LuaDLL.lua_settable(luaState, LuaIndexes.LUA_GLOBALSINDEX);
}
public static void lua_pushglobaltable(IntPtr l)
{
LuaDLL.lua_pushvalue(l, LuaIndexes.LUA_GLOBALSINDEX);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_insert(IntPtr luaState, int newTop);
public static int lua_rawlen(IntPtr luaState, int stackPos)
{
return LuaDLLWrapper.luaS_objlen(luaState, stackPos);
}
public static int lua_strlen(IntPtr luaState, int stackPos)
{
return lua_rawlen(luaState, stackPos);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_call(IntPtr luaState, int nArgs, int nResults);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_pcall(IntPtr luaState, int nArgs, int nResults, int errfunc);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern double lua_tonumber(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_tointeger(IntPtr luaState, int index);
public static int luaL_loadbuffer(IntPtr luaState, byte[] buff, int size, string name)
{
return LuaDLLWrapper.luaLS_loadbuffer(luaState, buff, size, name);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_remove(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_rawgeti(IntPtr luaState, int tableIndex, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_rawseti(IntPtr luaState, int tableIndex, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushinteger(IntPtr luaState, int i);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaL_checkinteger(IntPtr luaState, int stackPos);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_replace(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_setfenv(IntPtr luaState, int stackPos);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_equal(IntPtr luaState, int index1, int index2);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaL_loadfile(IntPtr luaState, string filename);
#endif
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_settop(IntPtr luaState, int newTop);
public static void lua_pop(IntPtr luaState, int amount)
{
LuaDLL.lua_settop(luaState, -(amount) - 1);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_gettable(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_rawget(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_settable(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_rawset(IntPtr luaState, int index);
#if LUA_5_3
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_setmetatable(IntPtr luaState, int objIndex);
#else
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_setmetatable(IntPtr luaState, int objIndex);
#endif
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_getmetatable(IntPtr luaState, int objIndex);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushvalue(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_gettop(IntPtr luaState);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern LuaTypes lua_type(IntPtr luaState, int index);
public static bool lua_isnil(IntPtr luaState, int index)
{
return (LuaDLL.lua_type(luaState, index) == LuaTypes.LUA_TNIL);
}
public static bool lua_isnumber(IntPtr luaState, int index)
{
return LuaDLLWrapper.lua_isnumber(luaState, index) > 0;
}
public static bool lua_isboolean(IntPtr luaState, int index)
{
return LuaDLL.lua_type(luaState, index) == LuaTypes.LUA_TBOOLEAN;
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaL_ref(IntPtr luaState, int registryIndex);
public static void lua_getref(IntPtr luaState, int reference)
{
LuaDLL.lua_rawgeti(luaState, LuaIndexes.LUA_REGISTRYINDEX, reference);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaL_unref(IntPtr luaState, int registryIndex, int reference);
public static void lua_unref(IntPtr luaState, int reference)
{
LuaDLL.luaL_unref(luaState, LuaIndexes.LUA_REGISTRYINDEX, reference);
}
public static bool lua_isstring(IntPtr luaState, int index)
{
return LuaDLLWrapper.lua_isstring(luaState, index) > 0;
}
public static bool lua_iscfunction(IntPtr luaState, int index)
{
return LuaDLLWrapper.lua_iscfunction(luaState, index) > 0;
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushnil(IntPtr luaState);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaL_checktype(IntPtr luaState, int p, LuaTypes t);
public static void lua_pushcfunction(IntPtr luaState, LuaCSFunction function)
{
IntPtr fn = Marshal.GetFunctionPointerForDelegate(function);
lua_pushcclosure(luaState, fn, 0);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr lua_tocfunction(IntPtr luaState, int index);
public static bool lua_toboolean(IntPtr luaState, int index)
{
return LuaDLLWrapper.lua_toboolean(luaState, index) > 0;
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr luaS_tolstring32(IntPtr luaState, int index, out int strLen);
public static string lua_tostring(IntPtr luaState, int index)
{
int strlen;
IntPtr str = luaS_tolstring32(luaState, index, out strlen); // fix il2cpp 64 bit
if (str != IntPtr.Zero)
{
return Marshal.PtrToStringAnsi(str, strlen);
}
return null;
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr lua_atpanic(IntPtr luaState, LuaCSFunction panicf);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushnumber(IntPtr luaState, double number);
public static void lua_pushboolean(IntPtr luaState, bool value)
{
LuaDLLWrapper.lua_pushboolean(luaState, value ? 1 : 0);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushstring(IntPtr luaState, string str);
public static void lua_pushlstring(IntPtr luaState, byte[] str, int size)
{
LuaDLLWrapper.luaS_pushlstring(luaState, str, size);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaL_newmetatable(IntPtr luaState, string meta);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_getfield(IntPtr luaState, int stackPos, string meta);
public static void luaL_getmetatable(IntPtr luaState, string meta)
{
LuaDLL.lua_getfield(luaState, LuaIndexes.LUA_REGISTRYINDEX, meta);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr luaL_checkudata(IntPtr luaState, int stackPos, string meta);
public static bool luaL_getmetafield(IntPtr luaState, int stackPos, string field)
{
return LuaDLLWrapper.luaL_getmetafield(luaState, stackPos, field) > 0;
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_load(IntPtr luaState, LuaChunkReader chunkReader, ref ReaderInfo data, string chunkName);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_error(IntPtr luaState);
public static bool lua_checkstack(IntPtr luaState, int extra)
{
return LuaDLLWrapper.lua_checkstack(luaState, extra) > 0;
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int lua_next(IntPtr luaState, int index);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushlightuserdata(IntPtr luaState, IntPtr udata);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaL_where(IntPtr luaState, int level);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern double luaL_checknumber(IntPtr luaState, int stackPos);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_concat(IntPtr luaState, int n);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_newuserdata(IntPtr luaState, int val);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaS_rawnetobj(IntPtr luaState, int obj);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern IntPtr lua_touserdata(IntPtr luaState, int index);
public static int lua_absindex(IntPtr luaState, int index)
{
return index > 0 ? index : lua_gettop(luaState) + index + 1;
}
public static int lua_upvalueindex(int i)
{
#if LUA_5_3
return LuaIndexes.LUA_REGISTRYINDEX - i;
#else
return LuaIndexes.LUA_GLOBALSINDEX - i;
#endif
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void lua_pushcclosure(IntPtr l, IntPtr f, int nup);
public static void lua_pushcclosure(IntPtr l, LuaCSFunction f, int nup)
{
IntPtr fn = Marshal.GetFunctionPointerForDelegate(f);
lua_pushcclosure(l, fn, nup);
}
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_checkVector2(IntPtr l, int p, out float x, out float y);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_checkVector3(IntPtr l, int p, out float x, out float y, out float z);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_checkVector4(IntPtr l, int p, out float x, out float y, out float z, out float w);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_checkQuaternion(IntPtr l, int p, out float x, out float y, out float z, out float w);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_checkColor(IntPtr l, int p, out float x, out float y, out float z, out float w);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_pushVector2(IntPtr l, float x, float y);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_pushVector3(IntPtr l, float x, float y, float z);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_pushVector4(IntPtr l, float x, float y, float z, float w);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_pushQuaternion(IntPtr l, float x, float y, float z, float w);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_pushColor(IntPtr l, float x, float y, float z, float w);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_setData(IntPtr l, int p, float x, float y, float z, float w);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaS_checkluatype(IntPtr l, int p, string t);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern void luaS_pushobject(IntPtr l, int index, string t, bool gco, int cref);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaS_getcacheud(IntPtr l, int index, int cref);
[DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)]
public static extern int luaS_subclassof(IntPtr l, int index, string t);
}
}
| |
namespace android.widget
{
[global::MonoJavaBridge.JavaClass()]
public partial class TimePicker : android.widget.FrameLayout
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
protected TimePicker(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
[global::MonoJavaBridge.JavaInterface(typeof(global::android.widget.TimePicker.OnTimeChangedListener_))]
public partial interface OnTimeChangedListener : global::MonoJavaBridge.IJavaObject
{
void onTimeChanged(android.widget.TimePicker arg0, int arg1, int arg2);
}
[global::MonoJavaBridge.JavaProxy(typeof(global::android.widget.TimePicker.OnTimeChangedListener))]
internal sealed partial class OnTimeChangedListener_ : java.lang.Object, OnTimeChangedListener
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
internal OnTimeChangedListener_(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
private static global::MonoJavaBridge.MethodId _m0;
void android.widget.TimePicker.OnTimeChangedListener.onTimeChanged(android.widget.TimePicker arg0, int arg1, int arg2)
{
global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.widget.TimePicker.OnTimeChangedListener_.staticClass, "onTimeChanged", "(Landroid/widget/TimePicker;II)V", ref global::android.widget.TimePicker.OnTimeChangedListener_._m0, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2));
}
static OnTimeChangedListener_()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.widget.TimePicker.OnTimeChangedListener_.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/widget/TimePicker$OnTimeChangedListener"));
}
}
public delegate void OnTimeChangedListenerDelegate(android.widget.TimePicker arg0, int arg1, int arg2);
internal partial class OnTimeChangedListenerDelegateWrapper : java.lang.Object, OnTimeChangedListener
{
internal new static global::MonoJavaBridge.JniGlobalHandle staticClass;
protected OnTimeChangedListenerDelegateWrapper(global::MonoJavaBridge.JNIEnv @__env) : base(@__env)
{
}
private static global::MonoJavaBridge.MethodId _m0;
public OnTimeChangedListenerDelegateWrapper() : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper._m0.native == global::System.IntPtr.Zero)
global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper._m0 = @__env.GetMethodIDNoThrow(global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper.staticClass, "<init>", "()V");
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper.staticClass, global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper._m0);
Init(@__env, handle);
}
static OnTimeChangedListenerDelegateWrapper()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/widget/TimePicker_OnTimeChangedListenerDelegateWrapper"));
}
}
internal partial class OnTimeChangedListenerDelegateWrapper
{
private OnTimeChangedListenerDelegate myDelegate;
public void onTimeChanged(android.widget.TimePicker arg0, int arg1, int arg2)
{
myDelegate(arg0, arg1, arg2);
}
public static implicit operator OnTimeChangedListenerDelegateWrapper(OnTimeChangedListenerDelegate d)
{
global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper ret = new global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper();
ret.myDelegate = d;
global::MonoJavaBridge.JavaBridge.SetGCHandle(global::MonoJavaBridge.JNIEnv.ThreadEnv, ret);
return ret;
}
}
public new bool Enabled
{
set
{
setEnabled(value);
}
}
private static global::MonoJavaBridge.MethodId _m0;
public override void setEnabled(bool arg0)
{
global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.widget.TimePicker.staticClass, "setEnabled", "(Z)V", ref global::android.widget.TimePicker._m0, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
private static global::MonoJavaBridge.MethodId _m1;
protected override void onRestoreInstanceState(android.os.Parcelable arg0)
{
global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.widget.TimePicker.staticClass, "onRestoreInstanceState", "(Landroid/os/Parcelable;)V", ref global::android.widget.TimePicker._m1, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
private static global::MonoJavaBridge.MethodId _m2;
protected override global::android.os.Parcelable onSaveInstanceState()
{
return global::MonoJavaBridge.JavaBridge.CallIJavaObjectMethod<android.os.Parcelable>(this, global::android.widget.TimePicker.staticClass, "onSaveInstanceState", "()Landroid/os/Parcelable;", ref global::android.widget.TimePicker._m2) as android.os.Parcelable;
}
public new int Baseline
{
get
{
return getBaseline();
}
}
private static global::MonoJavaBridge.MethodId _m3;
public override int getBaseline()
{
return global::MonoJavaBridge.JavaBridge.CallIntMethod(this, global::android.widget.TimePicker.staticClass, "getBaseline", "()I", ref global::android.widget.TimePicker._m3);
}
private static global::MonoJavaBridge.MethodId _m4;
public virtual bool is24HourView()
{
return global::MonoJavaBridge.JavaBridge.CallBooleanMethod(this, global::android.widget.TimePicker.staticClass, "is24HourView", "()Z", ref global::android.widget.TimePicker._m4);
}
private static global::MonoJavaBridge.MethodId _m5;
public virtual void setOnTimeChangedListener(android.widget.TimePicker.OnTimeChangedListener arg0)
{
global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.widget.TimePicker.staticClass, "setOnTimeChangedListener", "(Landroid/widget/TimePicker$OnTimeChangedListener;)V", ref global::android.widget.TimePicker._m5, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
public void setOnTimeChangedListener(global::android.widget.TimePicker.OnTimeChangedListenerDelegate arg0)
{
setOnTimeChangedListener((global::android.widget.TimePicker.OnTimeChangedListenerDelegateWrapper)arg0);
}
public new global::java.lang.Integer CurrentHour
{
get
{
return getCurrentHour();
}
set
{
setCurrentHour(value);
}
}
private static global::MonoJavaBridge.MethodId _m6;
public virtual global::java.lang.Integer getCurrentHour()
{
return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.Integer>(this, global::android.widget.TimePicker.staticClass, "getCurrentHour", "()Ljava/lang/Integer;", ref global::android.widget.TimePicker._m6) as java.lang.Integer;
}
private static global::MonoJavaBridge.MethodId _m7;
public virtual void setCurrentHour(java.lang.Integer arg0)
{
global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.widget.TimePicker.staticClass, "setCurrentHour", "(Ljava/lang/Integer;)V", ref global::android.widget.TimePicker._m7, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
private static global::MonoJavaBridge.MethodId _m8;
public virtual void setIs24HourView(java.lang.Boolean arg0)
{
global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.widget.TimePicker.staticClass, "setIs24HourView", "(Ljava/lang/Boolean;)V", ref global::android.widget.TimePicker._m8, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
public new global::java.lang.Integer CurrentMinute
{
get
{
return getCurrentMinute();
}
set
{
setCurrentMinute(value);
}
}
private static global::MonoJavaBridge.MethodId _m9;
public virtual global::java.lang.Integer getCurrentMinute()
{
return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.Integer>(this, global::android.widget.TimePicker.staticClass, "getCurrentMinute", "()Ljava/lang/Integer;", ref global::android.widget.TimePicker._m9) as java.lang.Integer;
}
private static global::MonoJavaBridge.MethodId _m10;
public virtual void setCurrentMinute(java.lang.Integer arg0)
{
global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.widget.TimePicker.staticClass, "setCurrentMinute", "(Ljava/lang/Integer;)V", ref global::android.widget.TimePicker._m10, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
}
private static global::MonoJavaBridge.MethodId _m11;
public TimePicker(android.content.Context arg0) : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.widget.TimePicker._m11.native == global::System.IntPtr.Zero)
global::android.widget.TimePicker._m11 = @__env.GetMethodIDNoThrow(global::android.widget.TimePicker.staticClass, "<init>", "(Landroid/content/Context;)V");
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.widget.TimePicker.staticClass, global::android.widget.TimePicker._m11, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0));
Init(@__env, handle);
}
private static global::MonoJavaBridge.MethodId _m12;
public TimePicker(android.content.Context arg0, android.util.AttributeSet arg1) : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.widget.TimePicker._m12.native == global::System.IntPtr.Zero)
global::android.widget.TimePicker._m12 = @__env.GetMethodIDNoThrow(global::android.widget.TimePicker.staticClass, "<init>", "(Landroid/content/Context;Landroid/util/AttributeSet;)V");
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.widget.TimePicker.staticClass, global::android.widget.TimePicker._m12, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1));
Init(@__env, handle);
}
private static global::MonoJavaBridge.MethodId _m13;
public TimePicker(android.content.Context arg0, android.util.AttributeSet arg1, int arg2) : base(global::MonoJavaBridge.JNIEnv.ThreadEnv)
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
if (global::android.widget.TimePicker._m13.native == global::System.IntPtr.Zero)
global::android.widget.TimePicker._m13 = @__env.GetMethodIDNoThrow(global::android.widget.TimePicker.staticClass, "<init>", "(Landroid/content/Context;Landroid/util/AttributeSet;I)V");
global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.widget.TimePicker.staticClass, global::android.widget.TimePicker._m13, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2));
Init(@__env, handle);
}
static TimePicker()
{
global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv;
global::android.widget.TimePicker.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/widget/TimePicker"));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Net.Http.Headers;
using System.Web.Http;
using System.Web.Http.Description;
using Reflow.WebDemo.Areas.HelpPage.Models;
namespace Reflow.WebDemo.Areas.HelpPage
{
public static class HelpPageConfigurationExtensions
{
private const string ApiModelPrefix = "MS_HelpPageApiModel_";
/// <summary>
/// Sets the documentation provider for help page.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="documentationProvider">The documentation provider.</param>
public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider)
{
config.Services.Replace(typeof(IDocumentationProvider), documentationProvider);
}
/// <summary>
/// Sets the objects that will be used by the formatters to produce sample requests/responses.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleObjects">The sample objects.</param>
public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects)
{
config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects;
}
/// <summary>
/// Sets the sample request directly for the specified media type and action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type and action with parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample request.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample request directly for the specified media type of the action.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample);
}
/// <summary>
/// Sets the sample response directly for the specified media type of the action with specific parameters.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample response.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample);
}
/// <summary>
/// Sets the sample directly for all actions with the specified type and media type.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sample">The sample.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="type">The parameter type or return type of an action.</param>
public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type)
{
config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate request samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type);
}
/// <summary>
/// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// The help page will use this information to produce more accurate response samples.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="type">The type.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames)
{
config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type);
}
/// <summary>
/// Gets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <returns>The help page sample generator.</returns>
public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config)
{
return (HelpPageSampleGenerator)config.Properties.GetOrAdd(
typeof(HelpPageSampleGenerator),
k => new HelpPageSampleGenerator());
}
/// <summary>
/// Sets the help page sample generator.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="sampleGenerator">The help page sample generator.</param>
public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator)
{
config.Properties.AddOrUpdate(
typeof(HelpPageSampleGenerator),
k => sampleGenerator,
(k, o) => sampleGenerator);
}
/// <summary>
/// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls.
/// </summary>
/// <param name="config">The <see cref="HttpConfiguration"/>.</param>
/// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param>
/// <returns>
/// An <see cref="HelpPageApiModel"/>
/// </returns>
public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId)
{
object model;
string modelId = ApiModelPrefix + apiDescriptionId;
if (!config.Properties.TryGetValue(modelId, out model))
{
Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions;
ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase));
if (apiDescription != null)
{
HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator();
model = GenerateApiModel(apiDescription, sampleGenerator);
config.Properties.TryAdd(modelId, model);
}
}
return (HelpPageApiModel)model;
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")]
private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HelpPageSampleGenerator sampleGenerator)
{
HelpPageApiModel apiModel = new HelpPageApiModel();
apiModel.ApiDescription = apiDescription;
try
{
foreach (var item in sampleGenerator.GetSampleRequests(apiDescription))
{
apiModel.SampleRequests.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
foreach (var item in sampleGenerator.GetSampleResponses(apiDescription))
{
apiModel.SampleResponses.Add(item.Key, item.Value);
LogInvalidSampleAsError(apiModel, item.Value);
}
}
catch (Exception e)
{
apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception Message: {0}", e.Message));
}
return apiModel;
}
private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample)
{
InvalidSample invalidSample = sample as InvalidSample;
if (invalidSample != null)
{
apiModel.ErrorMessages.Add(invalidSample.ErrorMessage);
}
}
}
}
| |
namespace AngleSharp
{
using AngleSharp.Browser;
using AngleSharp.Common;
using AngleSharp.Dom;
using AngleSharp.Io;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
/// <summary>
/// A set of useful extensions for Configuration (or derived) objects.
/// </summary>
public static class ConfigurationExtensions
{
#region General
/// <summary>
/// Returns a new configuration that includes the given service.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="service">The service to register.</param>
/// <returns>The new instance with the service.</returns>
public static IConfiguration With(this IConfiguration configuration, Object service)
{
configuration = configuration ?? throw new ArgumentNullException(nameof(configuration));
service = service ?? throw new ArgumentNullException(nameof(service));
return new Configuration(configuration.Services.Concat(service));
}
/// <summary>
/// Returns a new configuration that includes only the given service,
/// excluding other instances or instance creators for the same service.
/// </summary>
/// <typeparam name="TService">The service to include exclusively.</typeparam>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="service">The service to include.</param>
/// <returns>The new instance with only the given service.</returns>
public static IConfiguration WithOnly<TService>(this IConfiguration configuration, TService service)
{
if (service is null)
{
throw new ArgumentNullException(nameof(service));
}
return configuration.Without<TService>().With(service);
}
/// <summary>
/// Returns a new configuration that includes only the given service
/// creator, excluding other instances or instance creators for the same
/// service.
/// </summary>
/// <typeparam name="TService">The service to include exclusively.</typeparam>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="creator">The service creator to include.</param>
/// <returns>The new instance with only the given service.</returns>
public static IConfiguration WithOnly<TService>(this IConfiguration configuration, Func<IBrowsingContext, TService> creator)
{
creator = creator ?? throw new ArgumentNullException(nameof(creator));
return configuration.Without<TService>().With(creator);
}
/// <summary>
/// Returns a new configuration that excludes the given service.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="service">The service to unregister.</param>
/// <returns>The new instance without the service.</returns>
public static IConfiguration Without(this IConfiguration configuration, Object service)
{
configuration = configuration ?? throw new ArgumentNullException(nameof(configuration));
service = service ?? throw new ArgumentNullException(nameof(service));
return new Configuration(configuration.Services.Except(service));
}
/// <summary>
/// Returns a new configuration that includes the given services.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="services">The services to register.</param>
/// <returns>The new instance with the services.</returns>
public static IConfiguration With(this IConfiguration configuration, IEnumerable<Object> services)
{
configuration = configuration ?? throw new ArgumentNullException(nameof(configuration));
services = services ?? throw new ArgumentNullException(nameof(services));
return new Configuration(services.Concat(configuration.Services));
}
/// <summary>
/// Returns a new configuration that excludes the given services.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="services">The services to unregister.</param>
/// <returns>The new instance without the services.</returns>
public static IConfiguration Without(this IConfiguration configuration, IEnumerable<Object> services)
{
configuration = configuration ?? throw new ArgumentNullException(nameof(configuration));
services = services ?? throw new ArgumentNullException(nameof(services));
return new Configuration(configuration.Services.Except(services));
}
/// <summary>
/// Returns a new configuration that includes the given service creator.
/// </summary>
/// <typeparam name="TService">The type of service to create.</typeparam>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="creator">The creator to register.</param>
/// <returns>The new instance with the services.</returns>
public static IConfiguration With<TService>(this IConfiguration configuration, Func<IBrowsingContext, TService> creator)
{
creator = creator ?? throw new ArgumentNullException(nameof(creator));
return configuration.With((Object)creator);
}
/// <summary>
/// Returns a new configuration that excludes the given service creator.
/// </summary>
/// <typeparam name="TService">The type of service to remove.</typeparam>
/// <param name="configuration">The configuration to extend.</param>
/// <returns>The new instance without the services.</returns>
public static IConfiguration Without<TService>(this IConfiguration configuration)
{
configuration = configuration ?? throw new ArgumentNullException(nameof(configuration));
var items = configuration.Services.OfType<TService>().Cast<Object>();
var creators = configuration.Services.OfType<Func<IBrowsingContext, TService>>();
return configuration.Without(items).Without(creators);
}
/// <summary>
/// Checks if the configuration holds any references to the given service.
/// </summary>
/// <typeparam name="TService">The type of service to check for.</typeparam>
/// <param name="configuration">The configuration to examine.</param>
/// <returns>True if any service / creators are found, otherwise false.</returns>
public static Boolean Has<TService>(this IConfiguration configuration)
{
configuration = configuration ?? throw new ArgumentNullException(nameof(configuration));
return configuration.Services.OfType<TService>().Any() || configuration.Services.OfType<Func<IBrowsingContext, TService>>().Any();
}
#endregion
#region Loading Resources
/// <summary>
/// Registers the default loader service, if no other loader has been registered yet.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="setup">Configuration for the loader service.</param>
/// <returns>The new configuration with the service.</returns>
public static IConfiguration WithDefaultLoader(this IConfiguration configuration, LoaderOptions? setup = null)
{
var config = setup ?? new LoaderOptions();
if (!configuration.Has<IRequester>())
{
configuration = configuration
.With(new DefaultHttpRequester());
}
if (!config.IsNavigationDisabled)
{
configuration = configuration
.With<IDocumentLoader>(ctx => new DefaultDocumentLoader(ctx, config.Filter));
}
if (config.IsResourceLoadingEnabled)
{
configuration = configuration
.With<IResourceLoader>(ctx => new DefaultResourceLoader(ctx, config.Filter));
}
return configuration;
}
#endregion
#region Culture
/// <summary>
/// Returns a new configuration that uses the culture with the provided
/// name.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="name">The name of the culture to set.</param>
/// <returns>The new instance with the culture being set.</returns>
public static IConfiguration WithCulture(this IConfiguration configuration, String name)
{
var culture = new CultureInfo(name);
return configuration.WithCulture(culture);
}
/// <summary>
/// Returns a new configuration that uses the given culture. Providing
/// null will reset the culture to the default one.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="culture">The culture to set.</param>
/// <returns>The new instance with the culture being set.</returns>
public static IConfiguration WithCulture(this IConfiguration configuration, CultureInfo culture) => configuration.With(culture);
#endregion
#region Including Refresh
/// <summary>
/// Registeres a handler to include the meta data refresh.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <param name="shouldRefresh">The optional callback.</param>
/// <returns>The new instance with the service.</returns>
public static IConfiguration WithMetaRefresh(this IConfiguration configuration, Predicate<Url>? shouldRefresh = null)
{
var service = new RefreshMetaHandler(shouldRefresh);
return configuration.With(service);
}
#endregion
#region Setting Encoding
/// <summary>
/// Registeres the default encoding determination algorithm, as
/// specified by the W3C.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <returns>The new instance with the service.</returns>
public static IConfiguration WithLocaleBasedEncoding(this IConfiguration configuration)
{
var service = new LocaleEncodingProvider();
return configuration.With(service);
}
#endregion
#region Cookies
/// <summary>
/// Registers the default cookie service if no other cookie service has
/// been registered yet.
/// </summary>
/// <param name="configuration">The configuration to extend.</param>
/// <returns>The new instance with the service.</returns>
public static IConfiguration WithDefaultCookies(this IConfiguration configuration)
{
var service = new MemoryCookieProvider();
return configuration.With(service);
}
#endregion
}
}
| |
/*============================================================================
* Copyright (C) Microsoft Corporation, All rights reserved.
*============================================================================
*/
#region Using directives
using System;
using System.Diagnostics;
using System.Globalization;
using System.Management.Automation;
using Microsoft.Management.Infrastructure.Options;
#endregion
namespace Microsoft.Management.Infrastructure.CimCmdlets
{
#region class ErrorToErrorRecord
/// <summary>
/// <para>
/// Convert error or exception to <see cref="System.Management.Automation.ErrorRecord"/>
/// </para>
/// </summary>
internal sealed class ErrorToErrorRecord
{
/// <summary>
/// <para>
/// Convert ErrorRecord from exception object, <see cref="Exception"/>
/// can be either <see cref="CimException"/> or general <see cref="Exception"/>.
/// </para>
/// </summary>
/// <param name="inner"></param>
/// <param name="context">the context starting the operation, which generated the error</param>
/// <param name="cimResultContext">the CimResultContext used to provide ErrorSource, etc. info.</param>
/// <returns></returns>
internal static ErrorRecord ErrorRecordFromAnyException(
InvocationContext context,
Exception inner,
CimResultContext cimResultContext)
{
Debug.Assert(inner != null, "Caller should verify inner != null");
CimException cimException = inner as CimException;
if (cimException != null)
{
return CreateFromCimException(context, cimException, cimResultContext);
}
var containsErrorRecord = inner as IContainsErrorRecord;
if (containsErrorRecord != null)
{
return InitializeErrorRecord(context,
exception : inner,
errorId: "CimCmdlet_" + containsErrorRecord.ErrorRecord.FullyQualifiedErrorId,
errorCategory: containsErrorRecord.ErrorRecord.CategoryInfo.Category,
cimResultContext: cimResultContext);
}
else
{
return InitializeErrorRecord(context,
exception :inner,
errorId: "CimCmdlet_" + inner.GetType().Name,
errorCategory: ErrorCategory.NotSpecified,
cimResultContext: cimResultContext);
}
}
#region Helper functions
/// <summary>
/// Create <see cref="ErrorRecord"/> from <see cref="CimException"/> object.
/// </summary>
/// <param name="context"></param>
/// <param name="cimException"></param>
/// <param name="cimResultContext">the CimResultContext used to provide ErrorSource, etc. info.</param>
/// <returns></returns>
internal static ErrorRecord CreateFromCimException(
InvocationContext context,
CimException cimException,
CimResultContext cimResultContext)
{
Debug.Assert(cimException != null, "Caller should verify cimException != null");
return InitializeErrorRecord(context, cimException, cimResultContext);
}
/// <summary>
/// Create <see cref="ErrorRecord"/> from <see cref="Exception"/> object.
/// </summary>
/// <param name="context"></param>
/// <param name="exception"></param>
/// <param name="errorId"></param>
/// <param name="errorCategory"></param>
/// <param name="cimResultContext">the CimResultContext used to provide ErrorSource, etc. info.</param>
/// <returns></returns>
internal static ErrorRecord InitializeErrorRecord(
InvocationContext context,
Exception exception,
string errorId,
ErrorCategory errorCategory,
CimResultContext cimResultContext)
{
return InitializeErrorRecordCore(
context,
exception: exception,
errorId: errorId,
errorCategory: errorCategory,
cimResultContext: cimResultContext);
}
/// <summary>
/// Create <see cref="ErrorRecord"/> from <see cref="CimException"/> object.
/// </summary>
/// <param name="context"></param>
/// <param name="cimException"></param>
/// <param name="cimResultContext">the CimResultContext used to provide ErrorSource, etc. info.</param>
/// <returns></returns>
internal static ErrorRecord InitializeErrorRecord(
InvocationContext context,
CimException cimException,
CimResultContext cimResultContext)
{
ErrorRecord errorRecord = InitializeErrorRecordCore(
context,
exception: cimException,
errorId: cimException.MessageId ?? "MiClientApiError_" + cimException.NativeErrorCode,
errorCategory: ConvertCimExceptionToErrorCategory(cimException),
cimResultContext: cimResultContext);
if (cimException.ErrorData != null)
{
errorRecord.CategoryInfo.TargetName = cimException.ErrorSource;
}
return errorRecord;
}
/// <summary>
/// Create <see cref="ErrorRecord"/> from <see cref="Exception"/> object.
/// </summary>
/// <param name="context"></param>
/// <param name="exception"></param>
/// <param name="errorId"></param>
/// <param name="errorCategory"></param>
/// <param name="cimResultContext">the CimResultContext used to provide ErrorSource, etc. info.</param>
/// <returns></returns>
internal static ErrorRecord InitializeErrorRecordCore(
InvocationContext context,
Exception exception,
string errorId,
ErrorCategory errorCategory,
CimResultContext cimResultContext)
{
object theTargetObject = null;
if (cimResultContext != null)
{
theTargetObject = cimResultContext.ErrorSource;
}
if (theTargetObject == null)
{
if (context != null)
{
if (context.TargetCimInstance != null)
{
theTargetObject = context.TargetCimInstance;
}
}
}
ErrorRecord coreErrorRecord = new ErrorRecord(
exception: exception,
errorId: errorId,
errorCategory: errorCategory,
targetObject: theTargetObject);
if (context == null)
{
return coreErrorRecord;
}
System.Management.Automation.Remoting.OriginInfo originInfo = new System.Management.Automation.Remoting.OriginInfo(
context.ComputerName,
Guid.Empty);
ErrorRecord errorRecord = new System.Management.Automation.Runspaces.RemotingErrorRecord(
coreErrorRecord,
originInfo);
DebugHelper.WriteLogEx("Created RemotingErrorRecord.", 0);
// errorRecord.SetInvocationInfo(jobContext.CmdletInvocationInfo);
// errorRecord.PreserveInvocationInfoOnce = true;
return errorRecord;
}
/// <summary>
/// Convert <see cref="CimException"/> to <see cref="ErrorCategory"/>.
/// </summary>
/// <param name="cimException"></param>
/// <returns></returns>
internal static ErrorCategory ConvertCimExceptionToErrorCategory(CimException cimException)
{
ErrorCategory result = ErrorCategory.NotSpecified;
if (cimException.ErrorData != null)
{
result = ConvertCimErrorToErrorCategory(cimException.ErrorData);
}
if (result == ErrorCategory.NotSpecified)
{
result = ConvertCimNativeErrorCodeToErrorCategory(cimException.NativeErrorCode);
}
return result;
}
/// <summary>
/// Convert <see cref="NativeErrorCode"/> to <see cref="ErrorCategory"/>.
/// </summary>
/// <param name="nativeErrorCode"></param>
/// <returns></returns>
internal static ErrorCategory ConvertCimNativeErrorCodeToErrorCategory(NativeErrorCode nativeErrorCode)
{
switch (nativeErrorCode)
{
case NativeErrorCode.Failed:
return ErrorCategory.NotSpecified;
case NativeErrorCode.AccessDenied:
return ErrorCategory.PermissionDenied;
case NativeErrorCode.InvalidNamespace:
return ErrorCategory.MetadataError;
case NativeErrorCode.InvalidParameter:
return ErrorCategory.InvalidArgument;
case NativeErrorCode.InvalidClass:
return ErrorCategory.MetadataError;
case NativeErrorCode.NotFound:
return ErrorCategory.ObjectNotFound;
case NativeErrorCode.NotSupported:
return ErrorCategory.NotImplemented;
case NativeErrorCode.ClassHasChildren:
return ErrorCategory.MetadataError;
case NativeErrorCode.ClassHasInstances:
return ErrorCategory.MetadataError;
case NativeErrorCode.InvalidSuperClass:
return ErrorCategory.MetadataError;
case NativeErrorCode.AlreadyExists:
return ErrorCategory.ResourceExists;
case NativeErrorCode.NoSuchProperty:
return ErrorCategory.MetadataError;
case NativeErrorCode.TypeMismatch:
return ErrorCategory.InvalidType;
case NativeErrorCode.QueryLanguageNotSupported:
return ErrorCategory.NotImplemented;
case NativeErrorCode.InvalidQuery:
return ErrorCategory.InvalidArgument;
case NativeErrorCode.MethodNotAvailable:
return ErrorCategory.MetadataError;
case NativeErrorCode.MethodNotFound:
return ErrorCategory.MetadataError;
case NativeErrorCode.NamespaceNotEmpty:
return ErrorCategory.MetadataError;
case NativeErrorCode.InvalidEnumerationContext:
return ErrorCategory.MetadataError;
case NativeErrorCode.InvalidOperationTimeout:
return ErrorCategory.InvalidArgument;
case NativeErrorCode.PullHasBeenAbandoned:
return ErrorCategory.OperationStopped;
case NativeErrorCode.PullCannotBeAbandoned:
return ErrorCategory.CloseError;
case NativeErrorCode.FilteredEnumerationNotSupported:
return ErrorCategory.NotImplemented;
case NativeErrorCode.ContinuationOnErrorNotSupported:
return ErrorCategory.NotImplemented;
case NativeErrorCode.ServerLimitsExceeded:
return ErrorCategory.ResourceBusy;
case NativeErrorCode.ServerIsShuttingDown:
return ErrorCategory.ResourceUnavailable;
default:
return ErrorCategory.NotSpecified;
}
}
/// <summary>
/// Convert <see cref="cimError"/> to <see cref="ErrorCategory"/>.
/// </summary>
/// <param name="cimError"></param>
/// <returns></returns>
internal static ErrorCategory ConvertCimErrorToErrorCategory(CimInstance cimError)
{
if (cimError == null)
{
return ErrorCategory.NotSpecified;
}
CimProperty errorCategoryProperty = cimError.CimInstanceProperties[@"Error_Category"];
if (errorCategoryProperty == null)
{
return ErrorCategory.NotSpecified;
}
ErrorCategory errorCategoryValue;
if (!LanguagePrimitives.TryConvertTo<ErrorCategory>(errorCategoryProperty.Value, CultureInfo.InvariantCulture, out errorCategoryValue))
{
return ErrorCategory.NotSpecified;
}
return errorCategoryValue;
}
#endregion
}
#endregion
/// <summary>
/// <para>
/// Write error to pipeline
/// </para>
/// </summary>
internal sealed class CimWriteError : CimSyncAction
{
/// <summary>
/// Constructor with an <see cref="CimInstance"/> error
/// </summary>
/// <param name="error"></param>
public CimWriteError(CimInstance error, InvocationContext context)
{
this.error = error;
this.invocationContext = context;
}
/// <summary>
/// Construct with an exception object
/// </summary>
/// <param name="exception"></param>
public CimWriteError(Exception exception, InvocationContext context, CimResultContext cimResultContext)
{
this.exception = exception;
this.invocationContext = context;
this.cimResultContext = cimResultContext;
}
/// <summary>
/// <para>
/// Write error to pipeline
/// </para>
/// </summary>
/// <param name="cmdlet"></param>
public override void Execute(CmdletOperationBase cmdlet)
{
Debug.Assert(cmdlet != null, "Caller should verify that cmdlet != null");
try
{
Exception errorException = (error != null) ? new CimException(error) : this.Exception;
// PS engine takes care of handling error action
cmdlet.WriteError(ErrorToErrorRecord.ErrorRecordFromAnyException(this.invocationContext, errorException, this.cimResultContext));
// if user wants to continue, we will get here
this.responseType = CimResponseType.Yes;
}
catch
{
this.responseType = CimResponseType.NoToAll;
throw;
}
finally
{
// unblocking the waiting thread
this.OnComplete();
}
}
#region members
/// <summary>
/// <para>
/// Error instance
/// </para>
/// </summary>
private CimInstance error;
internal CimInstance Error
{
get
{
return error;
}
}
/// <summary>
/// <para>
/// Exception object
/// </para>
/// </summary>
internal Exception Exception
{
get
{
return exception;
}
}
private Exception exception;
/// <summary>
/// <para>
/// <see cref="InvocationContext"/> object that contains
/// the information while issuing the current operation
/// </para>
/// </summary>
private InvocationContext invocationContext;
internal InvocationContext CimInvocationContext
{
get
{
return invocationContext;
}
}
/// <summary>
/// <see cref="CimResultConte"/>
/// </summary>
private CimResultContext cimResultContext;
internal CimResultContext ResultContext
{
get
{
return cimResultContext;
}
}
#endregion
}//End Class
}//End namespace
| |
/*
* Copyright 2008-2014, 2017 the GAP developers. See the NOTICE file at the
* top-level directory of this distribution, and at
* https://gapwiki.chiro.be/copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.ServiceModel;
using System.Web.Mvc;
using Chiro.Cdf.Authentication;
using Chiro.Cdf.ServiceHelper;
using Chiro.Gap.Domain;
using Chiro.Gap.ServiceContracts;
using Chiro.Gap.ServiceContracts.DataContracts;
using Chiro.Gap.ServiceContracts.FaultContracts;
using Chiro.Gap.WebApp.Models;
namespace Chiro.Gap.WebApp.Controllers
{
/// <summary>
/// Deze controller voorziet de acties om afdelingen en afdelingsjaren te
/// maken. te wijzigen, te verwijderen.
/// </summary>
/// <remarks>Het koppelen van leden aan afdelingen gebeurt hier niet, dat zit in de
/// <see cref="LedenController" />.</remarks>
[HandleError]
public class AfdelingenController : BaseController
{
/// <summary>
/// Standaardconstructor. <paramref name="veelGebruikt"/> wordt
/// best toegewezen via inversion of control.
/// </summary>
/// <param name="veelGebruikt">Haalt veel gebruikte zaken op uit cache, of indien niet beschikbaar, via
/// service</param>
/// <param name="serviceHelper"></param>
/// <param name="authenticator"></param>
public AfdelingenController(IVeelGebruikt veelGebruikt, ServiceHelper serviceHelper, IAuthenticator authenticator)
: base(veelGebruikt, serviceHelper, authenticator)
{
}
/// <summary>
/// Toont het afdelingsoverzicht voor het huidge groepswerkjaar: de actieve afdelingen,
/// met links om dien te bekijken/bewerken. De inactieve afdelingen worden ook getoond,
/// met dan de mogelijkheid om ze te activeren.
/// </summary>
/// <param name="groepID">ID van de groep die de pagina oproept, en van dewelke we dus gegevens moeten tonen</param>
/// <returns>Het afdelingsoverzicht voor het huidige werkJaar</returns>
[HandleError]
public override ActionResult Index(int groepID)
{
return RedirectToAction("Afdelingen", new { Controller = "Groep", groepID = groepID });
}
/// <summary>
/// Levert een JSON-representatie van een AfdelingsOverzichtModel op voor het groepswerkjaar met gegeven
/// <paramref name="groepsWerkJaarID"/>. Een AfdelingsOverzichtModel bevat een lijst van actieve afdelingen
/// en een lijst van inactieve afdelingen.
/// </summary>
/// <param name="groepsWerkJaarID">ID van groepswerkjaar waarvoor AfdelingsOverzicht op te halen is</param>
/// <param name="groepID">Groep waarin we aan het werk zijn</param>
/// <returns>JSON-representatie van een AfdelingsOverzichtModel voor het groepswerkjaar met gegeven
/// <paramref name="groepsWerkJaarID"/>. Een AfdelingsOverzichtModel bevat een lijst van actieve afdelingen
/// en een lijst van inactieve afdelingen.</returns>
[AcceptVerbs(HttpVerbs.Post)]
[HandleError]
public JsonResult AfdelingsInfo(int groepsWerkJaarID, int groepID)
{
var model = new AfdelingsOverzichtModel();
BaseModelInit(model, groepID);
// AfdelingDetails voor Afdelingen die in het opgegeven werkJaar voorkomen als AfdelingsJaar
model.Actief =
ServiceHelper.CallService<IGroepenService, IList<AfdelingDetail>>
(groep => groep.ActieveAfdelingenOphalen(groepsWerkJaarID));
// AfdelingDetails voor Afdelingen die in het opgegeven werkJaar voorkomen als AfdelingsJaar
model.NietActief
= ServiceHelper.CallService<IGroepenService, IList<AfdelingInfo>>(svc => svc.OngebruikteAfdelingenOphalen(groepsWerkJaarID));
model.Titel = "Afdelingen";
return Json(model,JsonRequestBehavior.AllowGet);
}
/// <summary>
/// Toont de view die toelaat een nieuwe afdeling te maken.
/// </summary>
/// <param name="groepID">Groep waarvoor de afdeling gemaakt moet worden</param>
/// <returns>De view die toelaat een nieuwe afdeling te maken.</returns>
/// <!-- GET: /Afdeling/Nieuw/ -->
[HandleError]
public ActionResult Nieuw(int groepID)
{
var model = new AfdelingInfoModel();
BaseModelInit(model, groepID);
model.Info = new AfdelingInfo();
model.Titel = Properties.Resources.NieuweAfdelingTitel;
return View("Nieuw", model);
}
/// <summary>
/// Maakt een nieuwe afdeling, op basis van <paramref name="model"/>
/// </summary>
/// <param name="model">Bevat naam en code voor de nieuwe afdeling</param>
/// <param name="groepID">ID van de groep waarvoor de afdeling gemaakt moet worden</param>
/// <returns>Het overzicht van de afdelingen, indien de nieuwe afdeling goed gemaakt is.
/// In het andere geval opnieuw de view om een afdeling bij te maken.</returns>
[AcceptVerbs(HttpVerbs.Post)]
[HandleError]
public ActionResult Nieuw(AfdelingInfoModel model, int groepID)
{
model.Titel = Properties.Resources.NieuweAfdelingTitel;
BaseModelInit(model, groepID);
if (ModelState.IsValid)
{
try
{
ServiceHelper.CallService<IGroepenService>(e => e.AfdelingAanmaken(groepID, model.Info.Naam, model.Info.Afkorting));
TempData["succes"] = Properties.Resources.WijzigingenOpgeslagenFeedback;
// (er wordt hier geredirect ipv de view te tonen, zodat je bij een 'refresh' niet de vraag krijgt
// of je de gegevens opnieuw wil posten.)
return RedirectToAction("Afdelingen", new { Controller = "Groep", groepID = model.GroepID });
}
catch (FaultException<BestaatAlFault<AfdelingInfo>> ex)
{
// Als de naam EN de afkorting van een afdeling al bestaan, dan hebben we graag twee
// foutberichten. Vandaar het truukje met 'possible'. Maar dit vangt niet alle situaties op.
// Als een nieuwe afdeling de naam heeft van bestaande afdeling A, en de code van bestaande
// afdeling B, dan zul je enkel een exception krijgen op de afkorting, omdat de
// BestaatAlException maar 1 object oplevert.
var possible = false;
if (String.Compare(ex.Detail.Bestaande.Afkorting, model.Info.Afkorting, StringComparison.OrdinalIgnoreCase) == 0)
{
possible = true;
ModelState.AddModelError(
"Info.Afkorting",
string.Format(
Properties.Resources.AfdelingsCodeBestaatAl,
ex.Detail.Bestaande.Afkorting,
ex.Detail.Bestaande.Naam));
}
if (String.Compare(ex.Detail.Bestaande.Naam, model.Info.Naam, StringComparison.OrdinalIgnoreCase) == 0)
{
possible = true;
ModelState.AddModelError(
"Info.Naam",
string.Format(
Properties.Resources.AfdelingsNaamBestaatAl,
ex.Detail.Bestaande.Afkorting,
ex.Detail.Bestaande.Naam));
}
if(!possible)
{
// Dit kan niet.
Debug.Assert(false);
}
return View(model);// FIXME only reloads part of the previous page
}
}
else
{
// Modelstate bevat ongeldige waarden; toon de pagina opnieuw
return View(model);// FIXME only reloads part of the previous page
}
}
/// <summary>
/// Verwijdert een afdeling uit het lijstje van actieve afdelingen in een bepaald werkJaar,
/// en levert een JSON-lijst op die 'gelukt' bevat als het gelukt is, en anders 'fail'.
/// </summary>
/// <param name="groepID">ID van de groep die we aan het bewerken zijn</param>
/// <param name="id">ID van het afdelingsjaar dat we willen verwijderen</param>
/// <returns>'gelukt' als het gelukt is, anders 'fail'.</returns>
[HandleError]
public JsonResult VerwijderenVanWerkjaar(int groepID, int id)
{
// TODO (#1812): gestandaardiseerde manier van feedback.
try
{
ServiceHelper.CallService<IGroepenService>(groep => groep.AfdelingsJaarVerwijderen(id));
var l = new List<String> {"gelukt"};
return Json(l, JsonRequestBehavior.AllowGet);
}
catch (FaultException)
{
var l = new List<String> {"fail"};
return Json(l, JsonRequestBehavior.AllowGet);
}
}
/// <summary>
/// Verwijdert een afdeling volledig uit de database. Levert een JSON-lijst op
/// die 'gelukt' bevat als het gelukt is, en anders 'fail'.
/// </summary>
/// <param name="groepID">ID van de groep die we aan het bewerken zijn</param>
/// <param name="id">ID van de afdeling dat we willen verwijderen</param>
/// <returns>'gelukt' als het gelukt is, anders 'fail'.</returns>
[HandleError]
public JsonResult Verwijderen(int groepID, int id)
{
// TODO (#1812): gestandaardiseerde manier van feedback.
try
{
ServiceHelper.CallService<IGroepenService>(groep => groep.AfdelingVerwijderen(id));
var l = new List<String> {"gelukt"};
return Json(l, JsonRequestBehavior.AllowGet);
}
catch (FaultException)
{
var l = new List<String> {"fail"};
return Json(l, JsonRequestBehavior.AllowGet);
}
}
/// <summary>
/// Laat de gebruiker een nieuw afdelingsjaar maken voor een niet-actieve afdeling
/// (met AfdelingID <paramref name="id"/>)
/// </summary>
/// <param name="groepID">ID van de geselecteerde groep</param>
/// <param name="id">AfdelingID van de te activeren afdeling</param>
/// <returns>De view 'afdelingsjaar'</returns>
[HandleError]
public ActionResult Activeren(int groepID, int id)
{
var model = new AfdelingsJaarModel();
BaseModelInit(model, groepID);
model.OfficieleAfdelingen =
ServiceHelper.CallService<IGroepenService, IEnumerable<OfficieleAfdelingDetail>>
(groep => groep.OfficieleAfdelingenOphalen());
model.Afdeling = ServiceHelper.CallService<IGroepenService, AfdelingInfo>
(groep => groep.AfdelingOphalen(id));
model.AfdelingsJaar = new AfdelingsJaarDetail();
model.AfdelingsJaar.AfdelingID = model.Afdeling.ID;
model.Titel = "Afdeling activeren";
return View("AfdelingsJaar", model);
}
/// <summary>
/// Laat de gebruiker het bestaande afdelingsjaar met afdelingsjaarID <paramref name="id"/>
/// bewerken.
/// </summary>
/// <param name="groepID">ID van de geselecteerde groep</param>
/// <param name="id">ID van het te bewerken afdelingsjaar</param>
/// <returns>De view 'afdelingsjaar'</returns>
[HandleError]
public ActionResult AfdJaarBewerken(int groepID, int id)
{
var model = new AfdelingsJaarModel();
BaseModelInit(model, groepID);
AfdelingDetail detail = ServiceHelper.CallService<IGroepenService, AfdelingDetail>(svc => svc.AfdelingDetailOphalen(id));
model.Afdeling = new AfdelingInfo
{
ID = detail.AfdelingID,
Naam = detail.AfdelingNaam,
Afkorting = detail.AfdelingAfkorting
};
model.AfdelingsJaar = detail; // inheritance :)
model.OfficieleAfdelingen = ServiceHelper.CallService<IGroepenService, IEnumerable<OfficieleAfdelingDetail>>(groep => groep.OfficieleAfdelingenOphalen());
model.Titel = "Afdeling bewerken";
return View("AfdelingsJaar", model);
}
/// <summary>
/// Postback voor activeren/bewerken afdelingsjaar
/// </summary>
/// <param name="model">De property <c>model.AfdelingsJaar</c> bevat de relevante details over het afdelingsjaar</param>
/// <param name="groepID">Groep waarin de gebruiker momenteel aan het werken is</param>
/// <returns>Het afdelingsoverzicht als de wijzigingen bewaard zijn, en anders opnieuw de
/// 'AfdelingsJaarView'.</returns>
[AcceptVerbs(HttpVerbs.Post)]
[HandleError]
public ActionResult AfdJaarBewerken(AfdelingsJaarModel model, int groepID)
{
BaseModelInit(model, groepID);
// Als de gebruiker een kleiner geboortejaar 'tot' als 'van' ingeeft, wisselen we die stiekem om. (Ticket #289)
if (model.AfdelingsJaar.GeboorteJaarTot < model.AfdelingsJaar.GeboorteJaarVan)
{
model.AfdelingsJaar.GeboorteJaarVan = model.AfdelingsJaar.GeboorteJaarTot;
model.AfdelingsJaar.GeboorteJaarTot = model.AfdelingsJaar.GeboorteJaarVan;
}
try
{
// de view 'AfdJaarBewerken' laat zowel toe de naam en afkorting
// van de afdeling aan te passen, als de geboortejaren, geslacht en
// officiele afdeling.
// Vandaar dat we zowel afdeling als afdelingsjaar moeten aanpassen.
ServiceHelper.CallService<IGroepenService>(e => e.AfdelingsJaarBewaren(model.AfdelingsJaar));
ServiceHelper.CallService<IGroepenService>(e => e.AfdelingBewaren(model.Afdeling));
TempData["succes"] = Properties.Resources.WijzigingenOpgeslagenFeedback;
return RedirectToAction("Afdelingen", new { Controller = "Groep", groepID = model.GroepID });
}
catch (FaultException<FoutNummerFault> ex)
{
switch (ex.Detail.FoutNummer)
{
case FoutNummer.OngeldigeGeboorteJarenVoorAfdeling:
ModelState.AddModelError("AfdelingsJaar.GeboorteJaarTot", Properties.Resources.MinimumLeeftijd);
break;
default:
ModelState.AddModelError("Afdeling.Naam", ex.Detail.Bericht);
break;
}
// Vul model aan, en toon de view AfdelingsJaar opnieuw
model.Afdeling =
ServiceHelper.CallService<IGroepenService, AfdelingInfo>(
svc => svc.AfdelingOphalen(model.AfdelingsJaar.AfdelingID));
model.OfficieleAfdelingen =
ServiceHelper.CallService<IGroepenService, IEnumerable<OfficieleAfdelingDetail>>(
svc => svc.OfficieleAfdelingenOphalen());
model.Titel = "Afdeling bewerken";
return View("AfdelingsJaar", model); // FIXME only reloads part of the previous page
}
catch (FaultException<BestaatAlFault<AfdelingInfo>> ex)
{
if (
String.Compare(ex.Detail.Bestaande.Afkorting, model.Afdeling.Afkorting,
StringComparison.OrdinalIgnoreCase) == 0)
{
ModelState.AddModelError(
// De key Afdeling.Afkorting geeft aan dat de fout zit in
// Model.Afdeling.Afkorting. Die key moet juist zijn, anders laat
// MVC de foutmelding niet zien.
"Afdeling.Afkorting",
string.Format(
Properties.Resources.AfdelingsCodeBestaatAl,
ex.Detail.Bestaande.Afkorting,
ex.Detail.Bestaande.Naam));
}
else if (
String.Compare(ex.Detail.Bestaande.Naam, model.Afdeling.Naam,
StringComparison.OrdinalIgnoreCase) == 0)
{
ModelState.AddModelError(
"Afdeling.Naam",
string.Format(
Properties.Resources.AfdelingsNaamBestaatAl,
ex.Detail.Bestaande.Afkorting,
ex.Detail.Bestaande.Naam));
}
else
{
// Dit kan niet.
Debug.Assert(false);
}
model.OfficieleAfdelingen = ServiceHelper.CallService<IGroepenService, IEnumerable<OfficieleAfdelingDetail>>(groep => groep.OfficieleAfdelingenOphalen());
return View("AfdelingsJaar", model); // FIXME only reloads part of the previous page
}
}
}
}
| |
/*
* Copyright (c) InWorldz Halcyon Developers
* Copyright (c) Contributors, http://opensimulator.org/
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Reflection;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenMetaverse.StructuredData;
using OpenSim.Framework;
using OpenSim.Framework.Servers.HttpServer;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using Caps=OpenSim.Framework.Communications.Capabilities.Caps;
namespace OpenSim.Region.CoreModules.Capabilities
{
public class ObjectAddModule : IRegionModule
{
private static readonly ILog m_log =
LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private Scene m_scene;
#region IRegionModule Members
public void Initialize(Scene pScene, IConfigSource pSource)
{
m_scene = pScene;
m_scene.EventManager.OnRegisterCaps += RegisterCaps;
}
public void PostInitialize()
{
}
public void RegisterCaps(UUID agentID, Caps caps)
{
UUID capuuid = UUID.Random();
m_log.InfoFormat("[OBJECTADD]: {0}", "/CAPS/OA/" + capuuid + "/");
IRequestHandler handler =
new RestHTTPHandler("POST", "/CAPS/OA/" + capuuid + "/",
delegate(Hashtable m_dhttpMethod)
{
return ProcessAdd(m_dhttpMethod, agentID, caps);
});
caps.RegisterHandler("ObjectAdd", handler);
}
public void Close()
{
}
public string Name
{
get { return "ObjectAddModule"; }
}
public bool IsSharedModule
{
get { return false; }
}
#endregion
public Hashtable ProcessAdd(Hashtable request, UUID AgentId, Caps cap)
{
Hashtable responsedata = new Hashtable();
responsedata["int_response_code"] = 400; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["str_response_string"] = "Request wasn't what was expected";
ScenePresence avatar;
if (!m_scene.TryGetAvatar(AgentId, out avatar))
return responsedata;
OSD r = OSDParser.DeserializeLLSDXml((string)request["requestbody"]);
//UUID session_id = UUID.Zero;
bool bypass_raycast = false;
uint everyone_mask = 0;
uint group_mask = 0;
uint next_owner_mask = 0;
uint flags = 0;
UUID group_id = UUID.Zero;
int hollow = 0;
int material = 0;
int p_code = 0;
int path_begin = 0;
int path_curve = 0;
int path_end = 0;
int path_radius_offset = 0;
int path_revolutions = 0;
int path_scale_x = 0;
int path_scale_y = 0;
int path_shear_x = 0;
int path_shear_y = 0;
int path_skew = 0;
int path_taper_x = 0;
int path_taper_y = 0;
int path_twist = 0;
int path_twist_begin = 0;
int profile_begin = 0;
int profile_curve = 0;
int profile_end = 0;
Vector3 ray_end = Vector3.Zero;
bool ray_end_is_intersection = false;
Vector3 ray_start = Vector3.Zero;
UUID ray_target_id = UUID.Zero;
Quaternion rotation = Quaternion.Identity;
Vector3 scale = Vector3.Zero;
int state = 0;
if (r.Type != OSDType.Map) // not a proper req
return responsedata;
OSDMap rm = (OSDMap)r;
if (rm.ContainsKey("ObjectData")) //v2
{
if (rm["ObjectData"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has ObjectData key, but data not in expected format";
return responsedata;
}
OSDMap ObjMap = (OSDMap) rm["ObjectData"];
bypass_raycast = ObjMap["BypassRaycast"].AsBoolean();
everyone_mask = readuintval(ObjMap["EveryoneMask"]);
flags = readuintval(ObjMap["Flags"]);
group_mask = readuintval(ObjMap["GroupMask"]);
material = ObjMap["Material"].AsInteger();
next_owner_mask = readuintval(ObjMap["NextOwnerMask"]);
p_code = ObjMap["PCode"].AsInteger();
if (ObjMap.ContainsKey("Path"))
{
if (ObjMap["Path"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has Path key, but data not in expected format";
return responsedata;
}
OSDMap PathMap = (OSDMap)ObjMap["Path"];
path_begin = PathMap["Begin"].AsInteger();
path_curve = PathMap["Curve"].AsInteger();
path_end = PathMap["End"].AsInteger();
path_radius_offset = PathMap["RadiusOffset"].AsInteger();
path_revolutions = PathMap["Revolutions"].AsInteger();
path_scale_x = PathMap["ScaleX"].AsInteger();
path_scale_y = PathMap["ScaleY"].AsInteger();
path_shear_x = PathMap["ShearX"].AsInteger();
path_shear_y = PathMap["ShearY"].AsInteger();
path_skew = PathMap["Skew"].AsInteger();
path_taper_x = PathMap["TaperX"].AsInteger();
path_taper_y = PathMap["TaperY"].AsInteger();
path_twist = PathMap["Twist"].AsInteger();
path_twist_begin = PathMap["TwistBegin"].AsInteger();
}
if (ObjMap.ContainsKey("Profile"))
{
if (ObjMap["Profile"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has Profile key, but data not in expected format";
return responsedata;
}
OSDMap ProfileMap = (OSDMap)ObjMap["Profile"];
profile_begin = ProfileMap["Begin"].AsInteger();
profile_curve = ProfileMap["Curve"].AsInteger();
profile_end = ProfileMap["End"].AsInteger();
hollow = ProfileMap["Hollow"].AsInteger();
}
ray_end_is_intersection = ObjMap["RayEndIsIntersection"].AsBoolean();
ray_target_id = ObjMap["RayTargetId"].AsUUID();
state = ObjMap["State"].AsInteger();
try
{
ray_end = ((OSDArray) ObjMap["RayEnd"]).AsVector3();
ray_start = ((OSDArray) ObjMap["RayStart"]).AsVector3();
scale = ((OSDArray) ObjMap["Scale"]).AsVector3();
rotation = ((OSDArray)ObjMap["Rotation"]).AsQuaternion();
}
catch (Exception)
{
responsedata["str_response_string"] = "RayEnd, RayStart, Scale or Rotation wasn't in the expected format";
return responsedata;
}
if (rm.ContainsKey("AgentData"))
{
if (rm["AgentData"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has AgentData key, but data not in expected format";
return responsedata;
}
OSDMap AgentDataMap = (OSDMap) rm["AgentData"];
//session_id = AgentDataMap["SessionId"].AsUUID();
group_id = AgentDataMap["GroupId"].AsUUID();
}
}
else
{ //v1
bypass_raycast = rm["bypass_raycast"].AsBoolean();
everyone_mask = readuintval(rm["everyone_mask"]);
flags = readuintval(rm["flags"]);
group_id = rm["group_id"].AsUUID();
group_mask = readuintval(rm["group_mask"]);
hollow = rm["hollow"].AsInteger();
material = rm["material"].AsInteger();
next_owner_mask = readuintval(rm["next_owner_mask"]);
hollow = rm["hollow"].AsInteger();
p_code = rm["p_code"].AsInteger();
path_begin = rm["path_begin"].AsInteger();
path_curve = rm["path_curve"].AsInteger();
path_end = rm["path_end"].AsInteger();
path_radius_offset = rm["path_radius_offset"].AsInteger();
path_revolutions = rm["path_revolutions"].AsInteger();
path_scale_x = rm["path_scale_x"].AsInteger();
path_scale_y = rm["path_scale_y"].AsInteger();
path_shear_x = rm["path_shear_x"].AsInteger();
path_shear_y = rm["path_shear_y"].AsInteger();
path_skew = rm["path_skew"].AsInteger();
path_taper_x = rm["path_taper_x"].AsInteger();
path_taper_y = rm["path_taper_y"].AsInteger();
path_twist = rm["path_twist"].AsInteger();
path_twist_begin = rm["path_twist_begin"].AsInteger();
profile_begin = rm["profile_begin"].AsInteger();
profile_curve = rm["profile_curve"].AsInteger();
profile_end = rm["profile_end"].AsInteger();
ray_end_is_intersection = rm["ray_end_is_intersection"].AsBoolean();
ray_target_id = rm["ray_target_id"].AsUUID();
//session_id = rm["session_id"].AsUUID();
state = rm["state"].AsInteger();
try
{
ray_end = ((OSDArray)rm["ray_end"]).AsVector3();
ray_start = ((OSDArray)rm["ray_start"]).AsVector3();
rotation = ((OSDArray)rm["rotation"]).AsQuaternion();
scale = ((OSDArray)rm["scale"]).AsVector3();
}
catch (Exception)
{
responsedata["str_response_string"] = "RayEnd, RayStart, Scale or Rotation wasn't in the expected format";
return responsedata;
}
}
Vector3 pos = m_scene.GetNewRezLocation(ray_start, ray_end, ray_target_id, rotation, (bypass_raycast) ? (byte)1 : (byte)0, (ray_end_is_intersection) ? (byte)1 : (byte)0, true, scale, false, UUID.Zero);
PrimitiveBaseShape pbs = PrimitiveBaseShape.CreateBox();
pbs.PathBegin = (ushort)path_begin;
pbs.PathCurve = (byte)path_curve;
pbs.PathEnd = (ushort)path_end;
pbs.PathRadiusOffset = (sbyte)path_radius_offset;
pbs.PathRevolutions = (byte)path_revolutions;
pbs.PathScaleX = (byte)path_scale_x;
pbs.PathScaleY = (byte)path_scale_y;
pbs.PathShearX = (byte) path_shear_x;
pbs.PathShearY = (byte)path_shear_y;
pbs.PathSkew = (sbyte)path_skew;
pbs.PathTaperX = (sbyte)path_taper_x;
pbs.PathTaperY = (sbyte)path_taper_y;
pbs.PathTwist = (sbyte)path_twist;
pbs.PathTwistBegin = (sbyte)path_twist_begin;
pbs.HollowShape = (HollowShape) hollow;
pbs.PCode = (byte)p_code;
pbs.ProfileBegin = (ushort) profile_begin;
pbs.ProfileCurve = (byte) profile_curve;
pbs.ProfileEnd = (ushort)profile_end;
pbs.Scale = scale;
pbs.State = (byte)state;
SceneObjectGroup obj = null; ;
if (m_scene.Permissions.CanRezObject(1, avatar.UUID, UUID.Zero, pos, false))
{
// rez ON the ground, not IN the ground
pos.Z += 0.25F;
obj = m_scene.AddNewPrim(avatar.UUID, group_id, pos, rotation, pbs, false, true);
}
if (obj == null)
return responsedata;
SceneObjectPart rootpart = obj.RootPart;
rootpart.Shape = pbs;
rootpart.Flags |= (PrimFlags)flags;
rootpart.EveryoneMask = everyone_mask;
rootpart.GroupID = group_id;
rootpart.GroupMask = group_mask;
rootpart.NextOwnerMask = next_owner_mask;
rootpart.Material = (byte)material;
m_scene.PhysicsScene.AddPhysicsActorTaint(rootpart.PhysActor);
responsedata["int_response_code"] = 200; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["str_response_string"] = String.Format("<llsd><map><key>local_id</key>{0}</map></llsd>",ConvertUintToBytes(obj.LocalId));
return responsedata;
}
private uint readuintval(OSD obj)
{
byte[] tmp = obj.AsBinary();
if (BitConverter.IsLittleEndian)
Array.Reverse(tmp);
return Utils.BytesToUInt(tmp);
}
private string ConvertUintToBytes(uint val)
{
byte[] resultbytes = Utils.UIntToBytes(val);
if (BitConverter.IsLittleEndian)
Array.Reverse(resultbytes);
return String.Format("<binary encoding=\"base64\">{0}</binary>",Convert.ToBase64String(resultbytes));
}
}
}
| |
// -----------------------------------------------------------------------------
// qf4net Library
//
// Port of Samek's Quantum Framework to C#. The implementation takes the liberty
// to depart from Miro Samek's code where the specifics of desktop systems
// (compared to embedded systems) seem to warrant a different approach.
// Please see accompanying documentation for details.
//
// Reference:
// Practical Statecharts in C/C++; Quantum Programming for Embedded Systems
// Author: Miro Samek, Ph.D.
// http://www.quantum-leaps.com/book.htm
//
// -----------------------------------------------------------------------------
//
// Copyright (C) 2003-2004, The qf4net Team
// All rights reserved
// Lead: Rainer Hessmer, Ph.D. (rainer@hessmer.org)
//
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Neither the name of the qf4net-Team, nor the names of its contributors
// may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// -----------------------------------------------------------------------------
using System;
namespace qf4net
{
/// <summary>
///
/// </summary>
[Serializable]
public class QEvent : IQEvent
{
private const string QActivityIdSlotName = "QF4Net.QEvents.QActivityId";
private const string QTransactionSlotName = "QF4Net.QEvents.QTransaction";
private string m_QSignal;
private object m_QData;
private DateTime m_QSent;
private string m_QKey;
private string m_QActivityId;
private IQFTransaction m_QTransaction;
/// <summary>
/// Default constructor - initializes all fields to default values
/// </summary>
public QEvent(string qSignal)
{
Init (null, qSignal);
}
public QEvent(string qSignal, object qData)
{
Init (null, qSignal);
m_QData = qData;
}
public QEvent (string qSource, string qSignal, object qData)
{
Init (qSource, qSignal);
m_QData = qData;
}
public QEvent (string qSource, string qSignal, object qData, DateTime qSent)
{
Init (qSource, qSignal);
m_QData = qData;
m_QSent = qSent;
}
public QEvent (string qSource, string qKey, string qSignal, object qData)
{
Init (qSource, qKey, qSignal);
m_QData = qData;
}
public QEvent (string qSource, string qKey, string qSignal, object qData, DateTime qSent)
{
Init (qSource, qKey, qSignal);
m_QData = qData;
m_QSent = qSent;
}
public QEvent (System.Runtime.Remoting.Messaging.IMethodCallMessage msg)
{
Init (null, msg.MethodName);
m_QData = msg;
}
public QEvent (System.Runtime.Remoting.Messaging.IMethodReturnMessage ret)
{
Init (null, ret.MethodName);
m_QData = ret;
}
protected void Init (string qSource, string qSignal)
{
Init (qSource, null, qSignal);
}
protected void Init (string qSource, string qKey, string qSignal)
{
if (qSource != null)
{
m_QSignal = qSource + "." + qSignal;
}
else
{
m_QSignal = qSignal;
}
m_QKey = qKey;
m_QSent = DateTime.Now;
LoadActivityId ();
LoadPendingTransaction ();
}
public static string GetActivityId ()
{
LocalDataStoreSlot slot = System.Threading.Thread.GetNamedDataSlot (QActivityIdSlotName);
object possibleCardinalValue = System.Threading.Thread.GetData (slot);
if (possibleCardinalValue == null)
{
return null;
}
return possibleCardinalValue.ToString ();
}
public static IQFTransaction GetThreadTransaction ()
{
LocalDataStoreSlot slot = System.Threading.Thread.GetNamedDataSlot (QTransactionSlotName);
object possibleTransaction = System.Threading.Thread.GetData (slot);
if (possibleTransaction == null)
{
return null;
}
return possibleTransaction as IQFTransaction;
}
public static void SetThreadTransaction (IQFTransaction transaction)
{
LocalDataStoreSlot slot = System.Threading.Thread.GetNamedDataSlot (QTransactionSlotName);
System.Threading.Thread.SetData (slot, transaction);
}
private void LoadPendingTransaction ()
{
m_QTransaction = GetThreadTransaction ();
}
private void LoadActivityId ()
{
string possibleCardinalValue = GetActivityId ();
if (possibleCardinalValue == null)
{
m_QActivityId = Guid.NewGuid ().ToString ();
}
else
{
m_QActivityId = possibleCardinalValue.ToString ();
}
}
public void ApplyActivityId ()
{
LocalDataStoreSlot slot = System.Threading.Thread.GetNamedDataSlot (QActivityIdSlotName);
System.Threading.Thread.SetData (slot, m_QActivityId);
}
public void ClearActivityId ()
{
LocalDataStoreSlot slot = System.Threading.Thread.GetNamedDataSlot (QActivityIdSlotName);
System.Threading.Thread.SetData (slot, null);
}
public void Commit()
{
if (null != m_QTransaction)
{
m_QTransaction.Commit ();
}
}
public void Abort()
{
if (null != m_QTransaction)
{
m_QTransaction.Abort ();
}
}
/// <summary>
/// The identifier of the <see cref="QEvent"/> type.
/// </summary>
public string QSignal
{
get { return m_QSignal; }
}
public object QData {
get { return m_QData; }
}
public DateTime QSent
{
get
{
return m_QSent;
}
}
public string QKey
{
get
{
return m_QKey;
}
}
public string QActivityId
{
get
{
return m_QActivityId;
}
}
/// <summary>
/// The QSignal in string form. It allows for simpler debugging and logging.
/// </summary>
/// <returns>The signal as string.</returns>
public override string ToString()
{
switch (QSignal)
{
case QSignals.Init: return "Init";
case QSignals.Entry: return "Entry";
case QSignals.Exit: return "Exit";
default: return QSignal.ToString();
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.InteropServices;
using System.Security.Claims;
using System.Security.Principal;
using System.Text.Encodings.Web;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections.Features;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Primitives;
using Microsoft.Net.Http.Headers;
namespace Microsoft.AspNetCore.Authentication.Negotiate
{
/// <summary>
/// Authenticates requests using Negotiate, Kerberos, or NTLM.
/// </summary>
public class NegotiateHandler : AuthenticationHandler<NegotiateOptions>, IAuthenticationRequestHandler
{
private const string AuthPersistenceKey = nameof(AuthPersistence);
private const string NegotiateVerb = "Negotiate";
private const string AuthHeaderPrefix = NegotiateVerb + " ";
private bool _requestProcessed;
private INegotiateState? _negotiateState;
/// <summary>
/// Creates a new <see cref="NegotiateHandler"/>
/// </summary>
/// <inheritdoc />
public NegotiateHandler(IOptionsMonitor<NegotiateOptions> options, ILoggerFactory logger, UrlEncoder encoder, ISystemClock clock)
: base(options, logger, encoder, clock)
{ }
/// <summary>
/// The handler calls methods on the events which give the application control at certain points where processing is occurring.
/// If it is not provided a default instance is supplied which does nothing when the methods are called.
/// </summary>
protected new NegotiateEvents Events
{
get => (NegotiateEvents)base.Events!;
set => base.Events = value;
}
/// <summary>
/// Creates the default events type.
/// </summary>
/// <returns></returns>
protected override Task<object> CreateEventsAsync() => Task.FromResult<object>(new NegotiateEvents());
private bool IsSupportedProtocol => HttpProtocol.IsHttp11(Request.Protocol) || HttpProtocol.IsHttp10(Request.Protocol);
/// <summary>
/// Intercepts incomplete Negotiate authentication handshakes and continues or completes them.
/// </summary>
/// <returns><see langword="true" /> if a response was generated, otherwise <see langword="false"/>.</returns>
public async Task<bool> HandleRequestAsync()
{
AuthPersistence? persistence = null;
bool authFailedEventCalled = false;
try
{
if (_requestProcessed || Options.DeferToServer)
{
// This request was already processed but something is re-executing it like an exception handler.
// Don't re-run because we could corrupt the connection state, e.g. if this was a stage2 NTLM request
// that we've already completed the handshake for.
// Or we're in deferral mode where we let the server handle the authentication.
return false;
}
_requestProcessed = true;
if (!IsSupportedProtocol)
{
// HTTP/1.0 and HTTP/1.1 are supported. Do not throw because this may be running on a server that supports
// additional protocols.
return false;
}
var connectionItems = GetConnectionItems();
persistence = (AuthPersistence)connectionItems[AuthPersistenceKey]!;
_negotiateState = persistence?.State;
var authorizationHeader = Request.Headers.Authorization;
if (StringValues.IsNullOrEmpty(authorizationHeader))
{
if (_negotiateState?.IsCompleted == false)
{
throw new InvalidOperationException("An anonymous request was received in between authentication handshake requests.");
}
return false;
}
var authorization = authorizationHeader.ToString();
string? token = null;
if (authorization.StartsWith(AuthHeaderPrefix, StringComparison.OrdinalIgnoreCase))
{
token = authorization.Substring(AuthHeaderPrefix.Length).Trim();
}
else
{
if (_negotiateState?.IsCompleted == false)
{
throw new InvalidOperationException("Non-negotiate request was received in between authentication handshake requests.");
}
return false;
}
// WinHttpHandler re-authenticates an existing connection if it gets another challenge on subsequent requests.
if (_negotiateState?.IsCompleted == true)
{
Logger.Reauthenticating();
_negotiateState.Dispose();
_negotiateState = null;
if (persistence != null)
{
persistence.State = null;
}
}
_negotiateState ??= Options.StateFactory.CreateInstance();
var outgoing = _negotiateState.GetOutgoingBlob(token, out var errorType, out var exception);
if (errorType != BlobErrorType.None)
{
Debug.Assert(exception != null);
Logger.NegotiateError(errorType.ToString());
_negotiateState.Dispose();
_negotiateState = null;
if (persistence?.State != null)
{
persistence.State.Dispose();
persistence.State = null;
}
if (errorType == BlobErrorType.CredentialError)
{
Logger.CredentialError(exception);
authFailedEventCalled = true; // Could throw, and we don't want to double trigger the event.
var result = await InvokeAuthenticateFailedEvent(exception);
return result ?? false; // Default to skipping the handler, let AuthZ generate a new 401
}
else if (errorType == BlobErrorType.ClientError)
{
Logger.ClientError(exception);
authFailedEventCalled = true; // Could throw, and we don't want to double trigger the event.
var result = await InvokeAuthenticateFailedEvent(exception);
if (result.HasValue)
{
return result.Value;
}
Context.Response.StatusCode = StatusCodes.Status400BadRequest;
return true; // Default to terminating request
}
throw exception;
}
if (!_negotiateState.IsCompleted)
{
persistence ??= EstablishConnectionPersistence(connectionItems);
// Save the state long enough to complete the multi-stage handshake.
// We'll remove it once complete if !PersistNtlm/KerberosCredentials.
persistence.State = _negotiateState;
Logger.IncompleteNegotiateChallenge();
Response.StatusCode = StatusCodes.Status401Unauthorized;
Response.Headers.Append(HeaderNames.WWWAuthenticate, AuthHeaderPrefix + outgoing);
return true;
}
Logger.NegotiateComplete();
// There can be a final blob of data we need to send to the client, but let the request execute as normal.
if (!string.IsNullOrEmpty(outgoing))
{
Response.OnStarting(() =>
{
// Only include it if the response ultimately succeeds. This avoids adding it twice if Challenge is called again.
if (Response.StatusCode < StatusCodes.Status400BadRequest)
{
Response.Headers.Append(HeaderNames.WWWAuthenticate, AuthHeaderPrefix + outgoing);
}
return Task.CompletedTask;
});
}
// Deal with connection credential persistence.
if (_negotiateState.Protocol == "NTLM" && !Options.PersistNtlmCredentials)
{
// NTLM was already put in the persitence cache on the prior request so we could complete the handshake.
// Take it out if we don't want it to persist.
Debug.Assert(object.ReferenceEquals(persistence?.State, _negotiateState),
"NTLM is a two stage process, it must have already been in the cache for the handshake to succeed.");
Logger.DisablingCredentialPersistence(_negotiateState.Protocol);
persistence.State = null;
Response.RegisterForDispose(_negotiateState);
}
else if (_negotiateState.Protocol == "Kerberos")
{
// Kerberos can require one or two stage handshakes
if (Options.PersistKerberosCredentials)
{
Logger.EnablingCredentialPersistence();
persistence ??= EstablishConnectionPersistence(connectionItems);
persistence.State = _negotiateState;
}
else
{
if (persistence?.State != null)
{
Logger.DisablingCredentialPersistence(_negotiateState.Protocol);
persistence.State = null;
}
Response.RegisterForDispose(_negotiateState);
}
}
// Note we run the Authenticated event in HandleAuthenticateAsync so it is per-request rather than per connection.
}
catch (Exception ex)
{
if (authFailedEventCalled)
{
throw;
}
Logger.ExceptionProcessingAuth(ex);
// Clear state so it's possible to retry on the same connection.
_negotiateState?.Dispose();
_negotiateState = null;
if (persistence?.State != null)
{
persistence.State.Dispose();
persistence.State = null;
}
var result = await InvokeAuthenticateFailedEvent(ex);
if (result.HasValue)
{
return result.Value;
}
throw;
}
return false;
}
private async Task<bool?> InvokeAuthenticateFailedEvent(Exception ex)
{
var errorContext = new AuthenticationFailedContext(Context, Scheme, Options) { Exception = ex };
await Events.AuthenticationFailed(errorContext);
if (errorContext.Result != null)
{
if (errorContext.Result.Handled)
{
return true;
}
else if (errorContext.Result.Skipped)
{
return false;
}
else if (errorContext.Result.Failure != null)
{
throw new Exception("An error was returned from the AuthenticationFailed event.", errorContext.Result.Failure);
}
}
return null;
}
/// <summary>
/// Checks if the current request is authenticated and returns the user.
/// </summary>
/// <returns></returns>
protected override async Task<AuthenticateResult> HandleAuthenticateAsync()
{
if (!_requestProcessed)
{
throw new InvalidOperationException("AuthenticateAsync must not be called before the UseAuthentication middleware runs.");
}
if (!IsSupportedProtocol)
{
// Not supported. We don't throw because Negotiate may be set as the default auth
// handler on a server that's running HTTP/1 and HTTP/2. We'll challenge HTTP/2 requests
// that require auth and they'll downgrade to HTTP/1.1.
Logger.ProtocolNotSupported(Request.Protocol);
return AuthenticateResult.NoResult();
}
if (_negotiateState == null)
{
return AuthenticateResult.NoResult();
}
if (!_negotiateState.IsCompleted)
{
// This case should have been rejected by HandleRequestAsync
throw new InvalidOperationException("Attempting to use an incomplete authentication context.");
}
// Make a new copy of the user for each request, they are mutable objects and
// things like ClaimsTransformation run per request.
var identity = _negotiateState.GetIdentity();
ClaimsPrincipal user;
if (OperatingSystem.IsWindows() && identity is WindowsIdentity winIdentity)
{
user = new WindowsPrincipal(winIdentity);
Response.RegisterForDispose(winIdentity);
}
else
{
user = new ClaimsPrincipal(new ClaimsIdentity(identity));
}
AuthenticatedContext authenticatedContext;
if (Options.LdapSettings.EnableLdapClaimResolution)
{
var ldapContext = new LdapContext(Context, Scheme, Options, Options.LdapSettings)
{
Principal = user
};
await Events.RetrieveLdapClaims(ldapContext);
if (ldapContext.Result != null)
{
return ldapContext.Result;
}
await LdapAdapter.RetrieveClaimsAsync(ldapContext.LdapSettings, (ldapContext.Principal.Identity as ClaimsIdentity)!, Logger);
authenticatedContext = new AuthenticatedContext(Context, Scheme, Options)
{
Principal = ldapContext.Principal
};
}
else
{
authenticatedContext = new AuthenticatedContext(Context, Scheme, Options)
{
Principal = user
};
}
await Events.Authenticated(authenticatedContext);
if (authenticatedContext.Result != null)
{
return authenticatedContext.Result;
}
var ticket = new AuthenticationTicket(authenticatedContext.Principal, authenticatedContext.Properties, Scheme.Name);
return AuthenticateResult.Success(ticket);
}
/// <summary>
/// Issues a 401 WWW-Authenticate Negotiate challenge.
/// </summary>
/// <param name="properties"></param>
/// <returns></returns>
protected override async Task HandleChallengeAsync(AuthenticationProperties properties)
{
// We allow issuing a challenge from an HTTP/2 request. Browser clients will gracefully downgrade to HTTP/1.1.
// SocketHttpHandler will not downgrade (https://github.com/dotnet/corefx/issues/35195), but WinHttpHandler will.
var eventContext = new ChallengeContext(Context, Scheme, Options, properties);
await Events.Challenge(eventContext);
if (eventContext.Handled)
{
return;
}
Response.StatusCode = StatusCodes.Status401Unauthorized;
Response.Headers.Append(HeaderNames.WWWAuthenticate, NegotiateVerb);
Logger.ChallengeNegotiate();
}
private AuthPersistence EstablishConnectionPersistence(IDictionary<object, object?> items)
{
Debug.Assert(!items.ContainsKey(AuthPersistenceKey), "This should only be registered once per connection");
var persistence = new AuthPersistence();
RegisterForConnectionDispose(persistence);
items[AuthPersistenceKey] = persistence;
return persistence;
}
private IDictionary<object, object?> GetConnectionItems()
{
return Context.Features.Get<IConnectionItemsFeature>()?.Items
?? throw new NotSupportedException($"Negotiate authentication requires a server that supports {nameof(IConnectionItemsFeature)} like Kestrel.");
}
private void RegisterForConnectionDispose(IDisposable authState)
{
var connectionCompleteFeature = Context.Features.Get<IConnectionCompleteFeature>()
??throw new NotSupportedException($"Negotiate authentication requires a server that supports {nameof(IConnectionCompleteFeature)} like Kestrel.");
connectionCompleteFeature.OnCompleted(DisposeState, authState);
}
private static Task DisposeState(object state)
{
((IDisposable)state).Dispose();
return Task.CompletedTask;
}
// This allows us to have one disposal registration per connection and limits churn on the Items collection.
private class AuthPersistence : IDisposable
{
internal INegotiateState? State { get; set; }
public void Dispose()
{
State?.Dispose();
}
}
}
}
| |
using Microsoft.Xna.Framework;
namespace MonoVarmint.Widgets
{
//--------------------------------------------------------------------------------------
/// <summary>
/// VarmintWidgetScrollView
///
/// You place the objects where you want using their offsets. The view will
/// automatically set the bounds based on the objects you add and their margins
///
/// </summary>
//--------------------------------------------------------------------------------------
[VarmintWidgetShortName("ScrollView")]
public class VarmintWidgetScrollView : VarmintWidget
{
/// <summary>
/// Upper left corner of the scroll view in virtual space
/// </summary>
public Vector2 ScrollOffset { get { return _innerContent.Offset; } set { _innerContent.Offset = value; } }
/// <summary>
/// AbsoluteOffset
/// </summary>
public override Vector2 AbsoluteOffset
{
get
{
if (Parent == null) return Offset;
else return Parent.AbsoluteOffset + Offset;
}
}
PlainFormatter _innerContent;
//--------------------------------------------------------------------------------------
/// <summary>
/// ctor
/// </summary>
//--------------------------------------------------------------------------------------
public VarmintWidgetScrollView()
{
ClipToBounds = true;
this.SetCustomRender((gt, w) => {
ScrollBy(_momentum);
_momentum *= .9f;
Renderer.DrawBox(AbsoluteOffset, Size, RenderBackgroundColor);
});
this.OnDrag += VarmintWidgetScrollView_OnDrag;
this.OnFlick += VarmintWidgetScrollView_OnFlick;
_innerContent = new PlainFormatter();
_innerContent.Renderer = new NullRenderer();
_innerContent.Name = "InnerFrame_" + this.Name;
base.AddChild(_innerContent);
}
Vector2 _momentum;
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
private EventHandledState VarmintWidgetScrollView_OnFlick(VarmintFlickData flick)
{
_momentum = flick.Delta * .2f;
return EventHandledState.Handled;
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
protected override void UpdateFormatting_Internal(Vector2 updatedSize, bool updateChildren = true)
{
// child formatting is static - the scrollview does not update it
_innerContent.RecalculateExtremes();
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
public override void AddChild(VarmintWidget widget)
{
_innerContent.AddChild(widget);
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
public override void RemoveChild(VarmintWidget childToRemove)
{
_innerContent.RemoveChild(childToRemove);
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
public override void ClearChildren()
{
_innerContent.ClearChildren();
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
private void KeepInBounds(Vector2 previousDelta)
{
var correction = Vector2.Zero;
if(previousDelta.X != 0)
{
if (_innerContent.ExtremeRight + ScrollOffset.X < Size.X)
{
correction.X = Size.X - (_innerContent.ExtremeRight + ScrollOffset.X);
}
else if (_innerContent.ExtremeLeft + ScrollOffset.X > 0)
{
correction.X = -(_innerContent.ExtremeLeft + ScrollOffset.X);
}
}
if (previousDelta.Y != 0)
{
if (_innerContent.ExtremeTop + ScrollOffset.Y > 0)
{
correction.Y = -(_innerContent.ExtremeTop + ScrollOffset.Y);
}
else if (_innerContent.ExtremeBottom + ScrollOffset.Y < Size.Y)
{
correction.Y = Size.Y - (_innerContent.ExtremeBottom + ScrollOffset.Y);
}
}
ScrollOffset += correction;
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
private EventHandledState VarmintWidgetScrollView_OnDrag(VarmintWidget source, Vector2 location, Vector2 delta)
{
ScrollBy(delta);
return EventHandledState.Handled;
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
void ScrollBy(Vector2 delta)
{
if (delta.Length() == 0) return;
if (_innerContent.ExtremeLeft + ScrollOffset.X >= 0
&& _innerContent.ExtremeRight + ScrollOffset.X <= Size.X)
{
// Don't try to scroll if we are inside the display area
delta.X = 0;
}
if (_innerContent.ExtremeTop + ScrollOffset.Y >= 0
&& _innerContent.ExtremeBottom + ScrollOffset.Y <= Size.Y)
{
// Don't try to scroll if we are inside the display area
delta.Y = 0;
}
ScrollOffset += delta;
KeepInBounds(delta);
}
//--------------------------------------------------------------------------------------
//
//--------------------------------------------------------------------------------------
class PlainFormatter : VarmintWidget
{
public float ExtremeLeft { get; set; }
public float ExtremeTop { get; set; }
public float ExtremeRight { get; set; }
public float ExtremeBottom { get; set; }
public PlainFormatter()
{
this.SetCustomRender((gt, w) => { });
}
internal void RecalculateExtremes()
{
ExtremeLeft = float.MaxValue;
ExtremeTop = float.MaxValue;
ExtremeRight = float.MinValue;
ExtremeBottom = float.MinValue;
foreach(var child in Children)
{
var left = child.Offset.X - (child.Margin.Left ?? 0);
var top = child.Offset.Y - (child.Margin.Top ?? 0);
var right = child.Offset.X + child.Size.X + (child.Margin.Right ?? 0);
var bottom = child.Offset.Y + child.Size.Y +( child.Margin.Bottom ?? 0);
if (left < ExtremeLeft) ExtremeLeft = left;
if (top < ExtremeTop) ExtremeTop = top;
if (right > ExtremeRight) ExtremeRight = right;
if (bottom > ExtremeBottom) ExtremeBottom = bottom;
}
Size = new Vector2(ExtremeRight - ExtremeLeft, ExtremeBottom - ExtremeTop);
}
public override void AddChild(VarmintWidget widget)
{
base.AddChild(widget);
RecalculateExtremes();
}
public override void RemoveChild(VarmintWidget childToRemove)
{
base.RemoveChild(childToRemove);
RecalculateExtremes();
}
protected override void UpdateFormatting_Internal(Vector2 updatedSize, bool updateChildren = true) { }
}
}
}
| |
using System.Reflection;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Input;
using System.Windows.Media;
using MyWmp.Converters;
using MyWmp.ViewModel;
namespace MyWmp.View
{
/// <summary>
/// Interaction logic for LibraryLayout.xaml
/// </summary>
public partial class LibraryLayout
{
private readonly LibraryViewModel libraryViewModel_;
public LibraryLayout()
{
InitializeComponent();
this.libraryViewModel_ = new LibraryViewModel();
this.DataContext = this.libraryViewModel_;
}
private void OnTextChanged(object sender, TextChangedEventArgs e)
{
var library = "";
switch (((string)((TextBox)sender).Tag)[0])
{
case 'M':
library = "Music";
break;
case 'V':
library = "Video";
break;
case 'P':
library = "Picture";
break;
case 'L':
library = "Playlist";
break;
}
var tag = ((string) ((TextBox) sender).Tag).Substring(1);
if (
((CheckBox)
this.GetType()
.GetField(library + tag + "Filter",
BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public)
.GetValue(this)).IsChecked.HasValue &&
((CheckBox)
this.GetType()
.GetField(library + tag + "Group",
BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public)
.GetValue(this)).IsChecked.HasValue &&
((CheckBox)
this.GetType()
.GetField(library + tag + "Filter",
BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public)
.GetValue(this)).IsChecked.Value
)
{
this.libraryViewModel_.FilterCommand.Execute(new LibraryConverterParam
{
Sender = tag,
Filter =
((CheckBox)
this.GetType()
.GetField(library + tag + "Filter",
BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public)
.GetValue(this)).IsChecked.Value,
Group =
((CheckBox)
this.GetType()
.GetField(library + tag + "Group",
BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public)
.GetValue(this)).IsChecked.Value,
Value =
((TextBox)
this.GetType()
.GetField(library + tag + "Value",
BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public)
.GetValue(this)).Text,
Library = ((string)
((GroupBox)
this.GetType()
.GetField(library + tag + "GroupBox",
BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public)
.GetValue(this)).Tag)
});
}
}
private void ListPlaylist_OnSelectionChanged(object sender, SelectionChangedEventArgs e)
{
libraryViewModel_.OnPlaylistChanged(ListPlaylist.SelectedIndex);
}
private void Playlists_OnMouseDoubleClick(object sender, MouseButtonEventArgs e)
{
libraryViewModel_.OnPlayLibraryPlaylist(ListPlaylist.SelectedIndex, PlaylistDatagrid.SelectedIndex);
}
private void Videos_OnMouseDoubleClick(object sender, MouseButtonEventArgs e)
{
libraryViewModel_.OnPlayLibraryVideos(VideoDatagrid.SelectedIndex);
}
private void Musics_OnMouseDoubleClick(object sender, MouseButtonEventArgs e)
{
libraryViewModel_.OnPlayLibraryMusics(MusicDatagrid.SelectedIndex);
}
private void Pictures_OnMouseDoubleClick(object sender, MouseButtonEventArgs e)
{
libraryViewModel_.OnPlayLibraryPictures(PictureDatagrid.SelectedIndex);
}
private void Playlist_Delete_OnClick(object sender, RoutedEventArgs e)
{
((LibraryViewModel)DataContext).DeletePlaylist(ListPlaylist.SelectedIndex);
}
private void Playlist_Play_OnClick(object sender, RoutedEventArgs e)
{
((LibraryViewModel) DataContext).OnPlayLibraryPlaylist(ListPlaylist.SelectedIndex, 0);
}
private void Playlist_Rename_OnClick(object sender, RoutedEventArgs e)
{
var textbox =
(TextBox)
((Grid) ((Label) ((ContextMenu) ((MenuItem) sender).Parent).PlacementTarget).Parent).Children[1];
textbox.Visibility = Visibility.Visible;
textbox.SelectAll();
textbox.Focus();
}
private void UIElement_OnKeyDown(object sender, KeyEventArgs e)
{
if (e.Key == Key.Enter)
{
((TextBox) sender).Visibility = Visibility.Collapsed;
libraryViewModel_.OnSetName(ListPlaylist.SelectedIndex, ((TextBox)sender).Text);
}
}
private void UIElement_OnLostFocus(object sender, RoutedEventArgs e)
{
((TextBox)sender).Visibility = Visibility.Collapsed;
}
private void LibraryPlaylist_Play_OnClick(object sender, RoutedEventArgs e)
{
((LibraryViewModel)DataContext).OnPlayLibraryPlaylist(ListPlaylist.SelectedIndex, PlaylistDatagrid.SelectedIndex);
}
private void LibraryPlaylist_Remove_OnClick(object sender, RoutedEventArgs e)
{
((LibraryViewModel) DataContext).DeleteItemFromPlaylist(ListPlaylist.SelectedIndex, PlaylistDatagrid.SelectedIndex);
}
private void DataGridPlaylist_OnKeyDown(object sender, KeyEventArgs e)
{
e.Handled = true;
var viewModel = (LibraryViewModel) DataContext;
switch (e.Key)
{
case Key.Delete:
viewModel.DeleteItemFromPlaylist(ListPlaylist.SelectedIndex, PlaylistDatagrid.SelectedIndex);
break;
case Key.Enter:
viewModel.OnPlayLibraryPlaylist(ListPlaylist.SelectedIndex, PlaylistDatagrid.SelectedIndex);
break;
case Key.Down:
PlaylistDatagrid.SelectedIndex = (PlaylistDatagrid.SelectedIndex + 1) % PlaylistDatagrid.Items.Count;
break;
case Key.Up:
--PlaylistDatagrid.SelectedIndex;
if (PlaylistDatagrid.SelectedIndex < 0)
PlaylistDatagrid.SelectedIndex = PlaylistDatagrid.Items.Count - 1;
break;
}
}
private void DataGridVideos_OnPreviewKeyDown(object sender, KeyEventArgs e)
{
e.Handled = true;
var viewModel = (LibraryViewModel)DataContext;
var datagrid = (DataGrid) sender;
switch (e.Key)
{
case Key.Enter:
viewModel.OnPlayLibraryVideos(datagrid.SelectedIndex);
break;
case Key.Down:
datagrid.SelectedIndex = (datagrid.SelectedIndex + 1) % datagrid.Items.Count;
break;
case Key.Up:
--datagrid.SelectedIndex;
if (datagrid.SelectedIndex < 0)
datagrid.SelectedIndex = datagrid.Items.Count - 1;
break;
}
}
private void MusicDatagrid_OnPreviewKeyDown(object sender, KeyEventArgs e)
{
e.Handled = true;
var viewModel = (LibraryViewModel)DataContext;
var datagrid = (DataGrid)sender;
switch (e.Key)
{
case Key.Enter:
viewModel.OnPlayLibraryMusics(datagrid.SelectedIndex);
break;
case Key.Down:
datagrid.SelectedIndex = (datagrid.SelectedIndex + 1) % datagrid.Items.Count;
break;
case Key.Up:
--datagrid.SelectedIndex;
if (datagrid.SelectedIndex < 0)
datagrid.SelectedIndex = datagrid.Items.Count - 1;
break;
}
}
private void LibraryPicture_OpenInPlayer_OnClick(object sender, RoutedEventArgs e)
{
libraryViewModel_.OnPlayLibraryPictures(PictureDatagrid.SelectedIndex);
}
private void PictureDatagrid_OnPreviewKeyDown(object sender, KeyEventArgs e)
{
e.Handled = true;
var viewModel = (LibraryViewModel)DataContext;
var datagrid = (DataGrid)sender;
switch (e.Key)
{
case Key.Enter:
viewModel.OnPlayLibraryPictures(datagrid.SelectedIndex);
break;
case Key.Down:
datagrid.SelectedIndex = (datagrid.SelectedIndex + 1) % datagrid.Items.Count;
break;
case Key.Up:
--datagrid.SelectedIndex;
if (datagrid.SelectedIndex < 0)
datagrid.SelectedIndex = datagrid.Items.Count - 1;
break;
}
}
private void LibraryVideo_Play_OnClick(object sender, RoutedEventArgs e)
{
libraryViewModel_.OnPlayLibraryVideos(VideoDatagrid.SelectedIndex);
}
private void LibraryMusic_Play_OnClick(object sender, RoutedEventArgs e)
{
libraryViewModel_.OnPlayLibraryMusics(MusicDatagrid.SelectedIndex);
}
private void MenuItem_OnSubmenuOpened(object sender, RoutedEventArgs e)
{
var menu = sender as MenuItem;
var SelectedItem = ((DataGridRow)((ContextMenu)menu.Parent).PlacementTarget);
var datagrid = VisualTreeHelper.GetParent(SelectedItem);
while (datagrid != null && datagrid.GetType() != typeof (DataGrid))
datagrid = VisualTreeHelper.GetParent(datagrid);
while (menu.Items.Count > 2)
menu.Items.RemoveAt(menu.Items.Count - 1);
foreach (var playlist in libraryViewModel_.Playlists)
{
var item = new MenuItem {Header = playlist.Name};
item.Click += (o, args) =>
{
libraryViewModel_.AddItemIntoPlaylist(libraryViewModel_.Playlists.IndexOf(playlist), ((DataGrid)datagrid).Tag.ToString(), ((DataGrid)datagrid).SelectedIndex);
Music.SelectedIndex = Music.Items.Count - 1;
ListPlaylist.SelectedIndex = libraryViewModel_.Playlists.IndexOf(playlist);
};
item.SetResourceReference(StyleProperty, "MenuItem");
menu.Items.Add(item);
}
}
private void MenuItem_AddPlaylist_OnClick(object sender, RoutedEventArgs e)
{
libraryViewModel_.AddPlaylist();
var menu = sender as MenuItem;
var SelectedItem = ((DataGridRow)((ContextMenu)((MenuItem)menu.Parent).Parent).PlacementTarget);
var datagrid = VisualTreeHelper.GetParent(SelectedItem);
while (datagrid != null && datagrid.GetType() != typeof(DataGrid))
datagrid = VisualTreeHelper.GetParent(datagrid);
libraryViewModel_.AddItemIntoPlaylist(libraryViewModel_.Playlists.Count - 1, ((DataGrid)datagrid).Tag.ToString(), ((DataGrid)datagrid).SelectedIndex);
Music.SelectedIndex = Music.Items.Count - 1;
ListPlaylist.SelectedIndex = ListPlaylist.Items.Count - 1;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Runtime.InteropServices;
using Internal.Runtime;
namespace System.Runtime
{
internal static unsafe class DispatchResolve
{
[StructLayout(LayoutKind.Sequential)]
public struct DispatchMapEntry
{
public ushort _usInterfaceIndex;
public ushort _usInterfaceMethodSlot;
public ushort _usImplMethodSlot;
}
[StructLayout(LayoutKind.Sequential)]
public struct DispatchMap
{
public uint _entryCount;
public DispatchMapEntry _dispatchMap; // Actually a variable length array
}
public static IntPtr FindInterfaceMethodImplementationTarget(EEType* pTgtType,
EEType* pItfType,
ushort itfSlotNumber)
{
DynamicModule* dynamicModule = pTgtType->DynamicModule;
// Use the dynamic module resolver if it's present
if ((dynamicModule != null) && (dynamicModule->DynamicTypeSlotDispatchResolve != IntPtr.Zero))
{
return CalliIntrinsics.Call<IntPtr>(dynamicModule->DynamicTypeSlotDispatchResolve,
(IntPtr)pTgtType, (IntPtr)pItfType, itfSlotNumber);
}
// Start at the current type and work up the inheritance chain
EEType* pCur = pTgtType;
if (pItfType->IsCloned)
pItfType = pItfType->CanonicalEEType;
while (pCur != null)
{
ushort implSlotNumber;
if (FindImplSlotForCurrentType(
pCur, pItfType, itfSlotNumber, &implSlotNumber))
{
IntPtr targetMethod;
if (implSlotNumber < pCur->NumVtableSlots)
{
// true virtual - need to get the slot from the target type in case it got overridden
targetMethod = pTgtType->GetVTableStartAddress()[implSlotNumber];
}
else
{
// sealed virtual - need to get the slot form the implementing type, because
// it's not present on the target type
targetMethod = pCur->GetSealedVirtualSlot((ushort)(implSlotNumber - pCur->NumVtableSlots));
}
return targetMethod;
}
if (pCur->IsArray)
pCur = pCur->GetArrayEEType();
else
pCur = pCur->NonArrayBaseType;
}
return IntPtr.Zero;
}
private static bool FindImplSlotForCurrentType(EEType* pTgtType,
EEType* pItfType,
ushort itfSlotNumber,
ushort* pImplSlotNumber)
{
bool fRes = false;
// If making a call and doing virtual resolution don't look into the dispatch map,
// take the slot number directly.
if (!pItfType->IsInterface)
{
*pImplSlotNumber = itfSlotNumber;
// Only notice matches if the target type and search types are the same
// This will make dispatch to sealed slots work correctly
return pTgtType == pItfType;
}
if (pTgtType->HasDispatchMap)
{
// For variant interface dispatch, the algorithm is to walk the parent hierarchy, and at each level
// attempt to dispatch exactly first, and then if that fails attempt to dispatch variantly. This can
// result in interesting behavior such as a derived type only overriding one particular instantiation
// and funneling all the dispatches to it, but its the algorithm.
bool fDoVariantLookup = false; // do not check variance for first scan of dispatch map
fRes = FindImplSlotInSimpleMap(
pTgtType, pItfType, itfSlotNumber, pImplSlotNumber, fDoVariantLookup);
if (!fRes)
{
fDoVariantLookup = true; // check variance for second scan of dispatch map
fRes = FindImplSlotInSimpleMap(
pTgtType, pItfType, itfSlotNumber, pImplSlotNumber, fDoVariantLookup);
}
}
return fRes;
}
private static bool FindImplSlotInSimpleMap(EEType* pTgtType,
EEType* pItfType,
uint itfSlotNumber,
ushort* pImplSlotNumber,
bool actuallyCheckVariance)
{
Debug.Assert(pTgtType->HasDispatchMap, "Missing dispatch map");
EEType* pItfOpenGenericType = null;
EETypeRef* pItfInstantiation = null;
int itfArity = 0;
GenericVariance* pItfVarianceInfo = null;
bool fCheckVariance = false;
bool fArrayCovariance = false;
if (actuallyCheckVariance)
{
fCheckVariance = pItfType->HasGenericVariance;
fArrayCovariance = pTgtType->IsArray;
// Non-arrays can follow array variance rules iff
// 1. They have one generic parameter
// 2. That generic parameter is array covariant.
//
// This special case is to allow array enumerators to work
if (!fArrayCovariance && pTgtType->HasGenericVariance)
{
int tgtEntryArity = (int)pTgtType->GenericArity;
GenericVariance* pTgtVarianceInfo = pTgtType->GenericVariance;
if ((tgtEntryArity == 1) && pTgtVarianceInfo[0] == GenericVariance.ArrayCovariant)
{
fArrayCovariance = true;
}
}
// Arrays are covariant even though you can both get and set elements (type safety is maintained by
// runtime type checks during set operations). This extends to generic interfaces implemented on those
// arrays. We handle this by forcing all generic interfaces on arrays to behave as though they were
// covariant (over their one type parameter corresponding to the array element type).
if (fArrayCovariance && pItfType->IsGeneric)
fCheckVariance = true;
// TypeEquivalent interface dispatch is handled at covariance time. At this time we don't have general
// type equivalent interface dispatch, but we do use the model for the interface underlying CastableObject
// which is done by checking the interface types involved for ICastable.
if (pItfType->IsICastable)
fCheckVariance = true;
// If there is no variance checking, there is no operation to perform. (The non-variance check loop
// has already completed)
if (!fCheckVariance)
{
return false;
}
}
DispatchMap* pMap = pTgtType->DispatchMap;
DispatchMapEntry* i = &pMap->_dispatchMap;
DispatchMapEntry* iEnd = (&pMap->_dispatchMap) + pMap->_entryCount;
for (; i != iEnd; ++i)
{
if (i->_usInterfaceMethodSlot == itfSlotNumber)
{
EEType* pCurEntryType =
pTgtType->InterfaceMap[i->_usInterfaceIndex].InterfaceType;
if (pCurEntryType->IsCloned)
pCurEntryType = pCurEntryType->CanonicalEEType;
if (pCurEntryType == pItfType)
{
*pImplSlotNumber = i->_usImplMethodSlot;
return true;
}
else if (fCheckVariance && pCurEntryType->IsICastable && pItfType->IsICastable)
{
*pImplSlotNumber = i->_usImplMethodSlot;
return true;
}
else if (fCheckVariance && ((fArrayCovariance && pCurEntryType->IsGeneric) || pCurEntryType->HasGenericVariance))
{
// Interface types don't match exactly but both the target interface and the current interface
// in the map are marked as being generic with at least one co- or contra- variant type
// parameter. So we might still have a compatible match.
// Retrieve the unified generic instance for the callsite interface if we haven't already (we
// lazily get this then cache the result since the lookup isn't necessarily cheap).
if (pItfOpenGenericType == null)
{
pItfOpenGenericType = pItfType->GenericDefinition;
itfArity = (int)pItfType->GenericArity;
pItfInstantiation = pItfType->GenericArguments;
pItfVarianceInfo = pItfType->GenericVariance;
}
// Retrieve the unified generic instance for the interface we're looking at in the map.
EEType* pCurEntryGenericType = pCurEntryType->GenericDefinition;
// If the generic types aren't the same then the types aren't compatible.
if (pItfOpenGenericType != pCurEntryGenericType)
continue;
// Grab instantiation details for the candidate interface.
EETypeRef* pCurEntryInstantiation = pCurEntryType->GenericArguments;
// The types represent different instantiations of the same generic type. The
// arity of both had better be the same.
Debug.Assert(itfArity == (int)pCurEntryType->GenericArity, "arity mismatch betweeen generic instantiations");
if (TypeCast.TypeParametersAreCompatible(itfArity, pCurEntryInstantiation, pItfInstantiation, pItfVarianceInfo, fArrayCovariance, null))
{
*pImplSlotNumber = i->_usImplMethodSlot;
return true;
}
}
}
}
return false;
}
}
}
| |
//
// https://github.com/ServiceStack/ServiceStack.Text
// ServiceStack.Text: .NET C# POCO JSON, JSV and CSV Text Serializers.
//
// Authors:
// Demis Bellot (demis.bellot@gmail.com)
//
// Copyright 2012 ServiceStack Ltd.
//
// Licensed under the same terms of ServiceStack: new BSD license.
//
using System;
using System.Globalization;
using System.Xml;
using ServiceStack.Text.Json;
namespace ServiceStack.Text.Common
{
public static class DateTimeSerializer
{
public const string ShortDateTimeFormat = "yyyy-MM-dd"; //11
public const string DefaultDateTimeFormat = "dd/MM/yyyy HH:mm:ss"; //20
public const string DefaultDateTimeFormatWithFraction = "dd/MM/yyyy HH:mm:ss.fff"; //24
public const string XsdDateTimeFormat = "yyyy-MM-ddTHH:mm:ss.fffffffZ"; //29
public const string XsdDateTimeFormat3F = "yyyy-MM-ddTHH:mm:ss.fffZ"; //25
public const string XsdDateTimeFormatSeconds = "yyyy-MM-ddTHH:mm:ssZ"; //21
public const string EscapedWcfJsonPrefix = "\\/Date(";
public const string EscapedWcfJsonSuffix = ")\\/";
public const string WcfJsonPrefix = "/Date(";
public const char WcfJsonSuffix = ')';
/// <summary>
/// If AlwaysUseUtc is set to true then convert all DateTime to UTC.
/// </summary>
/// <param name="dateTime"></param>
/// <returns></returns>
private static DateTime Prepare(this DateTime dateTime)
{
if (JsConfig.AlwaysUseUtc && dateTime.Kind != DateTimeKind.Utc)
return dateTime.ToStableUniversalTime();
else
return dateTime;
}
public static DateTime? ParseShortestNullableXsdDateTime(string dateTimeStr)
{
if (dateTimeStr == null)
return null;
return ParseShortestXsdDateTime(dateTimeStr);
}
public static DateTime ParseShortestXsdDateTime(string dateTimeStr)
{
if (string.IsNullOrEmpty(dateTimeStr))
return DateTime.MinValue;
if (dateTimeStr.StartsWith(EscapedWcfJsonPrefix) || dateTimeStr.StartsWith(WcfJsonPrefix))
return ParseWcfJsonDate(dateTimeStr).Prepare();
if (dateTimeStr.Length == DefaultDateTimeFormat.Length
|| dateTimeStr.Length == DefaultDateTimeFormatWithFraction.Length)
{
return DateTime.Parse(dateTimeStr, CultureInfo.InvariantCulture).Prepare();
}
if (dateTimeStr.Length == XsdDateTimeFormatSeconds.Length)
return DateTime.ParseExact(dateTimeStr, XsdDateTimeFormatSeconds, null,
DateTimeStyles.AdjustToUniversal);
if (dateTimeStr.Length >= XsdDateTimeFormat3F.Length
&& dateTimeStr.Length <= XsdDateTimeFormat.Length)
{
#if NETFX_CORE
var dateTimeType = JsConfig.DateHandler != JsonDateHandler.ISO8601
? "yyyy-MM-ddTHH:mm:sszzzzzzz"
: "yyyy-MM-ddTHH:mm:sszzzzzzz";
return XmlConvert.ToDateTimeOffset(dateTimeStr, dateTimeType).DateTime.Prepare();
#else
var dateTimeType = JsConfig.DateHandler != JsonDateHandler.ISO8601
? XmlDateTimeSerializationMode.Local
: XmlDateTimeSerializationMode.RoundtripKind;
return XmlConvert.ToDateTime(dateTimeStr, dateTimeType).Prepare();
#endif
}
return DateTime.Parse(dateTimeStr, null, DateTimeStyles.AssumeLocal).Prepare();
}
public static string ToDateTimeString(DateTime dateTime)
{
return dateTime.ToStableUniversalTime().ToString(XsdDateTimeFormat);
}
public static DateTime ParseDateTime(string dateTimeStr)
{
return DateTime.ParseExact(dateTimeStr, XsdDateTimeFormat, null);
}
public static DateTimeOffset ParseDateTimeOffset(string dateTimeOffsetStr)
{
if (string.IsNullOrEmpty(dateTimeOffsetStr)) return default(DateTimeOffset);
// for interop, do not assume format based on config
// format: prefer TimestampOffset, DCJSCompatible
if (dateTimeOffsetStr.StartsWith(EscapedWcfJsonPrefix) ||
dateTimeOffsetStr.StartsWith(WcfJsonPrefix))
{
return ParseWcfJsonDateOffset(dateTimeOffsetStr);
}
// format: next preference ISO8601
// assume utc when no offset specified
if (dateTimeOffsetStr.LastIndexOfAny(TimeZoneChars) < 10)
{
if (!dateTimeOffsetStr.EndsWith("Z")) dateTimeOffsetStr += "Z";
#if __MonoCS__
// Without that Mono uses a Local timezone))
dateTimeOffsetStr = dateTimeOffsetStr.Substring(0, dateTimeOffsetStr.Length - 1) + "+00:00";
#endif
}
return DateTimeOffset.Parse(dateTimeOffsetStr, CultureInfo.InvariantCulture);
}
public static string ToXsdDateTimeString(DateTime dateTime)
{
#if NETFX_CORE
return XmlConvert.ToString(dateTime.ToStableUniversalTime(), XsdDateTimeFormat);
#else
return XmlConvert.ToString(dateTime.ToStableUniversalTime(), XmlDateTimeSerializationMode.Utc);
#endif
}
public static string ToXsdTimeSpanString(TimeSpan timeSpan)
{
var r = XmlConvert.ToString(timeSpan);
#if __MonoCS__
// Mono returns DT even if time is 00:00:00
if (r.EndsWith("DT")) return r.Substring(0, r.Length - 1);
#endif
return r;
}
public static string ToXsdTimeSpanString(TimeSpan? timeSpan)
{
return (timeSpan != null) ? ToXsdTimeSpanString(timeSpan.Value) : null;
}
public static DateTime ParseXsdDateTime(string dateTimeStr)
{
#if NETFX_CORE
return XmlConvert.ToDateTimeOffset(dateTimeStr).DateTime;
#else
return XmlConvert.ToDateTime(dateTimeStr, XmlDateTimeSerializationMode.Utc);
#endif
}
public static TimeSpan ParseTimeSpan(string dateTimeStr)
{
return dateTimeStr.StartsWith("P") || dateTimeStr.StartsWith("-P")
? ParseXsdTimeSpan(dateTimeStr)
: TimeSpan.Parse(dateTimeStr);
}
public static TimeSpan ParseXsdTimeSpan(string dateTimeStr)
{
return XmlConvert.ToTimeSpan(dateTimeStr);
}
public static TimeSpan? ParseNullableTimeSpan(string dateTimeStr)
{
return string.IsNullOrEmpty(dateTimeStr)
? (TimeSpan?)null
: ParseTimeSpan(dateTimeStr);
}
public static TimeSpan? ParseXsdNullableTimeSpan(string dateTimeStr)
{
return String.IsNullOrEmpty(dateTimeStr) ?
null :
new TimeSpan?(XmlConvert.ToTimeSpan(dateTimeStr));
}
public static string ToShortestXsdDateTimeString(DateTime dateTime)
{
var timeOfDay = dateTime.TimeOfDay;
if (timeOfDay.Ticks == 0)
return dateTime.ToString(ShortDateTimeFormat);
if (timeOfDay.Milliseconds == 0)
return dateTime.ToStableUniversalTime().ToString(XsdDateTimeFormatSeconds);
return ToXsdDateTimeString(dateTime);
}
static readonly char[] TimeZoneChars = new[] { '+', '-' };
/// <summary>
/// WCF Json format: /Date(unixts+0000)/
/// </summary>
/// <param name="wcfJsonDate"></param>
/// <returns></returns>
public static DateTimeOffset ParseWcfJsonDateOffset(string wcfJsonDate)
{
if (wcfJsonDate[0] == '\\')
{
wcfJsonDate = wcfJsonDate.Substring(1);
}
var suffixPos = wcfJsonDate.IndexOf(WcfJsonSuffix);
var timeString = (suffixPos < 0) ? wcfJsonDate : wcfJsonDate.Substring(WcfJsonPrefix.Length, suffixPos - WcfJsonPrefix.Length);
// for interop, do not assume format based on config
if (!wcfJsonDate.StartsWith(WcfJsonPrefix))
{
return DateTimeOffset.Parse(timeString, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind);
}
var timeZonePos = timeString.LastIndexOfAny(TimeZoneChars);
var timeZone = timeZonePos <= 0 ? string.Empty : timeString.Substring(timeZonePos);
var unixTimeString = timeString.Substring(0, timeString.Length - timeZone.Length);
var unixTime = long.Parse(unixTimeString);
if (timeZone == string.Empty)
{
// when no timezone offset is supplied, then treat the time as UTC
return unixTime.FromUnixTimeMs();
}
if (JsConfig.DateHandler == JsonDateHandler.DCJSCompatible)
{
// DCJS ignores the offset and considers it local time if any offset exists
// REVIEW: DCJS shoves offset in a separate field 'offsetMinutes', we have the offset in the format, so shouldn't we use it?
return unixTime.FromUnixTimeMs().ToLocalTime();
}
var offset = timeZone.FromTimeOffsetString();
var date = unixTime.FromUnixTimeMs();
return new DateTimeOffset(date.Ticks, offset);
}
/// <summary>
/// WCF Json format: /Date(unixts+0000)/
/// </summary>
/// <param name="wcfJsonDate"></param>
/// <returns></returns>
public static DateTime ParseWcfJsonDate(string wcfJsonDate)
{
if (wcfJsonDate[0] == JsonUtils.EscapeChar)
{
wcfJsonDate = wcfJsonDate.Substring(1);
}
var suffixPos = wcfJsonDate.IndexOf(WcfJsonSuffix);
var timeString = wcfJsonDate.Substring(WcfJsonPrefix.Length, suffixPos - WcfJsonPrefix.Length);
// for interop, do not assume format based on config
if (!wcfJsonDate.StartsWith(WcfJsonPrefix))
{
return DateTime.Parse(timeString, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind);
}
var timeZonePos = timeString.LastIndexOfAny(TimeZoneChars);
var timeZone = timeZonePos <= 0 ? string.Empty : timeString.Substring(timeZonePos);
var unixTimeString = timeString.Substring(0, timeString.Length - timeZone.Length);
var unixTime = long.Parse(unixTimeString);
if (timeZone == string.Empty)
{
// when no timezone offset is supplied, then treat the time as UTC
return unixTime.FromUnixTimeMs();
}
if (JsConfig.DateHandler == JsonDateHandler.DCJSCompatible)
{
// DCJS ignores the offset and considers it local time if any offset exists
return unixTime.FromUnixTimeMs().ToLocalTime();
}
var offset = timeZone.FromTimeOffsetString();
var date = unixTime.FromUnixTimeMs(offset);
return new DateTimeOffset(date, offset).DateTime;
}
public static string ToWcfJsonDate(DateTime dateTime)
{
if (JsConfig.DateHandler == JsonDateHandler.ISO8601)
{
return dateTime.ToString("o", CultureInfo.InvariantCulture);
}
var timestamp = dateTime.ToUnixTimeMs();
var offset = dateTime.Kind == DateTimeKind.Utc
? string.Empty
: TimeZoneInfo.Local.GetUtcOffset(dateTime).ToTimeOffsetString();
return EscapedWcfJsonPrefix + timestamp + offset + EscapedWcfJsonSuffix;
}
public static string ToWcfJsonDateTimeOffset(DateTimeOffset dateTimeOffset)
{
if (JsConfig.DateHandler == JsonDateHandler.ISO8601)
{
return dateTimeOffset.ToString("o", CultureInfo.InvariantCulture);
}
var timestamp = dateTimeOffset.Ticks.ToUnixTimeMs();
var offset = dateTimeOffset.Offset == TimeSpan.Zero
? string.Empty
: dateTimeOffset.Offset.ToTimeOffsetString();
return EscapedWcfJsonPrefix + timestamp + offset + EscapedWcfJsonSuffix;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.ComponentModel.Composition.Primitives;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Threading;
using Microsoft.Internal;
namespace System.ComponentModel.Composition.Hosting
{
/// <summary>
/// An immutable ComposablePartCatalog created from a managed code assembly.
/// </summary>
/// <remarks>
/// This type is thread safe.
/// </remarks>
[DebuggerTypeProxy(typeof(AssemblyCatalogDebuggerProxy))]
public class AssemblyCatalog : ComposablePartCatalog, ICompositionElement
{
private readonly object _thisLock = new object();
private readonly ICompositionElement _definitionOrigin;
private volatile Assembly _assembly = null;
private volatile ComposablePartCatalog _innerCatalog = null;
private int _isDisposed = 0;
private readonly ReflectionContext _reflectionContext = default(ReflectionContext);
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified code base.
/// </summary>
/// <param name="codeBase">
/// A <see cref="string"/> containing the code base of the assembly containing the
/// attributed <see cref="Type"/> objects to add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <exception cref="ArgumentNullException">
/// <paramref name="codeBase"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="codeBase"/> is a zero-length string, contains only white space,
/// or contains one or more invalid characters. />.
/// </exception>
/// <exception cref="PathTooLongException">
/// The specified path, file name, or both exceed the system-defined maximum length.
/// </exception>
/// <exception cref="System.Security.SecurityException">
/// The caller does not have path discovery permission.
/// </exception>
/// <exception cref="FileNotFoundException">
/// <paramref name="codeBase"/> is not found.
/// </exception>
/// <exception cref="FileLoadException ">
/// <paramref name="codeBase"/> could not be loaded.
/// <para>
/// -or-
/// </para>
/// <paramref name="codeBase"/> specified a directory.
/// </exception>
/// <exception cref="BadImageFormatException">
/// <paramref name="codeBase"/> is not a valid assembly
/// -or-
/// Version 2.0 or later of the common language runtime is currently loaded
/// and <paramref name="codeBase"/> was compiled with a later version.
/// </exception>
/// <remarks>
/// The assembly referenced by <paramref langword="codeBase"/> is loaded into the Load context.
/// </remarks>
public AssemblyCatalog(string codeBase)
{
Requires.NotNullOrEmpty(codeBase, nameof(codeBase));
InitializeAssemblyCatalog(LoadAssembly(codeBase));
_definitionOrigin = this;
}
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified code base.
/// </summary>
/// <param name="codeBase">
/// A <see cref="string"/> containing the code base of the assembly containing the
/// attributed <see cref="Type"/> objects to add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <param name="reflectionContext">
/// The <see cref="ReflectionContext"/> a context used by the catalog when
/// interpreting the types to inject attributes into the type definition<see cref="AssemblyCatalog"/>.
/// </param>
/// <exception cref="ArgumentNullException">
/// <paramref name="codeBase"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="reflectionContext"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="codeBase"/> is a zero-length string, contains only white space,
/// or contains one or more invalid characters. />.
/// </exception>
/// <exception cref="PathTooLongException">
/// The specified path, file name, or both exceed the system-defined maximum length.
/// </exception>
/// <exception cref="System.Security.SecurityException">
/// The caller does not have path discovery permission.
/// </exception>
/// <exception cref="FileNotFoundException">
/// <paramref name="codeBase"/> is not found.
/// </exception>
/// <exception cref="FileLoadException ">
/// <paramref name="codeBase"/> could not be loaded.
/// <para>
/// -or-
/// </para>
/// <paramref name="codeBase"/> specified a directory.
/// </exception>
/// <exception cref="BadImageFormatException">
/// <paramref name="codeBase"/> is not a valid assembly
/// -or-
/// Version 2.0 or later of the common language runtime is currently loaded
/// and <paramref name="codeBase"/> was compiled with a later version.
/// </exception>
/// <remarks>
/// The assembly referenced by <paramref langword="codeBase"/> is loaded into the Load context.
/// </remarks>
public AssemblyCatalog(string codeBase, ReflectionContext reflectionContext)
{
Requires.NotNullOrEmpty(codeBase, nameof(codeBase));
Requires.NotNull(reflectionContext, nameof(reflectionContext));
InitializeAssemblyCatalog(LoadAssembly(codeBase));
_reflectionContext = reflectionContext;
_definitionOrigin = this;
}
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified code base.
/// </summary>
/// <param name="codeBase">
/// A <see cref="string"/> containing the code base of the assembly containing the
/// attributed <see cref="Type"/> objects to add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <param name="definitionOrigin">
/// The <see cref="ICompositionElement"/> CompositionElement used by Diagnostics to identify the source for parts.
/// </param>
/// <exception cref="ArgumentNullException">
/// <paramref name="codeBase"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="definitionOrigin"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="codeBase"/> is a zero-length string, contains only white space,
/// or contains one or more invalid characters. />.
/// </exception>
/// <exception cref="PathTooLongException">
/// The specified path, file name, or both exceed the system-defined maximum length.
/// </exception>
/// <exception cref="System.Security.SecurityException">
/// The caller does not have path discovery permission.
/// </exception>
/// <exception cref="FileNotFoundException">
/// <paramref name="codeBase"/> is not found.
/// </exception>
/// <exception cref="FileLoadException ">
/// <paramref name="codeBase"/> could not be loaded.
/// <para>
/// -or-
/// </para>
/// <paramref name="codeBase"/> specified a directory.
/// </exception>
/// <exception cref="BadImageFormatException">
/// <paramref name="codeBase"/> is not a valid assembly
/// -or-
/// Version 2.0 or later of the common language runtime is currently loaded
/// and <paramref name="codeBase"/> was compiled with a later version.
/// </exception>
/// <remarks>
/// The assembly referenced by <paramref langword="codeBase"/> is loaded into the Load context.
/// </remarks>
public AssemblyCatalog(string codeBase, ICompositionElement definitionOrigin)
{
Requires.NotNullOrEmpty(codeBase, nameof(codeBase));
Requires.NotNull(definitionOrigin, nameof(definitionOrigin));
InitializeAssemblyCatalog(LoadAssembly(codeBase));
_definitionOrigin = definitionOrigin;
}
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified code base.
/// </summary>
/// <param name="codeBase">
/// A <see cref="string"/> containing the code base of the assembly containing the
/// attributed <see cref="Type"/> objects to add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <param name="reflectionContext">
/// The <see cref="ReflectionContext"/> a context used by the catalog when
/// interpreting the types to inject attributes into the type definition<see cref="AssemblyCatalog"/>.
/// </param>
/// <param name="definitionOrigin">
/// The <see cref="ICompositionElement"/> CompositionElement used by Diagnostics to identify the source for parts.
/// </param>
/// <exception cref="ArgumentNullException">
/// <paramref name="codeBase"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="reflectionContext"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="definitionOrigin"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="codeBase"/> is a zero-length string, contains only white space,
/// or contains one or more invalid characters. />.
/// </exception>
/// <exception cref="PathTooLongException">
/// The specified path, file name, or both exceed the system-defined maximum length.
/// </exception>
/// <exception cref="System.Security.SecurityException">
/// The caller does not have path discovery permission.
/// </exception>
/// <exception cref="FileNotFoundException">
/// <paramref name="codeBase"/> is not found.
/// </exception>
/// <exception cref="FileLoadException ">
/// <paramref name="codeBase"/> could not be loaded.
/// <para>
/// -or-
/// </para>
/// <paramref name="codeBase"/> specified a directory.
/// </exception>
/// <exception cref="BadImageFormatException">
/// <paramref name="codeBase"/> is not a valid assembly
/// -or-
/// Version 2.0 or later of the common language runtime is currently loaded
/// and <paramref name="codeBase"/> was compiled with a later version.
/// </exception>
/// <remarks>
/// The assembly referenced by <paramref langword="codeBase"/> is loaded into the Load context.
/// </remarks>
public AssemblyCatalog(string codeBase, ReflectionContext reflectionContext, ICompositionElement definitionOrigin)
{
Requires.NotNullOrEmpty(codeBase, nameof(codeBase));
Requires.NotNull(reflectionContext, nameof(reflectionContext));
Requires.NotNull(definitionOrigin, nameof(definitionOrigin));
InitializeAssemblyCatalog(LoadAssembly(codeBase));
_reflectionContext = reflectionContext;
_definitionOrigin = definitionOrigin;
}
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified assembly and reflection context.
/// </summary>
/// <param name="assembly">
/// The <see cref="Assembly"/> containing the attributed <see cref="Type"/> objects to
/// add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <param name="reflectionContext">
/// The <see cref="ReflectionContext"/> a context used by the catalog when
/// interpreting the types to inject attributes into the type definition.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="assembly"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="assembly"/> was loaded in the reflection-only context.
/// <para>
/// -or-
/// </para>
/// <paramref name="reflectionContext"/> is <see langword="null"/>.
/// </exception>
public AssemblyCatalog(Assembly assembly, ReflectionContext reflectionContext)
{
Requires.NotNull(assembly, nameof(assembly));
Requires.NotNull(reflectionContext, nameof(reflectionContext));
InitializeAssemblyCatalog(assembly);
_reflectionContext = reflectionContext;
_definitionOrigin = this;
}
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified assembly, reflectionContext and definitionOrigin.
/// </summary>
/// <param name="assembly">
/// The <see cref="Assembly"/> containing the attributed <see cref="Type"/> objects to
/// add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <param name="reflectionContext">
/// The <see cref="ReflectionContext"/> a context used by the catalog when
/// interpreting the types to inject attributes into the type definition.
/// </param>
/// <param name="definitionOrigin">
/// The <see cref="ICompositionElement"/> CompositionElement used by Diagnostics to identify the source for parts.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="assembly"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="assembly"/> was loaded in the reflection-only context.
/// <para>
/// -or-
/// </para>
/// <paramref name="reflectionContext"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="definitionOrigin"/> is <see langword="null"/>.
/// </exception>
public AssemblyCatalog(Assembly assembly, ReflectionContext reflectionContext, ICompositionElement definitionOrigin)
{
Requires.NotNull(assembly, nameof(assembly));
Requires.NotNull(reflectionContext, nameof(reflectionContext));
Requires.NotNull(definitionOrigin, nameof(definitionOrigin));
InitializeAssemblyCatalog(assembly);
_reflectionContext = reflectionContext;
_definitionOrigin = definitionOrigin;
}
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified assembly.
/// </summary>
/// <param name="assembly">
/// The <see cref="Assembly"/> containing the attributed <see cref="Type"/> objects to
/// add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="assembly"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="assembly"/> was loaded in the reflection-only context.
/// </exception>
public AssemblyCatalog(Assembly assembly)
{
Requires.NotNull(assembly, nameof(assembly));
InitializeAssemblyCatalog(assembly);
_definitionOrigin = this;
}
/// <summary>
/// Initializes a new instance of the <see cref="AssemblyCatalog"/> class
/// with the specified assembly.
/// </summary>
/// <param name="assembly">
/// The <see cref="Assembly"/> containing the attributed <see cref="Type"/> objects to
/// add to the <see cref="AssemblyCatalog"/>.
/// </param>
/// <param name="definitionOrigin">
/// The <see cref="ICompositionElement"/> CompositionElement used by Diagnostics to identify the source for parts.
/// </param>
/// <exception cref="ArgumentException">
/// <paramref name="assembly"/> is <see langword="null"/>.
/// <para>
/// -or-
/// </para>
/// <paramref name="assembly"/> was loaded in the reflection-only context.
/// <para>
/// -or-
/// </para>
/// <paramref name="definitionOrigin"/> is <see langword="null"/>.
/// </exception>
public AssemblyCatalog(Assembly assembly, ICompositionElement definitionOrigin)
{
Requires.NotNull(assembly, nameof(assembly));
Requires.NotNull(definitionOrigin, nameof(definitionOrigin));
InitializeAssemblyCatalog(assembly);
_definitionOrigin = definitionOrigin;
}
private void InitializeAssemblyCatalog(Assembly assembly)
{
if (assembly.ReflectionOnly)
{
throw new ArgumentException(SR.Format(SR.Argument_AssemblyReflectionOnly, nameof(assembly)), nameof(assembly));
}
_assembly = assembly;
}
/// <summary>
/// Returns the export definitions that match the constraint defined by the specified definition.
/// </summary>
/// <param name="definition">
/// The <see cref="ImportDefinition"/> that defines the conditions of the
/// <see cref="ExportDefinition"/> objects to return.
/// </param>
/// <returns>
/// An <see cref="IEnumerable{T}"/> of <see cref="Tuple{T1, T2}"/> containing the
/// <see cref="ExportDefinition"/> objects and their associated
/// <see cref="ComposablePartDefinition"/> for objects that match the constraint defined
/// by <paramref name="definition"/>.
/// </returns>
/// <exception cref="ArgumentNullException">
/// <paramref name="definition"/> is <see langword="null"/>.
/// </exception>
/// <exception cref="ObjectDisposedException">
/// The <see cref="ComposablePartCatalog"/> has been disposed of.
/// </exception>
/// <remarks>
/// <note type="inheritinfo">
/// Overriders of this property should never return <see langword="null"/>, if no
/// <see cref="ExportDefinition"/> match the conditions defined by
/// <paramref name="definition"/>, return an empty <see cref="IEnumerable{T}"/>.
/// </note>
/// </remarks>
public override IEnumerable<Tuple<ComposablePartDefinition, ExportDefinition>> GetExports(ImportDefinition definition)
{
return InnerCatalog.GetExports(definition);
}
private ComposablePartCatalog InnerCatalog
{
get
{
ThrowIfDisposed();
if (_innerCatalog == null)
{
var catalogReflectionContextAttribute = _assembly.GetFirstAttribute<CatalogReflectionContextAttribute>();
var assembly = (catalogReflectionContextAttribute != null)
? catalogReflectionContextAttribute.CreateReflectionContext().MapAssembly(_assembly)
: _assembly;
lock (_thisLock)
{
if (_innerCatalog == null)
{
var catalog = (_reflectionContext != null)
? new TypeCatalog(assembly.GetTypes(), _reflectionContext, _definitionOrigin)
: new TypeCatalog(assembly.GetTypes(), _definitionOrigin);
Thread.MemoryBarrier();
_innerCatalog = catalog;
}
}
}
return _innerCatalog;
}
}
/// <summary>
/// Gets the assembly containing the attributed types contained within the assembly
/// catalog.
/// </summary>
/// <value>
/// The <see cref="Assembly"/> containing the attributed <see cref="Type"/> objects
/// contained within the <see cref="AssemblyCatalog"/>.
/// </value>
public Assembly Assembly
{
get
{
Debug.Assert(_assembly != null);
return _assembly;
}
}
/// <summary>
/// Gets the display name of the assembly catalog.
/// </summary>
/// <value>
/// A <see cref="string"/> containing a human-readable display name of the <see cref="AssemblyCatalog"/>.
/// </value>
[SuppressMessage("Microsoft.Design", "CA1033:InterfaceMethodsShouldBeCallableByChildTypes")]
string ICompositionElement.DisplayName
{
get { return GetDisplayName(); }
}
/// <summary>
/// Gets the composition element from which the assembly catalog originated.
/// </summary>
/// <value>
/// This property always returns <see langword="null"/>.
/// </value>
[SuppressMessage("Microsoft.Design", "CA1033:InterfaceMethodsShouldBeCallableByChildTypes")]
ICompositionElement ICompositionElement.Origin
{
get { return null; }
}
/// <summary>
/// Returns a string representation of the assembly catalog.
/// </summary>
/// <returns>
/// A <see cref="string"/> containing the string representation of the <see cref="AssemblyCatalog"/>.
/// </returns>
public override string ToString()
{
return GetDisplayName();
}
protected override void Dispose(bool disposing)
{
try
{
if (Interlocked.CompareExchange(ref _isDisposed, 1, 0) == 0)
{
if (disposing)
{
if (_innerCatalog != null)
{
_innerCatalog.Dispose();
}
}
}
}
finally
{
base.Dispose(disposing);
}
}
public override IEnumerator<ComposablePartDefinition> GetEnumerator()
{
return InnerCatalog.GetEnumerator();
}
private void ThrowIfDisposed()
{
if (_isDisposed == 1)
{
throw ExceptionBuilder.CreateObjectDisposed(this);
}
}
private string GetDisplayName()
{
return string.Format(CultureInfo.CurrentCulture,
"{0} (Assembly=\"{1}\")", // NOLOC
GetType().Name,
Assembly.FullName);
}
private static Assembly LoadAssembly(string codeBase)
{
Requires.NotNullOrEmpty(codeBase, nameof(codeBase));
AssemblyName assemblyName;
try
{
assemblyName = AssemblyName.GetAssemblyName(codeBase);
}
catch (ArgumentException)
{
assemblyName = new AssemblyName();
assemblyName.CodeBase = codeBase;
}
try
{
return Assembly.Load(assemblyName);
}
//fallback attempt issue https://github.com/dotnet/corefx/issues/27433
catch (FileNotFoundException)
{
return Assembly.LoadFrom(codeBase);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using Hexpoint.Blox.Hosts.World;
using Hexpoint.Blox.GameObjects.Units;
using Hexpoint.Blox.GameActions;
using Hexpoint.Blox;
using Hexpoint.Blox.Hosts;
using OpenTK;
using Hexpoint.Blox.GameObjects.GameItems;
namespace AiKnowledgeEngine
{
internal class Tasks
{
public Tasks ()
{
tasks = new List<BaseTaskItem> ();
}
internal void Add (BaseTaskItem item)
{
tasks.Add (item);
}
internal void DoTask (Character chr, FrameEventArgs e)
{
if (tasks.Count > 0)
{
tasks [tasks.Count - 1].DoTask (chr, e);
}
}
public void Remove (BaseTaskItem item)
{
tasks.Remove (item);
}
private List<BaseTaskItem> tasks;
}
internal abstract class BaseTaskItem
{
internal abstract void DoTask (Character chr, FrameEventArgs e);
}
internal class GotoTask : BaseTaskItem
{
internal GotoTask(Position destination)
{
this.destination = destination;
}
internal override void DoTask (Character chr, FrameEventArgs e)
{
Console.WriteLine ("{0}:Finding path to {1}", chr.Id, destination);
List<Position> route = null;
route = chr.path.FindPath(chr.Position, destination, chr.knowledge, 10000);
if (route == null)
{
Console.WriteLine ("{0}:Can't find route to target", chr.Id);
// TODO - what?
return;
}
chr.RemoveTask (this);
chr.AddTask (new MoveToTask (route));
}
private Position destination;
}
internal class MoveToTask : BaseTaskItem
{
internal MoveToTask (List<Position> route)
{
this.route = route;
index = route.Count - 1;
}
private List<Position> route;
private int index;
internal override void DoTask (Character chr, FrameEventArgs e)
{
Position moveTo = route [index];
if (moveTo.GetBlock().IsSolid)
{
Console.WriteLine ("{0}:Route blocked", chr.Id);
chr.RemoveTask (this);
}
else
{
//Console.WriteLine ("Moving to {0}", moveTo);
chr.MoveTo (moveTo, e);
if (chr.Position == moveTo)
{
//Console.WriteLine ("Moved to {0}", moveTo);
index--;
}
if (index == 0)
{
Console.WriteLine ("{0}:At destination", chr.Id);
chr.RemoveTask (this);
}
}
}
}
// internal class SearchTask : ITaskItem
// {
// internal void DoTask (Character chr)
// {
// Console.WriteLine ("Searching");
//
// List<Position> route = chr.path.FindPathToNearest (chr.Position, loc => loc.Type == Block.BlockType.);
// if (route.Count <= 1)
// {
// Console.WriteLine ("Don't know where to go?");
// return;
// }
//
// chr.AddTask (new MoveToTask (route));
// }
// }
internal class WaitTask : BaseTaskItem
{
/// <summary>
/// Initializes a new instance of the <see cref="AiKnowledgeEngine.WaitTask"/> class.
/// </summary>
/// <param name='waitTime'>
/// Wait time in seconds.
/// </param>
internal WaitTask (double waitTime)
{
this.waitTime = waitTime * 1000;
}
internal override void DoTask (Character chr, FrameEventArgs e)
{
waitTime -= e.Time;
if (waitTime < 0) {
chr.RemoveTask (this);
}
}
private double waitTime; // Waittime in ms
}
internal class LookAroundTask : BaseTaskItem
{
internal override void DoTask (Character chr, FrameEventArgs e)
{
Console.WriteLine ("{0}:Looking around", chr.Id);
chr.path.FindPaths(chr.Position, chr.knowledge);
chr.RemoveTask (this);
}
}
internal class GatherItemTask : BaseTaskItem
{
internal GatherItemTask(Block.BlockType blockType)
{
this.blockType = blockType;
}
internal override void DoTask (Character chr, FrameEventArgs e)
{
Console.WriteLine ("{0}:Collect item", chr.Id);
List<Position> route = null;
route = chr.path.FindPaths(chr.Position, chr.knowledge, blockType, 10000);
/*
foreach (Position blockPos in chr.knowledge.GetNearestBlocks(blockType, chr.Position))
{
route = chr.path.FindPath(chr.Position, blockPos);
if (route != null)
{
Console.WriteLine("Found route to nearby item");
break;
}
}
*/
if (route == null)
{
Console.WriteLine ("{0}:Can't see any target item, Gather than waiting 10 seconds", chr.Id);
chr.AddTask (new WaitTask(10));
chr.AddTask( new LookAroundTask());
//chr.RemoveTask (this);
//chr.AddTask (new SearchTask ());
return;
}
chr.AddTask (new ChopBlocksAtPosTask(blockType, route[0]));
chr.AddTask (new MoveToTask (route));
}
private Block.BlockType blockType;
}
internal class ChopBlocksAtPosTask : BaseTaskItem
{
internal ChopBlocksAtPosTask (Block.BlockType blockType, Position position)
{
this.blockType = blockType;
this.position = position;
}
private Block.BlockType blockType;
private Position position;
internal override void DoTask (Character chr, FrameEventArgs e)
{
Console.WriteLine ("{0}:ChopBlocksAtPosTask {1}", chr.Id, position);
foreach (Position pos in chr.path.NeighbourBlocks(position))
{
if (pos.GetBlock().Type == blockType)
{
chr.AddTask (new ChopBlockTask(blockType, pos));
}
}
chr.RemoveTask (this);
}
}
internal class ChopBlockTask : BaseTaskItem
{
internal ChopBlockTask (Block.BlockType blockType, Position position)
{
this.blockType = blockType;
this.position = position;
}
private Block.BlockType blockType;
private Position position;
internal override void DoTask (Character chr, FrameEventArgs e)
{
if (position.GetBlock().Type == blockType)
{
Console.WriteLine ("{0}:Chopping {1}", chr.Id, blockType);
AddOrRemoveBlock(chr, position, Block.BlockType.Air); // replace with air
}
chr.RemoveTask (this);
}
private void AddOrRemoveBlock(Character chr, Position position, Block.BlockType blockType)
{
if (!position.IsValidBlockLocation)
{
Console.WriteLine("{0}:Invalid block location", chr.Id);
return;
}
NetworkClient.SendAddOrRemoveBlock(position, blockType);
}
}
internal class BuildConstructionTask : BaseTaskItem
{
internal BuildConstructionTask(string blueprint)
{
this.blueprint = blueprint;
}
private string blueprint;
private Dictionary<Position, Block.BlockType> blocks;
internal override void DoTask (Character chr, FrameEventArgs e)
{
Console.WriteLine ("{0}:Building {1}", chr.Id, blueprint);
blocks = Construction.Instance.GetConstruction(blueprint);
if (blocks == null)
{
chr.AddTask (new WaitTask(10));
}
// TODO
chr.RemoveTask (this);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Text;
using System.Xml.XPath;
namespace System.Xml
{
// Provides text-manipulation methods that are used by several classes.
public abstract class XmlCharacterData : XmlLinkedNode
{
private string _data;
//base(doc) will throw exception if doc is null.
protected internal XmlCharacterData(string data, XmlDocument doc) : base(doc)
{
_data = data;
}
// Gets or sets the value of the node.
public override String Value
{
get { return Data; }
set { Data = value; }
}
// Gets or sets the concatenated values of the node and
// all its children.
public override string InnerText
{
get { return Value; }
set { Value = value; }
}
// Contains this node's data.
public virtual string Data
{
get
{
if (_data != null)
{
return _data;
}
else
{
return String.Empty;
}
}
set
{
XmlNode parent = ParentNode;
XmlNodeChangedEventArgs args = GetEventArgs(this, parent, parent, _data, value, XmlNodeChangedAction.Change);
if (args != null)
BeforeEvent(args);
_data = value;
if (args != null)
AfterEvent(args);
}
}
// Gets the length of the data, in characters.
public virtual int Length
{
get
{
if (_data != null)
{
return _data.Length;
}
return 0;
}
}
// Retrieves a substring of the full string from the specified range.
public virtual String Substring(int offset, int count)
{
int len = _data != null ? _data.Length : 0;
if (len > 0)
{
if (len < (offset + count))
{
count = len - offset;
}
return _data.Substring(offset, count);
}
return String.Empty;
}
// Appends the specified string to the end of the character
// data of the node.
public virtual void AppendData(String strData)
{
XmlNode parent = ParentNode;
int capacity = _data != null ? _data.Length : 0;
if (strData != null) capacity += strData.Length;
string newValue = new StringBuilder(capacity).Append(_data).Append(strData).ToString();
XmlNodeChangedEventArgs args = GetEventArgs(this, parent, parent, _data, newValue, XmlNodeChangedAction.Change);
if (args != null)
BeforeEvent(args);
_data = newValue;
if (args != null)
AfterEvent(args);
}
// Insert the specified string at the specified character offset.
public virtual void InsertData(int offset, string strData)
{
XmlNode parent = ParentNode;
int capacity = _data != null ? _data.Length : 0;
if (strData != null) capacity += strData.Length;
string newValue = new StringBuilder(capacity).Append(_data).Insert(offset, strData).ToString();
XmlNodeChangedEventArgs args = GetEventArgs(this, parent, parent, _data, newValue, XmlNodeChangedAction.Change);
if (args != null)
BeforeEvent(args);
_data = newValue;
if (args != null)
AfterEvent(args);
}
// Remove a range of characters from the node.
public virtual void DeleteData(int offset, int count)
{
//Debug.Assert(offset >= 0 && offset <= Length);
int len = _data != null ? _data.Length : 0;
if (len > 0)
{
if (len < (offset + count))
{
count = Math.Max(len - offset, 0);
}
}
string newValue = new StringBuilder(_data).Remove(offset, count).ToString();
XmlNode parent = ParentNode;
XmlNodeChangedEventArgs args = GetEventArgs(this, parent, parent, _data, newValue, XmlNodeChangedAction.Change);
if (args != null)
BeforeEvent(args);
_data = newValue;
if (args != null)
AfterEvent(args);
}
// Replace the specified number of characters starting at the specified offset with the
// specified string.
public virtual void ReplaceData(int offset, int count, String strData)
{
int len = _data != null ? _data.Length : 0;
if (len > 0)
{
if (len < (offset + count))
{
count = Math.Max(len - offset, 0);
}
}
StringBuilder temp = new StringBuilder(_data).Remove(offset, count);
string newValue = temp.Insert(offset, strData).ToString();
XmlNode parent = ParentNode;
XmlNodeChangedEventArgs args = GetEventArgs(this, parent, parent, _data, newValue, XmlNodeChangedAction.Change);
if (args != null)
BeforeEvent(args);
_data = newValue;
if (args != null)
AfterEvent(args);
}
internal bool CheckOnData(string data)
{
return XmlCharType.Instance.IsOnlyWhitespace(data);
}
internal bool DecideXPNodeTypeForTextNodes(XmlNode node, ref XPathNodeType xnt)
{
//returns true - if all siblings of the node are processed else returns false.
//The reference XPathNodeType argument being passed in is the watermark that
//changes according to the siblings nodetype and will contain the correct
//nodetype when it returns.
Debug.Assert(XmlDocument.IsTextNode(node.NodeType) || (node.ParentNode != null && node.ParentNode.NodeType == XmlNodeType.EntityReference));
while (node != null)
{
switch (node.NodeType)
{
case XmlNodeType.Whitespace:
break;
case XmlNodeType.SignificantWhitespace:
xnt = XPathNodeType.SignificantWhitespace;
break;
case XmlNodeType.Text:
case XmlNodeType.CDATA:
xnt = XPathNodeType.Text;
return false;
case XmlNodeType.EntityReference:
if (!DecideXPNodeTypeForTextNodes(node.FirstChild, ref xnt))
{
return false;
}
break;
default:
return false;
}
node = node.NextSibling;
}
return true;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
using System.Threading;
using Internal;
namespace System.Collections.Concurrent
{
/// <summary>
/// Provides a producer/consumer queue safe to be used by only one producer and one consumer concurrently.
/// </summary>
/// <typeparam name="T">Specifies the type of data contained in the queue.</typeparam>
[DebuggerDisplay("Count = {Count}")]
[DebuggerTypeProxy(typeof(SingleProducerSingleConsumerQueue<>.SingleProducerSingleConsumerQueue_DebugView))]
internal sealed class SingleProducerSingleConsumerQueue<T> : IEnumerable<T>
{
// Design:
//
// SingleProducerSingleConsumerQueue (SPSCQueue) is a concurrent queue designed to be used
// by one producer thread and one consumer thread. SPSCQueue does not work correctly when used by
// multiple producer threads concurrently or multiple consumer threads concurrently.
//
// SPSCQueue is based on segments that behave like circular buffers. Each circular buffer is represented
// as an array with two indexes: _first and _last. _first is the index of the array slot for the consumer
// to read next, and _last is the slot for the producer to write next. The circular buffer is empty when
// (_first == _last), and full when ((_last+1) % _array.Length == _first).
//
// Since _first is only ever modified by the consumer thread and _last by the producer, the two indices can
// be updated without interlocked operations. As long as the queue size fits inside a single circular buffer,
// enqueues and dequeues simply advance the corresponding indices around the circular buffer. If an enqueue finds
// that there is no room in the existing buffer, however, a new circular buffer is allocated that is twice as big
// as the old buffer. From then on, the producer will insert values into the new buffer. The consumer will first
// empty out the old buffer and only then follow the producer into the new (larger) buffer.
//
// As described above, the enqueue operation on the fast path only modifies the _first field of the current segment.
// However, it also needs to read _last in order to verify that there is room in the current segment. Similarly, the
// dequeue operation on the fast path only needs to modify _last, but also needs to read _first to verify that the
// queue is non-empty. This results in true cache line sharing between the producer and the consumer.
//
// The cache line sharing issue can be mitigating by having a possibly stale copy of _first that is owned by the producer,
// and a possibly stale copy of _last that is owned by the consumer. So, the consumer state is described using
// (_first, _lastCopy) and the producer state using (_firstCopy, _last). The consumer state is separated from
// the producer state by padding, which allows fast-path enqueues and dequeues from hitting shared cache lines.
// _lastCopy is the consumer's copy of _last. Whenever the consumer can tell that there is room in the buffer
// simply by observing _lastCopy, the consumer thread does not need to read _last and thus encounter a cache miss. Only
// when the buffer appears to be empty will the consumer refresh _lastCopy from _last. _firstCopy is used by the producer
// in the same way to avoid reading _first on the hot path.
/// <summary>The initial size to use for segments (in number of elements).</summary>
private const int InitialSegmentSize = 32; // must be a power of 2
/// <summary>The maximum size to use for segments (in number of elements).</summary>
private const int MaxSegmentSize = 0x1000000; // this could be made as large as Int32.MaxValue / 2
/// <summary>The head of the linked list of segments.</summary>
private volatile Segment _head;
/// <summary>The tail of the linked list of segments.</summary>
private volatile Segment _tail;
/// <summary>Initializes the queue.</summary>
public SingleProducerSingleConsumerQueue()
{
// Validate constants in ctor rather than in an explicit cctor that would cause perf degradation
Debug.Assert(InitialSegmentSize > 0, "Initial segment size must be > 0.");
Debug.Assert((InitialSegmentSize & (InitialSegmentSize - 1)) == 0, "Initial segment size must be a power of 2");
Debug.Assert(InitialSegmentSize <= MaxSegmentSize, "Initial segment size should be <= maximum.");
Debug.Assert(MaxSegmentSize < int.MaxValue / 2, "Max segment size * 2 must be < Int32.MaxValue, or else overflow could occur.");
// Initialize the queue
_head = _tail = new Segment(InitialSegmentSize);
}
/// <summary>Enqueues an item into the queue.</summary>
/// <param name="item">The item to enqueue.</param>
public void Enqueue(T item)
{
Segment segment = _tail;
T[] array = segment._array;
int last = segment._state._last; // local copy to avoid multiple volatile reads
// Fast path: there's obviously room in the current segment
int tail2 = (last + 1) & (array.Length - 1);
if (tail2 != segment._state._firstCopy)
{
array[last] = item;
segment._state._last = tail2;
}
// Slow path: there may not be room in the current segment.
else EnqueueSlow(item, ref segment);
}
/// <summary>Enqueues an item into the queue.</summary>
/// <param name="item">The item to enqueue.</param>
/// <param name="segment">The segment in which to first attempt to store the item.</param>
private void EnqueueSlow(T item, ref Segment segment)
{
Debug.Assert(segment != null, "Expected a non-null segment.");
if (segment._state._firstCopy != segment._state._first)
{
segment._state._firstCopy = segment._state._first;
Enqueue(item); // will only recur once for this enqueue operation
return;
}
int newSegmentSize = _tail._array.Length << 1; // double size
Debug.Assert(newSegmentSize > 0, "The max size should always be small enough that we don't overflow.");
if (newSegmentSize > MaxSegmentSize) newSegmentSize = MaxSegmentSize;
var newSegment = new Segment(newSegmentSize);
newSegment._array[0] = item;
newSegment._state._last = 1;
newSegment._state._lastCopy = 1;
try { }
finally
{
// Finally block to protect against corruption due to a thread abort
// between setting _next and setting _tail.
Volatile.Write(ref _tail._next, newSegment); // ensure segment not published until item is fully stored
_tail = newSegment;
}
}
/// <summary>Attempts to dequeue an item from the queue.</summary>
/// <param name="result">The dequeued item.</param>
/// <returns>true if an item could be dequeued; otherwise, false.</returns>
public bool TryDequeue([MaybeNullWhen(false)] out T result)
{
Segment segment = _head;
T[] array = segment._array;
int first = segment._state._first; // local copy to avoid multiple volatile reads
// Fast path: there's obviously data available in the current segment
if (first != segment._state._lastCopy)
{
result = array[first];
array[first] = default!; // Clear the slot to release the element
segment._state._first = (first + 1) & (array.Length - 1);
return true;
}
// Slow path: there may not be data available in the current segment
else return TryDequeueSlow(ref segment, ref array, out result);
}
/// <summary>Attempts to dequeue an item from the queue.</summary>
/// <param name="array">The array from which the item was dequeued.</param>
/// <param name="segment">The segment from which the item was dequeued.</param>
/// <param name="result">The dequeued item.</param>
/// <returns>true if an item could be dequeued; otherwise, false.</returns>
private bool TryDequeueSlow(ref Segment segment, ref T[] array, [MaybeNullWhen(false)] out T result)
{
Debug.Assert(segment != null, "Expected a non-null segment.");
Debug.Assert(array != null, "Expected a non-null item array.");
if (segment._state._last != segment._state._lastCopy)
{
segment._state._lastCopy = segment._state._last;
return TryDequeue(out result); // will only recur once for this dequeue operation
}
if (segment._next != null && segment._state._first == segment._state._last)
{
segment = segment._next;
array = segment._array;
_head = segment;
}
int first = segment._state._first; // local copy to avoid extraneous volatile reads
if (first == segment._state._last)
{
result = default!;
return false;
}
result = array[first];
array[first] = default!; // Clear the slot to release the element
segment._state._first = (first + 1) & (segment._array.Length - 1);
segment._state._lastCopy = segment._state._last; // Refresh _lastCopy to ensure that _first has not passed _lastCopy
return true;
}
/// <summary>Gets whether the collection is currently empty.</summary>
public bool IsEmpty
{
// This implementation is optimized for calls from the consumer.
get
{
Segment head = _head;
if (head._state._first != head._state._lastCopy) return false; // _first is volatile, so the read of _lastCopy cannot get reordered
if (head._state._first != head._state._last) return false;
return head._next == null;
}
}
/// <summary>Gets an enumerable for the collection.</summary>
/// <remarks>This method is not safe to use concurrently with any other members that may mutate the collection.</remarks>
public IEnumerator<T> GetEnumerator()
{
for (Segment? segment = _head; segment != null; segment = segment._next)
{
for (int pt = segment._state._first;
pt != segment._state._last;
pt = (pt + 1) & (segment._array.Length - 1))
{
yield return segment._array[pt];
}
}
}
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
/// <summary>Gets the number of items in the collection.</summary>
/// <remarks>This method is not safe to use concurrently with any other members that may mutate the collection.</remarks>
internal int Count
{
get
{
int count = 0;
for (Segment? segment = _head; segment != null; segment = segment._next)
{
int arraySize = segment._array.Length;
int first, last;
while (true) // Count is not meant to be used concurrently, but this helps to avoid issues if it is
{
first = segment._state._first;
last = segment._state._last;
if (first == segment._state._first) break;
}
count += (last - first) & (arraySize - 1);
}
return count;
}
}
/// <summary>A segment in the queue containing one or more items.</summary>
[StructLayout(LayoutKind.Sequential)]
private sealed class Segment
{
/// <summary>The next segment in the linked list of segments.</summary>
internal Segment? _next;
/// <summary>The data stored in this segment.</summary>
internal readonly T[] _array;
/// <summary>Details about the segment.</summary>
internal SegmentState _state; // separated out to enable StructLayout attribute to take effect
/// <summary>Initializes the segment.</summary>
/// <param name="size">The size to use for this segment.</param>
internal Segment(int size)
{
Debug.Assert((size & (size - 1)) == 0, "Size must be a power of 2");
_array = new T[size];
}
}
/// <summary>Stores information about a segment.</summary>
[StructLayout(LayoutKind.Sequential)] // enforce layout so that padding reduces false sharing
private struct SegmentState
{
/// <summary>Padding to reduce false sharing between the segment's array and _first.</summary>
internal PaddingFor32 _pad0;
/// <summary>The index of the current head in the segment.</summary>
internal volatile int _first;
/// <summary>A copy of the current tail index.</summary>
internal int _lastCopy; // not volatile as read and written by the producer, except for IsEmpty, and there _lastCopy is only read after reading the volatile _first
/// <summary>Padding to reduce false sharing between the first and last.</summary>
internal PaddingFor32 _pad1;
/// <summary>A copy of the current head index.</summary>
internal int _firstCopy; // not volatile as only read and written by the consumer thread
/// <summary>The index of the current tail in the segment.</summary>
internal volatile int _last;
/// <summary>Padding to reduce false sharing with the last and what's after the segment.</summary>
internal PaddingFor32 _pad2;
}
/// <summary>Debugger type proxy for a SingleProducerSingleConsumerQueue of T.</summary>
private sealed class SingleProducerSingleConsumerQueue_DebugView
{
/// <summary>The queue being visualized.</summary>
private readonly SingleProducerSingleConsumerQueue<T> _queue;
/// <summary>Initializes the debug view.</summary>
/// <param name="queue">The queue being debugged.</param>
public SingleProducerSingleConsumerQueue_DebugView(SingleProducerSingleConsumerQueue<T> queue)
{
Debug.Assert(queue != null, "Expected a non-null queue.");
_queue = queue;
}
/// <summary>Gets the contents of the list.</summary>
[DebuggerBrowsable(DebuggerBrowsableState.RootHidden)]
public T[] Items => new List<T>(_queue).ToArray();
}
}
}
| |
//
// - PropertyProviderExtensions.cs -
//
// Copyright 2005, 2006, 2010 Carbonfrost Systems, Inc. (http://carbonfrost.com)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
namespace Carbonfrost.Commons.Shared.Runtime {
public static class PropertyProviderExtensions {
public static bool HasProperty(this IPropertyProvider source,
string property) {
if (source == null)
throw new ArgumentNullException("source"); // $NON-NLS-1
object dummy;
return source.TryGetProperty(property, out dummy);
}
public static bool GetBoolean(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToBoolean(source.GetProperty(property));
}
public static byte GetByte(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToByte(source.GetProperty(property));
}
public static char GetChar(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToChar(source.GetProperty(property));
}
public static DateTime GetDateTime(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToDateTime(source.GetProperty(property));
}
public static decimal GetDecimal(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToDecimal(source.GetProperty(property));
}
public static double GetDouble(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToDouble(source.GetProperty(property));
}
public static short GetInt16(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToInt16(source.GetProperty(property));
}
public static int GetInt32(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToInt32(source.GetProperty(property));
}
public static long GetInt64(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToInt64(source.GetProperty(property));
}
[CLSCompliant(false)]
public static sbyte GetSByte(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToSByte(source.GetProperty(property));
}
public static float GetSingle(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToSingle(source.GetProperty(property));
}
public static string GetString(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToString(source.GetProperty(property));
}
[CLSCompliant(false)]
public static ushort GetUInt16(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToUInt16(source.GetProperty(property));
}
[CLSCompliant(false)]
public static uint GetUInt32(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToUInt32(source.GetProperty(property));
}
[CLSCompliant(false)]
public static ulong GetUInt64(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source");
return Convert.ToUInt64(source.GetProperty(property));
}
// If no value is present, the value is simply set.
// If a value is present, and it is a value, the value is converted to
// an array containing that existing element plus <c>value</c>.
// If the value located there is a collection of some sort, then
// the value is added to it. If the collection is read-only, its
// contents are copied into a new.
public static void Push(this IProperties source, string property, object value) {
if (source == null)
throw new ArgumentNullException("source");
object current;
if (source.TryGetProperty(property, out current)) {
IEnumerable e = current as IEnumerable;
if (e == null)
source.SetProperty(property, new object[] { current, value });
else {
object[] objArray = current as object[];
if (objArray != null) {
Array.Resize(ref objArray, objArray.Length + 1);
objArray[objArray.Length - 1] = value;
} else {
List<object> items = new List<object>(e.Cast<object>());
items.Add(value);
source.SetProperty(property, items.ToArray());
}
}
} else {
source.SetProperty(property, value);
}
}
public static object GetProperty(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source"); // $NON-NLS-1
return source.GetProperty<object>(property, null);
}
public static T GetProperty<T>(this IPropertyProvider source, string property) {
if (source == null)
throw new ArgumentNullException("source"); // $NON-NLS-1
return source.GetProperty<T>(property, default(T));
}
public static T GetProperty<T>(this IPropertyProvider source, string property, T defaultValue) {
if (source == null)
throw new ArgumentNullException("source"); // $NON-NLS-1
object result;
if (source.TryGetProperty(property, out result))
return (T) result;
else
return defaultValue;
}
public static bool TryGetProperty<T>(this IPropertyProvider source, string property, out T value) {
if (source == null) { throw new ArgumentNullException("source"); } // $NON-NLS-1
object objValue;
bool result = source.TryGetProperty(property, typeof(T), out objValue);
if (result && objValue is T) {
value = (T) objValue;
return true;
} else {
value = default(T);
return false;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using Azure.Core;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Azure.Core.Pipeline;
namespace Azure.Identity
{
/// <summary>
/// Provides a default <see cref="TokenCredential"/> authentication flow for applications that will be deployed to Azure. The following credential
/// types if enabled will be tried, in order:
/// <list type="bullet">
/// <item><description><see cref="EnvironmentCredential"/></description></item>
/// <item><description><see cref="ManagedIdentityCredential"/></description></item>
/// <item><description><see cref="SharedTokenCacheCredential"/></description></item>
/// <item><description><see cref="VisualStudioCredential"/></description></item>
/// <item><description><see cref="VisualStudioCodeCredential"/></description></item>
/// <item><description><see cref="AzureCliCredential"/></description></item>
/// <item><description><see cref="AzurePowerShellCredential"/></description></item>
/// <item><description><see cref="InteractiveBrowserCredential"/></description></item>
/// </list>
/// Consult the documentation of these credential types for more information on how they attempt authentication.
/// </summary>
/// <remarks>
/// Note that credentials requiring user interaction, such as the <see cref="InteractiveBrowserCredential"/>, are not included by default. Callers must explicitly enable this when
/// constructing the <see cref="DefaultAzureCredential"/> either by setting the includeInteractiveCredentials parameter to true, or the setting the
/// <see cref="DefaultAzureCredentialOptions.ExcludeInteractiveBrowserCredential"/> property to false when passing <see cref="DefaultAzureCredentialOptions"/>.
/// </remarks>
/// <example>
/// <para>
/// This example demonstrates authenticating the BlobClient from the Azure.Storage.Blobs client library using the DefaultAzureCredential,
/// deployed to an Azure resource with a user assigned managed identity configured.
/// </para>
/// <code snippet="Snippet:UserAssignedManagedIdentity" language="csharp">
/// // When deployed to an azure host, the default azure credential will authenticate the specified user assigned managed identity.
///
/// string userAssignedClientId = "<your managed identity client Id>";
/// var credential = new DefaultAzureCredential(new DefaultAzureCredentialOptions { ManagedIdentityClientId = userAssignedClientId });
///
/// var blobClient = new BlobClient(new Uri("https://myaccount.blob.core.windows.net/mycontainer/myblob"), credential);
/// </code>
/// </example>
public class DefaultAzureCredential : TokenCredential
{
private const string Troubleshooting = "See the troubleshooting guide for more information. https://aka.ms/azsdk/net/identity/defaultazurecredential/troubleshoot";
private const string DefaultExceptionMessage = "DefaultAzureCredential failed to retrieve a token from the included credentials. " + Troubleshooting;
private const string UnhandledExceptionMessage = "DefaultAzureCredential authentication failed due to an unhandled exception: ";
private static readonly TokenCredential[] s_defaultCredentialChain = GetDefaultAzureCredentialChain(new DefaultAzureCredentialFactory(null), new DefaultAzureCredentialOptions());
private readonly CredentialPipeline _pipeline;
private readonly AsyncLockWithValue<TokenCredential> _credentialLock;
private TokenCredential[] _sources;
internal DefaultAzureCredential() : this(false) { }
/// <summary>
/// Creates an instance of the DefaultAzureCredential class.
/// </summary>
/// <param name="includeInteractiveCredentials">Specifies whether credentials requiring user interaction will be included in the default authentication flow.</param>
public DefaultAzureCredential(bool includeInteractiveCredentials = false)
: this(includeInteractiveCredentials ? new DefaultAzureCredentialOptions { ExcludeInteractiveBrowserCredential = false } : null)
{
}
/// <summary>
/// Creates an instance of the <see cref="DefaultAzureCredential"/> class.
/// </summary>
/// <param name="options">Options that configure the management of the requests sent to Azure Active Directory services, and determine which credentials are included in the <see cref="DefaultAzureCredential"/> authentication flow.</param>
public DefaultAzureCredential(DefaultAzureCredentialOptions options)
// we call ValidateAuthoriyHostOption to validate that we have a valid authority host before constructing the DAC chain
// if we don't validate this up front it will end up throwing an exception out of a static initializer which obscures the error.
: this(new DefaultAzureCredentialFactory(ValidateAuthorityHostOption(options)), options)
{
}
internal DefaultAzureCredential(DefaultAzureCredentialFactory factory, DefaultAzureCredentialOptions options)
{
_pipeline = factory.Pipeline;
_sources = GetDefaultAzureCredentialChain(factory, options);
_credentialLock = new AsyncLockWithValue<TokenCredential>();
}
/// <summary>
/// Sequentially calls <see cref="TokenCredential.GetToken"/> on all the included credentials in the order <see cref="EnvironmentCredential"/>, <see cref="ManagedIdentityCredential"/>, <see cref="SharedTokenCacheCredential"/>,
/// and <see cref="InteractiveBrowserCredential"/> returning the first successfully obtained <see cref="AccessToken"/>. This method is called automatically by Azure SDK client libraries. You may call this method directly, but you must also handle token caching and token refreshing.
/// </summary>
/// <remarks>
/// Note that credentials requiring user interaction, such as the <see cref="InteractiveBrowserCredential"/>, are not included by default.
/// </remarks>
/// <param name="requestContext">The details of the authentication request.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The first <see cref="AccessToken"/> returned by the specified sources. Any credential which raises a <see cref="CredentialUnavailableException"/> will be skipped.</returns>
public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken = default)
{
return GetTokenImplAsync(false, requestContext, cancellationToken).EnsureCompleted();
}
/// <summary>
/// Sequentially calls <see cref="TokenCredential.GetToken"/> on all the included credentials in the order <see cref="EnvironmentCredential"/>, <see cref="ManagedIdentityCredential"/>, <see cref="SharedTokenCacheCredential"/>,
/// and <see cref="InteractiveBrowserCredential"/> returning the first successfully obtained <see cref="AccessToken"/>. This method is called automatically by Azure SDK client libraries. You may call this method directly, but you must also handle token caching and token refreshing.
/// </summary>
/// <remarks>
/// Note that credentials requiring user interaction, such as the <see cref="InteractiveBrowserCredential"/>, are not included by default.
/// </remarks>
/// <param name="requestContext">The details of the authentication request.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> controlling the request lifetime.</param>
/// <returns>The first <see cref="AccessToken"/> returned by the specified sources. Any credential which raises a <see cref="CredentialUnavailableException"/> will be skipped.</returns>
public override async ValueTask<AccessToken> GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken = default)
{
return await GetTokenImplAsync(true, requestContext, cancellationToken).ConfigureAwait(false);
}
private async ValueTask<AccessToken> GetTokenImplAsync(bool async, TokenRequestContext requestContext, CancellationToken cancellationToken)
{
using CredentialDiagnosticScope scope = _pipeline.StartGetTokenScopeGroup("DefaultAzureCredential.GetToken", requestContext);
try
{
using var asyncLock = await _credentialLock.GetLockOrValueAsync(async, cancellationToken).ConfigureAwait(false);
AccessToken token;
if (asyncLock.HasValue)
{
token = await GetTokenFromCredentialAsync(asyncLock.Value, requestContext, async, cancellationToken).ConfigureAwait(false);
}
else
{
TokenCredential credential;
(token, credential) = await GetTokenFromSourcesAsync(_sources, requestContext, async, cancellationToken).ConfigureAwait(false);
_sources = default;
asyncLock.SetValue(credential);
AzureIdentityEventSource.Singleton.DefaultAzureCredentialCredentialSelected(credential.GetType().FullName);
}
return scope.Succeeded(token);
}
catch (Exception e)
{
throw scope.FailWrapAndThrow(e);
}
}
private static async ValueTask<AccessToken> GetTokenFromCredentialAsync(TokenCredential credential, TokenRequestContext requestContext, bool async, CancellationToken cancellationToken)
{
try
{
return async
? await credential.GetTokenAsync(requestContext, cancellationToken).ConfigureAwait(false)
: credential.GetToken(requestContext, cancellationToken);
}
catch (Exception e) when (!(e is CredentialUnavailableException))
{
throw new AuthenticationFailedException(UnhandledExceptionMessage, e);
}
}
private static async ValueTask<(AccessToken Token, TokenCredential Credential)> GetTokenFromSourcesAsync(TokenCredential[] sources, TokenRequestContext requestContext, bool async, CancellationToken cancellationToken)
{
List<CredentialUnavailableException> exceptions = new List<CredentialUnavailableException>();
for (var i = 0; i < sources.Length && sources[i] != null; i++)
{
try
{
AccessToken token = async
? await sources[i].GetTokenAsync(requestContext, cancellationToken).ConfigureAwait(false)
: sources[i].GetToken(requestContext, cancellationToken);
return (token, sources[i]);
}
catch (CredentialUnavailableException e)
{
exceptions.Add(e);
}
}
throw CredentialUnavailableException.CreateAggregateException(DefaultExceptionMessage, exceptions);
}
private static TokenCredential[] GetDefaultAzureCredentialChain(DefaultAzureCredentialFactory factory, DefaultAzureCredentialOptions options)
{
if (options is null)
{
return s_defaultCredentialChain;
}
int i = 0;
TokenCredential[] chain = new TokenCredential[8];
if (!options.ExcludeEnvironmentCredential)
{
chain[i++] = factory.CreateEnvironmentCredential();
}
if (!options.ExcludeManagedIdentityCredential)
{
chain[i++] = factory.CreateManagedIdentityCredential(options);
}
if (!options.ExcludeSharedTokenCacheCredential)
{
chain[i++] = factory.CreateSharedTokenCacheCredential(options.SharedTokenCacheTenantId, options.SharedTokenCacheUsername);
}
if (!options.ExcludeVisualStudioCredential)
{
chain[i++] = factory.CreateVisualStudioCredential(options.VisualStudioTenantId);
}
if (!options.ExcludeVisualStudioCodeCredential)
{
chain[i++] = factory.CreateVisualStudioCodeCredential(options.VisualStudioCodeTenantId);
}
if (!options.ExcludeAzureCliCredential)
{
chain[i++] = factory.CreateAzureCliCredential();
}
if (!options.ExcludeAzurePowerShellCredential)
{
chain[i++] = factory.CreateAzurePowerShellCredential();
}
if (!options.ExcludeInteractiveBrowserCredential)
{
chain[i++] = factory.CreateInteractiveBrowserCredential(options.InteractiveBrowserTenantId, options.InteractiveBrowserCredentialClientId);
}
if (i == 0)
{
throw new ArgumentException("At least one credential type must be included in the authentication flow.", nameof(options));
}
return chain;
}
private static DefaultAzureCredentialOptions ValidateAuthorityHostOption(DefaultAzureCredentialOptions options)
{
Validations.ValidateAuthorityHost(options?.AuthorityHost ?? AzureAuthorityHosts.GetDefault());
return options;
}
}
}
| |
//
// ServerTests.cs
//
// Author:
// Scott Peterson <lunchtimemama@gmail.com>
//
// Copyright (c) 2009 Scott Peterson
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Xml;
using NUnit.Framework;
using Mono.Ssdp;
using Mono.Upnp.Control;
namespace Mono.Upnp.Tests
{
[TestFixture]
public class ServerTests
{
static readonly Random random = new Random();
readonly object mutex = new object ();
readonly DummyDeserializer deserializer = new DummyDeserializer();
[Test]
public void InitialUnicastEventTest ()
{
var eventer = new DummyStateVariableEventer ();
var root = CreateRoot (CreateServiceController (new StateVariable ("Foo", "string", new StateVariableOptions { Eventer = eventer })));
eventer.SetValue ("foo");
using (var server = new Server (root)) {
server.Start ();
var prefix = GeneratePrefix ();
using (var listener = new HttpListener ()) {
listener.Prefixes.Add (prefix);
listener.Start ();
Exception exception = null;
listener.BeginGetContext (result => {
try {
var context = listener.EndGetContext (result);
using (var reader = new StreamReader (context.Request.InputStream)) {
Assert.AreEqual (Xml.SingleEventReport, reader.ReadToEnd ());
}
context.Response.Close ();
} catch (Exception e) {
exception = e;
}
lock (mutex) {
Monitor.Pulse (mutex);
}
}, null);
Subscribe (root, prefix);
if (exception != null) {
throw exception;
}
}
}
}
[Test]
public void SingleUpdateUnicastEventTest ()
{
var eventer1 = new DummyStateVariableEventer ();
var eventer2 = new DummyStateVariableEventer ();
var root = CreateRoot (CreateServiceController (
new StateVariable ("Foo", "string", new StateVariableOptions { Eventer = eventer1 }),
new StateVariable ("Bar", "string", new StateVariableOptions { Eventer = eventer2 })
));
eventer1.SetValue ("foo");
eventer2.SetValue ("bar");
using (var server = new Server (root)) {
server.Start ();
var prefix = GeneratePrefix ();
using (var listener = new HttpListener ()) {
listener.Prefixes.Add (prefix);
listener.Start ();
Exception exception = null;
listener.BeginGetContext (result => {
try {
var context = listener.EndGetContext (result);
using (var reader = new StreamReader (context.Request.InputStream)) {
Assert.AreEqual (Xml.DoubleEventReport, reader.ReadToEnd ());
}
context.Response.Close ();
listener.BeginGetContext (r => {
try {
var c = listener.EndGetContext (r);
using (var reader = new StreamReader (c.Request.InputStream)) {
Assert.AreEqual (Xml.SingleEventReport, reader.ReadToEnd ());
}
c.Response.Close ();
} catch (Exception e) {
exception = e;
}
lock (mutex) {
Monitor.Pulse (mutex);
}
}, null);
} catch (Exception e) {
exception = e;
lock (mutex) {
Monitor.Pulse (mutex);
}
}
eventer1.SetValue ("foo");
}, null);
Subscribe (root, prefix);
if (exception != null) {
throw exception;
}
}
}
}
[Test]
public void MultipleUpdateUnicastEventTest ()
{
var eventer1 = new DummyStateVariableEventer ();
var eventer2 = new DummyStateVariableEventer ();
var root = CreateRoot (CreateServiceController (
new StateVariable ("Foo", "string", new StateVariableOptions { Eventer = eventer1 }),
new StateVariable ("Bar", "string", new StateVariableOptions { Eventer = eventer2 })
));
eventer1.SetValue ("foo");
eventer2.SetValue ("bar");
using (var server = new Server (root)) {
server.Start ();
var prefix = GeneratePrefix ();
using (var listener = new HttpListener ()) {
listener.Prefixes.Add (prefix);
listener.Start ();
Exception exception = null;
listener.BeginGetContext (result => {
try {
var context = listener.EndGetContext (result);
using (var reader = new StreamReader (context.Request.InputStream)) {
Assert.AreEqual (Xml.DoubleEventReport, reader.ReadToEnd ());
}
context.Response.Close ();
listener.BeginGetContext (resp => {
try {
var con = listener.EndGetContext (resp);
using (var reader = new StreamReader (con.Request.InputStream)) {
Assert.AreEqual (Xml.SingleEventReport, reader.ReadToEnd ());
}
con.Response.Close ();
listener.BeginGetContext (r => {
try {
var c = listener.EndGetContext (r);
using (var reader = new StreamReader (c.Request.InputStream)) {
Assert.AreEqual (Xml.DoubleEventReport, reader.ReadToEnd ());
}
c.Response.Close ();
} catch (Exception e) {
exception = e;
}
lock (mutex) {
Monitor.Pulse (mutex);
}
}, null);
eventer1.SetValue ("foo");
eventer2.SetValue ("bar");
} catch (Exception e) {
exception = e;
lock (mutex) {
Monitor.Pulse (mutex);
}
}
}, null);
eventer1.SetValue ("foo");
} catch (Exception e) {
exception = e;
lock (mutex) {
Monitor.Pulse (mutex);
}
}
}, null);
Subscribe (root, prefix);
if (exception != null) {
throw exception;
}
}
}
}
void Subscribe (Root root, string prefix)
{
var request = WebRequest.Create (new Uri (root.UrlBase, "/service/0/event/"));
request.Method = "SUBSCRIBE";
request.Headers.Add ("CALLBACK", string.Format ("<{0}>", prefix));
request.Headers.Add ("NT", "upnp:event");
lock (mutex) {
using (var response = (HttpWebResponse)request.GetResponse ()) {
Assert.AreEqual (HttpStatusCode.OK, response.StatusCode);
Assert.IsNotNull (response.Headers["SID"]);
Assert.AreEqual ("Second-1800", response.Headers["TIMEOUT"]);
}
if (!Monitor.Wait (mutex, TimeSpan.FromSeconds (30))) {
Assert.Fail ("Event publishing timed out.");
}
}
}
[Test]
public void UnsubscribeUnicastEventTest ()
{
string sid = null;
var eventer = new DummyStateVariableEventer ();
var root = CreateRoot (CreateServiceController (new StateVariable ("Foo", "string", new StateVariableOptions { Eventer = eventer })));
eventer.SetValue ("foo");
using (var server = new Server (root)) {
server.Start ();
var prefix = GeneratePrefix ();
var url = new Uri (root.UrlBase, "/service/0/event/");
using (var listener = new HttpListener ()) {
listener.Prefixes.Add (prefix);
listener.Start ();
Exception exception = null;
listener.BeginGetContext (result => {
lock (mutex) {
try {
var context = listener.EndGetContext (result);
using (var reader = new StreamReader (context.Request.InputStream)) {
Assert.AreEqual (Xml.SingleEventReport, reader.ReadToEnd ());
}
context.Response.Close ();
var unsub_request = WebRequest.Create (url);
unsub_request.Method = "UNSUBSCRIBE";
unsub_request.Headers.Add ("SID", sid);
using (var response = (HttpWebResponse)unsub_request.GetResponse ()) {
Assert.AreEqual (HttpStatusCode.OK, response.StatusCode);
}
listener.BeginGetContext (r => {
lock (mutex) {
Monitor.Pulse (mutex);
}
}, null);
eventer.SetValue ("foo");
} catch (Exception e) {
exception = e;
Monitor.Pulse (mutex);
}
}
}, null);
var request = WebRequest.Create (url);
request.Method = "SUBSCRIBE";
request.Headers.Add ("CALLBACK", string.Format ("<{0}>", prefix));
request.Headers.Add ("NT", "upnp:event");
lock (mutex) {
using (var response = (HttpWebResponse)request.GetResponse ()) {
Assert.AreEqual (HttpStatusCode.OK, response.StatusCode);
Assert.IsNotNull (response.Headers["SID"]);
sid = response.Headers["SID"];
}
if (Monitor.Wait (mutex, TimeSpan.FromSeconds (10))) {
Assert.Fail ("The event server sent updates to an unsubscribed client.");
}
}
if (exception != null) {
throw exception;
}
}
}
}
static ServiceController CreateServiceController ()
{
return CreateServiceController (null);
}
static ServiceController CreateServiceController (StateVariable stateVariable)
{
return CreateServiceController (stateVariable, null);
}
static ServiceController CreateServiceController (StateVariable stateVariable1, StateVariable stateVariable2)
{
return new ServiceController (
new[] {
new ServiceAction (
"Foo",
new[] {
new Argument ("bar", "X_ARG_bar", ArgumentDirection.In),
new Argument ("result", "X_ARG_result", ArgumentDirection.Out)
},
arguments => {
var out_arguments = new Dictionary<string, string> (1);
out_arguments["result"] = string.Format ("You said {0}", arguments["bar"]);
return out_arguments;
}
)
},
new[] {
new StateVariable ("X_ARG_bar", "string"),
new StateVariable ("X_ARG_result", "string"),
stateVariable1,
stateVariable2
}
);
}
[Test]
public void ControlTest ()
{
var root = CreateRoot (CreateServiceController ());
using (var server = new Server (root)) {
server.Start ();
var request = (HttpWebRequest)WebRequest.Create (new Uri (root.UrlBase, "/service/0/control/"));
request.Method = "POST";
request.Headers.Add ("SOAPACTION", "urn:schemas-upnp-org:service:mono-upnp-test-service:1#Foo");
request.ContentType = @"text/xml; charset=""utf-8""";
var bytes = System.Text.Encoding.UTF8.GetBytes (Xml.SimpleSoapRequest);
using (var stream = request.GetRequestStream ()) {
stream.Write (bytes, 0, bytes.Length);
}
using (var response = (HttpWebResponse)request.GetResponse ()) {
Assert.AreEqual (HttpStatusCode.OK, response.StatusCode);
using (var stream = response.GetResponseStream ()) {
using (var reader = XmlReader.Create (stream)) {
reader.ReadToFollowing ("result");
Assert.AreEqual ("You said hello world!", reader.ReadElementContentAsString ());
}
}
}
}
}
static Root CreateRoot (ServiceController controller)
{
return new DummyRoot (
new DeviceType ("schemas-upnp-org", "mono-upnp-tests-device", new Version (1, 0)),
"uuid:d1",
"Mono.Upnp.Tests Device",
"Mono Project",
"Device",
new DeviceOptions {
Services = new[] {
new Service (
new ServiceType ("schemas-upnp-org", "mono-upnp-test-service", new Version (1, 0)),
"urn:upnp-org:serviceId:testService1",
controller
)
}
}
);
}
static Root CreateRoot ()
{
return CreateRoot (null, null);
}
static Root CreateRoot (IEnumerable<Icon> icons1, IEnumerable<Icon> icons2)
{
return new DummyRoot (
new DeviceType ("schemas-upnp-org", "mono-upnp-tests-device", new Version (1, 0)),
"uuid:d1",
"Mono.Upnp.Tests Device",
"Mono Project",
"Device",
new DeviceOptions {
Icons = icons1,
Services = new[] {
new DummyService (new ServiceType ("schemas-upnp-org", "mono-upnp-test-service", new Version (1, 0)), "urn:upnp-org:serviceId:testService1"),
new DummyService (new ServiceType ("schemas-upnp-org", "mono-upnp-test-service", new Version (2, 0)), "urn:upnp-org:serviceId:testService2"),
},
EmbeddedDevices = new[] {
new Device (
new DeviceType ("schemas-upnp-org", "mono-upnp-tests-embedded-device", new Version (1, 0)),
"uuid:ed1",
"Mono.Upnp.Tests Embedded Device",
"Mono Project",
"Embedded Device",
new DeviceOptions {
Icons = icons2,
Services = new[] {
new DummyService (new ServiceType ("schemas-upnp-org", "mono-upnp-test-service", new Version (1, 0)), "urn:upnp-org:serviceId:testService1"),
new DummyService (new ServiceType ("schemas-upnp-org", "mono-upnp-test-service", new Version (2, 0)), "urn:upnp-org:serviceId:testService2"),
}
}
)
}
}
);
}
[Test]
public void DescriptionTest ()
{
var root = CreateRoot ();
using (var server = new Server (root)) {
server.Start ();
var request = WebRequest.Create (root.UrlBase);
using (var response = (HttpWebResponse)request.GetResponse ()) {
Assert.AreEqual (HttpStatusCode.OK, response.StatusCode);
using (var reader = XmlReader.Create (response.GetResponseStream ())) {
var target_root = deserializer.DeserializeRoot (reader, root.UrlBase);
DeviceDescriptionTests.AssertEquality (root, target_root);
}
}
}
}
[Test]
public void ScpdTest ()
{
var controller = CreateServiceController ();
var root = new DummyRoot (
new DeviceType ("schemas-upnp-org", "mono-upnp-tests-device", new Version (1, 0)),
"uuid:d1",
"Mono.Upnp.Tests Device",
"Mono Project",
"Device",
new DeviceOptions {
Services = new[] {
new Service (
new ServiceType ("uschemas-upnp-org", "mono-upnp-test-service", new Version (1, 0)),
"urn:upnp-org:serviceId:testService1",
controller
)
}
}
);
using (var server = new Server (root)) {
server.Start ();
var request = WebRequest.Create (new Uri (root.UrlBase, "/service/0/scpd/"));
using (var response = (HttpWebResponse)request.GetResponse ()) {
Assert.AreEqual (HttpStatusCode.OK, response.StatusCode);
using (var reader = XmlReader.Create (response.GetResponseStream ())) {
var target_controller = deserializer.DeserializeServiceController (reader);
ServiceDescriptionTests.AssertEquality (controller, target_controller);
}
}
}
}
[Test]
public void IconTest ()
{
var root = CreateRoot (
new[] {
new Icon (100, 100, 32, "image/jpeg", new byte[] { 0 }),
new Icon (100, 100, 32, "image/png", new byte[] { 1 })
},
new[] {
new Icon (100, 100, 32, "image/jpeg", new byte[] { 2 }),
new Icon (100, 100, 32, "image/png", new byte[] { 3 })
}
);
using (var server = new Server (root)) {
server.Start ();
var url = new Uri (root.UrlBase, "/icon/");
AssertEquality (url, 0, 0);
AssertEquality (url, 1, 1);
url = new Uri (root.UrlBase, "/device/0/icon/");
AssertEquality (url, 0, 2);
AssertEquality (url, 1, 3);
}
}
static void AssertEquality (Uri url, int iconIndex, int iconValue)
{
var request = WebRequest.Create (new Uri (url, iconIndex.ToString ()));
using (var response = (HttpWebResponse)request.GetResponse ()) {
Assert.AreEqual (HttpStatusCode.OK, response.StatusCode);
using (var stream = response.GetResponseStream ()) {
Assert.AreEqual (iconValue, stream.ReadByte ());
Assert.AreEqual (-1, stream.ReadByte ());
}
}
}
[Test]
public void AnnouncementTest ()
{
using (var server = new Server (CreateRoot ())) {
using (var client = new Mono.Ssdp.Client ()) {
var announcements = new Dictionary<string,string> ();
announcements.Add ("upnp:rootdevice/uuid:d1::upnp:rootdevice", null);
announcements.Add ("uuid:d1/uuid:d1", null);
announcements.Add ("urn:schemas-upnp-org:device:mono-upnp-tests-device:1/uuid:d1::urn:schemas-upnp-org:device:mono-upnp-tests-device:1", null);
announcements.Add ("uuid:ed1/uuid:ed1", null);
announcements.Add ("urn:schemas-upnp-org:device:mono-upnp-tests-embedded-device:1/uuid:ed1::urn:schemas-upnp-org:device:mono-upnp-tests-embedded-device:1", null);
announcements.Add ("urn:schemas-upnp-org:service:mono-upnp-test-service:1/uuid:d1::urn:schemas-upnp-org:service:mono-upnp-test-service:1", null);
announcements.Add ("urn:schemas-upnp-org:service:mono-upnp-test-service:2/uuid:d1::urn:schemas-upnp-org:service:mono-upnp-test-service:2", null);
announcements.Add ("urn:schemas-upnp-org:service:mono-upnp-test-service:1/uuid:ed1::urn:schemas-upnp-org:service:mono-upnp-test-service:1", null);
announcements.Add ("urn:schemas-upnp-org:service:mono-upnp-test-service:2/uuid:ed1::urn:schemas-upnp-org:service:mono-upnp-test-service:2", null);
client.ServiceAdded += (obj, args) => {
lock (mutex) {
Assert.AreEqual (ServiceOperation.Added, args.Operation);
var announcement = string.Format ("{0}/{1}", args.Service.ServiceType, args.Service.Usn);
if (announcements.ContainsKey (announcement)) {
announcements.Remove (announcement);
}
if (announcements.Count == 0) {
Monitor.Pulse (mutex);
}
}
};
client.BrowseAll ();
lock (mutex) {
server.Start ();
if (!Monitor.Wait (mutex, TimeSpan.FromSeconds (30))) {
Assert.Fail ("The UPnP server announcement timed out.");
}
}
}
}
}
static string GeneratePrefix ()
{
var conf = UpnpConfigurationSection.Current.Http;
foreach (var address in Dns.GetHostAddresses (Dns.GetHostName ())) {
if (address.AddressFamily == AddressFamily.InterNetwork) {
return string.Format (
"http://{0}:{1}/mono-upnp-tests/event-subscriber/", address, random.Next (conf.PortMin, conf.PortMax));
}
}
return null;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// ------------------------------------------------------------------------------
// Changes to this file must follow the http://aka.ms/api-review process.
// ------------------------------------------------------------------------------
namespace System.Diagnostics
{
public partial class CounterCreationData
{
public CounterCreationData() { }
public CounterCreationData(string counterName, string counterHelp, System.Diagnostics.PerformanceCounterType counterType) { }
public string CounterHelp { get { throw null; } set { } }
public string CounterName { get { throw null; } set { } }
public System.Diagnostics.PerformanceCounterType CounterType { get { throw null; } set { } }
}
public partial class CounterCreationDataCollection : System.Collections.CollectionBase
{
public CounterCreationDataCollection() { }
public CounterCreationDataCollection(System.Diagnostics.CounterCreationDataCollection value) { }
public CounterCreationDataCollection(System.Diagnostics.CounterCreationData[] value) { }
public System.Diagnostics.CounterCreationData this[int index] { get { throw null; } set { } }
public int Add(System.Diagnostics.CounterCreationData value) { throw null; }
public void AddRange(System.Diagnostics.CounterCreationDataCollection value) { }
public void AddRange(System.Diagnostics.CounterCreationData[] value) { }
public bool Contains(System.Diagnostics.CounterCreationData value) { throw null; }
public void CopyTo(System.Diagnostics.CounterCreationData[] array, int index) { }
public int IndexOf(System.Diagnostics.CounterCreationData value) { throw null; }
public void Insert(int index, System.Diagnostics.CounterCreationData value) { }
protected override void OnValidate(object value) { }
public virtual void Remove(System.Diagnostics.CounterCreationData value) { }
}
public readonly partial struct CounterSample
{
private readonly int _dummyPrimitive;
public static System.Diagnostics.CounterSample Empty;
public CounterSample(long rawValue, long baseValue, long counterFrequency, long systemFrequency, long timeStamp, long timeStamp100nSec, System.Diagnostics.PerformanceCounterType counterType) { throw null; }
public CounterSample(long rawValue, long baseValue, long counterFrequency, long systemFrequency, long timeStamp, long timeStamp100nSec, System.Diagnostics.PerformanceCounterType counterType, long counterTimeStamp) { throw null; }
public long BaseValue { get { throw null; } }
public long CounterFrequency { get { throw null; } }
public long CounterTimeStamp { get { throw null; } }
public System.Diagnostics.PerformanceCounterType CounterType { get { throw null; } }
public long RawValue { get { throw null; } }
public long SystemFrequency { get { throw null; } }
public long TimeStamp { get { throw null; } }
public long TimeStamp100nSec { get { throw null; } }
public static float Calculate(System.Diagnostics.CounterSample counterSample) { throw null; }
public static float Calculate(System.Diagnostics.CounterSample counterSample, System.Diagnostics.CounterSample nextCounterSample) { throw null; }
public bool Equals(System.Diagnostics.CounterSample sample) { throw null; }
public override bool Equals(object o) { throw null; }
public override int GetHashCode() { throw null; }
public static bool operator ==(System.Diagnostics.CounterSample a, System.Diagnostics.CounterSample b) { throw null; }
public static bool operator !=(System.Diagnostics.CounterSample a, System.Diagnostics.CounterSample b) { throw null; }
}
public static partial class CounterSampleCalculator
{
public static float ComputeCounterValue(System.Diagnostics.CounterSample newSample) { throw null; }
public static float ComputeCounterValue(System.Diagnostics.CounterSample oldSample, System.Diagnostics.CounterSample newSample) { throw null; }
}
[System.Runtime.InteropServices.InterfaceTypeAttribute(System.Runtime.InteropServices.ComInterfaceType.InterfaceIsIUnknown)]
public partial interface ICollectData
{
void CloseData();
void CollectData(int id, System.IntPtr valueName, System.IntPtr data, int totalBytes, out System.IntPtr res);
}
public partial class InstanceData
{
public InstanceData(string instanceName, System.Diagnostics.CounterSample sample) { }
public string InstanceName { get { throw null; } }
public long RawValue { get { throw null; } }
public System.Diagnostics.CounterSample Sample { get { throw null; } }
}
public partial class InstanceDataCollection : System.Collections.DictionaryBase
{
[System.ObsoleteAttribute("This constructor has been deprecated. Please use System.Diagnostics.InstanceDataCollectionCollection.get_Item to get an instance of this collection instead. https://go.microsoft.com/fwlink/?linkid=14202")]
public InstanceDataCollection(string counterName) { }
public string CounterName { get { throw null; } }
public System.Diagnostics.InstanceData this[string instanceName] { get { throw null; } }
public System.Collections.ICollection Keys { get { throw null; } }
public System.Collections.ICollection Values { get { throw null; } }
public bool Contains(string instanceName) { throw null; }
public void CopyTo(System.Diagnostics.InstanceData[] instances, int index) { }
}
public partial class InstanceDataCollectionCollection : System.Collections.DictionaryBase
{
[System.ObsoleteAttribute("This constructor has been deprecated. Please use System.Diagnostics.PerformanceCounterCategory.ReadCategory() to get an instance of this collection instead. https://go.microsoft.com/fwlink/?linkid=14202")]
public InstanceDataCollectionCollection() { }
public System.Diagnostics.InstanceDataCollection this[string counterName] { get { throw null; } }
public System.Collections.ICollection Keys { get { throw null; } }
public System.Collections.ICollection Values { get { throw null; } }
public bool Contains(string counterName) { throw null; }
public void CopyTo(System.Diagnostics.InstanceDataCollection[] counters, int index) { }
}
public sealed partial class PerformanceCounter : System.ComponentModel.Component, System.ComponentModel.ISupportInitialize
{
[System.ObsoleteAttribute("This field has been deprecated and is not used. Use machine.config or an application configuration file to set the size of the PerformanceCounter file mapping.")]
public static int DefaultFileMappingSize;
public PerformanceCounter() { }
public PerformanceCounter(string categoryName, string counterName) { }
public PerformanceCounter(string categoryName, string counterName, bool readOnly) { }
public PerformanceCounter(string categoryName, string counterName, string instanceName) { }
public PerformanceCounter(string categoryName, string counterName, string instanceName, bool readOnly) { }
public PerformanceCounter(string categoryName, string counterName, string instanceName, string machineName) { }
public string CategoryName { get { throw null; } set { } }
public string CounterHelp { get { throw null; } }
public string CounterName { get { throw null; } set { } }
public System.Diagnostics.PerformanceCounterType CounterType { get { throw null; } }
public System.Diagnostics.PerformanceCounterInstanceLifetime InstanceLifetime { get { throw null; } set { } }
public string InstanceName { get { throw null; } set { } }
public string MachineName { get { throw null; } set { } }
public long RawValue { get { throw null; } set { } }
public bool ReadOnly { get { throw null; } set { } }
public void BeginInit() { }
public void Close() { }
public static void CloseSharedResources() { }
public long Decrement() { throw null; }
protected override void Dispose(bool disposing) { }
public void EndInit() { }
public long Increment() { throw null; }
public long IncrementBy(long value) { throw null; }
public System.Diagnostics.CounterSample NextSample() { throw null; }
public float NextValue() { throw null; }
public void RemoveInstance() { }
}
public sealed partial class PerformanceCounterCategory
{
public PerformanceCounterCategory() { }
public PerformanceCounterCategory(string categoryName) { }
public PerformanceCounterCategory(string categoryName, string machineName) { }
public string CategoryHelp { get { throw null; } }
public string CategoryName { get { throw null; } set { } }
public System.Diagnostics.PerformanceCounterCategoryType CategoryType { get { throw null; } }
public string MachineName { get { throw null; } set { } }
public bool CounterExists(string counterName) { throw null; }
public static bool CounterExists(string counterName, string categoryName) { throw null; }
public static bool CounterExists(string counterName, string categoryName, string machineName) { throw null; }
[System.ObsoleteAttribute("This method has been deprecated. Please use System.Diagnostics.PerformanceCounterCategory.Create(string categoryName, string categoryHelp, PerformanceCounterCategoryType categoryType, CounterCreationDataCollection counterData) instead. https://go.microsoft.com/fwlink/?linkid=14202")]
public static System.Diagnostics.PerformanceCounterCategory Create(string categoryName, string categoryHelp, System.Diagnostics.CounterCreationDataCollection counterData) { throw null; }
public static System.Diagnostics.PerformanceCounterCategory Create(string categoryName, string categoryHelp, System.Diagnostics.PerformanceCounterCategoryType categoryType, System.Diagnostics.CounterCreationDataCollection counterData) { throw null; }
public static System.Diagnostics.PerformanceCounterCategory Create(string categoryName, string categoryHelp, System.Diagnostics.PerformanceCounterCategoryType categoryType, string counterName, string counterHelp) { throw null; }
[System.ObsoleteAttribute("This method has been deprecated. Please use System.Diagnostics.PerformanceCounterCategory.Create(string categoryName, string categoryHelp, PerformanceCounterCategoryType categoryType, string counterName, string counterHelp) instead. https://go.microsoft.com/fwlink/?linkid=14202")]
public static System.Diagnostics.PerformanceCounterCategory Create(string categoryName, string categoryHelp, string counterName, string counterHelp) { throw null; }
public static void Delete(string categoryName) { }
public static bool Exists(string categoryName) { throw null; }
public static bool Exists(string categoryName, string machineName) { throw null; }
public static System.Diagnostics.PerformanceCounterCategory[] GetCategories() { throw null; }
public static System.Diagnostics.PerformanceCounterCategory[] GetCategories(string machineName) { throw null; }
public System.Diagnostics.PerformanceCounter[] GetCounters() { throw null; }
public System.Diagnostics.PerformanceCounter[] GetCounters(string instanceName) { throw null; }
public string[] GetInstanceNames() { throw null; }
public bool InstanceExists(string instanceName) { throw null; }
public static bool InstanceExists(string instanceName, string categoryName) { throw null; }
public static bool InstanceExists(string instanceName, string categoryName, string machineName) { throw null; }
public System.Diagnostics.InstanceDataCollectionCollection ReadCategory() { throw null; }
}
public enum PerformanceCounterCategoryType
{
Unknown = -1,
SingleInstance = 0,
MultiInstance = 1,
}
public enum PerformanceCounterInstanceLifetime
{
Global = 0,
Process = 1,
}
public sealed partial class PerformanceCounterManager : System.Diagnostics.ICollectData
{
[System.ObsoleteAttribute("This class has been deprecated. Use the PerformanceCounters through the System.Diagnostics.PerformanceCounter class instead. https://go.microsoft.com/fwlink/?linkid=14202")]
public PerformanceCounterManager() { }
[System.ObsoleteAttribute("This class has been deprecated. Use the PerformanceCounters through the System.Diagnostics.PerformanceCounter class instead. https://go.microsoft.com/fwlink/?linkid=14202")]
void System.Diagnostics.ICollectData.CloseData() { }
[System.ObsoleteAttribute("This class has been deprecated. Use the PerformanceCounters through the System.Diagnostics.PerformanceCounter class instead. https://go.microsoft.com/fwlink/?linkid=14202")]
void System.Diagnostics.ICollectData.CollectData(int callIdx, System.IntPtr valueNamePtr, System.IntPtr dataPtr, int totalBytes, out System.IntPtr res) { throw null; }
}
public enum PerformanceCounterType
{
NumberOfItemsHEX32 = 0,
NumberOfItemsHEX64 = 256,
NumberOfItems32 = 65536,
NumberOfItems64 = 65792,
CounterDelta32 = 4195328,
CounterDelta64 = 4195584,
SampleCounter = 4260864,
CountPerTimeInterval32 = 4523008,
CountPerTimeInterval64 = 4523264,
RateOfCountsPerSecond32 = 272696320,
RateOfCountsPerSecond64 = 272696576,
RawFraction = 537003008,
CounterTimer = 541132032,
Timer100Ns = 542180608,
SampleFraction = 549585920,
CounterTimerInverse = 557909248,
Timer100NsInverse = 558957824,
CounterMultiTimer = 574686464,
CounterMultiTimer100Ns = 575735040,
CounterMultiTimerInverse = 591463680,
CounterMultiTimer100NsInverse = 592512256,
AverageTimer32 = 805438464,
ElapsedTime = 807666944,
AverageCount64 = 1073874176,
SampleBase = 1073939457,
AverageBase = 1073939458,
RawBase = 1073939459,
CounterMultiBase = 1107494144,
}
}
namespace System.Diagnostics.PerformanceData
{
public sealed partial class CounterData
{
internal CounterData() { }
public long RawValue { get { throw null; } set { } }
public long Value { get { throw null; } set { } }
public void Decrement() { }
public void Increment() { }
public void IncrementBy(long value) { }
}
public partial class CounterSet : System.IDisposable
{
public CounterSet(System.Guid providerGuid, System.Guid counterSetGuid, System.Diagnostics.PerformanceData.CounterSetInstanceType instanceType) { }
public void AddCounter(int counterId, System.Diagnostics.PerformanceData.CounterType counterType) { }
public void AddCounter(int counterId, System.Diagnostics.PerformanceData.CounterType counterType, string counterName) { }
public System.Diagnostics.PerformanceData.CounterSetInstance CreateCounterSetInstance(string instanceName) { throw null; }
public void Dispose() { }
protected virtual void Dispose(bool disposing) { }
~CounterSet() { }
}
public sealed partial class CounterSetInstance : System.IDisposable
{
internal CounterSetInstance() { }
public System.Diagnostics.PerformanceData.CounterSetInstanceCounterDataSet Counters { get { throw null; } }
public void Dispose() { }
~CounterSetInstance() { }
}
public sealed partial class CounterSetInstanceCounterDataSet : System.IDisposable
{
internal CounterSetInstanceCounterDataSet() { }
public System.Diagnostics.PerformanceData.CounterData this[int counterId] { get { throw null; } }
public System.Diagnostics.PerformanceData.CounterData this[string counterName] { get { throw null; } }
public void Dispose() { }
~CounterSetInstanceCounterDataSet() { }
}
public enum CounterSetInstanceType
{
Single = 0,
Multiple = 2,
GlobalAggregate = 4,
MultipleAggregate = 6,
GlobalAggregateWithHistory = 11,
InstanceAggregate = 22,
}
public enum CounterType
{
RawDataHex32 = 0,
RawDataHex64 = 256,
RawData32 = 65536,
RawData64 = 65792,
Delta32 = 4195328,
Delta64 = 4195584,
SampleCounter = 4260864,
QueueLength = 4523008,
LargeQueueLength = 4523264,
QueueLength100Ns = 5571840,
QueueLengthObjectTime = 6620416,
RateOfCountPerSecond32 = 272696320,
RateOfCountPerSecond64 = 272696576,
RawFraction32 = 537003008,
RawFraction64 = 537003264,
PercentageActive = 541132032,
PrecisionSystemTimer = 541525248,
PercentageActive100Ns = 542180608,
PrecisionTimer100Ns = 542573824,
ObjectSpecificTimer = 543229184,
PrecisionObjectSpecificTimer = 543622400,
SampleFraction = 549585920,
PercentageNotActive = 557909248,
PercentageNotActive100Ns = 558957824,
MultiTimerPercentageActive = 574686464,
MultiTimerPercentageActive100Ns = 575735040,
MultiTimerPercentageNotActive = 591463680,
MultiTimerPercentageNotActive100Ns = 592512256,
AverageTimer32 = 805438464,
ElapsedTime = 807666944,
AverageCount64 = 1073874176,
SampleBase = 1073939457,
AverageBase = 1073939458,
RawBase32 = 1073939459,
RawBase64 = 1073939712,
MultiTimerBase = 1107494144,
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Xml;
using System.IO;
using System.Collections;
using System.Diagnostics;
using System.Text;
using OLEDB.Test.ModuleCore;
namespace System.Xml.XmlDiff
{
public enum NodePosition
{
Before = 0,
After = 1,
Unknown = 2,
Same = 3
}
public enum XmlDiffNodeType
{
Element = 0,
Attribute = 1,
ER = 2,
Text = 3,
CData = 4,
Comment = 5,
PI = 6,
WS = 7,
Document = 8
}
internal class PositionInfo : IXmlLineInfo
{
public virtual bool HasLineInfo() { return false; }
public virtual int LineNumber { get { return 0; } }
public virtual int LinePosition { get { return 0; } }
public static PositionInfo GetPositionInfo(object o)
{
IXmlLineInfo lineInfo = o as IXmlLineInfo;
if (lineInfo != null && lineInfo.HasLineInfo())
{
return new ReaderPositionInfo(lineInfo);
}
else
{
return new PositionInfo();
}
}
}
internal class ReaderPositionInfo : PositionInfo
{
private IXmlLineInfo _mlineInfo;
public ReaderPositionInfo(IXmlLineInfo lineInfo)
{
_mlineInfo = lineInfo;
}
public override bool HasLineInfo() { return true; }
public override int LineNumber { get { return _mlineInfo.LineNumber; } }
public override int LinePosition
{
get { return _mlineInfo.LinePosition; }
}
}
public class XmlDiffDocument : XmlDiffNode
{
private bool _bLoaded;
private bool _bIgnoreAttributeOrder;
private bool _bIgnoreChildOrder;
private bool _bIgnoreComments;
private bool _bIgnoreWhitespace;
private bool _bIgnoreDTD;
private bool _bIgnoreNS;
private bool _bIgnorePrefix;
private bool _bCDataAsText;
private bool _bNormalizeNewline;
public XmlNameTable nameTable;
public XmlDiffDocument()
: base()
{
_bLoaded = false;
_bIgnoreAttributeOrder = false;
_bIgnoreChildOrder = false;
_bIgnoreComments = false;
_bIgnoreWhitespace = false;
_bIgnoreDTD = false;
_bCDataAsText = false;
}
public XmlDiffOption Option
{
set
{
this.IgnoreAttributeOrder = (((int)value & (int)(XmlDiffOption.IgnoreAttributeOrder)) > 0);
this.IgnoreChildOrder = (((int)value & (int)(XmlDiffOption.IgnoreChildOrder)) > 0);
this.IgnoreComments = (((int)value & (int)(XmlDiffOption.IgnoreComments)) > 0);
this.IgnoreWhitespace = (((int)value & (int)(XmlDiffOption.IgnoreWhitespace)) > 0);
this.IgnoreDTD = (((int)value & (int)(XmlDiffOption.IgnoreDTD)) > 0);
this.IgnoreNS = (((int)value & (int)(XmlDiffOption.IgnoreNS)) > 0);
this.IgnorePrefix = (((int)value & (int)(XmlDiffOption.IgnorePrefix)) > 0);
this.CDataAsText = (((int)value & (int)(XmlDiffOption.CDataAsText)) > 0);
this.NormalizeNewline = (((int)value & (int)(XmlDiffOption.NormalizeNewline)) > 0);
}
}
public override XmlDiffNodeType NodeType { get { return XmlDiffNodeType.Document; } }
public bool IgnoreAttributeOrder
{
get { return this._bIgnoreAttributeOrder; }
set { this._bIgnoreAttributeOrder = value; }
}
public bool IgnoreChildOrder
{
get { return this._bIgnoreChildOrder; }
set { this._bIgnoreChildOrder = value; }
}
public bool IgnoreComments
{
get { return this._bIgnoreComments; }
set { this._bIgnoreComments = value; }
}
public bool IgnoreWhitespace
{
get { return this._bIgnoreWhitespace; }
set { this._bIgnoreWhitespace = value; }
}
public bool IgnoreDTD
{
get { return this._bIgnoreDTD; }
set { this._bIgnoreDTD = value; }
}
public bool IgnoreNS
{
get { return this._bIgnoreNS; }
set { this._bIgnoreNS = value; }
}
public bool IgnorePrefix
{
get { return this._bIgnorePrefix; }
set { this._bIgnorePrefix = value; }
}
public bool CDataAsText
{
get { return this._bCDataAsText; }
set { this._bCDataAsText = value; }
}
public bool NormalizeNewline
{
get { return this._bNormalizeNewline; }
set { this._bNormalizeNewline = value; }
}
//NodePosition.Before is returned if node2 should be before node1;
//NodePosition.After is returned if node2 should be after node1;
//In any case, NodePosition.Unknown should never be returned.
internal NodePosition ComparePosition(XmlDiffNode node1, XmlDiffNode node2)
{
int nt1 = (int)(node1.NodeType);
int nt2 = (int)(node2.NodeType);
if (nt2 > nt1)
return NodePosition.After;
if (nt2 < nt1)
return NodePosition.Before;
//now nt1 == nt2
if (nt1 == (int)XmlDiffNodeType.Element)
{
return CompareElements(node1 as XmlDiffElement, node2 as XmlDiffElement);
}
else if (nt1 == (int)XmlDiffNodeType.Attribute)
{
return CompareAttributes(node1 as XmlDiffAttribute, node2 as XmlDiffAttribute);
}
else if (nt1 == (int)XmlDiffNodeType.ER)
{
return CompareERs(node1 as XmlDiffEntityReference, node2 as XmlDiffEntityReference);
}
else if (nt1 == (int)XmlDiffNodeType.PI)
{
return ComparePIs(node1 as XmlDiffProcessingInstruction, node2 as XmlDiffProcessingInstruction);
}
else if (node1 is XmlDiffCharacterData)
{
return CompareTextLikeNodes(node1 as XmlDiffCharacterData, node2 as XmlDiffCharacterData);
}
else
{
//something really wrong here, what should we do???
Debug.Assert(false, "ComparePosition meets an indecision situation.");
return NodePosition.Unknown;
}
}
private NodePosition CompareElements(XmlDiffElement elem1, XmlDiffElement elem2)
{
Debug.Assert(elem1 != null);
Debug.Assert(elem2 != null);
int nCompare = 0;
if ((nCompare = CompareText(elem2.LocalName, elem1.LocalName)) == 0)
{
if (IgnoreNS || (nCompare = CompareText(elem2.NamespaceURI, elem1.NamespaceURI)) == 0)
{
if (IgnorePrefix || (nCompare = CompareText(elem2.Prefix, elem1.Prefix)) == 0)
{
if ((nCompare = CompareText(elem2.Value, elem1.Value)) == 0)
{
if ((nCompare = CompareAttributes(elem2, elem1)) == 0)
{
return NodePosition.After;
}
}
}
}
}
if (nCompare > 0)
//elem2 > elem1
return NodePosition.After;
else
//elem2 < elem1
return NodePosition.Before;
}
private int CompareAttributes(XmlDiffElement elem1, XmlDiffElement elem2)
{
int count1 = elem1.AttributeCount;
int count2 = elem2.AttributeCount;
if (count1 > count2)
return 1;
else if (count1 < count2)
return -1;
else
{
XmlDiffAttribute current1 = elem1.FirstAttribute;
XmlDiffAttribute current2 = elem2.FirstAttribute;
// NodePosition result = 0;
int nCompare = 0;
while (current1 != null && current2 != null && nCompare == 0)
{
if ((nCompare = CompareText(current2.LocalName, current1.LocalName)) == 0)
{
if (IgnoreNS || (nCompare = CompareText(current2.NamespaceURI, current1.NamespaceURI)) == 0)
{
if (IgnorePrefix || (nCompare = CompareText(current2.Prefix, current1.Prefix)) == 0)
{
if ((nCompare = CompareText(current2.Value, current1.Value)) == 0)
{
//do nothing!
}
}
}
}
current1 = (XmlDiffAttribute)current1._next;
current2 = (XmlDiffAttribute)current2._next;
}
if (nCompare > 0)
//elem1 > attr2
return 1;
else
//elem1 < elem2
return -1;
}
}
private NodePosition CompareAttributes(XmlDiffAttribute attr1, XmlDiffAttribute attr2)
{
Debug.Assert(attr1 != null);
Debug.Assert(attr2 != null);
int nCompare = 0;
if ((nCompare = CompareText(attr2.LocalName, attr1.LocalName)) == 0)
{
if (IgnoreNS || (nCompare = CompareText(attr2.NamespaceURI, attr1.NamespaceURI)) == 0)
{
if (IgnorePrefix || (nCompare = CompareText(attr2.Prefix, attr1.Prefix)) == 0)
{
if ((nCompare = CompareText(attr2.Value, attr1.Value)) == 0)
{
return NodePosition.After;
}
}
}
}
if (nCompare > 0)
//attr2 > attr1
return NodePosition.After;
else
//attr2 < attr1
return NodePosition.Before;
}
private NodePosition CompareERs(XmlDiffEntityReference er1, XmlDiffEntityReference er2)
{
Debug.Assert(er1 != null);
Debug.Assert(er2 != null);
int nCompare = CompareText(er2.Name, er1.Name);
if (nCompare >= 0)
return NodePosition.After;
else
return NodePosition.Before;
}
private NodePosition ComparePIs(XmlDiffProcessingInstruction pi1, XmlDiffProcessingInstruction pi2)
{
Debug.Assert(pi1 != null);
Debug.Assert(pi2 != null);
int nCompare = 0;
if ((nCompare = CompareText(pi2.Name, pi1.Name)) == 0)
{
if ((nCompare = CompareText(pi2.Value, pi1.Value)) == 0)
{
return NodePosition.After;
}
}
if (nCompare > 0)
//pi2 > pi1
return NodePosition.After;
else
//pi2 < pi1
return NodePosition.Before;
}
private NodePosition CompareTextLikeNodes(XmlDiffCharacterData t1, XmlDiffCharacterData t2)
{
Debug.Assert(t1 != null);
Debug.Assert(t2 != null);
int nCompare = CompareText(t2.Value, t1.Value);
if (nCompare >= 0)
return NodePosition.After;
else
return NodePosition.Before;
}
//returns 0 if the same string; 1 if s1 > s1 and -1 if s1 < s2
private int CompareText(string s1, string s2)
{
int len = s1.Length;
//len becomes the smaller of the two
if (len > s2.Length)
len = s2.Length;
int nInd = 0;
char c1 = (char)0x0;
char c2 = (char)0x0;
while (nInd < len)
{
c1 = s1[nInd];
c2 = s2[nInd];
if (c1 < c2)
return -1; //s1 < s2
else if (c1 > c2)
return 1; //s1 > s2
nInd++;
}
if (s2.Length > s1.Length)
return -1; //s1 < s2
else if (s2.Length < s1.Length)
return 1; //s1 > s2
else return 0;
}
public virtual void Load(XmlReader reader)
{
if (_bLoaded)
throw new InvalidOperationException("The document already contains data and should not be used again.");
if (reader.ReadState == ReadState.Initial)
{
if (!reader.Read())
return;
}
PositionInfo pInfo = PositionInfo.GetPositionInfo(reader);
ReadChildNodes(this, reader, pInfo);
this._bLoaded = true;
this.nameTable = reader.NameTable;
}
internal void ReadChildNodes(XmlDiffNode parent, XmlReader reader, PositionInfo pInfo)
{
bool lookAhead = false;
do
{
lookAhead = false;
switch (reader.NodeType)
{
case XmlNodeType.Element:
LoadElement(parent, reader, pInfo);
break;
case XmlNodeType.Comment:
if (!IgnoreComments)
LoadTextNode(parent, reader, pInfo, XmlDiffNodeType.Comment);
break;
case XmlNodeType.ProcessingInstruction:
LoadPI(parent, reader, pInfo);
break;
case XmlNodeType.SignificantWhitespace:
if (reader.XmlSpace == XmlSpace.Preserve)
{
LoadTextNode(parent, reader, pInfo, XmlDiffNodeType.WS);
}
break;
case XmlNodeType.CDATA:
if (!CDataAsText)
{
LoadTextNode(parent, reader, pInfo, XmlDiffNodeType.CData);
}
else //merge with adjacent text/CDATA nodes
{
StringBuilder text = new StringBuilder();
text.Append(reader.Value);
while ((lookAhead = reader.Read()) && (reader.NodeType == XmlNodeType.Text || reader.NodeType == XmlNodeType.CDATA))
{
text.Append(reader.Value);
}
LoadTextNode(parent, text.ToString(), pInfo, XmlDiffNodeType.Text);
}
break;
case XmlNodeType.Text:
if (!CDataAsText)
{
LoadTextNode(parent, reader, pInfo, XmlDiffNodeType.Text);
}
else //megre with adjacent text/CDATA nodes
{
StringBuilder text = new StringBuilder();
text.Append(reader.Value);
while ((lookAhead = reader.Read()) && (reader.NodeType == XmlNodeType.Text || reader.NodeType == XmlNodeType.CDATA))
{
text.Append(reader.Value);
}
LoadTextNode(parent, text.ToString(), pInfo, XmlDiffNodeType.Text);
}
break;
case XmlNodeType.EntityReference:
LoadEntityReference(parent, reader, pInfo);
break;
case XmlNodeType.EndElement:
SetElementEndPosition(parent as XmlDiffElement, pInfo);
return;
case XmlNodeType.Attribute: //attribute at top level
string attrVal = reader.Name + "=\"" + reader.Value + "\"";
LoadTopLevelAttribute(parent, attrVal, pInfo, XmlDiffNodeType.Text);
break;
default:
break;
}
} while (lookAhead || reader.Read());
}
private void LoadElement(XmlDiffNode parent, XmlReader reader, PositionInfo pInfo)
{
XmlDiffElement elem = null;
bool bEmptyElement = reader.IsEmptyElement;
if (bEmptyElement)
elem = new XmlDiffEmptyElement(reader.LocalName, reader.Prefix, reader.NamespaceURI);
else
elem = new XmlDiffElement(reader.LocalName, reader.Prefix, reader.NamespaceURI);
elem.LineNumber = pInfo.LineNumber;
elem.LinePosition = pInfo.LinePosition;
ReadAttributes(elem, reader, pInfo);
if (!bEmptyElement)
{
reader.Read(); //move to child
ReadChildNodes(elem, reader, pInfo);
}
InsertChild(parent, elem);
}
private void ReadAttributes(XmlDiffElement parent, XmlReader reader, PositionInfo pInfo)
{
if (reader.MoveToFirstAttribute())
{
XmlDiffAttribute attr = new XmlDiffAttribute(reader.LocalName, reader.Prefix, reader.NamespaceURI, reader.Value);
attr.LineNumber = pInfo.LineNumber;
attr.LinePosition = pInfo.LinePosition;
InsertAttribute(parent, attr);
while (reader.MoveToNextAttribute())
{
attr = new XmlDiffAttribute(reader.LocalName, reader.Prefix, reader.NamespaceURI, reader.Value);
attr.LineNumber = pInfo.LineNumber;
attr.LinePosition = pInfo.LinePosition;
InsertAttribute(parent, attr);
}
}
}
private void LoadTextNode(XmlDiffNode parent, XmlReader reader, PositionInfo pInfo, XmlDiffNodeType nt)
{
XmlDiffCharacterData textNode = new XmlDiffCharacterData(reader.Value, nt, this.NormalizeNewline);
textNode.LineNumber = pInfo.LineNumber;
textNode.LinePosition = pInfo.LinePosition;
InsertChild(parent, textNode);
}
private void LoadTextNode(XmlDiffNode parent, string text, PositionInfo pInfo, XmlDiffNodeType nt)
{
XmlDiffCharacterData textNode = new XmlDiffCharacterData(text, nt, this.NormalizeNewline);
textNode.LineNumber = pInfo.LineNumber;
textNode.LinePosition = pInfo.LinePosition;
InsertChild(parent, textNode);
}
private void LoadTopLevelAttribute(XmlDiffNode parent, string text, PositionInfo pInfo, XmlDiffNodeType nt)
{
XmlDiffCharacterData textNode = new XmlDiffCharacterData(text, nt, this.NormalizeNewline);
textNode.LineNumber = pInfo.LineNumber;
textNode.LinePosition = pInfo.LinePosition;
InsertTopLevelAttributeAsText(parent, textNode);
}
private void LoadPI(XmlDiffNode parent, XmlReader reader, PositionInfo pInfo)
{
XmlDiffProcessingInstruction pi = new XmlDiffProcessingInstruction(reader.Name, reader.Value);
pi.LineNumber = pInfo.LineNumber;
pi.LinePosition = pInfo.LinePosition;
InsertChild(parent, pi);
}
private void LoadEntityReference(XmlDiffNode parent, XmlReader reader, PositionInfo pInfo)
{
XmlDiffEntityReference er = new XmlDiffEntityReference(reader.Name);
er.LineNumber = pInfo.LineNumber;
er.LinePosition = pInfo.LinePosition;
InsertChild(parent, er);
}
private void SetElementEndPosition(XmlDiffElement elem, PositionInfo pInfo)
{
Debug.Assert(elem != null);
elem.EndLineNumber = pInfo.LineNumber;
elem.EndLinePosition = pInfo.LinePosition;
}
private void InsertChild(XmlDiffNode parent, XmlDiffNode newChild)
{
if (IgnoreChildOrder)
{
XmlDiffNode child = parent.FirstChild;
XmlDiffNode prevChild = null;
while (child != null && (ComparePosition(child, newChild) == NodePosition.After))
{
prevChild = child;
child = child.NextSibling;
}
parent.InsertChildAfter(prevChild, newChild);
}
else
parent.InsertChildAfter(parent.LastChild, newChild);
}
private void InsertTopLevelAttributeAsText(XmlDiffNode parent, XmlDiffCharacterData newChild)
{
if (parent.LastChild != null && (parent.LastChild.NodeType == XmlDiffNodeType.Text || parent.LastChild.NodeType == XmlDiffNodeType.WS))
{
((XmlDiffCharacterData)parent.LastChild).Value = ((XmlDiffCharacterData)parent.LastChild).Value + " " + newChild.Value;
}
else
{
parent.InsertChildAfter(parent.LastChild, newChild);
}
}
private void InsertAttribute(XmlDiffElement parent, XmlDiffAttribute newAttr)
{
Debug.Assert(parent != null);
Debug.Assert(newAttr != null);
newAttr._parent = parent;
if (IgnoreAttributeOrder)
{
XmlDiffAttribute attr = parent.FirstAttribute;
XmlDiffAttribute prevAttr = null;
while (attr != null && (CompareAttributes(attr, newAttr) == NodePosition.After))
{
prevAttr = attr;
attr = (XmlDiffAttribute)(attr.NextSibling);
}
parent.InsertAttributeAfter(prevAttr, newAttr);
}
else
parent.InsertAttributeAfter(parent.LastAttribute, newAttr);
}
public override void WriteTo(XmlWriter w)
{
WriteContentTo(w);
}
public override void WriteContentTo(XmlWriter w)
{
XmlDiffNode child = FirstChild;
while (child != null)
{
child.WriteTo(w);
child = child.NextSibling;
}
}
public XmlDiffNavigator CreateNavigator()
{
return new XmlDiffNavigator(this);
}
public void SortChildren()
{
if (this.FirstChild != null)
{
XmlDiffNode _first = this.FirstChild;
XmlDiffNode _current = this.FirstChild;
XmlDiffNode _last = this.LastChild;
this._firstChild = null;
this._lastChild = null;
//set flag to ignore child order
bool temp = IgnoreChildOrder;
IgnoreChildOrder = true;
XmlDiffNode _next = null;
do
{
if (_current is XmlDiffElement)
_next = _current._next;
_current._next = null;
InsertChild(this, _current);
if (_current == _last)
break;
_current = _next;
}
while (true);
//restore flag for ignoring child order
IgnoreChildOrder = temp;
}
}
void SortChildren(XmlDiffElement elem)
{
if (elem.FirstChild != null)
{
XmlDiffNode _first = elem.FirstChild;
XmlDiffNode _current = elem.FirstChild;
XmlDiffNode _last = elem.LastChild;
elem._firstChild = null;
elem._lastChild = null;
//set flag to ignore child order
bool temp = IgnoreChildOrder;
IgnoreChildOrder = true;
XmlDiffNode _next = null;
do
{
if (_current is XmlDiffElement)
_next = _current._next;
_current._next = null;
InsertChild(elem, _current);
if (_current == _last)
break;
_current = _next;
}
while (true);
//restore flag for ignoring child order
IgnoreChildOrder = temp;
}
}
}
//navgator over the xmldiffdocument
public class XmlDiffNavigator
{
private XmlDiffDocument _document;
private XmlDiffNode _currentNode;
public XmlDiffNavigator(XmlDiffDocument doc)
{
_document = doc;
_currentNode = _document;
}
public XmlDiffNavigator Clone()
{
XmlDiffNavigator _clone = new XmlDiffNavigator(_document);
if (!_clone.MoveTo(this))
throw new Exception("Cannot clone");
return _clone;
}
public NodePosition ComparePosition(XmlDiffNavigator nav)
{
XmlDiffNode targetNode = ((XmlDiffNavigator)nav).CurrentNode;
if (!(nav is XmlDiffNavigator))
{
return NodePosition.Unknown;
}
if (targetNode == this.CurrentNode)
{
return NodePosition.Same;
}
else
{
if (this.CurrentNode.ParentNode == null) //this is root
{
return NodePosition.After;
}
else if (targetNode.ParentNode == null) //this is root
{
return NodePosition.Before;
}
else //look in the following nodes
{
if (targetNode.LineNumber != 0 && this.CurrentNode.LineNumber != 0)
{
if (targetNode.LineNumber > this.CurrentNode.LineNumber)
{
return NodePosition.Before;
}
else if (targetNode.LineNumber == this.CurrentNode.LineNumber && targetNode.LinePosition > this.CurrentNode.LinePosition)
{
return NodePosition.Before;
}
else
return NodePosition.After;
}
return NodePosition.Before;
}
}
}
public string GetAttribute(string localName, string namespaceURI)
{
if (_currentNode is XmlDiffElement)
{
return ((XmlDiffElement)_currentNode).GetAttributeValue(localName, namespaceURI);
}
return "";
}
public string GetNamespace(string name)
{
Debug.Assert(false, "GetNamespace is NYI");
return "";
}
public bool IsSamePosition(XmlDiffNavigator other)
{
if (other is XmlDiffNavigator)
{
if (_currentNode == ((XmlDiffNavigator)other).CurrentNode)
return true;
}
return false;
}
public bool MoveTo(XmlDiffNavigator other)
{
if (other is XmlDiffNavigator)
{
_currentNode = ((XmlDiffNavigator)other).CurrentNode;
return true;
}
return false;
}
public bool MoveToAttribute(string localName, string namespaceURI)
{
if (_currentNode is XmlDiffElement)
{
XmlDiffAttribute _attr = ((XmlDiffElement)_currentNode).GetAttribute(localName, namespaceURI);
if (_attr != null)
{
_currentNode = _attr;
return true;
}
}
return false;
}
public bool MoveToFirst()
{
if (!(_currentNode is XmlDiffAttribute))
{
if (_currentNode.ParentNode.FirstChild == _currentNode)
{
if (_currentNode.ParentNode.FirstChild._next != null)
{
_currentNode = _currentNode.ParentNode.FirstChild._next;
return true;
}
}
else
{
_currentNode = _currentNode.ParentNode.FirstChild;
return true;
}
}
return false;
}
public bool MoveToFirstAttribute()
{
if (_currentNode is XmlDiffElement)
{
if (((XmlDiffElement)_currentNode).FirstAttribute != null)
{
XmlDiffAttribute _attr = ((XmlDiffElement)_currentNode).FirstAttribute;
while (_attr != null && IsNamespaceNode(_attr))
{
_attr = (XmlDiffAttribute)_attr._next;
}
if (_attr != null)
{
_currentNode = _attr;
return true;
}
}
}
return false;
}
public bool MoveToFirstChild()
{
if ((_currentNode is XmlDiffDocument || _currentNode is XmlDiffElement) && _currentNode.FirstChild != null)
{
_currentNode = _currentNode.FirstChild;
return true;
}
return false;
}
public bool MoveToId(string id)
{
Debug.Assert(false, "MoveToId is NYI");
return false;
}
public bool MoveToNamespace(string name)
{
Debug.Assert(false, "MoveToNamespace is NYI");
return false;
}
public bool MoveToNext()
{
if (!(_currentNode is XmlDiffAttribute) && _currentNode._next != null)
{
_currentNode = _currentNode._next;
return true;
}
return false;
}
public bool MoveToNextAttribute()
{
if (_currentNode is XmlDiffAttribute)
{
XmlDiffAttribute _attr = (XmlDiffAttribute)_currentNode._next;
while (_attr != null && IsNamespaceNode(_attr))
{
_attr = (XmlDiffAttribute)_attr._next;
}
if (_attr != null)
{
_currentNode = _attr;
return true;
}
}
return false;
}
private bool IsNamespaceNode(XmlDiffAttribute attr)
{
return attr.LocalName.ToLower() == "xmlns" ||
attr.Prefix.ToLower() == "xmlns";
}
public bool MoveToParent()
{
if (!(_currentNode is XmlDiffDocument))
{
_currentNode = _currentNode.ParentNode;
return true;
}
return false;
}
public bool MoveToPrevious()
{
if (_currentNode != _currentNode.ParentNode.FirstChild)
{
XmlDiffNode _current = _currentNode.ParentNode.FirstChild;
XmlDiffNode _prev = _currentNode.ParentNode.FirstChild;
while (_current != _currentNode)
{
_prev = _current;
_current = _current._next;
}
_currentNode = _prev;
return true;
}
return false;
}
public void MoveToRoot()
{
_currentNode = _document;
}
public string LocalName
{
get
{
if (_currentNode.NodeType == XmlDiffNodeType.Element)
{
return ((XmlDiffElement)_currentNode).LocalName;
}
else if (_currentNode.NodeType == XmlDiffNodeType.Attribute)
{
return ((XmlDiffAttribute)_currentNode).LocalName;
}
else if (_currentNode.NodeType == XmlDiffNodeType.PI)
{
return ((XmlDiffProcessingInstruction)_currentNode).Name;
}
return "";
}
}
public string Name
{
get
{
if (_currentNode.NodeType == XmlDiffNodeType.Element)
{
return _document.nameTable.Get(((XmlDiffElement)_currentNode).Name);
}
else if (_currentNode.NodeType == XmlDiffNodeType.Attribute)
{
return ((XmlDiffAttribute)_currentNode).Name;
}
else if (_currentNode.NodeType == XmlDiffNodeType.PI)
{
return ((XmlDiffProcessingInstruction)_currentNode).Name;
}
return "";
}
}
public string NamespaceURI
{
get
{
if (_currentNode is XmlDiffElement)
{
return ((XmlDiffElement)_currentNode).NamespaceURI;
}
else if (_currentNode is XmlDiffAttribute)
{
return ((XmlDiffAttribute)_currentNode).NamespaceURI;
}
return "";
}
}
public string Value
{
get
{
if (_currentNode is XmlDiffAttribute)
{
return ((XmlDiffAttribute)_currentNode).Value;
}
else if (_currentNode is XmlDiffCharacterData)
{
return ((XmlDiffCharacterData)_currentNode).Value;
}
else if (_currentNode is XmlDiffElement)
{
return ((XmlDiffElement)_currentNode).Value;
}
return "";
}
}
public string Prefix
{
get
{
if (_currentNode is XmlDiffElement)
{
return ((XmlDiffElement)_currentNode).Prefix;
}
else if (_currentNode is XmlDiffAttribute)
{
return ((XmlDiffAttribute)_currentNode).Prefix;
}
return "";
}
}
public string BaseURI
{
get
{
Debug.Assert(false, "BaseURI is NYI");
return "";
}
}
public string XmlLang
{
get
{
Debug.Assert(false, "XmlLang not supported");
return "";
}
}
public bool HasAttributes
{
get
{
return (_currentNode is XmlDiffElement && ((XmlDiffElement)_currentNode).FirstAttribute != null) ? true : false;
}
}
public bool HasChildren
{
get
{
return _currentNode._next != null ? true : false;
}
}
public bool IsEmptyElement
{
get
{
return _currentNode is XmlDiffEmptyElement ? true : false;
}
}
public XmlNameTable NameTable
{
get
{
return _document.nameTable;
}
}
public XmlDiffNode CurrentNode
{
get
{
return _currentNode;
}
}
public bool IsOnRoot()
{
return _currentNode == null ? true : false;
}
}
public class PropertyCollection : MyDict<string, object> { }
public abstract class XmlDiffNode
{
internal XmlDiffNode _next;
internal XmlDiffNode _firstChild;
internal XmlDiffNode _lastChild;
internal XmlDiffNode _parent;
internal int _lineNumber, _linePosition;
internal bool _bIgnoreValue;
private PropertyCollection _extendedProperties;
public XmlDiffNode()
{
this._next = null;
this._firstChild = null;
this._lastChild = null;
this._parent = null;
_lineNumber = 0;
_linePosition = 0;
}
public XmlDiffNode FirstChild
{
get
{
return this._firstChild;
}
}
public XmlDiffNode LastChild
{
get
{
return this._lastChild;
}
}
public XmlDiffNode NextSibling
{
get
{
return this._next;
}
}
public XmlDiffNode ParentNode
{
get
{
return this._parent;
}
}
public virtual bool IgnoreValue
{
get
{
return _bIgnoreValue;
}
set
{
_bIgnoreValue = value;
XmlDiffNode current = this._firstChild;
while (current != null)
{
current.IgnoreValue = value;
current = current._next;
}
}
}
public abstract XmlDiffNodeType NodeType { get; }
public virtual string OuterXml
{
get
{
StringWriter sw = new StringWriter();
XmlWriterSettings xws = new XmlWriterSettings();
xws.ConformanceLevel = ConformanceLevel.Auto;
xws.CheckCharacters = false;
XmlWriter xw = XmlWriter.Create(sw, xws);
WriteTo(xw);
xw.Dispose();
return sw.ToString();
}
}
public virtual string InnerXml
{
get
{
StringWriter sw = new StringWriter();
XmlWriterSettings xws = new XmlWriterSettings();
xws.ConformanceLevel = ConformanceLevel.Auto;
xws.CheckCharacters = false;
XmlWriter xw = XmlWriter.Create(sw, xws);
WriteTo(xw);
xw.Dispose();
return sw.ToString();
}
}
public abstract void WriteTo(XmlWriter w);
public abstract void WriteContentTo(XmlWriter w);
public PropertyCollection ExtendedProperties
{
get
{
if (_extendedProperties == null)
_extendedProperties = new PropertyCollection();
return _extendedProperties;
}
}
public virtual void InsertChildAfter(XmlDiffNode child, XmlDiffNode newChild)
{
Debug.Assert(newChild != null);
newChild._parent = this;
if (child == null)
{
newChild._next = this._firstChild;
this._firstChild = newChild;
}
else
{
Debug.Assert(child._parent == this);
newChild._next = child._next;
child._next = newChild;
}
if (newChild._next == null)
this._lastChild = newChild;
}
public virtual void DeleteChild(XmlDiffNode child)
{
if (child == this.FirstChild)//delete head
{
_firstChild = this.FirstChild.NextSibling;
}
else
{
XmlDiffNode current = this.FirstChild;
XmlDiffNode previous = null;
while (current != child)
{
previous = current;
current = current.NextSibling;
}
Debug.Assert(current != null);
if (current == this.LastChild) //tail being deleted
{
this._lastChild = current.NextSibling;
}
previous._next = current.NextSibling;
}
}
public int LineNumber
{
get { return this._lineNumber; }
set { this._lineNumber = value; }
}
public int LinePosition
{
get { return this._linePosition; }
set { this._linePosition = value; }
}
}
public class XmlDiffElement : XmlDiffNode
{
private string _lName;
private string _prefix;
private string _ns;
private XmlDiffAttribute _firstAttribute;
private XmlDiffAttribute _lastAttribute;
private int _attrC;
private int _endLineNumber, _endLinePosition;
public XmlDiffElement(string localName, string prefix, string ns)
: base()
{
this._lName = localName;
this._prefix = prefix;
this._ns = ns;
this._firstAttribute = null;
this._lastAttribute = null;
this._attrC = -1;
}
public override XmlDiffNodeType NodeType { get { return XmlDiffNodeType.Element; } }
public string LocalName { get { return this._lName; } }
public string NamespaceURI { get { return this._ns; } }
public string Prefix { get { return this._prefix; } }
public string Name
{
get
{
if (this._prefix.Length > 0)
return Prefix + ":" + LocalName;
else
return LocalName;
}
}
public XmlDiffAttribute FirstAttribute
{
get
{
return this._firstAttribute;
}
}
public XmlDiffAttribute LastAttribute
{
get
{
return this._lastAttribute;
}
}
public string GetAttributeValue(string LocalName, string NamespaceUri)
{
if (_firstAttribute != null)
{
XmlDiffAttribute _current = _firstAttribute;
do
{
if (_current.LocalName == LocalName && _current.NamespaceURI == NamespaceURI)
{
return _current.Value;
}
_current = (XmlDiffAttribute)_current._next;
}
while (_current != _lastAttribute);
}
return "";
}
public XmlDiffAttribute GetAttribute(string LocalName, string NamespaceUri)
{
if (_firstAttribute != null)
{
XmlDiffAttribute _current = _firstAttribute;
do
{
if (_current.LocalName == LocalName && _current.NamespaceURI == NamespaceURI)
{
return _current;
}
_current = (XmlDiffAttribute)_current._next;
}
while (_current != _lastAttribute);
}
return null;
}
internal void InsertAttributeAfter(XmlDiffAttribute attr, XmlDiffAttribute newAttr)
{
Debug.Assert(newAttr != null);
newAttr._ownerElement = this;
if (attr == null)
{
newAttr._next = this._firstAttribute;
this._firstAttribute = newAttr;
}
else
{
Debug.Assert(attr._ownerElement == this);
newAttr._next = attr._next;
attr._next = newAttr;
}
if (newAttr._next == null)
this._lastAttribute = newAttr;
}
internal void DeleteAttribute(XmlDiffAttribute attr)
{
if (attr == this.FirstAttribute)//delete head
{
if (attr == this.LastAttribute) //tail being deleted
{
this._lastAttribute = (XmlDiffAttribute)attr.NextSibling;
}
_firstAttribute = (XmlDiffAttribute)this.FirstAttribute.NextSibling;
}
else
{
XmlDiffAttribute current = this.FirstAttribute;
XmlDiffAttribute previous = null;
while (current != attr)
{
previous = current;
current = (XmlDiffAttribute)current.NextSibling;
}
Debug.Assert(current != null);
if (current == this.LastAttribute) //tail being deleted
{
this._lastAttribute = (XmlDiffAttribute)current.NextSibling;
}
previous._next = current.NextSibling;
}
}
public int AttributeCount
{
get
{
if (this._attrC != -1)
return this._attrC;
XmlDiffAttribute attr = this._firstAttribute;
this._attrC = 0;
while (attr != null)
{
this._attrC++;
attr = (XmlDiffAttribute)attr.NextSibling;
}
return this._attrC;
}
}
public override bool IgnoreValue
{
set
{
base.IgnoreValue = value;
XmlDiffAttribute current = this._firstAttribute;
while (current != null)
{
current.IgnoreValue = value;
current = (XmlDiffAttribute)current._next;
}
}
}
public int EndLineNumber
{
get { return this._endLineNumber; }
set { this._endLineNumber = value; }
}
public int EndLinePosition
{
get { return this._endLinePosition; }
set { this._endLinePosition = value; }
}
public override void WriteTo(XmlWriter w)
{
w.WriteStartElement(Prefix, LocalName, NamespaceURI);
XmlDiffAttribute attr = this._firstAttribute;
while (attr != null)
{
attr.WriteTo(w);
attr = (XmlDiffAttribute)(attr.NextSibling);
}
WriteContentTo(w);
w.WriteFullEndElement();
}
public override void WriteContentTo(XmlWriter w)
{
XmlDiffNode child = FirstChild;
while (child != null)
{
child.WriteTo(w);
child = child.NextSibling;
}
}
public string Value
{
get
{
if (this.IgnoreValue)
{
return "";
}
if (_firstChild != null)
{
StringBuilder _bldr = new StringBuilder();
XmlDiffNode _current = _firstChild;
do
{
if (_current is XmlDiffCharacterData && _current.NodeType != XmlDiffNodeType.Comment && _current.NodeType != XmlDiffNodeType.PI)
{
_bldr.Append(((XmlDiffCharacterData)_current).Value);
}
else if (_current is XmlDiffElement)
{
_bldr.Append(((XmlDiffElement)_current).Value);
}
_current = _current._next;
}
while (_current != null);
return _bldr.ToString();
}
return "";
}
}
}
public class XmlDiffEmptyElement : XmlDiffElement
{
public XmlDiffEmptyElement(string localName, string prefix, string ns) : base(localName, prefix, ns) { }
}
public class XmlDiffAttribute : XmlDiffNode
{
internal XmlDiffElement _ownerElement;
private string _lName;
private string _prefix;
private string _ns;
private string _value;
public XmlDiffAttribute(string localName, string prefix, string ns, string value)
: base()
{
this._lName = localName;
this._prefix = prefix;
this._ns = ns;
this._value = value;
}
public string Value
{
get
{
if (this.IgnoreValue)
{
return "";
}
return this._value;
}
}
public string LocalName { get { return this._lName; } }
public string NamespaceURI { get { return this._ns; } }
public string Prefix { get { return this._prefix; } }
public string Name
{
get
{
if (this._prefix.Length > 0)
return this._prefix + ":" + this._lName;
else
return this._lName;
}
}
public override XmlDiffNodeType NodeType { get { return XmlDiffNodeType.Attribute; } }
public override void WriteTo(XmlWriter w)
{
w.WriteStartAttribute(Prefix, LocalName, NamespaceURI);
WriteContentTo(w);
w.WriteEndAttribute();
}
public override void WriteContentTo(XmlWriter w)
{
w.WriteString(Value);
}
}
public class XmlDiffEntityReference : XmlDiffNode
{
private string _name;
public XmlDiffEntityReference(string name)
: base()
{
this._name = name;
}
public override XmlDiffNodeType NodeType { get { return XmlDiffNodeType.ER; } }
public string Name { get { return this._name; } }
public override void WriteTo(XmlWriter w)
{
w.WriteEntityRef(this._name);
}
public override void WriteContentTo(XmlWriter w)
{
XmlDiffNode child = this.FirstChild;
while (child != null)
{
child.WriteTo(w);
child = child.NextSibling;
}
}
}
public class XmlDiffCharacterData : XmlDiffNode
{
private string _value;
private XmlDiffNodeType _nodetype;
public XmlDiffCharacterData(string value, XmlDiffNodeType nt, bool NormalizeNewline)
: base()
{
this._value = value;
if (NormalizeNewline)
{
this._value = this._value.Replace("\n", "");
this._value = this._value.Replace("\r", "");
}
this._nodetype = nt;
}
public string Value
{
get
{
if (this.IgnoreValue)
{
return "";
}
return this._value;
}
set
{
_value = value;
}
}
public override XmlDiffNodeType NodeType { get { return _nodetype; } }
public override void WriteTo(XmlWriter w)
{
switch (this._nodetype)
{
case XmlDiffNodeType.Comment:
w.WriteComment(Value);
break;
case XmlDiffNodeType.CData:
w.WriteCData(Value);
break;
case XmlDiffNodeType.WS:
case XmlDiffNodeType.Text:
w.WriteString(Value);
break;
default:
Debug.Assert(false, "Wrong type for text-like node : " + this._nodetype.ToString());
break;
}
}
public override void WriteContentTo(XmlWriter w) { }
}
public class XmlDiffProcessingInstruction : XmlDiffCharacterData
{
private string _name;
public XmlDiffProcessingInstruction(string name, string value)
: base(value, XmlDiffNodeType.PI, false)
{
this._name = name;
}
public string Name { get { return this._name; } }
public override void WriteTo(XmlWriter w)
{
w.WriteProcessingInstruction(this._name, Value);
}
public override void WriteContentTo(XmlWriter w) { }
}
}
| |
using System;
using NUnit.Framework;
using OpenQA.Selenium.Environment;
namespace OpenQA.Selenium
{
[TestFixture]
public class PageLoadingTest : DriverTestFixture
{
private IWebDriver localDriver;
[SetUp]
public void RestartOriginalDriver()
{
driver = EnvironmentManager.Instance.GetCurrentDriver();
}
[TearDown]
public void QuitAdditionalDriver()
{
if (localDriver != null)
{
localDriver.Quit();
localDriver = null;
}
}
[Test]
public void NoneStrategyShouldNotWaitForPageToLoad()
{
InitLocalDriver(PageLoadStrategy.None);
string slowPage = EnvironmentManager.Instance.UrlBuilder.WhereIs("sleep?time=5");
DateTime start = DateTime.Now;
localDriver.Url = slowPage;
DateTime end = DateTime.Now;
TimeSpan duration = end - start;
// The slow loading resource on that page takes 6 seconds to return,
// but with 'none' page loading strategy 'get' operation should not wait.
Assert.That(duration.TotalMilliseconds, Is.LessThan(1000), "Took too long to load page: " + duration.TotalMilliseconds);
}
[Test]
public void NoneStrategyShouldNotWaitForPageToRefresh()
{
InitLocalDriver(PageLoadStrategy.None);
string slowPage = EnvironmentManager.Instance.UrlBuilder.WhereIs("sleep?time=5");
// We discard the element, but want a check to make sure the page is loaded
WaitFor(() => localDriver.FindElement(By.TagName("body")), TimeSpan.FromSeconds(10), "did not find body");
DateTime start = DateTime.Now;
localDriver.Navigate().Refresh();
DateTime end = DateTime.Now;
TimeSpan duration = end - start;
// The slow loading resource on that page takes 6 seconds to return,
// but with 'none' page loading strategy 'refresh' operation should not wait.
Assert.That(duration.TotalMilliseconds, Is.LessThan(1000), "Took too long to load page: " + duration.TotalMilliseconds);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver does not support eager page load strategy")]
public void EagerStrategyShouldNotWaitForResources()
{
InitLocalDriver(PageLoadStrategy.Eager);
string slowPage = EnvironmentManager.Instance.UrlBuilder.WhereIs("slowLoadingResourcePage.html");
DateTime start = DateTime.Now;
localDriver.Url = slowPage;
// We discard the element, but want a check to make sure the GET actually
// completed.
WaitFor(() => localDriver.FindElement(By.Id("peas")), TimeSpan.FromSeconds(10), "did not find element");
DateTime end = DateTime.Now;
// The slow loading resource on that page takes 6 seconds to return. If we
// waited for it, our load time should be over 6 seconds.
TimeSpan duration = end - start;
Assert.That(duration.TotalMilliseconds, Is.LessThan(5 * 1000), "Took too long to load page: " + duration.TotalMilliseconds);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver does not support eager page load strategy")]
public void EagerStrategyShouldNotWaitForResourcesOnRefresh()
{
InitLocalDriver(PageLoadStrategy.Eager);
string slowPage = EnvironmentManager.Instance.UrlBuilder.WhereIs("slowLoadingResourcePage.html");
localDriver.Url = slowPage;
// We discard the element, but want a check to make sure the GET actually
// completed.
WaitFor(() => localDriver.FindElement(By.Id("peas")), TimeSpan.FromSeconds(10), "did not find element");
DateTime start = DateTime.Now;
localDriver.Navigate().Refresh();
// We discard the element, but want a check to make sure the GET actually
// completed.
WaitFor(() => localDriver.FindElement(By.Id("peas")), TimeSpan.FromSeconds(10), "did not find element");
DateTime end = DateTime.Now;
// The slow loading resource on that page takes 6 seconds to return. If we
// waited for it, our load time should be over 6 seconds.
TimeSpan duration = end - start;
Assert.That(duration.TotalMilliseconds, Is.LessThan(5 * 1000), "Took too long to load page: " + duration.TotalMilliseconds);
}
[Test]
[IgnoreBrowser(Browser.Chrome, "Chrome driver does not support eager page load strategy")]
public void EagerStrategyShouldWaitForDocumentToBeLoaded()
{
InitLocalDriver(PageLoadStrategy.Eager);
string slowPage = EnvironmentManager.Instance.UrlBuilder.WhereIs("sleep?time=3");
localDriver.Url = slowPage;
// We discard the element, but want a check to make sure the GET actually completed.
WaitFor(() => localDriver.FindElement(By.TagName("body")), TimeSpan.FromSeconds(10), "did not find body");
}
[Test]
public void NormalStrategyShouldWaitForDocumentToBeLoaded()
{
driver.Url = simpleTestPage;
Assert.AreEqual(driver.Title, "Hello WebDriver");
}
[Test]
public void ShouldFollowRedirectsSentInTheHttpResponseHeaders()
{
driver.Url = redirectPage;
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldFollowMetaRedirects()
{
driver.Url = metaRedirectPage;
WaitFor(() => { return driver.Title == "We Arrive Here"; }, "Browser title was not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
[IgnoreBrowser(Browser.Firefox, "Browser doesn't see subsequent navigation to a fragment as a new navigation.")]
public void ShouldBeAbleToGetAFragmentOnTheCurrentPage()
{
driver.Url = xhtmlTestPage;
driver.Url = xhtmlTestPage + "#text";
driver.FindElement(By.Id("id1"));
}
[Test]
public void ShouldReturnWhenGettingAUrlThatDoesNotResolve()
{
try
{
// Of course, we're up the creek if this ever does get registered
driver.Url = "http://www.thisurldoesnotexist.comx/";
}
catch (Exception e)
{
if (!IsIeDriverTimedOutException(e))
{
throw e;
}
}
}
[Test]
[IgnoreBrowser(Browser.Safari, "Safari driver does not throw on malformed URL, causing long delay awaiting timeout")]
[NeedsFreshDriver(IsCreatedBeforeTest = true)]
public void ShouldThrowIfUrlIsMalformed()
{
Assert.That(() => driver.Url = "www.test.com", Throws.InstanceOf<WebDriverException>());
}
[Test]
[IgnoreBrowser(Browser.Safari, "Safari driver does not throw on malformed URL")]
[NeedsFreshDriver(IsCreatedBeforeTest = true)]
public void ShouldThrowIfUrlIsMalformedInPortPart()
{
Assert.That(() => driver.Url = "http://localhost:30001bla", Throws.InstanceOf<WebDriverException>());
}
[Test]
public void ShouldReturnWhenGettingAUrlThatDoesNotConnect()
{
// Here's hoping that there's nothing here. There shouldn't be
driver.Url = "http://localhost:3001";
}
[Test]
public void ShouldReturnUrlOnNotExistedPage()
{
string url = EnvironmentManager.Instance.UrlBuilder.WhereIs("not_existed_page.html");
driver.Url = url;
Assert.AreEqual(url, driver.Url);
}
[Test]
public void ShouldBeAbleToLoadAPageWithFramesetsAndWaitUntilAllFramesAreLoaded()
{
driver.Url = framesetPage;
driver.SwitchTo().Frame(0);
IWebElement pageNumber = driver.FindElement(By.XPath("//span[@id='pageNumber']"));
Assert.AreEqual(pageNumber.Text.Trim(), "1");
driver.SwitchTo().DefaultContent().SwitchTo().Frame(1);
pageNumber = driver.FindElement(By.XPath("//span[@id='pageNumber']"));
Assert.AreEqual(pageNumber.Text.Trim(), "2");
}
[Test]
[NeedsFreshDriver(IsCreatedBeforeTest = true)]
public void ShouldDoNothingIfThereIsNothingToGoBackTo()
{
string originalTitle = driver.Title;
driver.Url = formsPage;
driver.Navigate().Back();
// We may have returned to the browser's home page
string currentTitle = driver.Title;
Assert.That(currentTitle, Is.EqualTo(originalTitle).Or.EqualTo("We Leave From Here"));
if (driver.Title == originalTitle)
{
driver.Navigate().Back();
Assert.AreEqual(originalTitle, driver.Title);
}
}
[Test]
public void ShouldBeAbleToNavigateBackInTheBrowserHistory()
{
driver.Url = formsPage;
driver.FindElement(By.Id("imageButton")).Submit();
WaitFor(TitleToBeEqualTo("We Arrive Here"), "Browser title was not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
driver.Navigate().Back();
WaitFor(TitleToBeEqualTo("We Leave From Here"), "Browser title was not 'We Leave From Here'");
Assert.AreEqual(driver.Title, "We Leave From Here");
}
[Test]
public void ShouldBeAbleToNavigateBackInTheBrowserHistoryInPresenceOfIframes()
{
driver.Url = xhtmlTestPage;
driver.FindElement(By.Name("sameWindow")).Click();
WaitFor(TitleToBeEqualTo("This page has iframes"), "Browser title was not 'This page has iframes'");
Assert.AreEqual(driver.Title, "This page has iframes");
driver.Navigate().Back();
WaitFor(TitleToBeEqualTo("XHTML Test Page"), "Browser title was not 'XHTML Test Page'");
Assert.AreEqual(driver.Title, "XHTML Test Page");
}
[Test]
public void ShouldBeAbleToNavigateForwardsInTheBrowserHistory()
{
driver.Url = formsPage;
driver.FindElement(By.Id("imageButton")).Submit();
WaitFor(TitleToBeEqualTo("We Arrive Here"), "Browser title was not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
driver.Navigate().Back();
WaitFor(TitleToBeEqualTo("We Leave From Here"), "Browser title was not 'We Leave From Here'");
Assert.AreEqual(driver.Title, "We Leave From Here");
driver.Navigate().Forward();
WaitFor(TitleToBeEqualTo("We Arrive Here"), "Browser title was not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
[IgnoreBrowser(Browser.IE, "Browser does not support using insecure SSL certs")]
[IgnoreBrowser(Browser.Safari, "Browser does not support using insecure SSL certs")]
[IgnoreBrowser(Browser.Edge, "Browser does not support using insecure SSL certs")]
public void ShouldBeAbleToAccessPagesWithAnInsecureSslCertificate()
{
String url = EnvironmentManager.Instance.UrlBuilder.WhereIsSecure("simpleTest.html");
driver.Url = url;
// This should work
Assert.AreEqual(driver.Title, "Hello WebDriver");
}
[Test]
public void ShouldBeAbleToRefreshAPage()
{
driver.Url = xhtmlTestPage;
driver.Navigate().Refresh();
Assert.AreEqual(driver.Title, "XHTML Test Page");
}
/// <summary>
/// see <a href="http://code.google.com/p/selenium/issues/detail?id=208">Issue 208</a>
/// </summary>
[Test]
[IgnoreBrowser(Browser.IE, "Browser does, in fact, hang in this case.")]
[IgnoreBrowser(Browser.Firefox, "Browser does, in fact, hang in this case.")]
public void ShouldNotHangIfDocumentOpenCallIsNeverFollowedByDocumentCloseCall()
{
driver.Url = documentWrite;
// If this command succeeds, then all is well.
driver.FindElement(By.XPath("//body"));
}
[Test]
[IgnoreBrowser(Browser.Safari)]
[NeedsFreshDriver(IsCreatedAfterTest = true)]
public void PageLoadTimeoutCanBeChanged()
{
TestPageLoadTimeoutIsEnforced(2);
TestPageLoadTimeoutIsEnforced(3);
}
[Test]
[IgnoreBrowser(Browser.Safari)]
[NeedsFreshDriver(IsCreatedAfterTest = true)]
public void CanHandleSequentialPageLoadTimeouts()
{
long pageLoadTimeout = 2;
long pageLoadTimeBuffer = 10;
string slowLoadingPageUrl = EnvironmentManager.Instance.UrlBuilder.WhereIs("sleep?time=" + (pageLoadTimeout + pageLoadTimeBuffer));
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(2);
AssertPageLoadTimeoutIsEnforced(() => driver.Url = slowLoadingPageUrl, pageLoadTimeout, pageLoadTimeBuffer);
AssertPageLoadTimeoutIsEnforced(() => driver.Url = slowLoadingPageUrl, pageLoadTimeout, pageLoadTimeBuffer);
}
[Test]
[IgnoreBrowser(Browser.Opera, "Not implemented for browser")]
[NeedsFreshDriver(IsCreatedAfterTest = true)]
public void ShouldTimeoutIfAPageTakesTooLongToLoad()
{
try
{
TestPageLoadTimeoutIsEnforced(2);
}
finally
{
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(300);
}
// Load another page after get() timed out but before test HTTP server served previous page.
driver.Url = xhtmlTestPage;
WaitFor(TitleToBeEqualTo("XHTML Test Page"), "Title was not expected value");
}
[Test]
[IgnoreBrowser(Browser.Opera, "Not implemented for browser")]
[IgnoreBrowser(Browser.Edge, "Not implemented for browser")]
[NeedsFreshDriver(IsCreatedAfterTest = true)]
public void ShouldTimeoutIfAPageTakesTooLongToLoadAfterClick()
{
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(2);
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("page_with_link_to_slow_loading_page.html");
IWebElement link = WaitFor(() => driver.FindElement(By.Id("link-to-slow-loading-page")), "Could not find link");
try
{
AssertPageLoadTimeoutIsEnforced(() => link.Click(), 2, 3);
}
finally
{
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(300);
}
// Load another page after get() timed out but before test HTTP server served previous page.
driver.Url = xhtmlTestPage;
WaitFor(TitleToBeEqualTo("XHTML Test Page"), "Title was not expected value");
}
[Test]
[IgnoreBrowser(Browser.Opera, "Not implemented for browser")]
[NeedsFreshDriver(IsCreatedAfterTest = true)]
public void ShouldTimeoutIfAPageTakesTooLongToRefresh()
{
// Get the sleeping servlet with a pause of 5 seconds
long pageLoadTimeout = 2;
long pageLoadTimeBuffer = 0;
string slowLoadingPageUrl = EnvironmentManager.Instance.UrlBuilder.WhereIs("sleep?time=" + (pageLoadTimeout + pageLoadTimeBuffer));
driver.Url = slowLoadingPageUrl;
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(2);
try
{
AssertPageLoadTimeoutIsEnforced(() => driver.Navigate().Refresh(), 2, 4);
}
finally
{
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(300);
}
// Load another page after get() timed out but before test HTTP server served previous page.
driver.Url = xhtmlTestPage;
WaitFor(TitleToBeEqualTo("XHTML Test Page"), "Title was not expected value");
}
[Test]
[IgnoreBrowser(Browser.Edge, "Test hangs browser.")]
[IgnoreBrowser(Browser.Chrome, "Chrome driver does, in fact, stop loading page after a timeout.")]
[IgnoreBrowser(Browser.Opera, "Not implemented for browser")]
[IgnoreBrowser(Browser.Safari, "Safari behaves correctly with page load timeout, but getting text does not propertly trim, leading to a test run time of over 30 seconds")]
[NeedsFreshDriver(IsCreatedAfterTest = true)]
public void ShouldNotStopLoadingPageAfterTimeout()
{
try
{
TestPageLoadTimeoutIsEnforced(1);
}
finally
{
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(300);
}
WaitFor(() =>
{
try
{
string text = driver.FindElement(By.TagName("body")).Text;
return text == "Slept for 11s";
}
catch (NoSuchElementException)
{
}
catch (StaleElementReferenceException)
{
}
return false;
}, TimeSpan.FromSeconds(30), "Did not find expected text");
}
private Func<bool> TitleToBeEqualTo(string expectedTitle)
{
return () => { return driver.Title == expectedTitle; };
}
/**
* Sets given pageLoadTimeout to the {@link #driver} and asserts that attempt to navigate to a
* page that takes much longer (10 seconds longer) to load results in a TimeoutException.
* <p>
* Side effects: 1) {@link #driver} is configured to use given pageLoadTimeout,
* 2) test HTTP server still didn't serve the page to browser (some browsers may still
* be waiting for the page to load despite the fact that driver responded with the timeout).
*/
private void TestPageLoadTimeoutIsEnforced(long webDriverPageLoadTimeoutInSeconds)
{
// Test page will load this many seconds longer than WD pageLoadTimeout.
long pageLoadTimeBufferInSeconds = 10;
string slowLoadingPageUrl = EnvironmentManager.Instance.UrlBuilder.WhereIs("sleep?time=" + (webDriverPageLoadTimeoutInSeconds + pageLoadTimeBufferInSeconds));
driver.Manage().Timeouts().PageLoad = TimeSpan.FromSeconds(webDriverPageLoadTimeoutInSeconds);
AssertPageLoadTimeoutIsEnforced(() => driver.Url = slowLoadingPageUrl, webDriverPageLoadTimeoutInSeconds, pageLoadTimeBufferInSeconds);
}
private void AssertPageLoadTimeoutIsEnforced(TestDelegate delegateToTest, long webDriverPageLoadTimeoutInSeconds, long pageLoadTimeBufferInSeconds)
{
DateTime start = DateTime.Now;
Assert.That(delegateToTest, Throws.InstanceOf<WebDriverTimeoutException>(), "I should have timed out after " + webDriverPageLoadTimeoutInSeconds + " seconds");
DateTime end = DateTime.Now;
TimeSpan duration = end - start;
Assert.That(duration.TotalSeconds, Is.GreaterThan(webDriverPageLoadTimeoutInSeconds));
Assert.That(duration.TotalSeconds, Is.LessThan(webDriverPageLoadTimeoutInSeconds + pageLoadTimeBufferInSeconds));
}
private void InitLocalDriver(PageLoadStrategy strategy)
{
EnvironmentManager.Instance.CloseCurrentDriver();
if (localDriver != null)
{
localDriver.Quit();
}
PageLoadStrategyOptions options = new PageLoadStrategyOptions();
options.PageLoadStrategy = strategy;
localDriver = EnvironmentManager.Instance.CreateDriverInstance(options);
}
private class PageLoadStrategyOptions : DriverOptions
{
[Obsolete]
public override void AddAdditionalCapability(string capabilityName, object capabilityValue)
{
}
public override ICapabilities ToCapabilities()
{
return null;
}
}
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
//==========================================================================
// File: SdlChannelSink.cs
//
// Summary: Sdl channel sink for generating sdl dynamically on the server.
//
//==========================================================================
using System;
using System.Collections;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Runtime.Remoting.Channels;
using System.Runtime.Remoting.Channels.Http;
using System.Runtime.Remoting.Messaging;
using System.Text;
#if !FEATURE_PAL
using System.Web;
using System.Security.Permissions;
#endif
namespace System.Runtime.Remoting.MetadataServices
{
public class SdlChannelSinkProvider : IServerChannelSinkProvider
{
private IServerChannelSinkProvider _next = null;
private bool _bRemoteApplicationMetadataEnabled = false;
private bool _bMetadataEnabled = true;
public SdlChannelSinkProvider()
{
}
public SdlChannelSinkProvider(IDictionary properties, ICollection providerData)
{
if (properties != null)
{
foreach (DictionaryEntry entry in properties)
{
switch ((String)entry.Key)
{
case "remoteApplicationMetadataEnabled": _bRemoteApplicationMetadataEnabled = Convert.ToBoolean(entry.Value, CultureInfo.InvariantCulture); break;
case "metadataEnabled": _bMetadataEnabled = Convert.ToBoolean(entry.Value, CultureInfo.InvariantCulture); break;
default:
CoreChannel.ReportUnknownProviderConfigProperty(
this.GetType().Name, (String)entry.Key);
break;
}
}
}
}
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
public void GetChannelData(IChannelDataStore localChannelData)
{
}
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
public IServerChannelSink CreateSink(IChannelReceiver channel)
{
IServerChannelSink nextSink = null;
if (_next != null)
nextSink = _next.CreateSink(channel);
SdlChannelSink channelSink = new SdlChannelSink(channel, nextSink);
channelSink.RemoteApplicationMetadataEnabled = _bRemoteApplicationMetadataEnabled;
channelSink.MetadataEnabled = _bMetadataEnabled;
return channelSink;
}
public IServerChannelSinkProvider Next
{
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
get { return _next; }
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
set { _next = value; }
}
} // class SdlChannelSinkProvider
public class SdlChannelSink : IServerChannelSink
{
private IChannelReceiver _receiver;
private IServerChannelSink _nextSink;
private bool _bRemoteApplicationMetadataEnabled = false;
private bool _bMetadataEnabled = false;
public SdlChannelSink(IChannelReceiver receiver, IServerChannelSink nextSink)
{
_receiver = receiver;
_nextSink = nextSink;
} // SdlChannelSink
internal bool RemoteApplicationMetadataEnabled
{
get { return _bRemoteApplicationMetadataEnabled; }
set { _bRemoteApplicationMetadataEnabled = value; }
}
internal bool MetadataEnabled
{
get { return _bMetadataEnabled; }
set { _bMetadataEnabled = value; }
}
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
public ServerProcessing ProcessMessage(IServerChannelSinkStack sinkStack,
IMessage requestMsg,
ITransportHeaders requestHeaders, Stream requestStream,
out IMessage responseMsg, out ITransportHeaders responseHeaders,
out Stream responseStream)
{
if (requestMsg != null)
{
// The message has already been deserialized so delegate to the next sink.
return _nextSink.ProcessMessage(
sinkStack,
requestMsg, requestHeaders, requestStream,
out responseMsg, out responseHeaders, out responseStream);
}
SdlType sdlType;
if (!ShouldIntercept(requestHeaders, out sdlType))
return _nextSink.ProcessMessage(sinkStack, null, requestHeaders, requestStream,
out responseMsg, out responseHeaders, out responseStream);
// generate sdl and return it
responseHeaders = new TransportHeaders();
GenerateSdl(sdlType, sinkStack, requestHeaders, responseHeaders, out responseStream);
responseMsg = null;
return ServerProcessing.Complete;
} // ProcessMessage
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
public void AsyncProcessResponse(IServerResponseChannelSinkStack sinkStack, Object state,
IMessage msg, ITransportHeaders headers, Stream stream)
{
// We don't need to implement this because we never push ourselves to the sink
// stack.
} // AsyncProcessResponse
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
public Stream GetResponseStream(IServerResponseChannelSinkStack sinkStack, Object state,
IMessage msg, ITransportHeaders headers)
{
// We don't need to implement this because we never push ourselves
// to the sink stack.
throw new NotSupportedException();
} // GetResponseStream
public IServerChannelSink NextChannelSink
{
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
get { return _nextSink; }
}
public IDictionary Properties
{
[SecurityPermission(SecurityAction.LinkDemand, Flags=SecurityPermissionFlag.Infrastructure, Infrastructure=true)]
get { return null; }
} // Properties
// Should we intercept the call and return some SDL
private bool ShouldIntercept(ITransportHeaders requestHeaders, out SdlType sdlType)
{
sdlType = SdlType.Sdl;
String requestVerb = requestHeaders["__RequestVerb"] as String;
String requestURI = requestHeaders["__RequestUri"] as String;
// http verb must be "GET" to return sdl (and request uri must be set)
if ((requestURI == null) ||
(requestVerb == null) || !requestVerb.Equals("GET"))
return false;
// find last index of ? and look for "sdl" or "sdlx"
int index = requestURI.LastIndexOf('?');
if (index == -1)
return false; // no query string
String queryString = requestURI.Substring(index).ToLower(CultureInfo.InvariantCulture);
// sdl?
if ((String.CompareOrdinal(queryString, "?sdl") == 0) ||
(String.CompareOrdinal(queryString, "?sdlx") == 0))
{
sdlType = SdlType.Sdl;
return true;
}
// wsdl?
if (String.CompareOrdinal(queryString, "?wsdl") == 0)
{
sdlType = SdlType.Wsdl;
return true;
}
return false;
} // ShouldIntercept
private void GenerateSdl(SdlType sdlType,
IServerResponseChannelSinkStack sinkStack,
ITransportHeaders requestHeaders,
ITransportHeaders responseHeaders,
out Stream outputStream)
{
if (!MetadataEnabled)
throw new RemotingException(CoreChannel.GetResourceString("Remoting_MetadataNotEnabled"));
String requestUri = requestHeaders[CommonTransportKeys.RequestUri] as String;
String objectUri = HttpChannelHelper.GetObjectUriFromRequestUri(requestUri);
if (!RemoteApplicationMetadataEnabled &&
(String.Compare(objectUri, "RemoteApplicationMetadata.rem", StringComparison.OrdinalIgnoreCase) == 0))
throw new RemotingException(CoreChannel.GetResourceString("Remoting_RemoteApplicationMetadataNotEnabled"));
// If the host header is present, we will use this in the generated uri's
String hostName = (String)requestHeaders["Host"];
if (hostName != null)
{
// filter out port number if present
int index = hostName.IndexOf(':');
if (index != -1)
hostName = hostName.Substring(0, index);
}
#if !FEATURE_PAL
// For IIS, we will [....] the scheme://hostname:port with the incoming value
String iisHostOverride = SetupUrlBashingForIisIfNecessary(hostName);
#endif
ServiceType[] types = null;
if (String.Compare(objectUri, "RemoteApplicationMetadata.rem", StringComparison.OrdinalIgnoreCase) == 0)
{
// get the service description for all registered service types
ActivatedServiceTypeEntry[] activatedTypes =
RemotingConfiguration.GetRegisteredActivatedServiceTypes();
WellKnownServiceTypeEntry[] wellKnownTypes =
RemotingConfiguration.GetRegisteredWellKnownServiceTypes();
// determine total number of types
int typeCount = 0;
if (activatedTypes != null)
typeCount += activatedTypes.Length;
if (wellKnownTypes != null)
typeCount += wellKnownTypes.Length;
types = new ServiceType[typeCount];
// collect data
int co = 0;
if (activatedTypes != null)
{
foreach (ActivatedServiceTypeEntry entry in activatedTypes)
{
types[co++] = new ServiceType(entry.ObjectType, null);
}
}
if (wellKnownTypes != null)
{
foreach (WellKnownServiceTypeEntry entry in wellKnownTypes)
{
String[] urls = _receiver.GetUrlsForUri(entry.ObjectUri);
String url = urls[0];
#if !FEATURE_PAL
if (iisHostOverride != null)
url = HttpChannelHelper.ReplaceChannelUriWithThisString(url, iisHostOverride);
else
#endif
if (hostName != null)
url = HttpChannelHelper.ReplaceMachineNameWithThisString(url, hostName);
types[co++] = new ServiceType(entry.ObjectType, url);
}
}
InternalRemotingServices.RemotingAssert(co == typeCount, "Not all types were processed.");
}
else
{
// get the service description for a particular object
Type objectType = RemotingServices.GetServerTypeForUri(objectUri);
if (objectType == null)
{
throw new RemotingException(
String.Format(
CultureInfo.CurrentCulture, "Object with uri '{0}' does not exist at server.",
objectUri));
}
String[] urls = _receiver.GetUrlsForUri(objectUri);
String url = urls[0];
#if !FEATURE_PAL
if (iisHostOverride != null)
url = HttpChannelHelper.ReplaceChannelUriWithThisString(url, iisHostOverride);
else
#endif
if (hostName != null)
url = HttpChannelHelper.ReplaceMachineNameWithThisString(url, hostName);
types = new ServiceType[1];
types[0] = new ServiceType(objectType, url);
}
responseHeaders["Content-Type"] = "text/xml";
bool bMemStream = false;
outputStream = sinkStack.GetResponseStream(null, responseHeaders);
if (outputStream == null)
{
outputStream = new MemoryStream(1024);
bMemStream = true;
}
MetaData.ConvertTypesToSchemaToStream(types, sdlType, outputStream);
if (bMemStream)
outputStream.Position = 0;
} // GenerateXmlForUri
// SetupUrlBashingForIisIfNecessaryWorker wrapper.
// Prevents System.Web type load for client sku installations.
internal static string SetupUrlBashingForIisIfNecessary(string hostName)
{
String iisHostOverride = null;
if (!CoreChannel.IsClientSKUInstallation)
{
iisHostOverride = SetupUrlBashingForIisIfNecessaryWorker(hostName);
}
return iisHostOverride;
} // SetupUrlBashingForIisIfNecessary
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.NoInlining)]
private static string SetupUrlBashingForIisIfNecessaryWorker(string hostName)
{
// For IIS, we will [....] the scheme://hostname:port with the incoming value
String iisHostOverride = null;
HttpContext context = HttpContext.Current;
if (context != null)
{
HttpRequest request = context.Request;
String scheme = null;
if (request.IsSecureConnection)
scheme = "https";
else
scheme = "http";
int port = context.Request.Url.Port;
StringBuilder sb = new StringBuilder(100);
sb.Append(scheme);
sb.Append("://");
if (hostName != null)
sb.Append(hostName);
else
sb.Append(CoreChannel.GetMachineName());
sb.Append(":");
sb.Append(port.ToString(CultureInfo.InvariantCulture));
iisHostOverride = sb.ToString();
}
return iisHostOverride;
} // SetupUrlBashingForIisIfNecessaryWorker
} // class SdlChannelSink
} // namespace System.Runtime.Remoting.Channnels
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics; //StackFrame
using System.IO;
using System.Text;
namespace OLEDB.Test.ModuleCore
{
////////////////////////////////////////////////////////////////
// CError
//
////////////////////////////////////////////////////////////////
public class CError
{
//Data
private static IError s_rIError;
private static ITestConsole s_rITestConsole;
private static CLTMConsole s_rLTMConsole;
//Constructor
public CError()
{
}
static public IError Error
{
set
{
//Set the static Error interface...
s_rIError = value;
//Setup the standard c# Console to log to LTM instead...
//ie: Console.WriteLine will automatically log to LTM
if (s_rLTMConsole == null)
{
s_rLTMConsole = new CLTMConsole();
}
//The Error object may also support additional interfaces
s_rITestConsole = value as ITestConsole;
//Disable Asserts
DisableAsserts();
}
get { return s_rIError; }
}
static public TextWriter Out
{
get { return s_rLTMConsole; }
}
static public ITestConsole TestConsole
{
get { return s_rITestConsole; }
}
static internal void Dispose()
{
//Reset the info.
s_rIError = null;
s_rITestConsole = null;
//Remove listeners
s_rLTMConsole = null;
}
static internal void DisableAsserts()
{
}
//Helpers
static public void Increment()
{
Error.Increment();
}
static public tagERRORLEVEL ErrorLevel
{
get
{
if (Error != null)
return Error.GetErrorLevel();
return tagERRORLEVEL.HR_STRICT;
}
set
{
if (Error != null)
Error.SetErrorLevel(value);
}
}
static public void Transmit(string text)
{
Write(text);
}
static public string NewLine
{
get { return "\n"; }
}
static public void Write(object value)
{
if (value != null)
Write(value.ToString());
}
static public void WriteLine(object value)
{
Write(value);
WriteLine();
}
static public void Write(string text)
{
Write(tagCONSOLEFLAGS.CONSOLE_TEXT, text);
}
static public void WriteLine(string text)
{
Write(tagCONSOLEFLAGS.CONSOLE_TEXT, text);
WriteLine();
}
static public void Write(string text, params object[] args)
{
//Delegate
Write(String.Format(text, args));
}
static public void WriteLine(string text, params object[] args)
{
//Delegate
WriteLine(String.Format(text, args));
}
static public void Write(char[] value)
{
//Delegate
if (value != null)
Write(new string(value));
}
static public void WriteLine(char[] value)
{
//Delegate
if (value != null)
Write(new string(value));
WriteLine();
}
static public void WriteXml(string text)
{
Write(tagCONSOLEFLAGS.CONSOLE_XML, text);
}
static public void WriteRaw(string text)
{
Write(tagCONSOLEFLAGS.CONSOLE_RAW, text);
}
static public void WriteIgnore(string text)
{
Write(tagCONSOLEFLAGS.CONSOLE_IGNORE, text);
}
static public void WriteLineIgnore(string text)
{
Write(tagCONSOLEFLAGS.CONSOLE_IGNORE, text + CError.NewLine);
}
static public void Write(tagCONSOLEFLAGS flags, string text)
{
if (flags == tagCONSOLEFLAGS.CONSOLE_TEXT)
{
text = FixupXml(text);
}
//NOTE:You can also simply use Console.WriteLine and have it show up in LTM...
//Is the new ITestConsole interface available (using a new LTM)
if (TestConsole != null)
{
TestConsole.Write(flags, text);
}
else if (Error != null)
{
//Otherwise
Error.Transmit(text);
}
}
static public void WriteLine()
{
if (TestConsole != null)
TestConsole.WriteLine();
else if (Error != null)
Error.Transmit(CError.NewLine);
}
static public bool Compare(bool equal, string message)
{
if (equal)
return true;
return Compare(false, true, message);
}
static public bool Compare(object actual, object expected, string message)
{
if (InternalEquals(actual, expected))
return true;
//Compare not only compares but throws - so your test stops processing
//This way processing stops upon the first error, so you don't have to check return
//values or validate values afterwards. If you have other items to do, then use the
//CError.Equals instead of CError.Compare
Console.WriteLine("ERROR: {0}", message);
Console.WriteLine("Expected: {0}", expected);
Console.WriteLine("Actual : {0}", actual);
throw new CTestFailedException(message, actual, expected, null);
}
static public bool Compare(object actual, object expected1, object expected2, string message)
{
if (InternalEquals(actual, expected1) || InternalEquals(actual, expected2))
return true;
//Compare not only compares but throws - so your test stops processing
//This way processing stops upon the first error, so you don't have to check return
//values or validate values afterwards. If you have other items to do, then use the
//CError.Equals instead of CError.Compare
Console.WriteLine("expected1: " + expected1);
Console.WriteLine("expected2: " + expected2);
throw new CTestFailedException(message, actual, expected1, null);
//return false;
}
static public bool Equals(object actual, object expected, string message)
{
try
{
//Equals is identical to Compare, except that Equals doesn't throw.
//This way if We still want to throw the exception so we get the logging and comapre block
//but the test wants to continue to do other things.
return CError.Compare(actual, expected, message);
}
catch (Exception e)
{
CTestBase.HandleException(e);
return false;
}
}
static public bool Equals(bool equal, string message)
{
try
{
//Equals is identical to Compare, except that Equals doesn't throw.
//This way if We still want to throw the exception so we get the logging and comapre block
//but the test wants to continue to do other things.
return CError.Compare(equal, message);
}
catch (Exception e)
{
CTestBase.HandleException(e);
return false;
}
}
static public bool Warning(bool equal, string message)
{
return Warning(equal, true, message, null);
}
static public bool Warning(object actual, object expected, string message)
{
return Warning(actual, expected, message, null);
}
static public bool Warning(object actual, object expected, string message, Exception inner)
{
//See if these are equal
bool equal = InternalEquals(actual, expected);
if (equal)
return true;
try
{
//Throw a warning exception
throw new CTestException(CTestBase.TEST_WARNING, message, actual, expected, inner);
}
catch (Exception e)
{
//Warning should continue - not halt test progress
CTestBase.HandleException(e);
return false;
}
}
static public bool Skip(string message)
{
//Delegate
return Skip(true, message);
}
static public bool Skip(bool skip, string message)
{
if (skip)
throw new CTestSkippedException(message);
return false;
}
static internal bool InternalEquals(object actual, object expected)
{
//Handle null comparison
if (actual == null && expected == null)
return true;
else if (actual == null || expected == null)
return false;
//Otherwise
return expected.Equals(actual);
}
static public bool Log(object actual, object expected, string source, string message, string details, tagERRORLEVEL eErrorLevel)
{
//Obtain the error level
tagERRORLEVEL rSavedLevel = ErrorLevel;
//Set the error level
ErrorLevel = eErrorLevel;
try
{
//Get caller function, 0=current
//StackTrace rStackTrace = new StackTrace();
//StackFrame rStackFrame = rStackTrace.GetFrame(1);
//Log the error
if (TestConsole != null)
{
//ITestConsole.Log
TestConsole.Log(Common.Format(actual), //actual
Common.Format(expected), //expected
source, //source
message, //message
details, //details
tagCONSOLEFLAGS.CONSOLE_TEXT, //flags
"fake_filename",
999
);
}
else if (Error != null)
{
//We call IError::Compare, which logs the error AND increments the error count...
Console.WriteLine("Message:\t" + message);
Console.WriteLine("Source:\t\t" + source);
Console.WriteLine("Expected:\t" + expected);
Console.WriteLine("Received:\t" + actual);
Console.WriteLine("Details:" + CError.NewLine + details);
}
}
finally
{
//Restore the error level
ErrorLevel = rSavedLevel;
}
return false;
}
private static String FixupXml(String value)
{
bool escapeXmlStuff = false;
if (value == null) return null;
StringBuilder b = new StringBuilder();
for (int i = 0; i < value.Length; i++)
{
switch (value[i])
{
case '&':
if (escapeXmlStuff) b.Append("&"); else b.Append('&');
break;
case '<':
if (escapeXmlStuff) b.Append("<"); else b.Append('<');
break;
case '>':
if (escapeXmlStuff) b.Append(">"); else b.Append('>');
break;
case '"':
if (escapeXmlStuff) b.Append("""); else b.Append('"');
break;
case '\'':
if (escapeXmlStuff) b.Append("'"); else b.Append('\'');
break;
case '\t':
b.Append('\t');
break;
case '\r':
b.Append('\r');
break;
case '\n':
b.Append('\n');
break;
default:
if ((value[i] < 0x20) || value[i] >= 0x80)
{
b.Append(PrintUnknownCharacter(value[i]));
}
else
{
b.Append(value[i]);
}
break;
}
}
return b.ToString();
}
private static string PrintUnknownCharacter(char ch)
{
int number = ch;
string result = string.Empty;
if (number == 0)
{
result = "0";
}
while (number > 0)
{
int n = number % 16;
number = number / 16;
if (n < 10)
{
result = (char)(n + (int)'0') + result;
}
else
{
result = (char)(n - 10 + (int)'A') + result;
}
}
return "_x" + result + "_";
}
}
}
| |
using System;
using System.Collections.Generic;
using Android.Runtime;
namespace Com.Umeng.Common {
// Metadata.xml XPath class reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']"
[global::Android.Runtime.Register ("com/umeng/common/Log", DoNotGenerateAcw=true)]
public partial class Log : global::Java.Lang.Object {
static IntPtr LOG_jfieldId;
// Metadata.xml XPath field reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/field[@name='LOG']"
[Register ("LOG")]
public static bool Logs {
get {
if (LOG_jfieldId == IntPtr.Zero)
LOG_jfieldId = JNIEnv.GetStaticFieldID (class_ref, "LOG", "Z");
return JNIEnv.GetStaticBooleanField (class_ref, LOG_jfieldId);
}
set {
if (LOG_jfieldId == IntPtr.Zero)
LOG_jfieldId = JNIEnv.GetStaticFieldID (class_ref, "LOG", "Z");
JNIEnv.SetStaticField (class_ref, LOG_jfieldId, value);
}
}
internal static IntPtr java_class_handle;
internal static IntPtr class_ref {
get {
return JNIEnv.FindClass ("com/umeng/common/Log", ref java_class_handle);
}
}
protected override IntPtr ThresholdClass {
get { return class_ref; }
}
protected override global::System.Type ThresholdType {
get { return typeof (Log); }
}
protected Log (IntPtr javaReference, JniHandleOwnership transfer) : base (javaReference, transfer) {}
static IntPtr id_ctor;
// Metadata.xml XPath constructor reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/constructor[@name='Log' and count(parameter)=0]"
[Register (".ctor", "()V", "")]
public Log () : base (IntPtr.Zero, JniHandleOwnership.DoNotTransfer)
{
if (Handle != IntPtr.Zero)
return;
if (GetType () != typeof (Log)) {
SetHandle (global::Android.Runtime.JNIEnv.CreateInstance (GetType (), "()V"), JniHandleOwnership.TransferLocalRef);
return;
}
if (id_ctor == IntPtr.Zero)
id_ctor = JNIEnv.GetMethodID (class_ref, "<init>", "()V");
SetHandle (JNIEnv.NewObject (class_ref, id_ctor), JniHandleOwnership.TransferLocalRef);
}
static IntPtr id_a_Ljava_lang_String_Ljava_lang_String_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='a' and count(parameter)=2 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String']]"
[Register ("a", "(Ljava/lang/String;Ljava/lang/String;)V", "")]
public static void A (string p0, string p1)
{
if (id_a_Ljava_lang_String_Ljava_lang_String_ == IntPtr.Zero)
id_a_Ljava_lang_String_Ljava_lang_String_ = JNIEnv.GetStaticMethodID (class_ref, "a", "(Ljava/lang/String;Ljava/lang/String;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_a_Ljava_lang_String_Ljava_lang_String_, new JValue (native_p0), new JValue (native_p1));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_a_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='a' and count(parameter)=3 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String'] and parameter[3][@type='java.lang.Exception']]"
[Register ("a", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V", "")]
public static void A (string p0, string p1, global::Java.Lang.Exception p2)
{
if (id_a_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ == IntPtr.Zero)
id_a_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ = JNIEnv.GetStaticMethodID (class_ref, "a", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_a_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_, new JValue (native_p0), new JValue (native_p1), new JValue (p2));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_b_Ljava_lang_String_Ljava_lang_String_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='b' and count(parameter)=2 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String']]"
[Register ("b", "(Ljava/lang/String;Ljava/lang/String;)V", "")]
public static void B (string p0, string p1)
{
if (id_b_Ljava_lang_String_Ljava_lang_String_ == IntPtr.Zero)
id_b_Ljava_lang_String_Ljava_lang_String_ = JNIEnv.GetStaticMethodID (class_ref, "b", "(Ljava/lang/String;Ljava/lang/String;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_b_Ljava_lang_String_Ljava_lang_String_, new JValue (native_p0), new JValue (native_p1));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_b_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='b' and count(parameter)=3 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String'] and parameter[3][@type='java.lang.Exception']]"
[Register ("b", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V", "")]
public static void B (string p0, string p1, global::Java.Lang.Exception p2)
{
if (id_b_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ == IntPtr.Zero)
id_b_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ = JNIEnv.GetStaticMethodID (class_ref, "b", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_b_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_, new JValue (native_p0), new JValue (native_p1), new JValue (p2));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_c_Ljava_lang_String_Ljava_lang_String_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='c' and count(parameter)=2 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String']]"
[Register ("c", "(Ljava/lang/String;Ljava/lang/String;)V", "")]
public static void C (string p0, string p1)
{
if (id_c_Ljava_lang_String_Ljava_lang_String_ == IntPtr.Zero)
id_c_Ljava_lang_String_Ljava_lang_String_ = JNIEnv.GetStaticMethodID (class_ref, "c", "(Ljava/lang/String;Ljava/lang/String;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_c_Ljava_lang_String_Ljava_lang_String_, new JValue (native_p0), new JValue (native_p1));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_c_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='c' and count(parameter)=3 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String'] and parameter[3][@type='java.lang.Exception']]"
[Register ("c", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V", "")]
public static void C (string p0, string p1, global::Java.Lang.Exception p2)
{
if (id_c_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ == IntPtr.Zero)
id_c_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ = JNIEnv.GetStaticMethodID (class_ref, "c", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_c_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_, new JValue (native_p0), new JValue (native_p1), new JValue (p2));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_d_Ljava_lang_String_Ljava_lang_String_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='d' and count(parameter)=2 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String']]"
[Register ("d", "(Ljava/lang/String;Ljava/lang/String;)V", "")]
public static void D (string p0, string p1)
{
if (id_d_Ljava_lang_String_Ljava_lang_String_ == IntPtr.Zero)
id_d_Ljava_lang_String_Ljava_lang_String_ = JNIEnv.GetStaticMethodID (class_ref, "d", "(Ljava/lang/String;Ljava/lang/String;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_d_Ljava_lang_String_Ljava_lang_String_, new JValue (native_p0), new JValue (native_p1));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_d_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='d' and count(parameter)=3 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String'] and parameter[3][@type='java.lang.Exception']]"
[Register ("d", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V", "")]
public static void D (string p0, string p1, global::Java.Lang.Exception p2)
{
if (id_d_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ == IntPtr.Zero)
id_d_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ = JNIEnv.GetStaticMethodID (class_ref, "d", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_d_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_, new JValue (native_p0), new JValue (native_p1), new JValue (p2));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_e_Ljava_lang_String_Ljava_lang_String_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='e' and count(parameter)=2 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String']]"
[Register ("e", "(Ljava/lang/String;Ljava/lang/String;)V", "")]
public static void E (string p0, string p1)
{
if (id_e_Ljava_lang_String_Ljava_lang_String_ == IntPtr.Zero)
id_e_Ljava_lang_String_Ljava_lang_String_ = JNIEnv.GetStaticMethodID (class_ref, "e", "(Ljava/lang/String;Ljava/lang/String;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_e_Ljava_lang_String_Ljava_lang_String_, new JValue (native_p0), new JValue (native_p1));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
static IntPtr id_e_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_;
// Metadata.xml XPath method reference: path="/api/package[@name='com.umeng.common']/class[@name='Log']/method[@name='e' and count(parameter)=3 and parameter[1][@type='java.lang.String'] and parameter[2][@type='java.lang.String'] and parameter[3][@type='java.lang.Exception']]"
[Register ("e", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V", "")]
public static void E (string p0, string p1, global::Java.Lang.Exception p2)
{
if (id_e_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ == IntPtr.Zero)
id_e_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_ = JNIEnv.GetStaticMethodID (class_ref, "e", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Exception;)V");
IntPtr native_p0 = JNIEnv.NewString (p0);
IntPtr native_p1 = JNIEnv.NewString (p1);
JNIEnv.CallStaticVoidMethod (class_ref, id_e_Ljava_lang_String_Ljava_lang_String_Ljava_lang_Exception_, new JValue (native_p0), new JValue (native_p1), new JValue (p2));
JNIEnv.DeleteLocalRef (native_p0);
JNIEnv.DeleteLocalRef (native_p1);
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
public abstract class SgtBelt : MonoBehaviour
{
public static List<SgtBelt> AllBelts = new List<SgtBelt>();
public List<Light> Lights = new List<Light>();
public List<SgtShadow> Shadows = new List<SgtShadow>();
public Color Color = Color.white;
public float Brightness = 1.0f;
public SgtRenderQueue RenderQueue = SgtRenderQueue.Geometry;
public int RenderQueueOffset;
public float Age;
public float TimeScale = 1.0f;
public bool AutoRegenerate = true;
[SerializeField]
protected List<SgtBeltGroup> groups = new List<SgtBeltGroup>();
protected int lightCount;
protected static List<string> keywords = new List<string>();
[System.NonSerialized]
private bool meshDirty = true;
[System.NonSerialized]
private bool meshGenerated;
public void MarkMeshAsDirty()
{
#if UNITY_EDITOR
SgtHelper.SetDirty(this);
#endif
meshDirty = true;
}
public SgtCustomBelt MakeEditableCopy(Transform parent = null)
{
return MakeEditableCopy(parent, Vector3.zero, Quaternion.identity, Vector3.one);
}
public SgtCustomBelt MakeEditableCopy(Transform parent, Vector3 localPosition, Quaternion localRotation, Vector3 localScale)
{
#if UNITY_EDITOR
SgtHelper.BeginUndo("Create Editable Belt Copy");
#endif
var gameObject = SgtHelper.CreateGameObject("Editable Belt Copy", parent, localPosition, localRotation, localScale);
var customBelt = SgtHelper.AddComponent<SgtCustomBelt>(gameObject, false);
var asteroids = default(List<SgtBeltAsteroid>);
var pool = default(bool);
CalculateAsteroids(out asteroids, out pool);
if (asteroids != null)
{
if (pool == true)
{
customBelt.Asteroids = asteroids;
}
else
{
for (var i = 0; i < asteroids.Count; i++)
{
var asteroid = asteroids[i];
if (asteroid != null)
{
var newAsteroid = SgtClassPool<SgtBeltAsteroid>.Pop() ?? new SgtBeltAsteroid(); customBelt.Asteroids.Add(asteroid);
newAsteroid.CopyFrom(asteroid);
}
}
}
}
customBelt.Color = Color;
customBelt.Brightness = Brightness;
return customBelt;
}
public void ObserverPreCull(SgtObserver observer)
{
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
if (group != null && group.Material != null)
{
group.Material.SetFloat("_CameraRollAngle", observer.RollAngle * Mathf.Deg2Rad);
}
}
}
public void UpdateState()
{
UpdateDirty();
UpdateGroups();
}
[ContextMenu("Regenerate")]
public void Regenerate()
{
meshDirty = false;
meshGenerated = true;
BeginRegeneration();
{
RegenerateMeshes();
}
EndRegeneration();
}
#if UNITY_EDITOR
[ContextMenu("Make Editable Copy")]
public void MakeEditableCopyContext()
{
var customBelt = MakeEditableCopy(transform.parent, transform.localPosition, transform.localRotation, transform.localScale);
SgtHelper.SelectAndPing(customBelt);
}
#endif
protected virtual void Update()
{
Age += Time.deltaTime * TimeScale;
UpdateState();
}
protected virtual void OnEnable()
{
#if UNITY_EDITOR
if (AllBelts.Count == 0)
{
SgtHelper.RepaintAll();
}
#endif
AllBelts.Add(this);
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
if (group != null)
{
group.gameObject.SetActive(true);
}
}
}
protected virtual void OnDisable()
{
AllBelts.Remove(this);
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
if (group != null)
{
group.gameObject.SetActive(false);
}
}
}
protected virtual void OnDestroy()
{
for (var i = groups.Count - 1; i >= 0; i--)
{
SgtBeltGroup.MarkForDestruction(groups[i]);
}
groups.Clear();
}
protected abstract void CalculateAsteroids(out List<SgtBeltAsteroid> asteroids, out bool pool); // pool == true when they are temporary
protected virtual void UpdateGroupMaterial(SgtBeltGroup group)
{
if (group.Material == null) group.Material = SgtHelper.CreateTempMaterial(SgtHelper.ShaderNamePrefix + "Belt");
var scale = transform.lossyScale.x;
var color = SgtHelper.Brighten(Color, Brightness);
var renderQueue = (int)RenderQueue + RenderQueueOffset;
var lightCount = SgtHelper.WriteLights(Lights, 2, transform.position, null, null, group.Material);
var shadowCount = SgtHelper.WriteShadows(Shadows, 2, group.Material);
SgtHelper.WriteLightKeywords(Lights.Count > 0, lightCount, keywords);
SgtHelper.WriteShadowKeywords(shadowCount, keywords);
group.Material.renderQueue = renderQueue;
group.Material.SetTexture("_MainTex", group.MainTex);
group.Material.SetTexture("_HeightTex", group.HeightTex);
group.Material.SetColor("_Color", color);
group.Material.SetFloat("_Scale", scale);
group.Material.SetFloat("_Age", Age);
}
private void UpdateDirty()
{
if (meshDirty == true)
{
if (AutoRegenerate == true || meshGenerated == false)
{
Regenerate();
}
}
}
private void UpdateGroups()
{
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
if (group != null)
{
UpdateGroupMaterial(group);
SgtHelper.SetKeywords(group.Material, keywords); keywords.Clear();
group.ManualUpdate();
}
}
}
private void BeginRegeneration()
{
groups.RemoveAll(g => g == null);
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
group.Models.RemoveAll(m => m == null);
group.Asteroids.Clear();
for (var j = group.Models.Count - 1; j >= 0; j--)
{
group.Models[j].PoolMeshNow();
}
}
}
private void EndRegeneration()
{
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
if (group.Asteroids.Count > 0)
{
group.Asteroids.Clear(); // No longer needed, and they've already been pooled in RegenerateMeshes()
for (var j = group.Models.Count - 1; j >= 0; j--)
{
var model = group.Models[j];
if (model.Mesh == null)
{
SgtBeltModel.Pool(model);
group.Models.RemoveAt(j);
}
}
}
else
{
SgtBeltGroup.Pool(group);
groups.RemoveAt(i);
}
}
}
private void RegenerateMeshes()
{
var asteroids = default(List<SgtBeltAsteroid>);
var pool = default(bool);
CalculateAsteroids(out asteroids, out pool);
if (asteroids != null)
{
// Sort asteroids into groups
for (var i = asteroids.Count - 1; i >= 0; i--)
{
var asteroid = asteroids[i];
if (asteroid != null)
{
var group = GetGroup(asteroid.MainTex, asteroid.HeightTex);
group.Asteroids.Add(asteroid);
}
}
// Pool asteroids?
if (pool == true)
{
SgtClassPool<SgtBeltAsteroid>.Add(asteroids);
}
// Build groups
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
var groupAsteroids = group.Asteroids;
var maxWidth = 0.0f;
var maxHeight = 0.0f;
SgtProceduralMesh.Clear();
for (var j = groupAsteroids.Count - 1; j >= 0; j--)
{
var asteroid = groupAsteroids[j];
var radius = asteroid.Radius;
var distance = asteroid.OrbitDistance;
var height = asteroid.Height;
var uv = SgtHelper.CalculateSpriteUV(asteroid.MainTex);
maxWidth = Mathf.Max(maxWidth, distance + radius);
maxHeight = Mathf.Max(maxHeight, height + radius);
SgtProceduralMesh.PushPosition(asteroid.OrbitAngle, distance, asteroid.OrbitSpeed, 4);
SgtProceduralMesh.PushColor(asteroid.Color, 4);
SgtProceduralMesh.PushNormal(-1.0f, 1.0f, 0.0f);
SgtProceduralMesh.PushNormal( 1.0f, 1.0f, 0.0f);
SgtProceduralMesh.PushNormal(-1.0f, -1.0f, 0.0f);
SgtProceduralMesh.PushNormal( 1.0f, -1.0f, 0.0f);
SgtProceduralMesh.PushTangent(asteroid.Angle / Mathf.PI, asteroid.Spin / Mathf.PI, 0.0f, 0.0f, 4);
SgtProceduralMesh.PushCoord1(uv.x, uv.y);
SgtProceduralMesh.PushCoord1(uv.z, uv.y);
SgtProceduralMesh.PushCoord1(uv.x, uv.w);
SgtProceduralMesh.PushCoord1(uv.z, uv.w);
SgtProceduralMesh.PushCoord2(radius, height, 4);
}
var bounds = new Bounds(Vector3.zero, new Vector3(maxWidth * 2.0f, maxHeight * 2.0f, maxWidth * 2.0f));
SgtProceduralMesh.SplitQuads(HideFlags.DontSave);
var meshCount = SgtProceduralMesh.Count;
// Copy meshes
for (var j = 0; j < meshCount; j++)
{
var mesh = SgtProceduralMesh.Pop();
var model = group.Models.Count > j ? group.Models[j] : SgtBeltModel.Create(group);
mesh.bounds = bounds;
model.Mesh = mesh;
}
}
}
}
private SgtBeltGroup GetGroup(Sprite diffuseSprite, Sprite depthSprite)
{
var diffuseTexture = diffuseSprite != null ? diffuseSprite.texture : null;
var depthTexture = depthSprite != null ? depthSprite.texture : null;
for (var i = groups.Count - 1; i >= 0; i--)
{
var group = groups[i];
if (group.MainTex == diffuseTexture && group.HeightTex == depthTexture)
{
return group;
}
}
var newGroup = SgtBeltGroup.Create(this); groups.Add(newGroup);
newGroup.MainTex = diffuseTexture;
newGroup.HeightTex = depthTexture;
return newGroup;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void MaskStoreDouble()
{
var test = new StoreBinaryOpTest__MaskStoreDouble();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class StoreBinaryOpTest__MaskStoreDouble
{
private struct TestStruct
{
public Vector256<Double> _fld1;
public Vector256<Double> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref testStruct._fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref testStruct._fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Double>>());
return testStruct;
}
public void RunStructFldScenario(StoreBinaryOpTest__MaskStoreDouble testClass)
{
Avx.MaskStore((Double*)testClass._dataTable.outArrayPtr, _fld1, _fld2);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Double>>() / sizeof(Double);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<Double>>() / sizeof(Double);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<Double>>() / sizeof(Double);
private static Double[] _data1 = new Double[Op1ElementCount];
private static Double[] _data2 = new Double[Op2ElementCount];
private static Vector256<Double> _clsVar1;
private static Vector256<Double> _clsVar2;
private Vector256<Double> _fld1;
private Vector256<Double> _fld2;
private SimpleBinaryOpTest__DataTable<Double, Double, Double> _dataTable;
static StoreBinaryOpTest__MaskStoreDouble()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Double>>());
}
public StoreBinaryOpTest__MaskStoreDouble()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Double>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Double>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetDouble(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetDouble(); }
_dataTable = new SimpleBinaryOpTest__DataTable<Double, Double, Double>(_data1, _data2, new Double[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
Avx.MaskStore(
(Double*)_dataTable.outArrayPtr,
Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr)
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
Avx.MaskStore(
(Double*)_dataTable.outArrayPtr,
Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr))
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
Avx.MaskStore(
(Double*)_dataTable.outArrayPtr,
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr))
);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
typeof(Avx).GetMethod(nameof(Avx.MaskStore), new Type[] { typeof(Double*), typeof(Vector256<Double>), typeof(Vector256<Double>) })
.Invoke(null, new object[] {
Pointer.Box(_dataTable.outArrayPtr, typeof(Double*)),
Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr)
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
typeof(Avx).GetMethod(nameof(Avx.MaskStore), new Type[] { typeof(Double*), typeof(Vector256<Double>), typeof(Vector256<Double>) })
.Invoke(null, new object[] {
Pointer.Box(_dataTable.outArrayPtr, typeof(Double*)),
Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr))
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
typeof(Avx).GetMethod(nameof(Avx.MaskStore), new Type[] { typeof(Double*), typeof(Vector256<Double>), typeof(Vector256<Double>) })
.Invoke(null, new object[] {
Pointer.Box(_dataTable.outArrayPtr, typeof(Double*)),
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr))
});
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
Avx.MaskStore(
(Double*)_dataTable.outArrayPtr,
_clsVar1,
_clsVar2
);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var left = Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr);
Avx.MaskStore((Double*)_dataTable.outArrayPtr, left, right);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var left = Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr));
var right = Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr));
Avx.MaskStore((Double*)_dataTable.outArrayPtr, left, right);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var left = Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr));
var right = Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr));
Avx.MaskStore((Double*)_dataTable.outArrayPtr, left, right);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new StoreBinaryOpTest__MaskStoreDouble();
Avx.MaskStore((Double*)_dataTable.outArrayPtr, test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
Avx.MaskStore((Double*)_dataTable.outArrayPtr, _fld1, _fld2);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
Avx.MaskStore((Double*)_dataTable.outArrayPtr, test._fld1, test._fld2);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<Double> left, Vector256<Double> right, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[Op1ElementCount];
Double[] inArray2 = new Double[Op2ElementCount];
Double[] outArray = new Double[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), left);
Unsafe.WriteUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Double>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[Op1ElementCount];
Double[] inArray2 = new Double[Op2ElementCount];
Double[] outArray = new Double[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), (uint)Unsafe.SizeOf<Vector256<Double>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), (uint)Unsafe.SizeOf<Vector256<Double>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Double>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Double[] left, Double[] right, Double[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if (BitConverter.DoubleToInt64Bits(result[0]) != BitConverter.DoubleToInt64Bits((BitConverter.DoubleToInt64Bits(left[0]) < 0) ? right[0] : BitConverter.DoubleToInt64Bits(result[0])))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if (BitConverter.DoubleToInt64Bits(result[i]) != BitConverter.DoubleToInt64Bits((BitConverter.DoubleToInt64Bits(left[i]) < 0) ? right[i] : BitConverter.DoubleToInt64Bits(result[i])))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx)}.{nameof(Avx.MaskStore)}<Double>(Vector256<Double>, Vector256<Double>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Net.Sockets;
using System.Net.Test.Common;
using System.Runtime.InteropServices;
using Xunit;
namespace System.Net.NameResolution.PalTests
{
public class NameResolutionPalTests
{
static NameResolutionPalTests()
{
NameResolutionPal.EnsureSocketsAreInitialized();
}
[Fact]
public void HostName_NotNull()
{
Assert.NotNull(NameResolutionPal.GetHostName());
}
[Fact]
public void GetHostByName_LocalHost()
{
IPHostEntry hostEntry = NameResolutionPal.GetHostByName("localhost");
Assert.NotNull(hostEntry);
Assert.NotNull(hostEntry.HostName);
Assert.NotNull(hostEntry.AddressList);
Assert.NotNull(hostEntry.Aliases);
}
public static object[][] InvalidHostNames = new object[][] {
new object[] { ":" },
new object[] { "..." }
};
[Theory, MemberData(nameof(InvalidHostNames))]
public void GetHostByName_InvalidHostName_Throws(string hostName)
{
Assert.ThrowsAny<SocketException>(() => NameResolutionPal.GetHostByName(hostName));
}
[Fact]
public void GetHostByName_HostName()
{
string hostName = NameResolutionPal.GetHostName();
Assert.NotNull(hostName);
IPHostEntry hostEntry = NameResolutionPal.GetHostByName(hostName);
Assert.NotNull(hostEntry);
Assert.NotNull(hostEntry.HostName);
Assert.NotNull(hostEntry.AddressList);
Assert.NotNull(hostEntry.Aliases);
}
[Fact]
public void GetHostByAddr_LocalHost()
{
Assert.NotNull(NameResolutionPal.GetHostByAddr(new IPAddress(0x0100007f)));
}
[Fact]
public void GetHostByName_LocalHost_GetHostByAddr()
{
IPHostEntry hostEntry1 = NameResolutionPal.GetHostByName("localhost");
Assert.NotNull(hostEntry1);
IPHostEntry hostEntry2 = NameResolutionPal.GetHostByAddr(hostEntry1.AddressList[0]);
Assert.NotNull(hostEntry2);
IPAddress[] list1 = hostEntry1.AddressList;
IPAddress[] list2 = hostEntry2.AddressList;
for (int i = 0; i < list1.Length; i++)
{
Assert.NotEqual(-1, Array.IndexOf(list2, list1[i]));
}
}
[Fact]
public void GetHostByName_HostName_GetHostByAddr()
{
IPHostEntry hostEntry1 = NameResolutionPal.GetHostByName(System.Net.Test.Common.Configuration.Http.Http2Host);
Assert.NotNull(hostEntry1);
IPAddress[] list1 = hostEntry1.AddressList;
Assert.InRange(list1.Length, 1, Int32.MaxValue);
foreach (IPAddress addr1 in list1)
{
IPHostEntry hostEntry2 = NameResolutionPal.GetHostByAddr(addr1);
Assert.NotNull(hostEntry2);
IPAddress[] list2 = hostEntry2.AddressList;
Assert.InRange(list2.Length, 1, list1.Length);
foreach (IPAddress addr2 in list2)
{
Assert.NotEqual(-1, Array.IndexOf(list1, addr2));
}
}
}
[Fact]
public void TryGetAddrInfo_LocalHost()
{
IPHostEntry hostEntry;
int nativeErrorCode;
SocketError error = NameResolutionPal.TryGetAddrInfo("localhost", out hostEntry, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(hostEntry);
Assert.NotNull(hostEntry.HostName);
Assert.NotNull(hostEntry.AddressList);
Assert.NotNull(hostEntry.Aliases);
}
[Fact]
public void TryGetAddrInfo_HostName()
{
string hostName = NameResolutionPal.GetHostName();
Assert.NotNull(hostName);
IPHostEntry hostEntry;
int nativeErrorCode;
SocketError error = NameResolutionPal.TryGetAddrInfo(hostName, out hostEntry, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(hostEntry);
Assert.NotNull(hostEntry.HostName);
Assert.NotNull(hostEntry.AddressList);
Assert.NotNull(hostEntry.Aliases);
}
[Fact]
public void TryGetNameInfo_LocalHost_IPv4()
{
SocketError error;
int nativeErrorCode;
string name = NameResolutionPal.TryGetNameInfo(new IPAddress(new byte[] { 127, 0, 0, 1 }), out error, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(name);
}
[Fact]
public void TryGetNameInfo_LocalHost_IPv6()
{
SocketError error;
int nativeErrorCode;
string name = NameResolutionPal.TryGetNameInfo(new IPAddress(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }), out error, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(name);
}
[Fact]
public void TryGetAddrInfo_LocalHost_TryGetNameInfo()
{
IPHostEntry hostEntry;
int nativeErrorCode;
SocketError error = NameResolutionPal.TryGetAddrInfo("localhost", out hostEntry, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(hostEntry);
string name = NameResolutionPal.TryGetNameInfo(hostEntry.AddressList[0], out error, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(name);
}
[Fact]
public void TryGetAddrInfo_HostName_TryGetNameInfo()
{
string hostName = NameResolutionPal.GetHostName();
Assert.NotNull(hostName);
IPHostEntry hostEntry;
int nativeErrorCode;
SocketError error = NameResolutionPal.TryGetAddrInfo(hostName, out hostEntry, out nativeErrorCode);
if (error == SocketError.HostNotFound)
{
// On Unix, getaddrinfo returns host not found, if all the machine discovery settings on the local network
// is turned off. Hence dns lookup for it's own hostname fails.
Assert.True(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) || RuntimeInformation.IsOSPlatform(OSPlatform.OSX));
return;
}
Assert.Equal(SocketError.Success, error);
Assert.NotNull(hostEntry);
string name = NameResolutionPal.TryGetNameInfo(hostEntry.AddressList[0], out error, out nativeErrorCode);
if (error == SocketError.HostNotFound)
{
// On Unix, getaddrinfo returns private ipv4 address for hostname. If the OS doesn't have the
// reverse dns lookup entry for this address, getnameinfo returns host not found.
Assert.True(RuntimeInformation.IsOSPlatform(OSPlatform.Linux) || RuntimeInformation.IsOSPlatform(OSPlatform.OSX));
return;
}
Assert.Equal(SocketError.Success, error);
Assert.NotNull(name);
}
[Fact]
public void TryGetAddrInfo_ExternalHost()
{
string hostName = "microsoft.com";
IPHostEntry hostEntry;
int nativeErrorCode;
SocketError error = NameResolutionPal.TryGetAddrInfo(hostName, out hostEntry, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(hostEntry);
}
[Fact]
public void TryGetNameInfo_LocalHost_IPv4_TryGetAddrInfo()
{
SocketError error;
int nativeErrorCode;
string name = NameResolutionPal.TryGetNameInfo(new IPAddress(new byte[] { 127, 0, 0, 1 }), out error, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(name);
IPHostEntry hostEntry;
error = NameResolutionPal.TryGetAddrInfo(name, out hostEntry, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(hostEntry);
}
[Fact]
public void TryGetNameInfo_LocalHost_IPv6_TryGetAddrInfo()
{
SocketError error;
int nativeErrorCode;
string name = NameResolutionPal.TryGetNameInfo(new IPAddress(new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }), out error, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(name);
IPHostEntry hostEntry;
error = NameResolutionPal.TryGetAddrInfo(name, out hostEntry, out nativeErrorCode);
Assert.Equal(SocketError.Success, error);
Assert.NotNull(hostEntry);
}
}
}
| |
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
using System;
using Windows.Devices.Gpio.Provider;
using Windows.Foundation;
namespace Windows.Devices.Gpio
{
/// <summary>
/// Represents a general-purpose I/O (GPIO) pin.
/// </summary>
public sealed class GpioPin : IDisposable
{
private IGpioPinProvider _provider;
private GpioPinDriveMode _driveMode = GpioPinDriveMode.Input;
private GpioPinValue _lastOutputValue = GpioPinValue.Low;
private TypedEventHandler<GpioPin, GpioPinValueChangedEventArgs> _callbacks = null;
internal GpioPin()
{
}
~GpioPin()
{
Dispose(false);
}
/// <summary>
/// Occurs when the value of the general-purpose I/O (GPIO) pin changes, either because of an external stimulus
/// when the pin is configured as an input, or when a value is written to the pin when the pin is configured as
/// an output.
/// </summary>
public event TypedEventHandler<GpioPin, GpioPinValueChangedEventArgs> ValueChanged
{
add
{
ThrowIfDisposed();
var callbacksOld = _callbacks;
_callbacks += value;
if (callbacksOld == null)
{
_provider.ValueChanged += HandlePinChangedEvent;
}
}
remove
{
ThrowIfDisposed();
_callbacks -= value;
if( _callbacks == null )
{
_provider.ValueChanged -= HandlePinChangedEvent;
}
}
}
/// <summary>
/// Gets or sets the debounce timeout for the general-purpose I/O (GPIO) pin, which is an interval during which
/// changes to the value of the pin are filtered out and do not generate <c>ValueChanged</c> events.
/// </summary>
/// <value> The debounce timeout for the GPIO pin, which is an interval during which changes to the value of the
/// pin are filtered out and do not generate <c>ValueChanged</c> events. If the length of this interval is
/// 0, all changes to the value of the pin generate <c>ValueChanged</c> events.</value>
public TimeSpan DebounceTimeout
{
get
{
ThrowIfDisposed();
return _provider.DebounceTimeout;
}
set
{
ThrowIfDisposed();
_provider.DebounceTimeout = value;
}
}
/// <summary>
/// Gets the pin number of the general-purpose I/O (GPIO) pin.
/// </summary>
/// <value>The pin number of the GPIO pin.</value>
public int PinNumber
{
get
{
ThrowIfDisposed();
return _provider.PinNumber;
}
}
/// <summary>
/// Gets the sharing mode in which the general-purpose I/O (GPIO) pin is open.
/// </summary>
/// <value>The sharing mode in which the GPIO pin is open.</value>
public GpioSharingMode SharingMode => GpioSharingMode.Exclusive;
internal IGpioPinProvider PinProvider
{
set
{
_provider = value;
}
}
/// <summary>
/// Reads the current value of the general-purpose I/O (GPIO) pin.
/// </summary>
/// <returns>The current value of the GPIO pin. If the pin is configured as an output, this value is the last
/// value written to the pin.</returns>
public GpioPinValue Read()
{
ThrowIfDisposed();
return (GpioPinValue)_provider.Read();
}
/// <summary>
/// Drives the specified value onto the general purpose I/O (GPIO) pin according to the current drive mode for
/// the pin if the pin is configured as an output, or updates the latched output value for the pin if the pin is
/// configured as an input.
/// </summary>
/// <param name="value">The enumeration value to write to the GPIO pin.
/// <para>If the GPIO pin is configured as an output, the method drives the specified value onto the pin
/// according to the current drive mode for the pin.</para>
/// <para>If the GPIO pin is configured as an input, the method updates the latched output value for the pin.
/// The latched output value is driven onto the pin when the configuration for the pin changes to
/// output.</para></param>
/// <remarks>If the pin drive mode is not currently set to output, this will latch <paramref name="value"/>
/// and drive the signal the when the mode is set.</remarks>
public void Write(GpioPinValue value)
{
ThrowIfDisposed();
// Remember this value in case we switch drive mode
_lastOutputValue = value;
if (_driveMode == GpioPinDriveMode.Output)
{
_provider.Write(value);
}
}
/// <summary>
/// Gets whether the general-purpose I/O (GPIO) pin supports the specified drive mode.
/// </summary>
/// <param name="driveMode">The drive mode to check for support.</param>
/// <returns>True if the GPIO pin supports the drive mode that driveMode specifies; otherwise false. If you
/// specify a drive mode for which this method returns false when you call SetDriveMode, SetDriveMode
/// generates an exception.</returns>
public bool IsDriveModeSupported(GpioPinDriveMode driveMode)
{
switch (driveMode)
{
case GpioPinDriveMode.Input:
case GpioPinDriveMode.Output:
case GpioPinDriveMode.InputPullUp:
case GpioPinDriveMode.InputPullDown:
return true;
}
return false;
}
/// <summary>
/// Gets the current drive mode for the general-purpose I/O (GPIO) pin. The drive mode specifies whether the pin
/// is configured as an input or an output, and determines how values are driven onto the pin.
/// </summary>
/// <returns>An enumeration value that indicates the current drive mode for the GPIO pin. The drive mode
/// specifies whether the pin is configured as an input or an output, and determines how values are driven
/// onto the pin.</returns>
public GpioPinDriveMode GetDriveMode()
{
ThrowIfDisposed();
return _driveMode;
}
/// <summary>
/// Sets the drive mode of the general-purpose I/O (GPIO) pin. The drive mode specifies whether the pin is
/// configured as an input or an output, and determines how values are driven onto the pin.
/// </summary>
/// <param name="driveMode">An enumeration value that specifies drive mode to use for the GPIO pin. The drive
/// mode specifies whether the pin is configured as an input or an output, and determines how values are
/// driven onto the pin.</param>
public void SetDriveMode(GpioPinDriveMode driveMode)
{
ThrowIfDisposed();
if (driveMode != _driveMode)
{
_provider.SetPinDriveMode((GpioDriveMode)driveMode);
if (driveMode == GpioPinDriveMode.Output)
{
_provider.Write(_lastOutputValue);
}
_driveMode = driveMode;
}
}
/// <summary>
/// Closes the general-purpose I/O (GPIO) pin and releases the resources associated with it.
/// </summary>
public void Dispose()
{
if (_provider != null)
{
Dispose(true);
_provider = null;
GC.SuppressFinalize(this);
}
}
private void HandlePinChangedEvent(GpioPin sender, GpioPinValueChangedEventArgs eventArgs)
{
if (_provider != null)
{
_callbacks?.Invoke(this, eventArgs);
}
}
/// <summary>
/// Releases internal resources held by the GPIO pin.
/// </summary>
/// <param name="disposing">True if called from Dispose, false if called from the finalizer.</param>
private void Dispose(bool disposing)
{
if (disposing)
{
if (_provider != null)
{
_provider.Dispose();
}
}
}
private void ThrowIfDisposed()
{
if (_provider == null)
{
throw new ObjectDisposedException(this.GetType().FullName);
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.Differencing;
using Microsoft.CodeAnalysis.Emit;
using Microsoft.CodeAnalysis.Test.Utilities;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Test.Utilities;
using Roslyn.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.EditAndContinue.UnitTests
{
internal abstract class EditAndContinueTestHelpers
{
public abstract AbstractEditAndContinueAnalyzer Analyzer { get; }
public abstract SyntaxNode FindNode(SyntaxNode root, TextSpan span);
public abstract SyntaxTree ParseText(string source);
public abstract Compilation CreateLibraryCompilation(string name, IEnumerable<SyntaxTree> trees);
public abstract ImmutableArray<SyntaxNode> GetDeclarators(ISymbol method);
internal void VerifyUnchangedDocument(
string source,
ActiveStatementSpan[] oldActiveStatements,
TextSpan?[] trackingSpansOpt,
TextSpan[] expectedNewActiveStatements,
ImmutableArray<TextSpan>[] expectedOldExceptionRegions,
ImmutableArray<TextSpan>[] expectedNewExceptionRegions)
{
var text = SourceText.From(source);
var tree = ParseText(source);
var root = tree.GetRoot();
tree.GetDiagnostics().Where(d => d.Severity == DiagnosticSeverity.Error).Verify();
DocumentId documentId = DocumentId.CreateNewId(ProjectId.CreateNewId("TestEnCProject"), "TestEnCDocument");
TestActiveStatementTrackingService trackingService;
if (trackingSpansOpt != null)
{
trackingService = new TestActiveStatementTrackingService(documentId, trackingSpansOpt);
}
else
{
trackingService = null;
}
var actualNewActiveStatements = new LinePositionSpan[oldActiveStatements.Length];
var actualNewExceptionRegions = new ImmutableArray<LinePositionSpan>[oldActiveStatements.Length];
Analyzer.AnalyzeUnchangedDocument(
oldActiveStatements.AsImmutable(),
text,
root,
documentId,
trackingService,
actualNewActiveStatements,
actualNewExceptionRegions);
// check active statements:
AssertSpansEqual(expectedNewActiveStatements, actualNewActiveStatements, source, text);
// check new exception regions:
Assert.Equal(expectedNewExceptionRegions.Length, actualNewExceptionRegions.Length);
for (int i = 0; i < expectedNewExceptionRegions.Length; i++)
{
AssertSpansEqual(expectedNewExceptionRegions[i], actualNewExceptionRegions[i], source, text);
}
}
internal void VerifyRudeDiagnostics(
EditScript<SyntaxNode> editScript,
ActiveStatementsDescription description,
RudeEditDiagnosticDescription[] expectedDiagnostics)
{
var oldActiveStatements = description.OldSpans;
if (description.OldTrackingSpans != null)
{
Assert.Equal(oldActiveStatements.Length, description.OldTrackingSpans.Length);
}
string newSource = editScript.Match.NewRoot.SyntaxTree.ToString();
string oldSource = editScript.Match.OldRoot.SyntaxTree.ToString();
var oldText = SourceText.From(oldSource);
var newText = SourceText.From(newSource);
var diagnostics = new List<RudeEditDiagnostic>();
var actualNewActiveStatements = new LinePositionSpan[oldActiveStatements.Length];
var actualNewExceptionRegions = new ImmutableArray<LinePositionSpan>[oldActiveStatements.Length];
var updatedActiveMethodMatches = new List<AbstractEditAndContinueAnalyzer.UpdatedMemberInfo>();
var editMap = Analyzer.BuildEditMap(editScript);
DocumentId documentId = DocumentId.CreateNewId(ProjectId.CreateNewId("TestEnCProject"), "TestEnCDocument");
TestActiveStatementTrackingService trackingService;
if (description.OldTrackingSpans != null)
{
trackingService = new TestActiveStatementTrackingService(documentId, description.OldTrackingSpans);
}
else
{
trackingService = null;
}
Analyzer.AnalyzeSyntax(
editScript,
editMap,
oldText,
newText,
documentId,
trackingService,
oldActiveStatements.AsImmutable(),
actualNewActiveStatements,
actualNewExceptionRegions,
updatedActiveMethodMatches,
diagnostics);
diagnostics.Verify(newSource, expectedDiagnostics);
// check active statements:
AssertSpansEqual(description.NewSpans, actualNewActiveStatements, newSource, newText);
if (diagnostics.Count == 0)
{
// check old exception regions:
for (int i = 0; i < oldActiveStatements.Length; i++)
{
var actualOldExceptionRegions = Analyzer.GetExceptionRegions(
oldText,
editScript.Match.OldRoot,
oldActiveStatements[i].Span,
isLeaf: (oldActiveStatements[i].Flags & ActiveStatementFlags.LeafFrame) != 0);
AssertSpansEqual(description.OldRegions[i], actualOldExceptionRegions, oldSource, oldText);
}
// check new exception regions:
Assert.Equal(description.NewRegions.Length, actualNewExceptionRegions.Length);
for (int i = 0; i < description.NewRegions.Length; i++)
{
AssertSpansEqual(description.NewRegions[i], actualNewExceptionRegions[i], newSource, newText);
}
}
else
{
for (int i = 0; i < oldActiveStatements.Length; i++)
{
Assert.Equal(0, description.NewRegions[i].Length);
}
}
if (description.OldTrackingSpans != null)
{
// Verify that the new tracking spans are equal to the new active statements.
AssertEx.Equal(trackingService.TrackingSpans, description.NewSpans.Select(s => (TextSpan?)s));
}
}
internal void VerifyLineEdits(
EditScript<SyntaxNode> editScript,
IEnumerable<LineChange> expectedLineEdits,
IEnumerable<string> expectedNodeUpdates,
RudeEditDiagnosticDescription[] expectedDiagnostics)
{
string newSource = editScript.Match.NewRoot.SyntaxTree.ToString();
string oldSource = editScript.Match.OldRoot.SyntaxTree.ToString();
var oldText = SourceText.From(oldSource);
var newText = SourceText.From(newSource);
var diagnostics = new List<RudeEditDiagnostic>();
var editMap = Analyzer.BuildEditMap(editScript);
var triviaEdits = new List<KeyValuePair<SyntaxNode, SyntaxNode>>();
var actualLineEdits = new List<LineChange>();
Analyzer.AnalyzeTrivia(
oldText,
newText,
editScript.Match,
editMap,
triviaEdits,
actualLineEdits,
diagnostics,
default(CancellationToken));
diagnostics.Verify(newSource, expectedDiagnostics);
AssertEx.Equal(expectedLineEdits, actualLineEdits, itemSeparator: ",\r\n");
var actualNodeUpdates = triviaEdits.Select(e => e.Value.ToString().ToLines().First());
AssertEx.Equal(expectedNodeUpdates, actualNodeUpdates, itemSeparator: ",\r\n");
}
internal void VerifySemantics(
EditScript<SyntaxNode> editScript,
ActiveStatementsDescription activeStatements,
IEnumerable<string> additionalOldSources,
IEnumerable<string> additionalNewSources,
SemanticEditDescription[] expectedSemanticEdits,
RudeEditDiagnosticDescription[] expectedDiagnostics)
{
var editMap = Analyzer.BuildEditMap(editScript);
var oldRoot = editScript.Match.OldRoot;
var newRoot = editScript.Match.NewRoot;
var oldSource = oldRoot.SyntaxTree.ToString();
var newSource = newRoot.SyntaxTree.ToString();
var oldText = SourceText.From(oldSource);
var newText = SourceText.From(newSource);
IEnumerable<SyntaxTree> oldTrees = new[] { oldRoot.SyntaxTree };
IEnumerable<SyntaxTree> newTrees = new[] { newRoot.SyntaxTree };
if (additionalOldSources != null)
{
oldTrees = oldTrees.Concat(additionalOldSources.Select(s => ParseText(s)));
}
if (additionalOldSources != null)
{
newTrees = newTrees.Concat(additionalNewSources.Select(s => ParseText(s)));
}
var oldCompilation = CreateLibraryCompilation("Old", oldTrees);
var newCompilation = CreateLibraryCompilation("New", newTrees);
if (oldCompilation is CSharpCompilation)
{
oldCompilation.GetDiagnostics().Where(d => d.Severity == DiagnosticSeverity.Error).Verify();
newCompilation.GetDiagnostics().Where(d => d.Severity == DiagnosticSeverity.Error).Verify();
}
else
{
// TODO: verify all compilation diagnostics like C# does (tests need to be updated)
oldTrees.SelectMany(tree => tree.GetDiagnostics()).Where(d => d.Severity == DiagnosticSeverity.Error).Verify();
newTrees.SelectMany(tree => tree.GetDiagnostics()).Where(d => d.Severity == DiagnosticSeverity.Error).Verify();
}
var oldModel = oldCompilation.GetSemanticModel(oldRoot.SyntaxTree);
var newModel = newCompilation.GetSemanticModel(newRoot.SyntaxTree);
var oldActiveStatements = activeStatements.OldSpans.AsImmutable();
var updatedActiveMethodMatches = new List<AbstractEditAndContinueAnalyzer.UpdatedMemberInfo>();
var triviaEdits = new List<KeyValuePair<SyntaxNode, SyntaxNode>>();
var actualLineEdits = new List<LineChange>();
var actualSemanticEdits = new List<SemanticEdit>();
var diagnostics = new List<RudeEditDiagnostic>();
var actualNewActiveStatements = new LinePositionSpan[activeStatements.OldSpans.Length];
var actualNewExceptionRegions = new ImmutableArray<LinePositionSpan>[activeStatements.OldSpans.Length];
Analyzer.AnalyzeSyntax(
editScript,
editMap,
oldText,
newText,
null,
null,
oldActiveStatements,
actualNewActiveStatements,
actualNewExceptionRegions,
updatedActiveMethodMatches,
diagnostics);
diagnostics.Verify(newSource);
Analyzer.AnalyzeTrivia(
oldText,
newText,
editScript.Match,
editMap,
triviaEdits,
actualLineEdits,
diagnostics,
default(CancellationToken));
diagnostics.Verify(newSource);
Analyzer.AnalyzeSemantics(
editScript,
editMap,
oldText,
oldActiveStatements,
triviaEdits,
updatedActiveMethodMatches,
oldModel,
newModel,
actualSemanticEdits,
diagnostics,
default(CancellationToken));
diagnostics.Verify(newSource, expectedDiagnostics);
if (expectedSemanticEdits == null)
{
return;
}
Assert.Equal(expectedSemanticEdits.Length, actualSemanticEdits.Count);
for (int i = 0; i < actualSemanticEdits.Count; i++)
{
var editKind = expectedSemanticEdits[i].Kind;
Assert.Equal(editKind, actualSemanticEdits[i].Kind);
var expectedOldSymbol = (editKind == SemanticEditKind.Update) ? expectedSemanticEdits[i].SymbolProvider(oldCompilation) : null;
var expectedNewSymbol = expectedSemanticEdits[i].SymbolProvider(newCompilation);
var actualOldSymbol = actualSemanticEdits[i].OldSymbol;
var actualNewSymbol = actualSemanticEdits[i].NewSymbol;
Assert.Equal(expectedOldSymbol, actualOldSymbol);
Assert.Equal(expectedNewSymbol, actualNewSymbol);
var expectedSyntaxMap = expectedSemanticEdits[i].SyntaxMap;
var actualSyntaxMap = actualSemanticEdits[i].SyntaxMap;
Assert.Equal(expectedSemanticEdits[i].PreserveLocalVariables, actualSemanticEdits[i].PreserveLocalVariables);
if (expectedSyntaxMap != null)
{
Assert.NotNull(actualSyntaxMap);
Assert.True(expectedSemanticEdits[i].PreserveLocalVariables);
var newNodes = new List<SyntaxNode>();
foreach (var expectedSpanMapping in expectedSyntaxMap)
{
var newNode = FindNode(newRoot, expectedSpanMapping.Value);
var expectedOldNode = FindNode(oldRoot, expectedSpanMapping.Key);
var actualOldNode = actualSyntaxMap(newNode);
Assert.Equal(expectedOldNode, actualOldNode);
newNodes.Add(newNode);
}
}
else if (!expectedSemanticEdits[i].PreserveLocalVariables)
{
Assert.Null(actualSyntaxMap);
}
}
}
private static void AssertSpansEqual(IList<TextSpan> expected, IList<LinePositionSpan> actual, string newSource, SourceText newText)
{
AssertEx.Equal(
expected,
actual.Select(span => newText.Lines.GetTextSpan(span)),
itemSeparator: "\r\n",
itemInspector: s => DisplaySpan(newSource, s));
}
private static string DisplaySpan(string source, TextSpan span)
{
return span + ": [" + source.Substring(span.Start, span.Length).Replace("\r\n", " ") + "]";
}
internal static IEnumerable<KeyValuePair<SyntaxNode, SyntaxNode>> GetMethodMatches(AbstractEditAndContinueAnalyzer analyzer, Match<SyntaxNode> bodyMatch)
{
Dictionary<SyntaxNode, AbstractEditAndContinueAnalyzer.LambdaInfo> lazyActiveOrMatchedLambdas = null;
var map = analyzer.ComputeMap(bodyMatch, Array.Empty<AbstractEditAndContinueAnalyzer.ActiveNode>(), ref lazyActiveOrMatchedLambdas, new List<RudeEditDiagnostic>());
var result = new Dictionary<SyntaxNode, SyntaxNode>();
foreach (var pair in map.Forward)
{
if (pair.Value == bodyMatch.NewRoot)
{
Assert.Same(pair.Key, bodyMatch.OldRoot);
continue;
}
result.Add(pair.Key, pair.Value);
}
return result;
}
public static MatchingPairs ToMatchingPairs(Match<SyntaxNode> match)
{
return ToMatchingPairs(match.Matches.Where(partners => partners.Key != match.OldRoot));
}
public static MatchingPairs ToMatchingPairs(IEnumerable<KeyValuePair<SyntaxNode, SyntaxNode>> matches)
{
return new MatchingPairs(matches
.OrderBy(partners => partners.Key.GetLocation().SourceSpan.Start)
.ThenByDescending(partners => partners.Key.Span.Length)
.Select(partners => new MatchingPair
{
Old = partners.Key.ToString().Replace("\r\n", " ").Replace("\n", " "),
New = partners.Value.ToString().Replace("\r\n", " ").Replace("\n", " ")
}));
}
private static IEnumerable<KeyValuePair<K, V>> ReverseMapping<K, V>(IEnumerable<KeyValuePair<V, K>> mapping)
{
foreach (var pair in mapping)
{
yield return KeyValuePair.Create(pair.Value, pair.Key);
}
}
}
internal static class EditScriptTestUtils
{
public static void VerifyEdits<TNode>(this EditScript<TNode> actual, params string[] expected)
{
AssertEx.Equal(expected, actual.Edits.Select(e => e.GetDebuggerDisplay()), itemSeparator: ",\r\n");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace Microsoft.Xml.Serialization
{
using System.Reflection;
using System.Collections;
using System.IO;
using Microsoft.Xml.Schema;
using System;
using System.Text;
using System.Threading;
using System.Globalization;
using System.Security;
// using System.Security.Permissions;
// using System.Security.Policy;
//using Microsoft.Xml.Serialization.Configuration;
using System.Diagnostics;
using Microsoft.CodeDom.Compiler;
using System.Runtime.Versioning;
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public struct XmlDeserializationEvents
{
private XmlNodeEventHandler _onUnknownNode;
private XmlAttributeEventHandler _onUnknownAttribute;
private XmlElementEventHandler _onUnknownElement;
private UnreferencedObjectEventHandler _onUnreferencedObject;
internal object sender;
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnknownNode"]/*' />
public XmlNodeEventHandler OnUnknownNode
{
get
{
return _onUnknownNode;
}
set
{
_onUnknownNode = value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnknownAttribute"]/*' />
public XmlAttributeEventHandler OnUnknownAttribute
{
get
{
return _onUnknownAttribute;
}
set
{
_onUnknownAttribute = value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnknownElement"]/*' />
public XmlElementEventHandler OnUnknownElement
{
get
{
return _onUnknownElement;
}
set
{
_onUnknownElement = value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlDeserializationEvents.OnUnreferencedObject"]/*' />
public UnreferencedObjectEventHandler OnUnreferencedObject
{
get
{
return _onUnreferencedObject;
}
set
{
_onUnreferencedObject = value;
}
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation"]/*' />
///<internalonly/>
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public abstract class XmlSerializerImplementation
{
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.Reader"]/*' />
public virtual XmlSerializationReader Reader { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.Writer"]/*' />
public virtual XmlSerializationWriter Writer { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.ReadMethods"]/*' />
public virtual Hashtable ReadMethods { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.WriteMethods"]/*' />
public virtual Hashtable WriteMethods { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.TypedSerializers"]/*' />
public virtual Hashtable TypedSerializers { get { throw new NotSupportedException(); } }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.CanSerialize"]/*' />
public virtual bool CanSerialize(Type type) { throw new NotSupportedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializerImplementation.GetSerializer"]/*' />
public virtual XmlSerializer GetSerializer(Type type) { throw new NotSupportedException(); }
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public class XmlSerializer
{
private TempAssembly _tempAssembly;
private bool _typedSerializer;
private Type _primitiveType;
private XmlMapping _mapping;
private XmlDeserializationEvents _events = new XmlDeserializationEvents();
private static TempAssemblyCache s_cache = new TempAssemblyCache();
private static volatile XmlSerializerNamespaces s_defaultNamespaces;
private static XmlSerializerNamespaces DefaultNamespaces
{
get
{
if (s_defaultNamespaces == null)
{
XmlSerializerNamespaces nss = new XmlSerializerNamespaces();
nss.AddInternal("xsi", XmlSchema.InstanceNamespace);
nss.AddInternal("xsd", XmlSchema.Namespace);
if (s_defaultNamespaces == null)
{
s_defaultNamespaces = nss;
}
}
return s_defaultNamespaces;
}
}
private static Hashtable s_xmlSerializerTable = new Hashtable();
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer8"]/*' />
///<internalonly/>
protected XmlSerializer()
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, XmlAttributeOverrides overrides, Type[] extraTypes, XmlRootAttribute root, string defaultNamespace) :
this(type, overrides, extraTypes, root, defaultNamespace, null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, XmlRootAttribute root) : this(type, null, new Type[0], root, null, null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer3"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, Type[] extraTypes) : this(type, null, extraTypes, null, null, null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer4"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, XmlAttributeOverrides overrides) : this(type, overrides, new Type[0], null, null, null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer5"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(XmlTypeMapping xmlTypeMapping)
{
_tempAssembly = GenerateTempAssembly(xmlTypeMapping);
_mapping = xmlTypeMapping;
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer6"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type) : this(type, (string)null)
{
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public XmlSerializer(Type type, string defaultNamespace)
{
if (type == null)
throw new ArgumentNullException("type");
_mapping = GetKnownMapping(type, defaultNamespace);
if (_mapping != null)
{
_primitiveType = type;
return;
}
_tempAssembly = s_cache[defaultNamespace, type];
if (_tempAssembly == null)
{
lock (s_cache)
{
_tempAssembly = s_cache[defaultNamespace, type];
if (_tempAssembly == null)
{
XmlSerializerImplementation contract = null;
Assembly assembly = TempAssembly.LoadGeneratedAssembly(type, defaultNamespace, out contract);
if (assembly == null)
{
// need to reflect and generate new serialization assembly
XmlReflectionImporter importer = new XmlReflectionImporter(defaultNamespace);
_mapping = importer.ImportTypeMapping(type, null, defaultNamespace);
_tempAssembly = GenerateTempAssembly(_mapping, type, defaultNamespace);
}
else
{
// we found the pre-generated assembly, now make sure that the assembly has the right serializer
// try to avoid the reflection step, need to get ElementName, namespace and the Key form the type
_mapping = XmlReflectionImporter.GetTopLevelMapping(type, defaultNamespace);
_tempAssembly = new TempAssembly(new XmlMapping[] { _mapping }, assembly, contract);
}
}
s_cache.Add(defaultNamespace, type, _tempAssembly);
}
}
if (_mapping == null)
{
_mapping = XmlReflectionImporter.GetTopLevelMapping(type, defaultNamespace);
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.XmlSerializer7"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
internal XmlSerializer(Type type, XmlAttributeOverrides overrides, Type[] extraTypes, XmlRootAttribute root, string defaultNamespace, string location)
{
if (type == null)
throw new ArgumentNullException("type");
XmlReflectionImporter importer = new XmlReflectionImporter(overrides, defaultNamespace);
if (extraTypes != null)
{
for (int i = 0; i < extraTypes.Length; i++)
importer.IncludeType(extraTypes[i]);
}
_mapping = importer.ImportTypeMapping(type, root, defaultNamespace);
if (location != null)
{
DemandForUserLocationOrEvidence();
}
_tempAssembly = GenerateTempAssembly(_mapping, type, defaultNamespace, location);
}
// [PermissionSet(SecurityAction.Demand, Name = "FullTrust")]
private void DemandForUserLocationOrEvidence()
{
// Ensure full trust before asserting full file access to the user-provided location or evidence
}
internal static TempAssembly GenerateTempAssembly(XmlMapping xmlMapping)
{
return GenerateTempAssembly(xmlMapping, null, null);
}
internal static TempAssembly GenerateTempAssembly(XmlMapping xmlMapping, Type type, string defaultNamespace)
{
if (xmlMapping == null)
throw new ArgumentNullException("xmlMapping");
return new TempAssembly(new XmlMapping[] { xmlMapping }, new Type[] { type }, defaultNamespace, null);
}
internal static TempAssembly GenerateTempAssembly(XmlMapping xmlMapping, Type type, string defaultNamespace, string location)
{
return new TempAssembly(new XmlMapping[] { xmlMapping }, new Type[] { type }, defaultNamespace, location);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(TextWriter textWriter, object o)
{
Serialize(textWriter, o, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(TextWriter textWriter, object o, XmlSerializerNamespaces namespaces)
{
XmlTextWriter xmlWriter = new XmlTextWriter(textWriter);
xmlWriter.Formatting = Formatting.Indented;
xmlWriter.Indentation = 2;
Serialize(xmlWriter, o, namespaces);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(Stream stream, object o)
{
Serialize(stream, o, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize3"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(Stream stream, object o, XmlSerializerNamespaces namespaces)
{
XmlTextWriter xmlWriter = new XmlTextWriter(stream, null);
xmlWriter.Formatting = Formatting.Indented;
xmlWriter.Indentation = 2;
Serialize(xmlWriter, o, namespaces);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize4"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(XmlWriter xmlWriter, object o)
{
Serialize(xmlWriter, o, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize5"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public void Serialize(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces)
{
Serialize(xmlWriter, o, namespaces, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize6"]/*' />
public void Serialize(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces, string encodingStyle)
{
Serialize(xmlWriter, o, namespaces, encodingStyle, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize6"]/*' />
public void Serialize(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces, string encodingStyle, string id)
{
try
{
if (_primitiveType != null)
{
if (encodingStyle != null && encodingStyle.Length > 0)
{
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidEncodingNotEncoded1, encodingStyle));
}
SerializePrimitive(xmlWriter, o, namespaces);
}
else if (_tempAssembly == null || _typedSerializer)
{
XmlSerializationWriter writer = CreateWriter();
writer.Init(xmlWriter, namespaces == null || namespaces.Count == 0 ? DefaultNamespaces : namespaces, encodingStyle, id, _tempAssembly);
try
{
Serialize(o, writer);
}
finally
{
writer.Dispose();
}
}
else
_tempAssembly.InvokeWriter(_mapping, xmlWriter, o, namespaces == null || namespaces.Count == 0 ? DefaultNamespaces : namespaces, encodingStyle, id);
}
catch (Exception e)
{
if (/*e is ThreadAbortException || e is StackOverflowException ||*/ e is OutOfMemoryException)
{
throw;
}
if (e is TargetInvocationException)
e = e.InnerException;
throw new InvalidOperationException(ResXml.XmlGenError, e);
}
xmlWriter.Flush();
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public object Deserialize(Stream stream)
{
XmlTextReader xmlReader = new XmlTextReader(stream);
xmlReader.WhitespaceHandling = WhitespaceHandling.Significant;
xmlReader.Normalization = true;
xmlReader.XmlResolver = null;
return Deserialize(xmlReader, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public object Deserialize(TextReader textReader)
{
XmlTextReader xmlReader = new XmlTextReader(textReader);
xmlReader.WhitespaceHandling = WhitespaceHandling.Significant;
xmlReader.Normalization = true;
xmlReader.XmlResolver = null;
return Deserialize(xmlReader, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize2"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public object Deserialize(XmlReader xmlReader)
{
return Deserialize(xmlReader, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize3"]/*' />
public object Deserialize(XmlReader xmlReader, XmlDeserializationEvents events)
{
return Deserialize(xmlReader, null, events);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize4"]/*' />
public object Deserialize(XmlReader xmlReader, string encodingStyle)
{
return Deserialize(xmlReader, encodingStyle, _events);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize5"]/*' />
public object Deserialize(XmlReader xmlReader, string encodingStyle, XmlDeserializationEvents events)
{
events.sender = this;
try
{
if (_primitiveType != null)
{
if (encodingStyle != null && encodingStyle.Length > 0)
{
throw new InvalidOperationException(string.Format(ResXml.XmlInvalidEncodingNotEncoded1, encodingStyle));
}
return DeserializePrimitive(xmlReader, events);
}
else if (_tempAssembly == null || _typedSerializer)
{
XmlSerializationReader reader = CreateReader();
reader.Init(xmlReader, events, encodingStyle, _tempAssembly);
try
{
return Deserialize(reader);
}
finally
{
reader.Dispose();
}
}
else
{
return _tempAssembly.InvokeReader(_mapping, xmlReader, events, encodingStyle);
}
}
catch (Exception e)
{
if (/*e is ThreadAbortException || e is StackOverflowException ||*/ e is OutOfMemoryException)
{
throw;
}
if (e is TargetInvocationException)
e = e.InnerException;
if (xmlReader is IXmlLineInfo)
{
IXmlLineInfo lineInfo = (IXmlLineInfo)xmlReader;
throw new InvalidOperationException(string.Format(ResXml.XmlSerializeErrorDetails, lineInfo.LineNumber.ToString(), lineInfo.LinePosition.ToString()), e);
}
else
{
throw new InvalidOperationException(ResXml.XmlSerializeError, e);
}
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.CanDeserialize"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public virtual bool CanDeserialize(XmlReader xmlReader)
{
if (_primitiveType != null)
{
TypeDesc typeDesc = (TypeDesc)TypeScope.PrimtiveTypes[_primitiveType];
return xmlReader.IsStartElement(typeDesc.DataType.Name, string.Empty);
}
else if (_tempAssembly != null)
{
return _tempAssembly.CanRead(_mapping, xmlReader);
}
else
{
return false;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.FromMappings"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
// [PermissionSet(SecurityAction.LinkDemand, Name="FullTrust")]
public static XmlSerializer[] FromMappings(XmlMapping[] mappings)
{
return FromMappings(mappings, (Type)null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.FromMappings1"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
// [PermissionSet(SecurityAction.LinkDemand, Name="FullTrust")]
public static XmlSerializer[] FromMappings(XmlMapping[] mappings, Type type)
{
if (mappings == null || mappings.Length == 0) return new XmlSerializer[0];
XmlSerializerImplementation contract = null;
Assembly assembly = type == null ? null : TempAssembly.LoadGeneratedAssembly(type, null, out contract);
TempAssembly tempAssembly = null;
if (assembly == null)
{
if (XmlMapping.IsShallow(mappings))
{
return new XmlSerializer[0];
}
else
{
if (type == null)
{
tempAssembly = new TempAssembly(mappings, new Type[] { type }, null, null);
XmlSerializer[] serializers = new XmlSerializer[mappings.Length];
contract = tempAssembly.Contract;
for (int i = 0; i < serializers.Length; i++)
{
serializers[i] = (XmlSerializer)contract.TypedSerializers[mappings[i].Key];
serializers[i].SetTempAssembly(tempAssembly, mappings[i]);
}
return serializers;
}
else
{
// Use XmlSerializer cache when the type is not null.
return GetSerializersFromCache(mappings, type);
}
}
}
else
{
XmlSerializer[] serializers = new XmlSerializer[mappings.Length];
for (int i = 0; i < serializers.Length; i++)
serializers[i] = (XmlSerializer)contract.TypedSerializers[mappings[i].Key];
return serializers;
}
}
private static XmlSerializer[] GetSerializersFromCache(XmlMapping[] mappings, Type type)
{
XmlSerializer[] serializers = new XmlSerializer[mappings.Length];
Hashtable typedMappingTable = null;
lock (s_xmlSerializerTable)
{
typedMappingTable = s_xmlSerializerTable[type] as Hashtable;
if (typedMappingTable == null)
{
typedMappingTable = new Hashtable();
s_xmlSerializerTable[type] = typedMappingTable;
}
}
lock (typedMappingTable)
{
Hashtable pendingKeys = new Hashtable();
for (int i = 0; i < mappings.Length; i++)
{
XmlSerializerMappingKey mappingKey = new XmlSerializerMappingKey(mappings[i]);
serializers[i] = typedMappingTable[mappingKey] as XmlSerializer;
if (serializers[i] == null)
{
pendingKeys.Add(mappingKey, i);
}
}
if (pendingKeys.Count > 0)
{
XmlMapping[] pendingMappings = new XmlMapping[pendingKeys.Count];
int index = 0;
foreach (XmlSerializerMappingKey mappingKey in pendingKeys.Keys)
{
pendingMappings[index++] = mappingKey.Mapping;
}
TempAssembly tempAssembly = new TempAssembly(pendingMappings, new Type[] { type }, null, null);
XmlSerializerImplementation contract = tempAssembly.Contract;
foreach (XmlSerializerMappingKey mappingKey in pendingKeys.Keys)
{
index = (int)pendingKeys[mappingKey];
serializers[index] = (XmlSerializer)contract.TypedSerializers[mappingKey.Mapping.Key];
serializers[index].SetTempAssembly(tempAssembly, mappingKey.Mapping);
typedMappingTable[mappingKey] = serializers[index];
}
}
}
return serializers;
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.FromTypes"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public static XmlSerializer[] FromTypes(Type[] types)
{
if (types == null)
return new XmlSerializer[0];
XmlReflectionImporter importer = new XmlReflectionImporter();
XmlTypeMapping[] mappings = new XmlTypeMapping[types.Length];
for (int i = 0; i < types.Length; i++)
{
mappings[i] = importer.ImportTypeMapping(types[i]);
}
return FromMappings(mappings);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.GetXmlSerializerAssemblyName"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
// [PermissionSet(SecurityAction.Demand, Name="FullTrust")]
public static string GetXmlSerializerAssemblyName(Type type)
{
return GetXmlSerializerAssemblyName(type, null);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.GetXmlSerializerAssemblyName"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
// [PermissionSet(SecurityAction.Demand, Name="FullTrust")]
public static string GetXmlSerializerAssemblyName(Type type, string defaultNamespace)
{
if (type == null)
{
throw new ArgumentNullException("type");
}
return Compiler.GetTempAssemblyName(type.GetTypeInfo().Assembly.GetName(), defaultNamespace);
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnknownNode"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public event XmlNodeEventHandler UnknownNode
{
add
{
_events.OnUnknownNode += value;
}
remove
{
_events.OnUnknownNode -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnknownAttribute"]/*' />
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public event XmlAttributeEventHandler UnknownAttribute
{
add
{
_events.OnUnknownAttribute += value;
}
remove
{
_events.OnUnknownAttribute -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnknownElement"]/*' />
public event XmlElementEventHandler UnknownElement
{
add
{
_events.OnUnknownElement += value;
}
remove
{
_events.OnUnknownElement -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.UnreferencedObject"]/*' />
public event UnreferencedObjectEventHandler UnreferencedObject
{
add
{
_events.OnUnreferencedObject += value;
}
remove
{
_events.OnUnreferencedObject -= value;
}
}
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.CreateReader"]/*' />
///<internalonly/>
protected virtual XmlSerializationReader CreateReader() { throw new NotImplementedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Deserialize4"]/*' />
///<internalonly/>
protected virtual object Deserialize(XmlSerializationReader reader) { throw new NotImplementedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.CreateWriter"]/*' />
///<internalonly/>
protected virtual XmlSerializationWriter CreateWriter() { throw new NotImplementedException(); }
/// <include file='doc\XmlSerializer.uex' path='docs/doc[@for="XmlSerializer.Serialize7"]/*' />
///<internalonly/>
protected virtual void Serialize(object o, XmlSerializationWriter writer) { throw new NotImplementedException(); }
internal void SetTempAssembly(TempAssembly tempAssembly, XmlMapping mapping)
{
_tempAssembly = tempAssembly;
_mapping = mapping;
_typedSerializer = true;
}
private static XmlTypeMapping GetKnownMapping(Type type, string ns)
{
if (ns != null && ns != string.Empty)
return null;
TypeDesc typeDesc = (TypeDesc)TypeScope.PrimtiveTypes[type];
if (typeDesc == null)
return null;
ElementAccessor element = new ElementAccessor();
element.Name = typeDesc.DataType.Name;
XmlTypeMapping mapping = new XmlTypeMapping(null, element);
mapping.SetKeyInternal(XmlMapping.GenerateKey(type, null, null));
return mapping;
}
private void SerializePrimitive(XmlWriter xmlWriter, object o, XmlSerializerNamespaces namespaces)
{
XmlSerializationPrimitiveWriter writer = new XmlSerializationPrimitiveWriter();
writer.Init(xmlWriter, namespaces, null, null, null);
switch (Type.GetTypeCode(_primitiveType))
{
case TypeCode.String:
writer.Write_string(o);
break;
case TypeCode.Int32:
writer.Write_int(o);
break;
case TypeCode.Boolean:
writer.Write_boolean(o);
break;
case TypeCode.Int16:
writer.Write_short(o);
break;
case TypeCode.Int64:
writer.Write_long(o);
break;
case TypeCode.Single:
writer.Write_float(o);
break;
case TypeCode.Double:
writer.Write_double(o);
break;
case TypeCode.Decimal:
writer.Write_decimal(o);
break;
case TypeCode.DateTime:
writer.Write_dateTime(o);
break;
case TypeCode.Char:
writer.Write_char(o);
break;
case TypeCode.Byte:
writer.Write_unsignedByte(o);
break;
case TypeCode.SByte:
writer.Write_byte(o);
break;
case TypeCode.UInt16:
writer.Write_unsignedShort(o);
break;
case TypeCode.UInt32:
writer.Write_unsignedInt(o);
break;
case TypeCode.UInt64:
writer.Write_unsignedLong(o);
break;
default:
if (_primitiveType == typeof(XmlQualifiedName))
{
writer.Write_QName(o);
}
else if (_primitiveType == typeof(byte[]))
{
writer.Write_base64Binary(o);
}
else if (_primitiveType == typeof(Guid))
{
writer.Write_guid(o);
}
else
{
throw new InvalidOperationException(string.Format(ResXml.XmlUnxpectedType, _primitiveType.FullName));
}
break;
}
}
private object DeserializePrimitive(XmlReader xmlReader, XmlDeserializationEvents events)
{
XmlSerializationPrimitiveReader reader = new XmlSerializationPrimitiveReader();
reader.Init(xmlReader, events, null, null);
object o;
switch (Type.GetTypeCode(_primitiveType))
{
case TypeCode.String:
o = reader.Read_string();
break;
case TypeCode.Int32:
o = reader.Read_int();
break;
case TypeCode.Boolean:
o = reader.Read_boolean();
break;
case TypeCode.Int16:
o = reader.Read_short();
break;
case TypeCode.Int64:
o = reader.Read_long();
break;
case TypeCode.Single:
o = reader.Read_float();
break;
case TypeCode.Double:
o = reader.Read_double();
break;
case TypeCode.Decimal:
o = reader.Read_decimal();
break;
case TypeCode.DateTime:
o = reader.Read_dateTime();
break;
case TypeCode.Char:
o = reader.Read_char();
break;
case TypeCode.Byte:
o = reader.Read_unsignedByte();
break;
case TypeCode.SByte:
o = reader.Read_byte();
break;
case TypeCode.UInt16:
o = reader.Read_unsignedShort();
break;
case TypeCode.UInt32:
o = reader.Read_unsignedInt();
break;
case TypeCode.UInt64:
o = reader.Read_unsignedLong();
break;
default:
if (_primitiveType == typeof(XmlQualifiedName))
{
o = reader.Read_QName();
}
else if (_primitiveType == typeof(byte[]))
{
o = reader.Read_base64Binary();
}
else if (_primitiveType == typeof(Guid))
{
o = reader.Read_guid();
}
else
{
throw new InvalidOperationException(string.Format(ResXml.XmlUnxpectedType, _primitiveType.FullName));
}
break;
}
return o;
}
private class XmlSerializerMappingKey
{
public XmlMapping Mapping;
public XmlSerializerMappingKey(XmlMapping mapping)
{
this.Mapping = mapping;
}
public override bool Equals(object obj)
{
XmlSerializerMappingKey other = obj as XmlSerializerMappingKey;
if (other == null)
return false;
if (this.Mapping.Key != other.Mapping.Key)
return false;
if (this.Mapping.ElementName != other.Mapping.ElementName)
return false;
if (this.Mapping.Namespace != other.Mapping.Namespace)
return false;
if (this.Mapping.IsSoap != other.Mapping.IsSoap)
return false;
return true;
}
public override int GetHashCode()
{
int hashCode = this.Mapping.IsSoap ? 0 : 1;
if (this.Mapping.Key != null)
hashCode ^= this.Mapping.Key.GetHashCode();
if (this.Mapping.ElementName != null)
hashCode ^= this.Mapping.ElementName.GetHashCode();
if (this.Mapping.Namespace != null)
hashCode ^= this.Mapping.Namespace.GetHashCode();
return hashCode;
}
}
}
}
| |
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
namespace Microsoft.Zelig.Tools.IRViewer
{
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
public class Method
{
//
// State
//
public string Name;
public List< Variable > Variables = new List< Variable >();
public List< BasicBlock > BasicBlocks = new List< BasicBlock >();
public List< BasicBlockEdge > BasicBlockEdges = new List< BasicBlockEdge >();
}
public class Variable
{
//
// State
//
public string Name;
public string Type;
}
public class BasicBlock
{
//
// State
//
public string Id;
public string Index;
public string Type;
public List< Operator > Operators = new List< Operator >();
public List< ReachingDefinition > ReachingDefinitions = new List< ReachingDefinition >();
public List< HandlerFor > Handlers = new List< HandlerFor >();
}
public class ReachingDefinition
{
//
// State
//
public string Variable;
public List< Operator > Definitions = new List< Operator >();
}
public class HandlerFor
{
//
// State
//
public string Eh;
}
public class Operator
{
//
// State
//
public int Index;
public string Value;
public string Call;
public Debug Debug;
}
public class Debug
{
//
// State
//
public string File;
public int BeginLine;
public int BeginColumn;
public int EndLine;
public int EndColumn;
}
public class BasicBlockEdge
{
//
// State
//
public BasicBlock From;
public BasicBlock To;
public string Kind;
}
public class Parser
{
//
// State
//
private Dictionary< string, Method > m_methods = new Dictionary< string, Method >();
//
// Constructor Methods
//
public Parser( XmlNode node )
{
foreach(XmlNode subnode in node.SelectNodes( "Method" ))
{
Method res = ParseMethod( subnode );
m_methods[ res.Name ] = res;
}
}
public Dictionary< string, Method > Methods
{
get
{
return m_methods;
}
}
//--//
private Method ParseMethod( XmlNode node )
{
Method res = new Method();
res.Name = GetAttribute( node, "Name" );
foreach(XmlNode subnode in node.SelectNodes( "Variable" ))
{
res.Variables.Add( ParseVariable( subnode ) );
}
Dictionary< string, BasicBlock > lookupBasicBlock = new Dictionary< string, BasicBlock >();
Dictionary< string, XmlNode > lookupNode = new Dictionary< string, XmlNode >();
Dictionary< int , Operator > lookupOperator = new Dictionary< int , Operator >();
foreach(XmlNode subnode in node.SelectNodes( "BasicBlock" ))
{
res.BasicBlocks.Add( ParseBasicBlock( subnode, lookupBasicBlock, lookupNode, lookupOperator ) );
}
foreach(string id in lookupNode.Keys)
{
BasicBlock bb = lookupBasicBlock[ id ];
XmlNode subnode = lookupNode [ id ];
ParseBasicBlock( subnode, res, bb, lookupBasicBlock, lookupOperator );
}
return res;
}
private Variable ParseVariable( XmlNode node )
{
Variable res = new Variable();
res.Name = GetAttribute( node, "Name" );
res.Type = GetAttribute( node, "Type" );
return res;
}
private BasicBlock ParseBasicBlock( XmlNode node ,
Dictionary< string, BasicBlock > lookupBasicBlock ,
Dictionary< string, XmlNode > lookupNode ,
Dictionary< int , Operator > lookupOperator )
{
BasicBlock res = new BasicBlock();
res.Id = GetAttribute( node, "Id" );
res.Index = GetAttribute( node, "Index" );
res.Type = GetAttribute( node, "Type" );
lookupBasicBlock[ res.Id ] = res;
lookupNode [ res.Id ] = node;
foreach(XmlNode subnode in node.SelectNodes( "Operator" ))
{
res.Operators.Add( ParseOperator( subnode, lookupOperator ) );
}
return res;
}
private Operator ParseOperator( XmlNode node ,
Dictionary< int, Operator > lookupOperator )
{
Operator res = new Operator();
res.Index = int.Parse( GetAttribute( node, "Index" ) );
res.Call = GetAttribute( node, "Call" );
res.Value = node.InnerText;
XmlNode subnode = node.SelectSingleNode( "Debug" );
if(subnode != null)
{
res.Debug = ParseDebug( subnode );
}
lookupOperator[ res.Index ] = res;
return res;
}
private Debug ParseDebug( XmlNode node )
{
Debug res = new Debug();
res.File = GetAttribute( node, "File" );
res.BeginLine = int.Parse( GetAttribute( node, "BeginLine" ) );
res.BeginColumn = int.Parse( GetAttribute( node, "BeginColumn" ) );
res.EndLine = int.Parse( GetAttribute( node, "EndLine" ) );
res.EndColumn = int.Parse( GetAttribute( node, "EndColumn" ) );
return res;
}
private void ParseBasicBlock( XmlNode node ,
Method method ,
BasicBlock bb ,
Dictionary< string, BasicBlock > lookupBasicBlock ,
Dictionary< int, Operator > lookupOperator )
{
foreach(XmlNode subnode in node.SelectNodes( "ReachingDefinition" ))
{
bb.ReachingDefinitions.Add( ParseReachingDefinition( subnode, lookupOperator ) );
}
foreach(XmlNode subnode in node.SelectNodes( "HandlerFor" ))
{
bb.Handlers.Add( ParseHandler( subnode ) );
}
foreach(XmlNode subnode in node.SelectNodes( "Edge" ))
{
method.BasicBlockEdges.Add( ParseBasicBlockEdge( subnode, lookupBasicBlock ) );
}
}
private ReachingDefinition ParseReachingDefinition( XmlNode node ,
Dictionary< int, Operator > lookupOperator )
{
ReachingDefinition res = new ReachingDefinition();
res.Variable = GetAttribute( node, "Variable" );
foreach(XmlNode subnode in node.SelectNodes( "Definition" ))
{
res.Definitions.Add( lookupOperator[ int.Parse( GetAttribute( subnode, "Index" ) ) ] );
}
return res;
}
private HandlerFor ParseHandler( XmlNode node )
{
HandlerFor res = new HandlerFor();
res.Eh = GetAttribute( node, "Type" );
return res;
}
private BasicBlockEdge ParseBasicBlockEdge( XmlNode node ,
Dictionary< string, BasicBlock > lookupBasicBlock )
{
BasicBlockEdge res = new BasicBlockEdge();
res.From = lookupBasicBlock[ GetAttribute( node, "From" ) ];
res.To = lookupBasicBlock[ GetAttribute( node, "To" ) ];
res.Kind = GetAttribute( node, "Kind" ) ;
return res;
}
//--//
private static string GetAttribute( XmlNode node ,
string name )
{
XmlAttribute attrib = node.Attributes.GetNamedItem( name ) as XmlAttribute ;
if(attrib != null)
{
return attrib.Value;
}
return null;
}
}
}
| |
#region File Description
//-----------------------------------------------------------------------------
// Uni2Container.cs
//
// Snailium Library (http://www.snailium.net)
// Copyright (C) Snailium. All rights reserved.
//-----------------------------------------------------------------------------
#endregion
#region Using Statements
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
#endregion
namespace Snailium.Lib.Containers
{
#region Structs
/// <summary>
/// The struct of a UNI2 file table item.
/// </summary>
public struct Uni2FileItem
{
#region Fields
/// <summary>
/// ID of the file.
/// </summary>
public uint fileId;
/// <summary>
/// Which cluster the file data starts.
/// Note: this is an offset from the beginning of data section.
/// </summary>
public uint startCluster;
/// <summary>
/// How many clusters occupied for the file data.
/// </summary>
public uint lengthCluster;
/// <summary>
/// How many real bytes are consumed by the file data.
/// </summary>
public uint lengthByte;
#endregion
#region Utilities
/// <summary>
/// Check if this item is valid. An invalid item means all fiels are zero (0).
/// </summary>
/// <returns>If this item is valid.</returns>
public bool isValid()
{
if (fileId == 0 && startCluster == 0 && lengthCluster == 0 && lengthByte == 0)
return false;
else
return true;
}
#endregion
}
#endregion
/// <summary>
/// Class to access a UNI2 file.
/// </summary>
public class Uni2Container : FileContainer
{
#region Fields
/// <summary>
/// The number of files extracted from header.
/// </summary>
private uint numFiles;
/// <summary>
/// The cluster size used in UNI2 file.
/// Default: 0x800 (2KB)
/// </summary>
private uint clusterSize = 0x800;
/// <summary>
/// The start location (in cluster) of file table.
/// Default: 1 (Cluster 1)
/// </summary>
private uint tableCluster = 1; // Default: File table is located from Cluster 1
/// <summary>
/// The start location (in cluster) of file data.
/// Default: 2 (Cluster 2)
/// </summary>
private uint dataCluster = 2; // Default: Data section is located from Cluster 2
#endregion
#region Properties
/// <summary>
/// The cluster size used in UNI2 file.
/// Default: 0x800 (2KB)
/// </summary>
public uint ClusterSize
{
set { this.clusterSize = value; }
get { return this.clusterSize; }
}
/// <summary>
/// The start location (in cluster) of file table.
/// Default: 1 (Cluster 1)
/// </summary>
public uint TableCluster
{
set { this.tableCluster = value; }
get { return this.tableCluster; }
}
/// <summary>
/// The start location (in cluster) of file data.
/// Default: 2 (Cluster 2)
/// </summary>
public uint DataCluster
{
set { this.dataCluster = value; }
get { return this.dataCluster; }
}
#endregion
#region Initialization
/// <summary>
/// Construct a UNI2 file.
/// </summary>
/// <param name="file">File stream.</param>
public Uni2Container(FileStream file) : base(file)
{
long offset = 0;
// Confirm 'UNI2' identifier
uint identifier = StreamUtility.ReadUIntFromStream(this.container, offset);
offset += 4;
if (identifier != 0x554e4932) // If file identifier is not UNI2
throw new FormatException(container.Name + ": File is not UNI2 format!");
// Get the next value
uint unknown = StreamUtility.ReadUIntFromStream(this.container, offset);
offset += 4;
// Get number of files
this.numFiles = StreamUtility.ReadUIntFromStream(this.container, offset);
offset += 4;
// Get file table position
this.tableCluster = StreamUtility.ReadUIntFromStream(this.container, offset);
offset += 4;
this.containerType = "Union 2";
}
#endregion
#region Utilities
/// <summary>
/// Parse the file table.
/// </summary>
/// <returns>Number of files found.</returns>
public override int ParseFileTable()
{
// Set correct position for data section
this.dataCluster = this.tableCluster +
((this.numFiles * 16) / this.clusterSize);
if ((this.numFiles * 16) % this.clusterSize != 0)
this.dataCluster++;
long offset = this.clusterSize * this.tableCluster;
this.fileTable = new List<FileItem>();
string fileNameBase;
{ // Get file name base
int fileNameStart = this.container.Name.LastIndexOf('\\') + 1;
int fileNameEnd = this.container.Name.LastIndexOf('.');
if (fileNameEnd == -1) fileNameEnd = this.container.Name.Length;
fileNameBase = this.container.Name.Substring(fileNameStart, fileNameEnd - fileNameStart);
}
while (true)
{
Uni2FileItem item = new Uni2FileItem();
item.fileId = StreamUtility.ReadUIntFromStream(this.container, offset);
item.startCluster = StreamUtility.ReadUIntFromStream(this.container, offset + 4);
item.lengthCluster = StreamUtility.ReadUIntFromStream(this.container, offset + 8);
item.lengthByte = StreamUtility.ReadUIntFromStream(this.container, offset + 12);
offset += 16;
if (item.isValid())
{
FileItem FileItem = ConvertUni2ToGeneral(item, fileNameBase + "-" + Convert.ToSingle(item.fileId));
uint fileIdentifier = StreamUtility.ReadUIntFromStream(this.container, FileItem.FileOffset);
string fileExt = StreamUtility.GetFileExtension(fileIdentifier);
FileItem.FileName += "." + fileExt;
fileTable.Add(FileItem);
}
else
{
break;
}
}
if (fileTable.Count != this.numFiles)
throw new FormatException(this.container.Name + ": The real number of files doesn't match the header value!" + System.Environment.NewLine +
"The header indicated: " + Convert.ToString(this.numFiles) + System.Environment.NewLine +
"The real number of files: " + Convert.ToString(fileTable.Count));
return fileTable.Count;
}
private FileItem ConvertUni2ToGeneral(Uni2FileItem item, string fileName)
{
FileItem FileItem = new FileItem(this.container);
FileItem.FileName = fileName;
FileItem.FileOffset = (item.startCluster + this.dataCluster) * this.clusterSize;
FileItem.FileSize = item.lengthByte;
return FileItem;
}
private Uni2FileItem ConvertGeneralToUni2(FileItem item, uint fileId)
{
Uni2FileItem FileItem = new Uni2FileItem();
FileItem.fileId = fileId;
FileItem.startCluster = (uint) (item.FileOffset / this.clusterSize - this.dataCluster);
FileItem.lengthCluster = item.FileSize / this.clusterSize;
if (item.FileSize % this.clusterSize != 0) FileItem.lengthCluster++;
FileItem.lengthByte = item.FileSize;
return FileItem;
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Data.Entity;
using System.Linq;
using System.Threading.Tasks;
using Abp;
using Abp.Configuration.Startup;
using Abp.Domain.Uow;
using Abp.Runtime.Session;
using Abp.TestBase;
using NorthLion.Zero.EntityFramework;
using NorthLion.Zero.Migrations.SeedData;
using NorthLion.Zero.MultiTenancy;
using NorthLion.Zero.Users;
using Castle.MicroKernel.Registration;
using Effort;
using EntityFramework.DynamicFilters;
namespace NorthLion.Zero.Tests
{
public abstract class ZeroTestBase : AbpIntegratedTestBase<ZeroTestModule>
{
private DbConnection _hostDb;
private Dictionary<int, DbConnection> _tenantDbs; //only used for db per tenant architecture
protected ZeroTestBase()
{
//Seed initial data for host
AbpSession.TenantId = null;
UsingDbContext(context =>
{
new InitialHostDbBuilder(context).Create();
new DefaultTenantCreator(context).Create();
});
//Seed initial data for default tenant
AbpSession.TenantId = 1;
UsingDbContext(context =>
{
new TenantRoleAndUserBuilder(context, 1).Create();
});
LoginAsDefaultTenantAdmin();
}
protected override void PreInitialize()
{
base.PreInitialize();
/* You can switch database architecture here: */
UseSingleDatabase();
//UseDatabasePerTenant();
}
/* Uses single database for host and all tenants.
*/
private void UseSingleDatabase()
{
_hostDb = DbConnectionFactory.CreateTransient();
LocalIocManager.IocContainer.Register(
Component.For<DbConnection>()
.UsingFactoryMethod(() => _hostDb)
.LifestyleSingleton()
);
}
/* Uses single database for host and Default tenant,
* but dedicated databases for all other tenants.
*/
private void UseDatabasePerTenant()
{
_hostDb = DbConnectionFactory.CreateTransient();
_tenantDbs = new Dictionary<int, DbConnection>();
LocalIocManager.IocContainer.Register(
Component.For<DbConnection>()
.UsingFactoryMethod((kernel) =>
{
lock (_tenantDbs)
{
var currentUow = kernel.Resolve<ICurrentUnitOfWorkProvider>().Current;
var abpSession = kernel.Resolve<IAbpSession>();
var tenantId = currentUow != null ? currentUow.GetTenantId() : abpSession.TenantId;
if (tenantId == null || tenantId == 1) //host and default tenant are stored in host db
{
return _hostDb;
}
if (!_tenantDbs.ContainsKey(tenantId.Value))
{
_tenantDbs[tenantId.Value] = DbConnectionFactory.CreateTransient();
}
return _tenantDbs[tenantId.Value];
}
}, true)
.LifestyleTransient()
);
}
#region UsingDbContext
protected IDisposable UsingTenantId(int? tenantId)
{
var previousTenantId = AbpSession.TenantId;
AbpSession.TenantId = tenantId;
return new DisposeAction(() => AbpSession.TenantId = previousTenantId);
}
protected void UsingDbContext(Action<ZeroDbContext> action)
{
UsingDbContext(AbpSession.TenantId, action);
}
protected Task UsingDbContextAsync(Func<ZeroDbContext, Task> action)
{
return UsingDbContextAsync(AbpSession.TenantId, action);
}
protected T UsingDbContext<T>(Func<ZeroDbContext, T> func)
{
return UsingDbContext(AbpSession.TenantId, func);
}
protected Task<T> UsingDbContextAsync<T>(Func<ZeroDbContext, Task<T>> func)
{
return UsingDbContextAsync(AbpSession.TenantId, func);
}
protected void UsingDbContext(int? tenantId, Action<ZeroDbContext> action)
{
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<ZeroDbContext>())
{
context.DisableAllFilters();
action(context);
context.SaveChanges();
}
}
}
protected async Task UsingDbContextAsync(int? tenantId, Func<ZeroDbContext, Task> action)
{
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<ZeroDbContext>())
{
context.DisableAllFilters();
await action(context);
await context.SaveChangesAsync();
}
}
}
protected T UsingDbContext<T>(int? tenantId, Func<ZeroDbContext, T> func)
{
T result;
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<ZeroDbContext>())
{
context.DisableAllFilters();
result = func(context);
context.SaveChanges();
}
}
return result;
}
protected async Task<T> UsingDbContextAsync<T>(int? tenantId, Func<ZeroDbContext, Task<T>> func)
{
T result;
using (UsingTenantId(tenantId))
{
using (var context = LocalIocManager.Resolve<ZeroDbContext>())
{
context.DisableAllFilters();
result = await func(context);
await context.SaveChangesAsync();
}
}
return result;
}
#endregion
#region Login
protected void LoginAsHostAdmin()
{
LoginAsHost(User.AdminUserName);
}
protected void LoginAsDefaultTenantAdmin()
{
LoginAsTenant(Tenant.DefaultTenantName, User.AdminUserName);
}
protected void LogoutAsDefaultTenant()
{
LogoutAsTenant(Tenant.DefaultTenantName);
}
protected void LoginAsHost(string userName)
{
AbpSession.TenantId = null;
var user =
UsingDbContext(
context =>
context.Users.FirstOrDefault(u => u.TenantId == AbpSession.TenantId && u.UserName == userName));
if (user == null)
{
throw new Exception("There is no user: " + userName + " for host.");
}
AbpSession.UserId = user.Id;
}
protected void LogoutAsHost()
{
Resolve<IMultiTenancyConfig>().IsEnabled = true;
AbpSession.TenantId = null;
AbpSession.UserId = null;
}
protected void LoginAsTenant(string tenancyName, string userName)
{
var tenant = UsingDbContext(context => context.Tenants.FirstOrDefault(t => t.TenancyName == tenancyName));
if (tenant == null)
{
throw new Exception("There is no tenant: " + tenancyName);
}
AbpSession.TenantId = tenant.Id;
var user =
UsingDbContext(
context =>
context.Users.FirstOrDefault(u => u.TenantId == AbpSession.TenantId && u.UserName == userName));
if (user == null)
{
throw new Exception("There is no user: " + userName + " for tenant: " + tenancyName);
}
AbpSession.UserId = user.Id;
}
protected void LogoutAsTenant(string tenancyName)
{
var tenant = UsingDbContext(context => context.Tenants.FirstOrDefault(t => t.TenancyName == tenancyName));
if (tenant == null)
{
throw new Exception("There is no tenant: " + tenancyName);
}
AbpSession.TenantId = tenant.Id;
AbpSession.UserId = null;
}
#endregion
/// <summary>
/// Gets current user if <see cref="IAbpSession.UserId"/> is not null.
/// Throws exception if it's null.
/// </summary>
protected async Task<User> GetCurrentUserAsync()
{
var userId = AbpSession.GetUserId();
return await UsingDbContext(context => context.Users.SingleAsync(u => u.Id == userId));
}
/// <summary>
/// Gets current tenant if <see cref="IAbpSession.TenantId"/> is not null.
/// Throws exception if there is no current tenant.
/// </summary>
protected async Task<Tenant> GetCurrentTenantAsync()
{
var tenantId = AbpSession.GetTenantId();
return await UsingDbContext(context => context.Tenants.SingleAsync(t => t.Id == tenantId));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
//
//
// Description:
// Wrapper that allows a ReaderWriterLock to work with C#'s using() clause
//
//
//
//
using System;
using System.Runtime.ConstrainedExecution;
using System.Security;
using System.Threading;
using System.Windows.Threading;
using MS.Internal.WindowsBase;
namespace MS.Internal
{
// Wrapper that allows a ReaderWriterLock to work with C#'s using() clause
// ------ CAUTION --------
// This uses a non-pumping wait while acquiring and releasing the lock, which
// avoids re-entrancy that leads to deadlock
// However, it means that the code protected by the lock must not do anything
// that can re-enter or pump messages; otherwise there could be deadlock.
// In effect, the protected code is limited to lock-free code that operates
// on simple data structures - no async calls, no COM, no Dispatcher messaging,
// no raising events, no calling out to user code, etc.
// !!! It is the caller's responsibility to obey this rule. !!!
// ------------------------
[FriendAccessAllowed] // Built into Base, used by Core and Framework.
internal class ReaderWriterLockWrapper
{
//------------------------------------------------------
//
// Constructors
//
//------------------------------------------------------
#region Constructors
internal ReaderWriterLockWrapper()
{
// ideally we'd like to use the NoRecursion policy, but RWLock supports
// recursion so we allow recursion for compat. It's needed for at least
// one pattern - a weak event manager for an event A that delegates to
// a second event B via a second weak event manager. There's at least
// one instance of this within WPF (CanExecuteChanged delegates to
// RequerySuggested), and it could also happen in user code.
_rwLock = new ReaderWriterLockSlim(LockRecursionPolicy.SupportsRecursion);
_defaultSynchronizationContext = new NonPumpingSynchronizationContext();
Initialize(!MS.Internal.BaseAppContextSwitches.EnableWeakEventMemoryImprovements);
}
private void Initialize(bool useLegacyMemoryBehavior)
{
if (useLegacyMemoryBehavior)
{
_awr = new AutoWriterRelease(this);
_arr = new AutoReaderRelease(this);
}
else
{
_awrc = new AutoWriterReleaseClass(this);
_arrc = new AutoReaderReleaseClass(this);
_enterReadAction = _rwLock.EnterReadLock;
_exitReadAction = _rwLock.ExitReadLock;
_enterWriteAction = _rwLock.EnterWriteLock;
_exitWriteAction = _rwLock.ExitWriteLock;
}
}
#endregion Constructors
//------------------------------------------------------
//
// Internal Properties
//
//------------------------------------------------------
#region Internal Properties
internal IDisposable WriteLock
{
get
{
if (!MS.Internal.BaseAppContextSwitches.EnableWeakEventMemoryImprovements)
{
CallWithNonPumpingWait(()=>{_rwLock.EnterWriteLock();});
return _awr;
}
else
{
CallWithNonPumpingWait(_enterWriteAction);
return _awrc;
}
}
}
internal IDisposable ReadLock
{
get
{
if (!MS.Internal.BaseAppContextSwitches.EnableWeakEventMemoryImprovements)
{
CallWithNonPumpingWait(()=>{_rwLock.EnterReadLock();});
return _arr;
}
else
{
CallWithNonPumpingWait(_enterReadAction);
return _arrc;
}
}
}
#endregion Internal Properties
//------------------------------------------------------
//
// Private Methods
//
//------------------------------------------------------
#region Private Methods
// called when AutoWriterRelease is disposed
private void ReleaseWriterLock()
{
CallWithNonPumpingWait(()=>{_rwLock.ExitWriteLock();});
}
// called when AutoReaderRelease is disposed
private void ReleaseReaderLock()
{
CallWithNonPumpingWait(()=>{_rwLock.ExitReadLock();});
}
// called when AutoWriterRelease is disposed
private void ReleaseWriterLock2()
{
CallWithNonPumpingWait(_exitWriteAction);
}
// called when AutoReaderRelease is disposed
private void ReleaseReaderLock2()
{
CallWithNonPumpingWait(_exitReadAction);
}
private void CallWithNonPumpingWait(Action callback)
{
SynchronizationContext oldSynchronizationContext = SynchronizationContext.Current;
NonPumpingSynchronizationContext nonPumpingSynchronizationContext =
Interlocked.Exchange<NonPumpingSynchronizationContext>(ref _defaultSynchronizationContext, null);
// if the default non-pumping context is in use, allocate a new one
bool usingDefaultContext = (nonPumpingSynchronizationContext != null);
if (!usingDefaultContext)
{
nonPumpingSynchronizationContext = new NonPumpingSynchronizationContext();
}
try
{
// install the non-pumping context
nonPumpingSynchronizationContext.Parent = oldSynchronizationContext;
SynchronizationContext.SetSynchronizationContext(nonPumpingSynchronizationContext);
// invoke the callback
callback();
}
finally
{
// restore the old context
SynchronizationContext.SetSynchronizationContext(oldSynchronizationContext);
// put the default non-pumping context back into play
if (usingDefaultContext)
{
Interlocked.Exchange<NonPumpingSynchronizationContext>(ref _defaultSynchronizationContext, nonPumpingSynchronizationContext);
}
}
}
#endregion Private Methods
//------------------------------------------------------
//
// Private Fields
//
//------------------------------------------------------
#region Private Fields
private ReaderWriterLockSlim _rwLock;
private AutoReaderRelease _arr;
private AutoWriterRelease _awr;
private AutoReaderReleaseClass _arrc;
private AutoWriterReleaseClass _awrc;
private Action _enterReadAction;
private Action _exitReadAction;
private Action _enterWriteAction;
private Action _exitWriteAction;
private NonPumpingSynchronizationContext _defaultSynchronizationContext;
#endregion Private Fields
//------------------------------------------------------
//
// Private Classes & Structs
//
//------------------------------------------------------
#region Private Classes & Structs
private struct AutoWriterRelease : IDisposable
{
public AutoWriterRelease(ReaderWriterLockWrapper wrapper)
{
_wrapper = wrapper;
}
public void Dispose()
{
_wrapper.ReleaseWriterLock();
}
private ReaderWriterLockWrapper _wrapper;
}
private struct AutoReaderRelease : IDisposable
{
public AutoReaderRelease(ReaderWriterLockWrapper wrapper)
{
_wrapper = wrapper;
}
public void Dispose()
{
_wrapper.ReleaseReaderLock();
}
private ReaderWriterLockWrapper _wrapper;
}
private class AutoWriterReleaseClass : IDisposable
{
public AutoWriterReleaseClass(ReaderWriterLockWrapper wrapper)
{
_wrapper = wrapper;
}
public void Dispose()
{
_wrapper.ReleaseWriterLock2();
}
private ReaderWriterLockWrapper _wrapper;
}
private class AutoReaderReleaseClass : IDisposable
{
public AutoReaderReleaseClass(ReaderWriterLockWrapper wrapper)
{
_wrapper = wrapper;
}
public void Dispose()
{
_wrapper.ReleaseReaderLock2();
}
private ReaderWriterLockWrapper _wrapper;
}
// This SynchronizationContext waits without pumping messages, like
// DispatcherSynchronizationContext when dispatcher is disabled. This
// avoids re-entrancy that leads to deadlock
// It delegates all other functionality to its Parent (the context it
// replaced), although if used properly those methods should never be called.
private class NonPumpingSynchronizationContext : SynchronizationContext
{
public NonPumpingSynchronizationContext()
{
// Tell the CLR to call us when blocking.
SetWaitNotificationRequired();
}
public SynchronizationContext Parent { get; set; }
/// <summary>
/// Wait for a set of handles.
/// </summary>
#pragma warning disable SYSLIB0004 // The Constrained Execution Region (CER) feature is not supported.
[PrePrepareMethod]
#pragma warning restore SYSLIB0004 // The Constrained Execution Region (CER) feature is not supported.
public override int Wait(IntPtr[] waitHandles, bool waitAll, int millisecondsTimeout)
{
return MS.Win32.UnsafeNativeMethods.WaitForMultipleObjectsEx(waitHandles.Length, waitHandles, waitAll, millisecondsTimeout, false);
}
/// <summary>
/// Synchronously invoke the callback in the SynchronizationContext.
/// </summary>
public override void Send(SendOrPostCallback d, Object state)
{
Parent.Send(d, state);
}
/// <summary>
/// Asynchronously invoke the callback in the SynchronizationContext.
/// </summary>
public override void Post(SendOrPostCallback d, Object state)
{
Parent.Post(d, state);
}
/// <summary>
/// Create a copy of this SynchronizationContext.
/// </summary>
public override SynchronizationContext CreateCopy()
{
return this;
}
}
#endregion Private Classes
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Reflection;
using System.Threading;
using System.Text;
using System.Xml;
using System.Xml.Linq;
using log4net;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Serialization;
using OpenSim.Framework.Serialization.External;
using OpenSim.Region.CoreModules.World.Archiver;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Region.Framework.Scenes.Serialization;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Services.Interfaces;
namespace OpenSim.Region.CoreModules.Avatar.Inventory.Archiver
{
public class InventoryArchiveReadRequest
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// The maximum major version of archive that we can read. Minor versions shouldn't need a max number since version
/// bumps here should be compatible.
/// </summary>
public static int MAX_MAJOR_VERSION = 1;
protected TarArchiveReader archive;
private UserAccount m_userInfo;
private string m_invPath;
/// <value>
/// ID of this request
/// </value>
protected UUID m_id;
/// <summary>
/// Do we want to merge this load with existing inventory?
/// </summary>
protected bool m_merge;
protected IInventoryService m_InventoryService;
protected IAssetService m_AssetService;
protected IUserAccountService m_UserAccountService;
private InventoryArchiverModule m_module;
/// <value>
/// The stream from which the inventory archive will be loaded.
/// </value>
private Stream m_loadStream;
/// <summary>
/// Has the control file been loaded for this archive?
/// </summary>
public bool ControlFileLoaded { get; private set; }
/// <summary>
/// Do we want to enforce the check. IAR versions before 0.2 and 1.1 do not guarantee this order, so we can't
/// enforce.
/// </summary>
public bool EnforceControlFileCheck { get; private set; }
protected bool m_assetsLoaded;
protected bool m_inventoryNodesLoaded;
protected int m_successfulAssetRestores;
protected int m_failedAssetRestores;
protected int m_successfulItemRestores;
/// <summary>
/// Root destination folder for the IAR load.
/// </summary>
protected InventoryFolderBase m_rootDestinationFolder;
/// <summary>
/// Inventory nodes loaded from the iar.
/// </summary>
protected HashSet<InventoryNodeBase> m_loadedNodes = new HashSet<InventoryNodeBase>();
/// <summary>
/// In order to load identically named folders, we need to keep track of the folders that we have already
/// resolved.
/// </summary>
Dictionary <string, InventoryFolderBase> m_resolvedFolders = new Dictionary<string, InventoryFolderBase>();
/// <summary>
/// Record the creator id that should be associated with an asset. This is used to adjust asset creator ids
/// after OSP resolution (since OSP creators are only stored in the item
/// </summary>
protected Dictionary<UUID, UUID> m_creatorIdForAssetId = new Dictionary<UUID, UUID>();
public InventoryArchiveReadRequest(
IInventoryService inv, IAssetService assets, IUserAccountService uacc, UserAccount userInfo, string invPath, string loadPath, bool merge)
: this(UUID.Zero, null,
inv,
assets,
uacc,
userInfo,
invPath,
loadPath,
merge)
{
}
public InventoryArchiveReadRequest(
UUID id, InventoryArchiverModule module, IInventoryService inv, IAssetService assets, IUserAccountService uacc, UserAccount userInfo, string invPath, string loadPath, bool merge)
: this(
id,
module,
inv,
assets,
uacc,
userInfo,
invPath,
new GZipStream(ArchiveHelpers.GetStream(loadPath), CompressionMode.Decompress),
merge)
{
}
public InventoryArchiveReadRequest(
UUID id, InventoryArchiverModule module, IInventoryService inv, IAssetService assets, IUserAccountService uacc, UserAccount userInfo, string invPath, Stream loadStream, bool merge)
{
m_id = id;
m_InventoryService = inv;
m_AssetService = assets;
m_UserAccountService = uacc;
m_merge = merge;
m_userInfo = userInfo;
m_invPath = invPath;
m_loadStream = loadStream;
m_module = module;
// FIXME: Do not perform this check since older versions of OpenSim do save the control file after other things
// (I thought they weren't). We will need to bump the version number and perform this check on all
// subsequent IAR versions only
ControlFileLoaded = true;
}
/// <summary>
/// Execute the request
/// </summary>
/// <remarks>
/// Only call this once. To load another IAR, construct another request object.
/// </remarks>
/// <returns>
/// A list of the inventory nodes loaded. If folders were loaded then only the root folders are
/// returned
/// </returns>
/// <exception cref="System.Exception">Thrown if load fails.</exception>
public HashSet<InventoryNodeBase> Execute()
{
try
{
Exception reportedException = null;
string filePath = "ERROR";
List<InventoryFolderBase> folderCandidates
= InventoryArchiveUtils.FindFoldersByPath(
m_InventoryService, m_userInfo.PrincipalID, m_invPath);
if (folderCandidates.Count == 0)
{
// Possibly provide an option later on to automatically create this folder if it does not exist
m_log.ErrorFormat("[INVENTORY ARCHIVER]: Inventory path {0} does not exist", m_invPath);
return m_loadedNodes;
}
m_rootDestinationFolder = folderCandidates[0];
archive = new TarArchiveReader(m_loadStream);
byte[] data;
TarArchiveReader.TarEntryType entryType;
while ((data = archive.ReadEntry(out filePath, out entryType)) != null)
{
if (filePath == ArchiveConstants.CONTROL_FILE_PATH)
{
LoadControlFile(filePath, data);
}
else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH))
{
LoadAssetFile(filePath, data);
}
else if (filePath.StartsWith(ArchiveConstants.INVENTORY_PATH))
{
LoadInventoryFile(filePath, entryType, data);
}
}
archive.Close();
m_log.DebugFormat(
"[INVENTORY ARCHIVER]: Successfully loaded {0} assets with {1} failures",
m_successfulAssetRestores, m_failedAssetRestores);
//Alicia: When this is called by LibraryModule or Tests, m_module will be null as event is not required
if(m_module != null)
m_module.TriggerInventoryArchiveLoaded(m_id, true, m_userInfo, m_invPath, m_loadStream, reportedException, m_successfulItemRestores);
return m_loadedNodes;
}
catch(Exception Ex)
{
// Trigger saved event with failed result and exception data
if (m_module != null)
m_module.TriggerInventoryArchiveLoaded(m_id, false, m_userInfo, m_invPath, m_loadStream, Ex, 0);
return m_loadedNodes;
}
finally
{
m_loadStream.Close();
}
}
public void Close()
{
if (m_loadStream != null)
m_loadStream.Close();
}
/// <summary>
/// Replicate the inventory paths in the archive to the user's inventory as necessary.
/// </summary>
/// <param name="iarPath">The item archive path to replicate</param>
/// <param name="rootDestinationFolder">The root folder for the inventory load</param>
/// <param name="resolvedFolders">
/// The folders that we have resolved so far for a given archive path.
/// This method will add more folders if necessary
/// </param>
/// <param name="loadedNodes">
/// Track the inventory nodes created.
/// </param>
/// <returns>The last user inventory folder created or found for the archive path</returns>
public InventoryFolderBase ReplicateArchivePathToUserInventory(
string iarPath,
InventoryFolderBase rootDestFolder,
Dictionary <string, InventoryFolderBase> resolvedFolders,
HashSet<InventoryNodeBase> loadedNodes)
{
string iarPathExisting = iarPath;
// m_log.DebugFormat(
// "[INVENTORY ARCHIVER]: Loading folder {0} {1}", rootDestFolder.Name, rootDestFolder.ID);
InventoryFolderBase destFolder
= ResolveDestinationFolder(rootDestFolder, ref iarPathExisting, resolvedFolders);
// m_log.DebugFormat(
// "[INVENTORY ARCHIVER]: originalArchivePath [{0}], section already loaded [{1}]",
// iarPath, iarPathExisting);
string iarPathToCreate = iarPath.Substring(iarPathExisting.Length);
CreateFoldersForPath(destFolder, iarPathExisting, iarPathToCreate, resolvedFolders, loadedNodes);
return destFolder;
}
/// <summary>
/// Resolve a destination folder
/// </summary>
///
/// We require here a root destination folder (usually the root of the user's inventory) and the archive
/// path. We also pass in a list of previously resolved folders in case we've found this one previously.
///
/// <param name="archivePath">
/// The item archive path to resolve. The portion of the path passed back is that
/// which corresponds to the resolved desintation folder.
/// <param name="rootDestinationFolder">
/// The root folder for the inventory load
/// </param>
/// <param name="resolvedFolders">
/// The folders that we have resolved so far for a given archive path.
/// </param>
/// <returns>
/// The folder in the user's inventory that matches best the archive path given. If no such folder was found
/// then the passed in root destination folder is returned.
/// </returns>
protected InventoryFolderBase ResolveDestinationFolder(
InventoryFolderBase rootDestFolder,
ref string archivePath,
Dictionary <string, InventoryFolderBase> resolvedFolders)
{
// string originalArchivePath = archivePath;
while (archivePath.Length > 0)
{
// m_log.DebugFormat("[INVENTORY ARCHIVER]: Trying to resolve destination folder {0}", archivePath);
if (resolvedFolders.ContainsKey(archivePath))
{
// m_log.DebugFormat(
// "[INVENTORY ARCHIVER]: Found previously created folder from archive path {0}", archivePath);
return resolvedFolders[archivePath];
}
else
{
if (m_merge)
{
// TODO: Using m_invPath is totally wrong - what we need to do is strip the uuid from the
// iar name and try to find that instead.
string plainPath = ArchiveConstants.ExtractPlainPathFromIarPath(archivePath);
List<InventoryFolderBase> folderCandidates
= InventoryArchiveUtils.FindFoldersByPath(
m_InventoryService, m_userInfo.PrincipalID, plainPath);
if (folderCandidates.Count != 0)
{
InventoryFolderBase destFolder = folderCandidates[0];
resolvedFolders[archivePath] = destFolder;
return destFolder;
}
}
// Don't include the last slash so find the penultimate one
int penultimateSlashIndex = archivePath.LastIndexOf("/", archivePath.Length - 2);
if (penultimateSlashIndex >= 0)
{
// Remove the last section of path so that we can see if we've already resolved the parent
archivePath = archivePath.Remove(penultimateSlashIndex + 1);
}
else
{
// m_log.DebugFormat(
// "[INVENTORY ARCHIVER]: Found no previously created folder for archive path {0}",
// originalArchivePath);
archivePath = string.Empty;
return rootDestFolder;
}
}
}
return rootDestFolder;
}
/// <summary>
/// Create a set of folders for the given path.
/// </summary>
/// <param name="destFolder">
/// The root folder from which the creation will take place.
/// </param>
/// <param name="iarPathExisting">
/// the part of the iar path that already exists
/// </param>
/// <param name="iarPathToReplicate">
/// The path to replicate in the user's inventory from iar
/// </param>
/// <param name="resolvedFolders">
/// The folders that we have resolved so far for a given archive path.
/// </param>
/// <param name="loadedNodes">
/// Track the inventory nodes created.
/// </param>
protected void CreateFoldersForPath(
InventoryFolderBase destFolder,
string iarPathExisting,
string iarPathToReplicate,
Dictionary <string, InventoryFolderBase> resolvedFolders,
HashSet<InventoryNodeBase> loadedNodes)
{
string[] rawDirsToCreate = iarPathToReplicate.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
for (int i = 0; i < rawDirsToCreate.Length; i++)
{
// m_log.DebugFormat("[INVENTORY ARCHIVER]: Creating folder {0} from IAR", rawDirsToCreate[i]);
if (!rawDirsToCreate[i].Contains(ArchiveConstants.INVENTORY_NODE_NAME_COMPONENT_SEPARATOR))
continue;
int identicalNameIdentifierIndex
= rawDirsToCreate[i].LastIndexOf(
ArchiveConstants.INVENTORY_NODE_NAME_COMPONENT_SEPARATOR);
string newFolderName = rawDirsToCreate[i].Remove(identicalNameIdentifierIndex);
newFolderName = InventoryArchiveUtils.UnescapeArchivePath(newFolderName);
UUID newFolderId = UUID.Random();
destFolder
= new InventoryFolderBase(
newFolderId, newFolderName, m_userInfo.PrincipalID,
(short)FolderType.None, destFolder.ID, 1);
m_InventoryService.AddFolder(destFolder);
// Record that we have now created this folder
iarPathExisting += rawDirsToCreate[i] + "/";
m_log.DebugFormat("[INVENTORY ARCHIVER]: Created folder {0} from IAR", iarPathExisting);
resolvedFolders[iarPathExisting] = destFolder;
if (0 == i)
loadedNodes.Add(destFolder);
}
}
/// <summary>
/// Load an item from the archive
/// </summary>
/// <param name="filePath">The archive path for the item</param>
/// <param name="data">The raw item data</param>
/// <param name="rootDestinationFolder">The root destination folder for loaded items</param>
/// <param name="nodesLoaded">All the inventory nodes (items and folders) loaded so far</param>
protected InventoryItemBase LoadItem(byte[] data, InventoryFolderBase loadFolder)
{
InventoryItemBase item = UserInventoryItemSerializer.Deserialize(data);
// Don't use the item ID that's in the file
item.ID = UUID.Random();
UUID ospResolvedId = OspResolver.ResolveOspa(item.CreatorId, m_UserAccountService);
if (UUID.Zero != ospResolvedId) // The user exists in this grid
{
// m_log.DebugFormat("[INVENTORY ARCHIVER]: Found creator {0} via OSPA resolution", ospResolvedId);
// item.CreatorIdAsUuid = ospResolvedId;
// Don't preserve the OSPA in the creator id (which actually gets persisted to the
// database). Instead, replace with the UUID that we found.
item.CreatorId = ospResolvedId.ToString();
item.CreatorData = string.Empty;
}
else if (string.IsNullOrEmpty(item.CreatorData))
{
item.CreatorId = m_userInfo.PrincipalID.ToString();
// item.CreatorIdAsUuid = new UUID(item.CreatorId);
}
item.Owner = m_userInfo.PrincipalID;
// Reset folder ID to the one in which we want to load it
item.Folder = loadFolder.ID;
// Record the creator id for the item's asset so that we can use it later, if necessary, when the asset
// is loaded.
// FIXME: This relies on the items coming before the assets in the TAR file. Need to create stronger
// checks for this, and maybe even an external tool for creating OARs which enforces this, rather than
// relying on native tar tools.
m_creatorIdForAssetId[item.AssetID] = item.CreatorIdAsUuid;
if (!m_InventoryService.AddItem(item))
m_log.WarnFormat("[INVENTORY ARCHIVER]: Unable to save item {0} in folder {1}", item.Name, item.Folder);
return item;
}
/// <summary>
/// Load an asset
/// </summary>
/// <param name="assetFilename"></param>
/// <param name="data"></param>
/// <returns>true if asset was successfully loaded, false otherwise</returns>
private bool LoadAsset(string assetPath, byte[] data)
{
//IRegionSerialiser serialiser = scene.RequestModuleInterface<IRegionSerialiser>();
// Right now we're nastily obtaining the UUID from the filename
string filename = assetPath.Remove(0, ArchiveConstants.ASSETS_PATH.Length);
int i = filename.LastIndexOf(ArchiveConstants.ASSET_EXTENSION_SEPARATOR);
if (i == -1)
{
m_log.ErrorFormat(
"[INVENTORY ARCHIVER]: Could not find extension information in asset path {0} since it's missing the separator {1}. Skipping",
assetPath, ArchiveConstants.ASSET_EXTENSION_SEPARATOR);
return false;
}
string extension = filename.Substring(i);
string rawUuid = filename.Remove(filename.Length - extension.Length);
UUID assetId = new UUID(rawUuid);
if (ArchiveConstants.EXTENSION_TO_ASSET_TYPE.ContainsKey(extension))
{
sbyte assetType = ArchiveConstants.EXTENSION_TO_ASSET_TYPE[extension];
if (assetType == (sbyte)AssetType.Unknown)
{
m_log.WarnFormat("[INVENTORY ARCHIVER]: Importing {0} byte asset {1} with unknown type", data.Length, assetId);
}
else if (assetType == (sbyte)AssetType.Object)
{
if (m_creatorIdForAssetId.ContainsKey(assetId))
{
data = SceneObjectSerializer.ModifySerializedObject(assetId, data,
sog => {
bool modified = false;
foreach (SceneObjectPart sop in sog.Parts)
{
if (string.IsNullOrEmpty(sop.CreatorData))
{
sop.CreatorID = m_creatorIdForAssetId[assetId];
modified = true;
}
}
return modified;
});
if (data == null)
return false;
}
}
//m_log.DebugFormat("[INVENTORY ARCHIVER]: Importing asset {0}, type {1}", uuid, assetType);
AssetBase asset = new AssetBase(assetId, "From IAR", assetType, UUID.Zero.ToString());
asset.Data = data;
m_AssetService.Store(asset);
return true;
}
else
{
m_log.ErrorFormat(
"[INVENTORY ARCHIVER]: Tried to dearchive data with path {0} with an unknown type extension {1}",
assetPath, extension);
return false;
}
}
/// <summary>
/// Load control file
/// </summary>
/// <param name="path"></param>
/// <param name="data"></param>
public void LoadControlFile(string path, byte[] data)
{
XDocument doc = XDocument.Parse(Encoding.ASCII.GetString(data));
XElement archiveElement = doc.Element("archive");
int majorVersion = int.Parse(archiveElement.Attribute("major_version").Value);
int minorVersion = int.Parse(archiveElement.Attribute("minor_version").Value);
string version = string.Format("{0}.{1}", majorVersion, minorVersion);
if (majorVersion > MAX_MAJOR_VERSION)
{
throw new Exception(
string.Format(
"The IAR you are trying to load has major version number of {0} but this version of OpenSim can only load IARs with major version number {1} and below",
majorVersion, MAX_MAJOR_VERSION));
}
ControlFileLoaded = true;
m_log.InfoFormat("[INVENTORY ARCHIVER]: Loading IAR with version {0}", version);
}
/// <summary>
/// Load inventory file
/// </summary>
/// <param name="path"></param>
/// <param name="entryType"></param>
/// <param name="data"></param>
protected void LoadInventoryFile(string path, TarArchiveReader.TarEntryType entryType, byte[] data)
{
if (!ControlFileLoaded)
throw new Exception(
string.Format(
"The IAR you are trying to load does not list {0} before {1}. Aborting load",
ArchiveConstants.CONTROL_FILE_PATH, ArchiveConstants.INVENTORY_PATH));
if (m_assetsLoaded)
throw new Exception(
string.Format(
"The IAR you are trying to load does not list all {0} before {1}. Aborting load",
ArchiveConstants.INVENTORY_PATH, ArchiveConstants.ASSETS_PATH));
path = path.Substring(ArchiveConstants.INVENTORY_PATH.Length);
// Trim off the file portion if we aren't already dealing with a directory path
if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType)
path = path.Remove(path.LastIndexOf("/") + 1);
InventoryFolderBase foundFolder
= ReplicateArchivePathToUserInventory(
path, m_rootDestinationFolder, m_resolvedFolders, m_loadedNodes);
if (TarArchiveReader.TarEntryType.TYPE_DIRECTORY != entryType)
{
InventoryItemBase item = LoadItem(data, foundFolder);
if (item != null)
{
m_successfulItemRestores++;
// If we aren't loading the folder containing the item then well need to update the
// viewer separately for that item.
if (!m_loadedNodes.Contains(foundFolder))
m_loadedNodes.Add(item);
}
}
m_inventoryNodesLoaded = true;
}
/// <summary>
/// Load asset file
/// </summary>
/// <param name="path"></param>
/// <param name="data"></param>
protected void LoadAssetFile(string path, byte[] data)
{
if (!ControlFileLoaded)
throw new Exception(
string.Format(
"The IAR you are trying to load does not list {0} before {1}. Aborting load",
ArchiveConstants.CONTROL_FILE_PATH, ArchiveConstants.ASSETS_PATH));
if (!m_inventoryNodesLoaded)
throw new Exception(
string.Format(
"The IAR you are trying to load does not list all {0} before {1}. Aborting load",
ArchiveConstants.INVENTORY_PATH, ArchiveConstants.ASSETS_PATH));
if (LoadAsset(path, data))
m_successfulAssetRestores++;
else
m_failedAssetRestores++;
if ((m_successfulAssetRestores) % 50 == 0)
m_log.DebugFormat(
"[INVENTORY ARCHIVER]: Loaded {0} assets...",
m_successfulAssetRestores);
m_assetsLoaded = true;
}
}
}
| |
namespace SandcastleBuilder.Gui.ContentEditors
{
partial class EntityReferenceWindow
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if(disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(EntityReferenceWindow));
this.label1 = new System.Windows.Forms.Label();
this.txtFindName = new System.Windows.Forms.TextBox();
this.sbStatusBarText = new SandcastleBuilder.Utils.Controls.StatusBarTextProvider(this.components);
this.tvEntities = new System.Windows.Forms.TreeView();
this.tsbRefresh = new System.Windows.Forms.ToolStripButton();
this.cboContentType = new System.Windows.Forms.ToolStripComboBox();
this.ilImages = new System.Windows.Forms.ImageList(this.components);
this.lblLoading = new System.Windows.Forms.Label();
this.pbWait = new System.Windows.Forms.PictureBox();
this.tsOptions = new System.Windows.Forms.ToolStrip();
this.toolStripSeparator1 = new System.Windows.Forms.ToolStripSeparator();
this.toolStripLabel1 = new System.Windows.Forms.ToolStripLabel();
((System.ComponentModel.ISupportInitialize)(this.pbWait)).BeginInit();
this.tsOptions.SuspendLayout();
this.SuspendLayout();
//
// label1
//
this.label1.Location = new System.Drawing.Point(2, 28);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(40, 23);
this.label1.TabIndex = 0;
this.label1.Text = "Fin&d";
this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleRight;
//
// txtFindName
//
this.txtFindName.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.txtFindName.Enabled = false;
this.txtFindName.Location = new System.Drawing.Point(48, 28);
this.txtFindName.MaxLength = 256;
this.txtFindName.Name = "txtFindName";
this.txtFindName.Size = new System.Drawing.Size(312, 22);
this.sbStatusBarText.SetStatusBarText(this.txtFindName, "Find: Enter the ID or, for code entities, a regular expression for which to searc" +
"h");
this.txtFindName.TabIndex = 1;
this.txtFindName.KeyDown += new System.Windows.Forms.KeyEventHandler(this.txtFindName_KeyDown);
//
// tvEntities
//
this.tvEntities.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.tvEntities.Enabled = false;
this.tvEntities.FullRowSelect = true;
this.tvEntities.Location = new System.Drawing.Point(1, 56);
this.tvEntities.Name = "tvEntities";
this.tvEntities.ShowLines = false;
this.tvEntities.ShowNodeToolTips = true;
this.tvEntities.ShowPlusMinus = false;
this.tvEntities.ShowRootLines = false;
this.tvEntities.Size = new System.Drawing.Size(360, 203);
this.sbStatusBarText.SetStatusBarText(this.tvEntities, "Entities: Drag an item and drop it in the topic");
this.tvEntities.TabIndex = 2;
this.tvEntities.NodeMouseDoubleClick += new System.Windows.Forms.TreeNodeMouseClickEventHandler(this.tvEntities_NodeMouseDoubleClick);
this.tvEntities.BeforeCollapse += new System.Windows.Forms.TreeViewCancelEventHandler(this.tvEntities_BeforeCollapse);
this.tvEntities.KeyDown += new System.Windows.Forms.KeyEventHandler(this.tvEntities_KeyDown);
this.tvEntities.ItemDrag += new System.Windows.Forms.ItemDragEventHandler(this.tvEntities_ItemDrag);
//
// tsbRefresh
//
this.tsbRefresh.DisplayStyle = System.Windows.Forms.ToolStripItemDisplayStyle.Image;
this.tsbRefresh.Image = global::SandcastleBuilder.Gui.Properties.Resources.Refresh;
this.tsbRefresh.ImageTransparentColor = System.Drawing.Color.Magenta;
this.tsbRefresh.Name = "tsbRefresh";
this.tsbRefresh.Size = new System.Drawing.Size(23, 22);
this.sbStatusBarText.SetStatusBarText(this.tsbRefresh, "Refresh the content list using the current project settings");
this.tsbRefresh.ToolTipText = "Refresh the content lists";
this.tsbRefresh.Click += new System.EventHandler(this.tsbRefresh_Click);
//
// cboContentType
//
this.cboContentType.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cboContentType.Font = new System.Drawing.Font("Microsoft Sans Serif", 7.8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.cboContentType.Items.AddRange(new object[] {
"Tokens",
"Images",
"Code Snippets",
"Code Entities"});
this.cboContentType.Name = "cboContentType";
this.cboContentType.Size = new System.Drawing.Size(121, 25);
this.sbStatusBarText.SetStatusBarText(this.cboContentType, "Select the type of entity reference");
this.cboContentType.SelectedIndexChanged += new System.EventHandler(this.cboContentType_SelectedIndexChanged);
//
// ilImages
//
this.ilImages.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject("ilImages.ImageStream")));
this.ilImages.TransparentColor = System.Drawing.Color.Magenta;
this.ilImages.Images.SetKeyName(0, "TokenFile.bmp");
this.ilImages.Images.SetKeyName(1, "ImageFile.bmp");
this.ilImages.Images.SetKeyName(2, "SnippetsFile.bmp");
this.ilImages.Images.SetKeyName(3, "NormalTopic.bmp");
//
// lblLoading
//
this.lblLoading.AutoSize = true;
this.lblLoading.BackColor = System.Drawing.SystemColors.Window;
this.lblLoading.Location = new System.Drawing.Point(50, 75);
this.lblLoading.Name = "lblLoading";
this.lblLoading.Size = new System.Drawing.Size(140, 17);
this.lblLoading.TabIndex = 3;
this.lblLoading.Text = "Indexing comments...";
this.lblLoading.Visible = false;
//
// pbWait
//
this.pbWait.BackColor = System.Drawing.SystemColors.Window;
this.pbWait.Image = ((System.Drawing.Image)(resources.GetObject("pbWait.Image")));
this.pbWait.Location = new System.Drawing.Point(12, 67);
this.pbWait.Name = "pbWait";
this.pbWait.Size = new System.Drawing.Size(32, 32);
this.pbWait.SizeMode = System.Windows.Forms.PictureBoxSizeMode.AutoSize;
this.pbWait.TabIndex = 9;
this.pbWait.TabStop = false;
this.pbWait.Visible = false;
//
// tsOptions
//
this.tsOptions.Font = new System.Drawing.Font("Microsoft Sans Serif", 7.8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.tsOptions.GripStyle = System.Windows.Forms.ToolStripGripStyle.Hidden;
this.tsOptions.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.tsbRefresh,
this.toolStripSeparator1,
this.toolStripLabel1,
this.cboContentType});
this.tsOptions.Location = new System.Drawing.Point(0, 0);
this.tsOptions.Name = "tsOptions";
this.tsOptions.Size = new System.Drawing.Size(362, 25);
this.tsOptions.TabIndex = 4;
//
// toolStripSeparator1
//
this.toolStripSeparator1.Name = "toolStripSeparator1";
this.toolStripSeparator1.Size = new System.Drawing.Size(6, 25);
//
// toolStripLabel1
//
this.toolStripLabel1.Name = "toolStripLabel1";
this.toolStripLabel1.Size = new System.Drawing.Size(40, 22);
this.toolStripLabel1.Text = "&Type";
//
// EntityReferenceWindow
//
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Inherit;
this.ClientSize = new System.Drawing.Size(362, 260);
this.Controls.Add(this.tsOptions);
this.Controls.Add(this.lblLoading);
this.Controls.Add(this.pbWait);
this.Controls.Add(this.label1);
this.Controls.Add(this.txtFindName);
this.Controls.Add(this.tvEntities);
this.DockAreas = ((WeifenLuo.WinFormsUI.Docking.DockAreas)(((((WeifenLuo.WinFormsUI.Docking.DockAreas.Float | WeifenLuo.WinFormsUI.Docking.DockAreas.DockLeft)
| WeifenLuo.WinFormsUI.Docking.DockAreas.DockRight)
| WeifenLuo.WinFormsUI.Docking.DockAreas.DockTop)
| WeifenLuo.WinFormsUI.Docking.DockAreas.DockBottom)));
this.HideOnClose = true;
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Name = "EntityReferenceWindow";
this.ShowHint = WeifenLuo.WinFormsUI.Docking.DockState.DockRight;
this.TabText = "Entity References";
this.Text = "Entity References";
this.VisibleChanged += new System.EventHandler(this.EntityReferenceWindow_VisibleChanged);
this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.EntityReferenceWindow_FormClosing);
((System.ComponentModel.ISupportInitialize)(this.pbWait)).EndInit();
this.tsOptions.ResumeLayout(false);
this.tsOptions.PerformLayout();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label label1;
private System.Windows.Forms.TextBox txtFindName;
private SandcastleBuilder.Utils.Controls.StatusBarTextProvider sbStatusBarText;
private System.Windows.Forms.Label lblLoading;
private System.Windows.Forms.PictureBox pbWait;
private System.Windows.Forms.TreeView tvEntities;
private System.Windows.Forms.ToolStrip tsOptions;
private System.Windows.Forms.ToolStripButton tsbRefresh;
private System.Windows.Forms.ToolStripSeparator toolStripSeparator1;
private System.Windows.Forms.ToolStripComboBox cboContentType;
private System.Windows.Forms.ImageList ilImages;
private System.Windows.Forms.ToolStripLabel toolStripLabel1;
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using Newtonsoft.Json;
using notifier.Languages;
using notifier.Properties;
namespace notifier {
class Scheduler {
#region #attributes
/// <summary>
/// Day list using Monday as first day of week
/// </summary>
public readonly List<DayOfWeek> Days = Enum.GetValues(typeof(DayOfWeek)).Cast<DayOfWeek>().OrderBy(day => { return (day - DayOfWeek.Monday + 7) % 7; }).ToList();
/// <summary>
/// Time type possibilities
/// </summary>
public enum TimeType : uint {
Start = 0,
End = 1
}
/// <summary>
/// List of slots for the scheduler
/// </summary>
private readonly List<TimeSlot> Slots;
/// <summary>
/// Reference to the main interface
/// </summary>
private readonly Main UI;
#endregion
#region #methods
/// <summary>
/// Class constructor
/// </summary>
/// <param name="form">Reference to the application main window</param>
public Scheduler(ref Main form) {
UI = form;
// init the slots depending on the user settings
Slots = string.IsNullOrEmpty(Settings.Default.SchedulerTimeSlot) ? new List<TimeSlot>() : JsonConvert.DeserializeObject<List<TimeSlot>>(Settings.Default.SchedulerTimeSlot);
// display the default day of week based on today
UI.fieldDayOfWeek.SelectedIndex = Days.IndexOf(DateTime.Now.DayOfWeek);
// display the start time and end time for today
DisplayTimeSlotProperties(GetTimeSlot());
}
/// <summary>
/// Display the time slot properties on the interface
/// </summary>
public void DisplayTimeSlotProperties(TimeSlot slot) {
// if no time slot is defined, the synchronization will work all the day
if (slot == null) {
UI.fieldStartTime.SelectedIndex = 0;
UI.fieldEndTime.SelectedIndex = 0;
UI.labelDuration.Text = Translation.theday;
return;
}
// if time slot of 0 hours is defined, the synchronization will be paused all the day
if (slot.TotalHours == 0) {
UI.fieldStartTime.SelectedIndex = 1;
UI.fieldEndTime.SelectedIndex = 1;
UI.labelDuration.Text = Translation.off;
return;
}
// display the time slot properties
UI.fieldStartTime.Text = slot.Start.ToString(@"h\:mm");
UI.fieldEndTime.Text = slot.End.ToString(@"h\:mm");
UI.labelDuration.Text = $"{slot.TotalHours.ToString(CultureInfo.CurrentCulture)} {Translation.hours}";
}
/// <summary>
/// Update the scheduler properties depending on the type of time
/// </summary>
/// <param name="type">Type of time</param>
public async Task Update(TimeType type) {
// get the selected day of week
DayOfWeek day = GetDayOfWeek(UI.fieldDayOfWeek.SelectedIndex);
// get the selected indexes
int start = UI.fieldStartTime.SelectedIndex;
int end = UI.fieldEndTime.SelectedIndex;
// check for the infinite or off time options
bool infiniteOption = (type == TimeType.Start && start == 0) || (type == TimeType.End && end == 0);
bool offOption = (type == TimeType.Start && start == 1) || (type == TimeType.End && end == 1);
// remove the time slot for the selected day
if (infiniteOption || offOption) {
// remove the time slot from the scheduler
if (infiniteOption) {
// remove the time slot from the scheduler
RemoveTimeSlot(day);
// udpate the interface
UI.fieldStartTime.SelectedIndex = 0;
UI.fieldEndTime.SelectedIndex = 0;
UI.labelDuration.Text = Translation.theday;
} else {
// set the time slot to 0 in the scheduler (mean no synchronization)
SetTimeSlot(day, new TimeSpan(0, 0, 0), new TimeSpan(0, 0, 0));
// udpate the interface
UI.fieldStartTime.SelectedIndex = 1;
UI.fieldEndTime.SelectedIndex = 1;
UI.labelDuration.Text = Translation.off;
}
// synchronize the inbox if the selected day of week is today
if (GetDayOfWeek(UI.fieldDayOfWeek.SelectedIndex) == DateTime.Now.DayOfWeek) {
await UI.GmailService.Inbox.Sync();
}
return;
}
// update the end time depending on the start time
if (type == TimeType.Start && (start > end || end == 0 || end == 1)) {
UI.fieldEndTime.SelectedIndex = start;
}
// update the start time depending on the end time
if (type == TimeType.End && (end < start || start == 0 || start == 1)) {
UI.fieldStartTime.SelectedIndex = end;
}
// add or update the time slot based on selected start/end time
TimeSlot slot = SetTimeSlot(day, TimeSpan.Parse(UI.fieldStartTime.Text), TimeSpan.Parse(UI.fieldEndTime.Text));
// update the duration label
UI.labelDuration.Text = $"{slot.TotalHours.ToString(CultureInfo.CurrentCulture)} {Translation.hours}";
// synchronize the inbox if the selected day of week is today
if (GetDayOfWeek(UI.fieldDayOfWeek.SelectedIndex) == DateTime.Now.DayOfWeek) {
await UI.GmailService.Inbox.Sync();
}
}
/// <summary>
/// Pause the synchronization
/// </summary>
public void PauseSync() {
// get the time slot of today
TimeSlot slot = GetTimeSlot();
// display the timeout icon
UI.notifyIcon.Icon = Resources.timeout;
// add icon text depending on the slot duration
string day = CultureInfo.CurrentUICulture.DateTimeFormat.GetDayName(DateTime.Now.DayOfWeek);
if (slot.TotalHours != 0) {
UI.notifyIcon.Text = Translation.syncScheduled.Replace("{day}", day).Replace("{start}", slot.Start.ToString(@"h\:mm")).Replace("{end}", slot.End.ToString(@"h\:mm"));
} else {
UI.notifyIcon.Text = Translation.syncOff.Replace("{day}", day);
}
// disable some menu items
UI.menuItemSynchronize.Enabled = false;
UI.menuItemMarkAsRead.Enabled = false;
UI.menuItemTimout.Enabled = false;
UI.menuItemSettings.Enabled = true;
// update some text items
UI.menuItemMarkAsRead.Text = Translation.markAsRead;
}
/// <summary>
/// Get a specific day of week by index using Monday as reference for the starting day of week
/// </summary>
/// <param name="index">Index of the day in the list</param>
/// <returns>The day of week at the specific index</returns>
public DayOfWeek GetDayOfWeek(int index) {
return Days.ElementAt(index);
}
/// <summary>
/// Search a time slot for today
/// </summary>
/// <returns>The time slot of today</returns>
public TimeSlot GetTimeSlot() {
return Slots.Find((match) => {
return match.Day == DateTime.Now.DayOfWeek;
});
}
/// <summary>
/// Search a time slot for a specific day
/// </summary>
/// <param name="day">The day for which to find a time slot</param>
/// <returns>The time slot of the day</returns>
public TimeSlot GetTimeSlot(DayOfWeek day) {
return Slots.Find((match) => {
return match.Day == day;
});
}
/// <summary>
/// Add or update a time slot to the scheduler
/// </summary>
/// <param name="day">Day of week</param>
/// <param name="start">Start time of the time slot</param>
/// <param name="end">End time of the time slot</param>
/// <returns>The time slot that was added or updated</returns>
public TimeSlot SetTimeSlot(DayOfWeek day, TimeSpan start, TimeSpan end) {
int index = Slots.FindIndex((match) => {
return match.Day == day;
});
if (index != -1) {
Slots[index].Start = start;
Slots[index].End = end;
} else {
Slots.Add(new TimeSlot(day, start, end));
}
// save all slots
Settings.Default.SchedulerTimeSlot = JsonConvert.SerializeObject(Slots);
// return the added or updated time slot
return GetTimeSlot(day);
}
/// <summary>
/// Remove a time slot from the list
/// </summary>
/// <param name="day">The day for which the time slot is defined</param>
public void RemoveTimeSlot(DayOfWeek day) {
TimeSlot slot = GetTimeSlot(day);
if (slot == null) {
return;
}
Slots.Remove(slot);
// save all slots
Settings.Default.SchedulerTimeSlot = JsonConvert.SerializeObject(Slots);
}
/// <summary>
/// Detect if the synchronization is scheduled
/// </summary>
/// <returns>A flag that tells if the inbox can be synced</returns>
public bool ScheduledSync() {
// get the time slot of today
TimeSlot slot = GetTimeSlot();
// allow inbox syncing if there is no slot defined for today
if (slot == null) {
return true;
}
// check if the current time is inside the time slot
return DateTime.Now >= Convert.ToDateTime(slot.Start.ToString()) && DateTime.Now <= Convert.ToDateTime(slot.End.ToString());
}
#endregion
#region #accessors
#endregion
}
}
| |
using System;
using System.Drawing;
using Microsoft.DirectX;
using D3D = Microsoft.DirectX.Direct3D;
namespace Voyage.Terraingine.DataCore.VertexFormats
{
/// <summary>
/// A struct for providing a custom vertex format that provides vertex position and normal information.
/// Also contains information for eight sets of texture coordinates.
/// </summary>
public struct PositionNormalTextured8
{
#region Data Members
/// <summary>
/// X-coordinate of the vertex.
/// </summary>
public float X;
/// <summary>
/// Y-coordinate of the vertex.
/// </summary>
public float Y;
/// <summary>
/// Z-coordinate of the vertex.
/// </summary>
public float Z;
/// <summary>
/// X-coordinate of the vertex normal.
/// </summary>
public float Nx;
/// <summary>
/// Y-coordinate of the vertex normal.
/// </summary>
public float Ny;
/// <summary>
/// Z-coordinate of the vertex normal.
/// </summary>
public float Nz;
/// <summary>
/// U-coordinate of the first texture.
/// </summary>
public float Tu1;
/// <summary>
/// V-coordinate of the first texture.
/// </summary>
public float Tv1;
/// <summary>
/// U-coordinate of the second texture.
/// </summary>
public float Tu2;
/// <summary>
/// V-coordinate of the second texture.
/// </summary>
public float Tv2;
/// <summary>
/// U-coordinate of the third texture.
/// </summary>
public float Tu3;
/// <summary>
/// V-coordinate of the third texture.
/// </summary>
public float Tv3;
/// <summary>
/// U-coordinate of the fourth texture.
/// </summary>
public float Tu4;
/// <summary>
/// V-coordinate of the fourth texture.
/// </summary>
public float Tv4;
/// <summary>
/// U-coordinate of the fifth texture.
/// </summary>
public float Tu5;
/// <summary>
/// V-coordinate of the fifth texture.
/// </summary>
public float Tv5;
/// <summary>
/// U-coordinate of the sixth texture.
/// </summary>
public float Tu6;
/// <summary>
/// V-coordinate of the sixth texture.
/// </summary>
public float Tv6;
/// <summary>
/// U-coordinate of the seventh texture.
/// </summary>
public float Tu7;
/// <summary>
/// V-coordinate of the seventh texture.
/// </summary>
public float Tv7;
/// <summary>
/// U-coordinate of the eighth texture.
/// </summary>
public float Tu8;
/// <summary>
/// V-coordinate of the eighth texture.
/// </summary>
public float Tv8;
/// <summary>
/// Format of the Direct3D vertex.
/// </summary>
public static readonly D3D.VertexFormats Format =
D3D.VertexFormats.Position | D3D.VertexFormats.Normal | D3D.VertexFormats.Texture8;
/// <summary>
/// Stride size of the Direct3D vertex.
/// </summary>
public static readonly int StrideSize = DXHelp.GetTypeSize( typeof( PositionNormalTextured8 ) );
/// <summary>
/// Number of textures the vertex can hold.
/// </summary>
public static readonly int numTextures = 8;
#endregion
#region Properties
/// <summary>
/// Gets or sets the position of the vertex.
/// </summary>
public Vector3 Position
{
get { return new Vector3( X, Y, Z ); }
set
{
X = value.X;
Y = value.Y;
Z = value.Z;
}
}
/// <summary>
/// Gets or sets the vertex normal.
/// </summary>
public Vector3 Normal
{
get { return new Vector3( Nx, Ny, Nz ); }
set
{
Nx = value.X;
Ny = value.Y;
Nz = value.Z;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the first texture.
/// </summary>
public Vector2 Vector21
{
get { return new Vector2( Tu1, Tv1 ); }
set
{
Tu1 = value.X;
Tv1 = value.Y;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the second texture.
/// </summary>
public Vector2 Vector22
{
get { return new Vector2( Tu2, Tv2 ); }
set
{
Tu2 = value.X;
Tv2 = value.Y;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the third texture.
/// </summary>
public Vector2 Vector23
{
get { return new Vector2( Tu3, Tv3 ); }
set
{
Tu3 = value.X;
Tv3 = value.Y;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the fourth texture.
/// </summary>
public Vector2 Vector24
{
get { return new Vector2( Tu4, Tv4 ); }
set
{
Tu4 = value.X;
Tv4 = value.Y;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the fifth texture.
/// </summary>
public Vector2 Vector25
{
get { return new Vector2( Tu5, Tv5 ); }
set
{
Tu5 = value.X;
Tv5 = value.Y;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the sixth texture.
/// </summary>
public Vector2 Vector26
{
get { return new Vector2( Tu6, Tv6 ); }
set
{
Tu6 = value.X;
Tv6 = value.Y;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the seventh texture.
/// </summary>
public Vector2 Vector27
{
get { return new Vector2( Tu7, Tv7 ); }
set
{
Tu7 = value.X;
Tv7 = value.Y;
}
}
/// <summary>
/// Gets or sets the texture coordinates for the eighth texture.
/// </summary>
public Vector2 Vector28
{
get { return new Vector2( Tu8, Tv8 ); }
set
{
Tu8 = value.X;
Tv8 = value.Y;
}
}
#endregion
#region Methods
/// <summary>
/// Creates a vertex with a position, a vertex normal, and eight sets of texture coordinates.
/// </summary>
/// <param name="x">X-coordinate of the vertex position.</param>
/// <param name="y">Y-coordinate of the vertex position.</param>
/// <param name="z">Z-coordinate of the vertex position.</param>
/// <param name="nX">X-coordinate of the vertex normal direction.</param>
/// <param name="nY">Y-coordinate of the vertex normal direction.</param>
/// <param name="nZ">Z-coordinate of the vertex normal direction.</param>
/// <param name="u1">U-coordinate for the first texture.</param>
/// <param name="v1">V-coordinate for the first texture.</param>
/// <param name="u2">U-coordinate for the second texture.</param>
/// <param name="v2">V-coordinate for the second texture.</param>
/// <param name="u3">U-coordinate for the third texture.</param>
/// <param name="v3">V-coordinate for the third texture.</param>
/// <param name="u4">U-coordinate for the fourth texture.</param>
/// <param name="v4">V-coordinate for the fourth texture.</param>
/// <param name="u5">U-coordinate for the fifth texture.</param>
/// <param name="v5">V-coordinate for the fifth texture.</param>
/// <param name="u6">U-coordinate for the sixth texture.</param>
/// <param name="v6">V-coordinate for the sixth texture.</param>
/// <param name="u7">U-coordinate for the seventh texture.</param>
/// <param name="v7">V-coordinate for the seventh texture.</param>
/// <param name="u8">U-coordinate for the eighth texture.</param>
/// <param name="v8">V-coordinate for the eighth texture.</param>
public PositionNormalTextured8( float x, float y, float z, float nX, float nY, float nZ,
float u1, float v1, float u2, float v2, float u3, float v3, float u4, float v4,
float u5, float v5, float u6, float v6, float u7, float v7, float u8, float v8 )
{
X = x;
Y = y;
Z = z;
Nx = nX;
Ny = nY;
Nz = nZ;
Tu1 = u1;
Tv1 = v1;
Tu2 = u2;
Tv2 = v2;
Tu3 = u3;
Tv3 = v3;
Tu4 = u4;
Tv4 = v4;
Tu5 = u5;
Tv5 = v5;
Tu6 = u6;
Tv6 = v6;
Tu7 = u7;
Tv7 = v7;
Tu8 = u8;
Tv8 = v8;
}
/// <summary>
/// Creates a vertex with a position, a vertex normal, and eight sets of texture coordinates.
/// </summary>
/// <param name="position">Position of the vertex.</param>
/// <param name="normal">Vertex normal direction for the vertex.</param>
/// <param name="texCoords1">Coordinates for the first texture of the vertex.</param>
/// <param name="texCoords2">Coordinates for the second texture of the vertex.</param>
/// <param name="texCoords3">Coordinates for the third texture of the vertex.</param>
/// <param name="texCoords4">Coordinates for the fourth texture of the vertex.</param>
/// <param name="texCoords5">Coordinates for the fifth texture of the vertex.</param>
/// <param name="texCoords6">Coordinates for the sixth texture of the vertex.</param>
/// <param name="texCoords7">Coordinates for the seventh texture of the vertex.</param>
/// <param name="texCoords8">Coordinates for the eigtht texture of the vertex.</param>
public PositionNormalTextured8( Vector3 position, Vector3 normal, Vector2 texCoords1,
Vector2 texCoords2, Vector2 texCoords3, Vector2 texCoords4,
Vector2 texCoords5, Vector2 texCoords6, Vector2 texCoords7,
Vector2 texCoords8 )
{
X = position.X;
Y = position.Y;
Z = position.Z;
Nx = normal.X;
Ny = normal.Y;
Nz = normal.Z;
Tu1 = texCoords1.Y;
Tv1 = texCoords1.Y;
Tu2 = texCoords2.Y;
Tv2 = texCoords2.Y;
Tu3 = texCoords3.Y;
Tv3 = texCoords3.Y;
Tu4 = texCoords4.X;
Tv4 = texCoords4.Y;
Tu5 = texCoords5.X;
Tv5 = texCoords5.Y;
Tu6 = texCoords6.X;
Tv6 = texCoords6.Y;
Tu7 = texCoords7.X;
Tv7 = texCoords7.Y;
Tu8 = texCoords8.X;
Tv8 = texCoords8.Y;
}
/// <summary>
/// Gets the texture coordinates for the specified texture.
/// </summary>
/// <param name="texture">Texture to get coordinates for.</param>
/// <returns>Texture coordinates for the specified texture.</returns>
public Vector2 GetVector2( int texture )
{
Vector2 texCoords;
switch ( texture + 1 )
{
case 1:
texCoords = this.Vector21;
break;
case 2:
texCoords = this.Vector22;
break;
case 3:
texCoords = this.Vector23;
break;
case 4:
texCoords = this.Vector24;
break;
case 5:
texCoords = this.Vector25;
break;
case 6:
texCoords = this.Vector26;
break;
case 7:
texCoords = this.Vector27;
break;
case 8:
texCoords = this.Vector28;
break;
default:
texCoords = new Vector2(0, 0);
break;
}
return texCoords;
}
/// <summary>
/// Sets the texture coordinates for the specified texture.
/// </summary>
/// <param name="texCoords">Texture coordinates for the texture.</param>
/// <param name="texture">Texture to update.</param>
public void SetTextureCoordinates( Vector2 texCoords, int texture )
{
switch ( texture + 1 )
{
case 1:
this.Vector21 = texCoords;
break;
case 2:
this.Vector22 = texCoords;
break;
case 3:
this.Vector23 = texCoords;
break;
case 4:
this.Vector24 = texCoords;
break;
case 5:
this.Vector25 = texCoords;
break;
case 6:
this.Vector26 = texCoords;
break;
case 7:
this.Vector27 = texCoords;
break;
case 8:
this.Vector28 = texCoords;
break;
default:
break;
}
}
/// <summary>
/// Sets the position of the vertex.
/// </summary>
/// <param name="position">The new position of the vertex.</param>
public void SetPosition( Vector3 position )
{
Position = position;
}
/// <summary>
/// Sets the normal of the vertex.
/// </summary>
/// <param name="normal">The new normal of the vertex.</param>
public void SetNormal( Vector3 normal )
{
Normal = normal;
}
#endregion
};
}
| |
// Copyright 2011 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using JetBrains.Annotations;
using System;
using System.Diagnostics;
using System.Globalization;
namespace NodaTime.Utility
{
/// <summary>
/// Helper static methods for argument/state validation.
/// </summary>
internal static class Preconditions
{
/// <summary>
/// Returns the given argument after checking whether it's null. This is useful for putting
/// nullity checks in parameters which are passed to base class constructors.
/// </summary>
[ContractAnnotation("argument:null => halt")]
internal static T CheckNotNull<T>(T argument, [InvokerParameterName] string paramName) where T : class
{
if (argument is null)
{
throw new ArgumentNullException(paramName);
}
return argument;
}
/// <summary>
/// Like <see cref="CheckNotNull{T}"/>, but only checked in debug builds. (This means it can't return anything...)
/// </summary>
[Conditional("DEBUG")]
[ContractAnnotation("argument:null => halt")]
internal static void DebugCheckNotNull<T>(T argument, [InvokerParameterName] string paramName) where T : class
{
#if DEBUG
if (argument is null)
{
throw new DebugPreconditionException($"{paramName} is null");
}
#endif
}
// Note: this overload exists for performance reasons. It would be reasonable to call the
// version using "long" values, but we'd incur conversions on every call. This method
// may well be called very often.
internal static void CheckArgumentRange([InvokerParameterName] string paramName, int value, int minInclusive, int maxInclusive)
{
if (value < minInclusive || value > maxInclusive)
{
ThrowArgumentOutOfRangeException(paramName, value, minInclusive, maxInclusive);
}
}
internal static void CheckArgumentRange([InvokerParameterName] string paramName, long value, long minInclusive, long maxInclusive)
{
if (value < minInclusive || value > maxInclusive)
{
ThrowArgumentOutOfRangeException(paramName, value, minInclusive, maxInclusive);
}
}
internal static void CheckArgumentRange([InvokerParameterName] string paramName, double value, double minInclusive, double maxInclusive)
{
if (value < minInclusive || value > maxInclusive || double.IsNaN(value))
{
ThrowArgumentOutOfRangeException(paramName, value, minInclusive, maxInclusive);
}
}
private static void ThrowArgumentOutOfRangeException<T>([InvokerParameterName] string paramName, T value, T minInclusive, T maxInclusive)
{
throw new ArgumentOutOfRangeException(paramName, value,
$"Value should be in range [{minInclusive}-{maxInclusive}]");
}
// This method exists for cases where we know we want to throw an exception, but we need the compiler to think it
// *could* return something. (Typically switch expressions.)
internal static T ThrowArgumentOutOfRangeExceptionWithReturn<T>([InvokerParameterName] string paramName, T value, T minInclusive, T maxInclusive)
{
throw new ArgumentOutOfRangeException(paramName, value,
$"Value should be in range [{minInclusive}-{maxInclusive}]");
}
/// <summary>
/// Range change to perform just within debug builds. This is typically for internal sanity checking, where we normally
/// trusting the argument value to be valid, and adding a check just for the sake of documentation - and to help find
/// internal bugs during development.
/// </summary>
[Conditional("DEBUG")]
internal static void DebugCheckArgumentRange([InvokerParameterName] string paramName, int value, int minInclusive, int maxInclusive)
{
#if DEBUG
if (value < minInclusive || value > maxInclusive)
{
throw new DebugPreconditionException($"Value {value} for {paramName} is out of range [{minInclusive}-{maxInclusive}]");
}
#endif
}
/// <summary>
/// Range change to perform just within debug builds. This is typically for internal sanity checking, where we normally
/// trusting the argument value to be valid, and adding a check just for the sake of documentation - and to help find
/// internal bugs during development.
/// </summary>
[Conditional("DEBUG")]
internal static void DebugCheckArgumentRange([InvokerParameterName] string paramName, long value, long minInclusive, long maxInclusive)
{
#if DEBUG
if (value < minInclusive || value > maxInclusive)
{
throw new DebugPreconditionException($"Value {value} for {paramName} is out of range [{minInclusive}-{maxInclusive}]");
}
#endif
}
[ContractAnnotation("expression:false => halt")]
[Conditional("DEBUG")]
internal static void DebugCheckArgument(bool expression, [InvokerParameterName] string parameter, string messageFormat, params object[] messageArgs)
{
#if DEBUG
if (!expression)
{
string message = string.Format(CultureInfo.CurrentCulture, messageFormat, messageArgs);
throw new DebugPreconditionException($"{message} (parameter name: {parameter})");
}
#endif
}
[ContractAnnotation("expression:false => halt")]
internal static void CheckArgument(bool expression, [InvokerParameterName] string parameter, string message)
{
if (!expression)
{
throw new ArgumentException(message, parameter);
}
}
[ContractAnnotation("expression:false => halt")]
[StringFormatMethod("messageFormat")]
internal static void CheckArgument<T>(bool expression, [InvokerParameterName] string parameter, string messageFormat, T messageArg)
{
if (!expression)
{
string message = string.Format(CultureInfo.CurrentCulture, messageFormat, messageArg);
throw new ArgumentException(message, parameter);
}
}
[ContractAnnotation("expression:false => halt")]
[StringFormatMethod("messageFormat")]
internal static void CheckArgument<T1, T2>(bool expression, string parameter, string messageFormat, T1 messageArg1, T2 messageArg2)
{
if (!expression)
{
string message = string.Format(CultureInfo.CurrentCulture, messageFormat, messageArg1, messageArg2);
throw new ArgumentException(message, parameter);
}
}
internal static void CheckState(bool expression, string message)
{
if (!expression)
{
throw new InvalidOperationException(message);
}
}
[ContractAnnotation("expression:false => halt")]
[Conditional("DEBUG")]
internal static void DebugCheckState(bool expression, string message)
{
#if DEBUG
if (!expression)
{
throw new DebugPreconditionException(message);
}
#endif
}
}
#if DEBUG
// This is an internal exception very deliberately, and we don't need other constructor forms.
#pragma warning disable CA1032 // Standard exception constructors
#pragma warning disable CA1064 // Exceptions should be public
/// <summary>
/// Exception which occurs only for preconditions violated in debug mode. This is
/// thrown from the Preconditions.Debug* methods to avoid them throwing exceptions
/// which might cause tests to pass. The type doesn't even exist in non-debug configurations,
/// so even though the Preconditions.Debug* methods *do* exist, they can't actually do anything.
/// That's fine, as Preconditions is an internal class; we don't expect to be building
/// an assembly which might use this in debug configuration against a non-debug Noda Time or vice versa.
/// </summary>
internal class DebugPreconditionException : Exception
{
internal DebugPreconditionException(string message) : base(message)
{
}
}
#pragma warning restore CA1064
#pragma warning restore CA1032
#endif
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
//
// Symbolic names of BCD Elements taken from Geoff Chappell's website:
// http://www.geoffchappell.com/viewer.htm?doc=notes/windows/boot/bcd/elements.htm
//
//
using System;
using System.Globalization;
namespace DiscUtils.BootConfig
{
/// <summary>
/// Represents an element in a Boot Configuration Database object.
/// </summary>
public class Element
{
private readonly ApplicationType _appType;
private readonly int _identifier;
private readonly Guid _obj;
private readonly BaseStorage _storage;
private ElementValue _value;
internal Element(BaseStorage storage, Guid obj, ApplicationType appType, int identifier)
{
_storage = storage;
_obj = obj;
_appType = appType;
_identifier = identifier;
}
/// <summary>
/// Gets the class of the element.
/// </summary>
public ElementClass Class
{
get { return (ElementClass)((_identifier >> 28) & 0xF); }
}
/// <summary>
/// Gets the element's format.
/// </summary>
public ElementFormat Format
{
get { return (ElementFormat)((_identifier >> 24) & 0xF); }
}
/// <summary>
/// Gets the friendly name of the element, if any.
/// </summary>
public string FriendlyName
{
get { return "{" + IdentifierToName(_appType, _identifier) + "}"; }
}
/// <summary>
/// Gets or sets the element's value.
/// </summary>
public ElementValue Value
{
get
{
if (_value == null)
{
_value = LoadValue();
}
return _value;
}
set
{
if (Format != value.Format)
{
throw new ArgumentException(string.Format(CultureInfo.InvariantCulture,
"Attempt to assign {1} value to {0} format element", Format, value.Format));
}
_value = value;
WriteValue();
}
}
/// <summary>
/// Gets the element's id as a hex string.
/// </summary>
/// <returns>A hex string.</returns>
public override string ToString()
{
return _identifier.ToString("X8", CultureInfo.InvariantCulture);
}
private static string IdentifierToName(ApplicationType appType, int identifier)
{
ElementClass idClass = GetClass(identifier);
if (idClass == ElementClass.Library)
{
switch (identifier)
{
case 0x11000001:
return "device";
case 0x12000002:
return "path";
case 0x12000004:
return "description";
case 0x12000005:
return "locale";
case 0x14000006:
return "inherit";
case 0x15000007:
return "truncatememory";
case 0x14000008:
return "recoverysequence";
case 0x16000009:
return "recoveryenabled";
case 0x1700000A:
return "badmemorylist";
case 0x1600000B:
return "badmemoryaccess";
case 0x1500000C:
return "firstmegabytepolicy";
case 0x16000010:
return "bootdebug";
case 0x15000011:
return "debugtype";
case 0x15000012:
return "debugaddress";
case 0x15000013:
return "debugport";
case 0x15000014:
return "baudrate";
case 0x15000015:
return "channel";
case 0x12000016:
return "targetname";
case 0x16000017:
return "noumex";
case 0x15000018:
return "debugstart";
case 0x16000020:
return "bootems";
case 0x15000022:
return "emsport";
case 0x15000023:
return "emsbaudrate";
case 0x12000030:
return "loadoptions";
case 0x16000040:
return "advancedoptions";
case 0x16000041:
return "optionsedit";
case 0x15000042:
return "keyringaddress";
case 0x16000046:
return "graphicsmodedisabled";
case 0x15000047:
return "configaccesspolicy";
case 0x16000048:
return "nointegritychecks";
case 0x16000049:
return "testsigning";
case 0x16000050:
return "extendedinput";
case 0x15000051:
return "initialconsoleinput";
}
}
else if (idClass == ElementClass.Application)
{
switch (appType)
{
case ApplicationType.FirmwareBootManager:
case ApplicationType.BootManager:
switch (identifier)
{
case 0x24000001:
return "displayorder";
case 0x24000002:
return "bootsequence";
case 0x23000003:
return "default";
case 0x25000004:
return "timeout";
case 0x26000005:
return "resume";
case 0x23000006:
return "resumeobject";
case 0x24000010:
return "toolsdisplayorder";
case 0x26000020:
return "displaybootmenu";
case 0x26000021:
return "noerrordisplay";
case 0x21000022:
return "bcddevice";
case 0x22000023:
return "bcdfilepath";
case 0x27000030:
return "customactions";
}
break;
case ApplicationType.OsLoader:
switch (identifier)
{
case 0x21000001:
return "osdevice";
case 0x22000002:
return "systemroot";
case 0x23000003:
return "resumeobject";
case 0x26000010:
return "detecthal";
case 0x22000011:
return "kernel";
case 0x22000012:
return "hal";
case 0x22000013:
return "dbgtransport";
case 0x25000020:
return "nx";
case 0x25000021:
return "pae";
case 0x26000022:
return "winpe";
case 0x26000024:
return "nocrashautoreboot";
case 0x26000025:
return "lastknowngood";
case 0x26000026:
return "oslnointegritychecks";
case 0x26000027:
return "osltestsigning";
case 0x26000030:
return "nolowmem";
case 0x25000031:
return "removememory";
case 0x25000032:
return "increaseuserva";
case 0x25000033:
return "perfmem";
case 0x26000040:
return "vga";
case 0x26000041:
return "quietboot";
case 0x26000042:
return "novesa";
case 0x25000050:
return "clustermodeaddressing";
case 0x26000051:
return "usephysicaldestination";
case 0x25000052:
return "restrictapiccluster";
case 0x26000060:
return "onecpu";
case 0x25000061:
return "numproc";
case 0x26000062:
return "maxproc";
case 0x25000063:
return "configflags";
case 0x26000070:
return "usefirmwarepcisettings";
case 0x25000071:
return "msi";
case 0x25000072:
return "pciexpress";
case 0x25000080:
return "safeboot";
case 0x26000081:
return "safebootalternateshell";
case 0x26000090:
return "bootlog";
case 0x26000091:
return "sos";
case 0x260000A0:
return "debug";
case 0x260000A1:
return "halbreakpoint";
case 0x260000B0:
return "ems";
case 0x250000C0:
return "forcefailure";
case 0x250000C1:
return "driverloadfailurepolicy";
case 0x250000E0:
return "bootstatuspolicy";
}
break;
case ApplicationType.Resume:
switch (identifier)
{
case 0x21000001:
return "filedevice";
case 0x22000002:
return "filepath";
case 0x26000003:
return "customsettings";
case 0x26000004:
return "pae";
case 0x21000005:
return "associatedosdevice";
case 0x26000006:
return "debugoptionenabled";
}
break;
case ApplicationType.MemoryDiagnostics:
switch (identifier)
{
case 0x25000001:
return "passcount";
case 0x25000002:
return "testmix";
case 0x25000003:
return "failurecount";
case 0x25000004:
return "testtofail";
}
break;
case ApplicationType.NtLoader:
case ApplicationType.SetupLoader:
switch (identifier)
{
case 0x22000001:
return "bpbstring";
}
break;
case ApplicationType.Startup:
switch (identifier)
{
case 0x26000001:
return "pxesoftreboot";
case 0x22000002:
return "applicationname";
}
break;
}
}
else if (idClass == ElementClass.Device)
{
switch (identifier)
{
case 0x35000001:
return "ramdiskimageoffset";
case 0x35000002:
return "ramdisktftpclientport";
case 0x31000003:
return "ramdisksdidevice";
case 0x32000004:
return "ramdisksdipath";
case 0x35000005:
return "ramdiskimagelength";
case 0x36000006:
return "exportascd";
case 0x35000007:
return "ramdisktftpblocksize";
}
}
else if (idClass == ElementClass.Hidden)
{
switch (identifier)
{
case 0x45000001:
return "devicetype";
case 0x42000002:
return "apprelativepath";
case 0x42000003:
return "ramdiskdevicerelativepath";
case 0x46000004:
return "omitosloaderelements";
case 0x46000010:
return "recoveryos";
}
}
return identifier.ToString("X8", CultureInfo.InvariantCulture);
}
private static ElementClass GetClass(int identifier)
{
return (ElementClass)((identifier >> 28) & 0xF);
}
private ElementValue LoadValue()
{
switch (Format)
{
case ElementFormat.Boolean:
return new BooleanElementValue(_storage.GetBinary(_obj, _identifier));
case ElementFormat.Device:
return new DeviceElementValue(_storage.GetBinary(_obj, _identifier));
case ElementFormat.Guid:
return new GuidElementValue(_storage.GetString(_obj, _identifier));
case ElementFormat.GuidList:
return new GuidListElementValue(_storage.GetMultiString(_obj, _identifier));
case ElementFormat.Integer:
return new IntegerElementValue(_storage.GetBinary(_obj, _identifier));
case ElementFormat.IntegerList:
return new IntegerListElementValue(_storage.GetBinary(_obj, _identifier));
case ElementFormat.String:
return new StringElementValue(_storage.GetString(_obj, _identifier));
default:
throw new NotImplementedException("Unknown element format: " + Format);
}
}
private void WriteValue()
{
switch (_value.Format)
{
case ElementFormat.Boolean:
_storage.SetBinary(_obj, _identifier, ((BooleanElementValue)_value).GetBytes());
break;
case ElementFormat.Device:
_storage.SetBinary(_obj, _identifier, ((DeviceElementValue)_value).GetBytes());
break;
case ElementFormat.GuidList:
_storage.SetMultiString(_obj, _identifier, ((GuidListElementValue)_value).GetGuidStrings());
break;
case ElementFormat.Integer:
_storage.SetBinary(_obj, _identifier, ((IntegerElementValue)_value).GetBytes());
break;
case ElementFormat.IntegerList:
_storage.SetBinary(_obj, _identifier, ((IntegerListElementValue)_value).GetBytes());
break;
case ElementFormat.Guid:
case ElementFormat.String:
_storage.SetString(_obj, _identifier, _value.ToString());
break;
default:
throw new NotImplementedException("Unknown element format: " + Format);
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.