context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
//! \file ImageGCC.cs
//! \date Mon Jun 29 05:12:05 2015
//! \brief Ai5Win engine image format.
//
// Copyright (C) 2015 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.IO;
using System.Windows.Media;
using GameRes.Compression;
using GameRes.Utility;
namespace GameRes.Formats.Elf
{
internal class GccMetaData : ImageMetaData
{
public uint Signature;
}
[Export(typeof(ImageFormat))]
public class GccFormat : ImageFormat
{
public override string Tag { get { return "GCC"; } }
public override string Description { get { return "AI5WIN engine image format"; } }
public override uint Signature { get { return 0x6d343252; } } // 'R24m'
public GccFormat ()
{
// 'R24m', 'R24n', 'G24m', 'G24n'
Signatures = new uint[] { 0x6d343252, 0x6E343252, 0x6D343247, 0x6E343247 };
}
public override ImageMetaData ReadMetaData (IBinaryStream stream)
{
var header = stream.ReadHeader (12);
return new GccMetaData
{
Width = header.ToUInt16 (8),
Height = header.ToUInt16 (10),
BPP = 'm' == header[3] ? 32 : 24,
OffsetX = header.ToInt16 (4),
OffsetY = header.ToInt16 (6),
Signature = header.ToUInt32 (0),
};
}
public override ImageData Read (IBinaryStream stream, ImageMetaData info)
{
var meta = (GccMetaData)info;
var reader = new Reader (stream.AsStream, meta);
reader.Unpack();
return ImageData.Create (info, reader.Format, null, reader.Data);
}
public override void Write (Stream file, ImageData image)
{
throw new NotImplementedException ("GccFormat.Write not implemented");
}
internal class Reader
{
byte[] m_input;
GccMetaData m_info;
byte[] m_output;
int m_width;
int m_height;
int m_alpha_w;
int m_alpha_h;
public PixelFormat Format { get; private set; }
public byte[] Data { get { return m_output; } }
public Reader (Stream input, GccMetaData info)
{
m_input = new byte[input.Length];
input.Read (m_input, 0, m_input.Length);
m_info = info;
m_width = (int)m_info.Width;
m_height = (int)m_info.Height;
}
public void Unpack ()
{
switch (m_info.Signature)
{
case 0x6E343247: UnpackNormal (LzssUnpack); break; // G24n
case 0x6D343247: UnpackMasked (LzssUnpack); break; // G24m
case 0x6E343252: UnpackNormal (AltUnpack); break; // R24n
case 0x6D343252: UnpackMasked (AltUnpack); break; // R24m
default: throw new NotSupportedException();
}
}
private void UnpackNormal (Action<int> unpacker)
{
unpacker (0x14);
FlipPixels (m_width*3);
Format = PixelFormats.Bgr24;
}
private void UnpackMasked (Action<int> unpacker)
{
unpacker (0x20);
var alpha = UnpackAlpha();
if (m_alpha_w < (m_info.OffsetX + m_width) || m_alpha_h < (m_info.OffsetY + m_height))
{
FlipPixels (m_width*3);
Format = PixelFormats.Bgr24;
}
else
{
Convert24To32 (alpha);
Format = PixelFormats.Bgra32;
}
}
private void FlipPixels (int stride)
{
// flip pixels vertically
var pixels = new byte[m_output.Length];
int dst = 0;
for (int src = stride * (m_height-1); src >= 0; src -= stride)
{
Buffer.BlockCopy (m_output, src, pixels, dst, stride);
dst += stride;
}
m_output = pixels;
}
private void Convert24To32 (byte[] alpha)
{
Debug.Assert (m_alpha_w >= (m_info.OffsetX + m_width) && m_alpha_h >= (m_info.OffsetY + m_height));
int src_stride = m_width * 3;
var pixels = new byte[m_width * m_height * 4];
int dst = 0;
int alpha_row = m_alpha_w * (m_alpha_h - m_info.OffsetY - 1);
for (int row = m_width * (m_height-1); row >= 0; row -= m_width)
{
int src = row*3;
for (int x = 0; x < m_width; ++x)
{
pixels[dst++] = m_output[src++];
pixels[dst++] = m_output[src++];
pixels[dst++] = m_output[src++];
pixels[dst++] = alpha[alpha_row + m_info.OffsetX + x];
}
alpha_row -= m_alpha_w;
}
m_output = pixels;
}
void LzssUnpack (int offset)
{
int out_length = m_width * m_height * 3;
using (var input = new MemoryStream (m_input, offset, m_input.Length-offset))
using (var lzss = new LzssReader (input, (int)input.Length, out_length))
{
lzss.Unpack();
m_output = lzss.Data;
}
}
int m_index;
int m_current;
int m_mask;
void ResetBitInput (int idx)
{
m_index = idx;
m_mask = 0x80;
}
bool NextBit ()
{
m_mask <<= 1;
if (0x100 == m_mask)
{
m_current = m_input[m_index++];
m_mask = 1;
}
return 0 != (m_current & m_mask);
}
byte[] UnpackAlpha () // sub_444FF0
{
m_alpha_w = LittleEndian.ToUInt16 (m_input, 0x18);
m_alpha_h = LittleEndian.ToUInt16 (m_input, 0x1A);
int total = m_alpha_w * m_alpha_h;
var alpha = new byte[total];
int offset = 0x20 + LittleEndian.ToInt32 (m_input, 0x0C);
ResetBitInput (offset);
int src = offset + LittleEndian.ToInt32 (m_input, 0x1C);
int dst = 0;
while (dst < total)
{
if (NextBit())
{
int count = ReadCount();
byte v = m_input[src++];
for (int i = 0; i < count; ++ i)
{
alpha[dst++] = v;
}
}
else
{
alpha[dst++] = m_input[src++];
}
}
return alpha;
}
int ReadCount () // sub_444F60
{
int result = 1;
int bit_count = 0;
while (!NextBit())
++bit_count;
while (bit_count != 0)
{
--bit_count;
result <<= 1;
if (NextBit())
result |= 1;
}
return result;
}
int m_dst;
private void AltUnpack (int offset) // sub_445620
{
byte[] chunk = new byte[0x10001];
int src = offset + LittleEndian.ToInt32 (m_input, 0x10); // within m_input
ResetBitInput (offset);
int total = 3 * m_width * m_height;
m_output = new byte[total];
m_dst = 0;
int dst = 0;
while (dst < total)
{
int chunk_size = Math.Min (total - dst, 0xffff);
if (NextBit())
{
src = ReadCompressedChunk (src, chunk, chunk_size + 2);
DecodeChunk (chunk, chunk_size);
}
else
{
src = ReadRawChunk (src, chunk_size);
}
dst += chunk_size;
}
return;
}
ushort[] v15 = new ushort[0x100];
ushort[] v16 = new ushort[0x100];
ushort[] v17 = new ushort[0x10000];
void DecodeChunk (byte[] chunk, int chunk_size) // sub_444E40
{
for (int i = 0; i < v15.Length; ++i)
v15[i] = 0;
for (int i = 0; i < chunk_size; ++i)
++v15[chunk[2+i]];
ushort v7 = 0;
for (int r = 0; r < 0x100; ++r)
{
v16[r] = v7;
v7 += v15[r];
v15[r] = 0;
}
for (int v9 = 0; v9 < chunk_size; ++v9)
{
int v10 = chunk[2+v9];
int r = v15[v10] + v16[v10];
v17[r] = (ushort)v9;
v15[v10]++;
}
int a3 = LittleEndian.ToUInt16 (chunk, 0);
int v12 = v17[a3];
for (int i = 0; i < chunk_size; ++i)
{
m_output[m_dst++] = chunk[2+v12];
v12 = v17[v12];
}
}
int ReadCompressedChunk (int src, byte[] chunk, int chunk_size) // sub_4450E0
{
byte[] v33 = new byte[0x10];
byte[] v35 = new byte[0x10];
for (byte v6 = 0; v6 < 0x10; ++v6)
{
v33[v6] = v6;
v35[v6] = v6;
}
int v31 = 0;
sbyte v5 = -1;
while ( v31 < chunk_size )
{
int v16;
int v26;
if (!NextBit())
{
if (NextBit())
{
v26 = ReadCount();
v16 = v35[v26];
chunk[v31++] = (byte)v16;
}
else
{
if (NextBit())
{
int v27 = ReadCount();
if (NextBit())
v16 = (v5 - v27) & 0xff;
else
v16 = (v5 + v27) & 0xff;
}
else
{
v16 = m_input[src++];
}
chunk[v31++] = (byte)v16;
v26 = 0;
while (v35[v26] != v16)
{
++v26;
if (v26 >= 0x10)
{
v26 = 0xff;
break;
}
}
}
}
else
{
int v17;
int count = ReadCount();
if (NextBit())
{
v17 = 0;
v16 = v33[0];
}
else if (NextBit())
{
v17 = ReadCount();
v16 = v33[v17];
}
else
{
if (NextBit())
{
int v20 = ReadCount();
if (NextBit())
v16 = (v5 - v20) & 0xff;
else
v16 = (v5 + v20) & 0xff;
}
else
{
v16 = m_input[src++];
}
v17 = 0;
while (v33[v17] != v16)
{
++v17;
if (v17 >= 0x10)
{
v17 = 0xff;
break;
}
}
}
if (v17 != 0)
{
for (int i = v17 & 0xF; i != 0; --i)
v33[i] = v33[i-1];
v33[0] = (byte)v16;
}
for (int n = 0; n < count; ++n)
chunk[v31++] = (byte)v16;
v26 = 0;
while (v35[v26] != v16)
{
++v26;
if (v26 >= 0x10)
{
v26 = 0xff;
break;
}
}
}
if (0 != (byte)v26)
{
for (int k = v26 & 0xF; k != 0; --k)
v35[k] = v35[k-1];
v35[0] = (byte)v16;
}
v5 = (sbyte)v16;
}
return src;
}
int ReadRawChunk (int src, int chunk_size) // sub_445400
{
int n = 0;
while (n < chunk_size)
{
if (!NextBit())
{
m_output[m_dst++] = m_input[src++];
m_output[m_dst++] = m_input[src++];
m_output[m_dst++] = m_input[src++];
n += 3;
}
else
{
int count = ReadCount();
byte b = m_input[src++];
byte g = m_input[src++];
byte r = m_input[src++];
for (int i = 0; i < count; ++i)
{
m_output[m_dst++] = b;
m_output[m_dst++] = g;
m_output[m_dst++] = r;
}
n += 3 * count;
}
}
return src;
}
}
}
}
| |
using Microsoft.IdentityModel;
using Microsoft.IdentityModel.S2S.Protocols.OAuth2;
using Microsoft.IdentityModel.S2S.Tokens;
using Microsoft.SharePoint.Client;
using Microsoft.SharePoint.Client.EventReceivers;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IdentityModel.Selectors;
using System.IdentityModel.Tokens;
using System.IO;
using System.Linq;
using System.Net;
using System.Security.Cryptography.X509Certificates;
using System.Security.Principal;
using System.ServiceModel;
using System.Text;
using System.Web;
using System.Web.Configuration;
using System.Web.Script.Serialization;
using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction;
using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException;
using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration;
using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials;
namespace ECM.RecordsManagementWeb
{
public static class TokenHelper
{
#region public fields
/// <summary>
/// SharePoint principal.
/// </summary>
public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000";
/// <summary>
/// Lifetime of HighTrust access token, 12 hours.
/// </summary>
public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0);
#endregion public fields
#region public methods
/// <summary>
/// Retrieves the context token string from the specified request by looking for well-known parameter names in the
/// POSTed form parameters and the querystring. Returns null if no context token is found.
/// </summary>
/// <param name="request">HttpRequest in which to look for a context token</param>
/// <returns>The context token string</returns>
public static string GetContextTokenFromRequest(HttpRequest request)
{
return GetContextTokenFromRequest(new HttpRequestWrapper(request));
}
/// <summary>
/// Retrieves the context token string from the specified request by looking for well-known parameter names in the
/// POSTed form parameters and the querystring. Returns null if no context token is found.
/// </summary>
/// <param name="request">HttpRequest in which to look for a context token</param>
/// <returns>The context token string</returns>
public static string GetContextTokenFromRequest(HttpRequestBase request)
{
string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" };
foreach (string paramName in paramNames)
{
if (!string.IsNullOrEmpty(request.Form[paramName]))
{
return request.Form[paramName];
}
if (!string.IsNullOrEmpty(request.QueryString[paramName]))
{
return request.QueryString[paramName];
}
}
return null;
}
/// <summary>
/// Validate that a specified context token string is intended for this application based on the parameters
/// specified in web.config. Parameters used from web.config used for validation include ClientId,
/// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present,
/// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not
/// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an
/// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents
/// and a JsonWebSecurityToken based on the context token is returned.
/// </summary>
/// <param name="contextTokenString">The context token to validate</param>
/// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation.
/// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used
/// for validation instead of <paramref name="appHostName"/> .</param>
/// <returns>A JsonWebSecurityToken based on the context token.</returns>
public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null)
{
JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler();
SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString);
JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken;
SharePointContextToken token = SharePointContextToken.Create(jsonToken);
string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority;
int firstDot = stsAuthority.IndexOf('.');
GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot);
AcsHostUrl = stsAuthority.Substring(firstDot + 1);
tokenHandler.ValidateToken(jsonToken);
string[] acceptableAudiences;
if (!String.IsNullOrEmpty(HostedAppHostNameOverride))
{
acceptableAudiences = HostedAppHostNameOverride.Split(';');
}
else if (appHostName == null)
{
acceptableAudiences = new[] { HostedAppHostName };
}
else
{
acceptableAudiences = new[] { appHostName };
}
bool validationSuccessful = false;
string realm = Realm ?? token.Realm;
foreach (var audience in acceptableAudiences)
{
string principal = GetFormattedPrincipal(ClientId, audience, realm);
if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal))
{
validationSuccessful = true;
break;
}
}
if (!validationSuccessful)
{
throw new AudienceUriValidationFailedException(
String.Format(CultureInfo.CurrentCulture,
"\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience));
}
return token;
}
/// <summary>
/// Retrieves an access token from ACS to call the source of the specified context token at the specified
/// targetHost. The targetHost must be registered for the principal that sent the context token.
/// </summary>
/// <param name="contextToken">Context token issued by the intended access token audience</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <returns>An access token with an audience matching the context token's source</returns>
public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost)
{
string targetPrincipalName = contextToken.TargetPrincipalName;
// Extract the refreshToken from the context token
string refreshToken = contextToken.RefreshToken;
if (String.IsNullOrEmpty(refreshToken))
{
return null;
}
string targetRealm = Realm ?? contextToken.Realm;
return GetAccessToken(refreshToken,
targetPrincipalName,
targetHost,
targetRealm);
}
/// <summary>
/// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="authorizationCode">Authorization code to exchange for access token</param>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAccessToken(
string authorizationCode,
string targetPrincipalName,
string targetHost,
string targetRealm,
Uri redirectUri)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, null, targetRealm);
// Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered
OAuth2AccessTokenRequest oauth2Request =
OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode(
clientId,
ClientSecret,
authorizationCode,
redirectUri,
resource);
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="refreshToken">Refresh token to exchange for access token</param>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAccessToken(
string refreshToken,
string targetPrincipalName,
string targetHost,
string targetRealm)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, null, targetRealm);
OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource);
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Retrieves an app-only access token from ACS to call the specified principal
/// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is
/// null, the "Realm" setting in web.config will be used instead.
/// </summary>
/// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param>
/// <param name="targetHost">Url authority of the target principal</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <returns>An access token with an audience of the target principal</returns>
public static OAuth2AccessTokenResponse GetAppOnlyAccessToken(
string targetPrincipalName,
string targetHost,
string targetRealm)
{
if (targetRealm == null)
{
targetRealm = Realm;
}
string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm);
string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm);
OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource);
oauth2Request.Resource = resource;
// Get token
OAuth2S2SClient client = new OAuth2S2SClient();
OAuth2AccessTokenResponse oauth2Response;
try
{
oauth2Response =
client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse;
}
catch (WebException wex)
{
using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream()))
{
string responseText = sr.ReadToEnd();
throw new WebException(wex.Message + " - " + responseText, wex);
}
}
return oauth2Response;
}
/// <summary>
/// Creates a client context based on the properties of a remote event receiver
/// </summary>
/// <param name="properties">Properties of a remote event receiver</param>
/// <returns>A ClientContext ready to call the web where the event originated</returns>
public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties)
{
Uri sharepointUrl;
if (properties.ListEventProperties != null)
{
sharepointUrl = new Uri(properties.ListEventProperties.WebUrl);
}
else if (properties.ItemEventProperties != null)
{
sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl);
}
else if (properties.WebEventProperties != null)
{
sharepointUrl = new Uri(properties.WebEventProperties.FullUrl);
}
else
{
return null;
}
if (IsHighTrustApp())
{
return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null);
}
return CreateAcsClientContextForUrl(properties, sharepointUrl);
}
/// <summary>
/// Creates a client context based on the properties of an app event
/// </summary>
/// <param name="properties">Properties of an app event</param>
/// <param name="useAppWeb">True to target the app web, false to target the host web</param>
/// <returns>A ClientContext ready to call the app web or the parent web</returns>
public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb)
{
if (properties.AppEventProperties == null)
{
return null;
}
Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl;
if (IsHighTrustApp())
{
return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null);
}
return CreateAcsClientContextForUrl(properties, sharepointUrl);
}
/// <summary>
/// Retrieves an access token from ACS using the specified authorization code, and uses that access token to
/// create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithAuthorizationCode(
string targetUrl,
string authorizationCode,
Uri redirectUri)
{
return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri);
}
/// <summary>
/// Retrieves an access token from ACS using the specified authorization code, and uses that access token to
/// create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="targetPrincipalName">Name of the target SharePoint principal</param>
/// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param>
/// <param name="targetRealm">Realm to use for the access token's nameid and audience</param>
/// <param name="redirectUri">Redirect URI registerd for this app</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithAuthorizationCode(
string targetUrl,
string targetPrincipalName,
string authorizationCode,
string targetRealm,
Uri redirectUri)
{
Uri targetUri = new Uri(targetUrl);
string accessToken =
GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken;
return GetClientContextWithAccessToken(targetUrl, accessToken);
}
/// <summary>
/// Uses the specified access token to create a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="accessToken">Access token to be used when calling the specified targetUrl</param>
/// <returns>A ClientContext ready to call targetUrl with the specified access token</returns>
public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken)
{
ClientContext clientContext = new ClientContext(targetUrl);
clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous;
clientContext.FormDigestHandlingEnabled = false;
clientContext.ExecutingWebRequest +=
delegate(object oSender, WebRequestEventArgs webRequestEventArgs)
{
webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] =
"Bearer " + accessToken;
};
return clientContext;
}
/// <summary>
/// Retrieves an access token from ACS using the specified context token, and uses that access token to create
/// a client context
/// </summary>
/// <param name="targetUrl">Url of the target SharePoint site</param>
/// <param name="contextTokenString">Context token received from the target SharePoint site</param>
/// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName
/// of web.config will be used instead</param>
/// <returns>A ClientContext ready to call targetUrl with a valid access token</returns>
public static ClientContext GetClientContextWithContextToken(
string targetUrl,
string contextTokenString,
string appHostUrl)
{
SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl);
Uri targetUri = new Uri(targetUrl);
string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken;
return GetClientContextWithAccessToken(targetUrl, accessToken);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request consent and get back
/// an authorization code.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format
/// (e.g. "Web.Read Site.Write")</param>
/// <returns>Url of the SharePoint site's OAuth authorization page</returns>
public static string GetAuthorizationUrl(string contextUrl, string scope)
{
return string.Format(
"{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code",
EnsureTrailingSlash(contextUrl),
AuthorizationPage,
ClientId,
scope);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request consent and get back
/// an authorization code.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format
/// (e.g. "Web.Read Site.Write")</param>
/// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is
/// granted</param>
/// <returns>Url of the SharePoint site's OAuth authorization page</returns>
public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri)
{
return string.Format(
"{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}",
EnsureTrailingSlash(contextUrl),
AuthorizationPage,
ClientId,
scope,
redirectUri);
}
/// <summary>
/// Returns the SharePoint url to which the app should redirect the browser to request a new context token.
/// </summary>
/// <param name="contextUrl">Absolute Url of the SharePoint site</param>
/// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param>
/// <returns>Url of the SharePoint site's context token redirect page</returns>
public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri)
{
return string.Format(
"{0}{1}?client_id={2}&redirect_uri={3}",
EnsureTrailingSlash(contextUrl),
RedirectPage,
ClientId,
redirectUri);
}
/// <summary>
/// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified
/// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in
/// web.config, an auth challenge will be issued to the targetApplicationUri to discover it.
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <param name="identity">Windows identity of the user on whose behalf to create the access token</param>
/// <returns>An access token with an audience of the target principal</returns>
public static string GetS2SAccessTokenWithWindowsIdentity(
Uri targetApplicationUri,
WindowsIdentity identity)
{
string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm;
JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null;
return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims);
}
/// <summary>
/// Retrieves an S2S client context with an access token signed by the application's private certificate on
/// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the
/// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the
/// targetApplicationUri to discover it.
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <param name="identity">Windows identity of the user on whose behalf to create the access token</param>
/// <returns>A ClientContext using an access token with an audience of the target application</returns>
public static ClientContext GetS2SClientContextWithWindowsIdentity(
Uri targetApplicationUri,
WindowsIdentity identity)
{
string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm;
JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null;
string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims);
return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken);
}
/// <summary>
/// Get authentication realm from SharePoint
/// </summary>
/// <param name="targetApplicationUri">Url of the target SharePoint site</param>
/// <returns>String representation of the realm GUID</returns>
public static string GetRealmFromTargetUrl(Uri targetApplicationUri)
{
WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc");
request.Headers.Add("Authorization: Bearer ");
try
{
using (request.GetResponse())
{
}
}
catch (WebException e)
{
if (e.Response == null)
{
return null;
}
string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"];
if (string.IsNullOrEmpty(bearerResponseHeader))
{
return null;
}
const string bearer = "Bearer realm=\"";
int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal);
if (bearerIndex < 0)
{
return null;
}
int realmIndex = bearerIndex + bearer.Length;
if (bearerResponseHeader.Length >= realmIndex + 36)
{
string targetRealm = bearerResponseHeader.Substring(realmIndex, 36);
Guid realmGuid;
if (Guid.TryParse(targetRealm, out realmGuid))
{
return targetRealm;
}
}
}
return null;
}
/// <summary>
/// Determines if this is a high trust app.
/// </summary>
/// <returns>True if this is a high trust app.</returns>
public static bool IsHighTrustApp()
{
return SigningCredentials != null;
}
/// <summary>
/// Ensures that the specified URL ends with '/' if it is not null or empty.
/// </summary>
/// <param name="url">The url.</param>
/// <returns>The url ending with '/' if it is not null or empty.</returns>
public static string EnsureTrailingSlash(string url)
{
if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/')
{
return url + "/";
}
return url;
}
#endregion
#region private fields
//
// Configuration Constants
//
private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx";
private const string RedirectPage = "_layouts/15/AppRedirect.aspx";
private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000";
private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1";
private const string S2SProtocol = "OAuth2";
private const string DelegationIssuance = "DelegationIssuance1.0";
private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier;
private const string TrustedForImpersonationClaimType = "trustedfordelegation";
private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken;
//
// Environment Constants
//
private static string GlobalEndPointPrefix = "accounts";
private static string AcsHostUrl = "accesscontrol.windows.net";
//
// Hosted app configuration
//
private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId");
private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId");
private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride");
private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName");
private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret");
private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret");
private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm");
private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm");
private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath");
private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword");
private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword);
private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest);
#endregion
#region private methods
private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl)
{
string contextTokenString = properties.ContextToken;
if (String.IsNullOrEmpty(contextTokenString))
{
return null;
}
SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host);
string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken;
return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken);
}
private static string GetAcsMetadataEndpointUrl()
{
return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl);
}
private static string GetFormattedPrincipal(string principalName, string hostName, string realm)
{
if (!String.IsNullOrEmpty(hostName))
{
return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm);
}
return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm);
}
private static string GetAcsPrincipalName(string realm)
{
return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm);
}
private static string GetAcsGlobalEndpointUrl()
{
return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl);
}
private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler()
{
JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler();
handler.Configuration = new SecurityTokenHandlerConfiguration();
handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never);
handler.Configuration.CertificateValidator = X509CertificateValidator.None;
List<byte[]> securityKeys = new List<byte[]>();
securityKeys.Add(Convert.FromBase64String(ClientSecret));
if (!string.IsNullOrEmpty(SecondaryClientSecret))
{
securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret));
}
List<SecurityToken> securityTokens = new List<SecurityToken>();
securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys));
handler.Configuration.IssuerTokenResolver =
SecurityTokenResolver.CreateDefaultSecurityTokenResolver(
new ReadOnlyCollection<SecurityToken>(securityTokens),
false);
SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry();
foreach (byte[] securitykey in securityKeys)
{
issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace));
}
handler.Configuration.IssuerNameRegistry = issuerNameRegistry;
return handler;
}
private static string GetS2SAccessTokenWithClaims(
string targetApplicationHostName,
string targetRealm,
IEnumerable<JsonWebTokenClaim> claims)
{
return IssueToken(
ClientId,
IssuerId,
targetRealm,
SharePointPrincipal,
targetRealm,
targetApplicationHostName,
true,
claims,
claims == null);
}
private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity)
{
JsonWebTokenClaim[] claims = new JsonWebTokenClaim[]
{
new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()),
new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory")
};
return claims;
}
private static string IssueToken(
string sourceApplication,
string issuerApplication,
string sourceRealm,
string targetApplication,
string targetRealm,
string targetApplicationHostName,
bool trustedForDelegation,
IEnumerable<JsonWebTokenClaim> claims,
bool appOnly = false)
{
if (null == SigningCredentials)
{
throw new InvalidOperationException("SigningCredentials was not initialized");
}
#region Actor token
string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm);
string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm);
string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm);
List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>();
actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid));
if (trustedForDelegation && !appOnly)
{
actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true"));
}
// Create token
JsonWebSecurityToken actorToken = new JsonWebSecurityToken(
issuer: issuer,
audience: audience,
validFrom: DateTime.UtcNow,
validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime),
signingCredentials: SigningCredentials,
claims: actorClaims);
string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken);
if (appOnly)
{
// App-only token is the same as actor token for delegated case
return actorTokenString;
}
#endregion Actor token
#region Outer token
List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims);
outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString));
JsonWebSecurityToken jsonToken = new JsonWebSecurityToken(
nameid, // outer token issuer should match actor token nameid
audience,
DateTime.UtcNow,
DateTime.UtcNow.Add(HighTrustAccessTokenLifetime),
outerClaims);
string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken);
#endregion Outer token
return accessToken;
}
#endregion
#region AcsMetadataParser
// This class is used to get MetaData document from the global STS endpoint. It contains
// methods to parse the MetaData document and get endpoints and STS certificate.
public static class AcsMetadataParser
{
public static X509Certificate2 GetAcsSigningCert(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
if (null != document.keys && document.keys.Count > 0)
{
JsonKey signingKey = document.keys[0];
if (null != signingKey && null != signingKey.keyValue)
{
return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value));
}
}
throw new Exception("Metadata document does not contain ACS signing certificate.");
}
public static string GetDelegationServiceUrl(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance);
if (null != delegationEndpoint)
{
return delegationEndpoint.location;
}
throw new Exception("Metadata document does not contain Delegation Service endpoint Url");
}
private static JsonMetadataDocument GetMetadataDocument(string realm)
{
string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}",
GetAcsMetadataEndpointUrl(),
realm);
byte[] acsMetadata;
using (WebClient webClient = new WebClient())
{
acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm);
}
string jsonResponseString = Encoding.UTF8.GetString(acsMetadata);
JavaScriptSerializer serializer = new JavaScriptSerializer();
JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString);
if (null == document)
{
throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm);
}
return document;
}
public static string GetStsUrl(string realm)
{
JsonMetadataDocument document = GetMetadataDocument(realm);
JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol);
if (null != s2sEndpoint)
{
return s2sEndpoint.location;
}
throw new Exception("Metadata document does not contain STS endpoint url");
}
private class JsonMetadataDocument
{
public string serviceName { get; set; }
public List<JsonEndpoint> endpoints { get; set; }
public List<JsonKey> keys { get; set; }
}
private class JsonEndpoint
{
public string location { get; set; }
public string protocol { get; set; }
public string usage { get; set; }
}
private class JsonKeyValue
{
public string type { get; set; }
public string value { get; set; }
}
private class JsonKey
{
public string usage { get; set; }
public JsonKeyValue keyValue { get; set; }
}
}
#endregion
}
/// <summary>
/// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token
/// </summary>
public class SharePointContextToken : JsonWebSecurityToken
{
public static SharePointContextToken Create(JsonWebSecurityToken contextToken)
{
return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims);
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims)
: base(issuer, audience, validFrom, validTo, claims)
{
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken)
: base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken)
{
}
public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials)
: base(issuer, audience, validFrom, validTo, claims, signingCredentials)
{
}
public string NameId
{
get
{
return GetClaimValue(this, "nameid");
}
}
/// <summary>
/// The principal name portion of the context token's "appctxsender" claim
/// </summary>
public string TargetPrincipalName
{
get
{
string appctxsender = GetClaimValue(this, "appctxsender");
if (appctxsender == null)
{
return null;
}
return appctxsender.Split('@')[0];
}
}
/// <summary>
/// The context token's "refreshtoken" claim
/// </summary>
public string RefreshToken
{
get
{
return GetClaimValue(this, "refreshtoken");
}
}
/// <summary>
/// The context token's "CacheKey" claim
/// </summary>
public string CacheKey
{
get
{
string appctx = GetClaimValue(this, "appctx");
if (appctx == null)
{
return null;
}
ClientContext ctx = new ClientContext("http://tempuri.org");
Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx);
string cacheKey = (string)dict["CacheKey"];
return cacheKey;
}
}
/// <summary>
/// The context token's "SecurityTokenServiceUri" claim
/// </summary>
public string SecurityTokenServiceUri
{
get
{
string appctx = GetClaimValue(this, "appctx");
if (appctx == null)
{
return null;
}
ClientContext ctx = new ClientContext("http://tempuri.org");
Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx);
string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"];
return securityTokenServiceUri;
}
}
/// <summary>
/// The realm portion of the context token's "audience" claim
/// </summary>
public string Realm
{
get
{
string aud = Audience;
if (aud == null)
{
return null;
}
string tokenRealm = aud.Substring(aud.IndexOf('@') + 1);
return tokenRealm;
}
}
private static string GetClaimValue(JsonWebSecurityToken token, string claimType)
{
if (token == null)
{
throw new ArgumentNullException("token");
}
foreach (JsonWebTokenClaim claim in token.Claims)
{
if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType))
{
return claim.Value;
}
}
return null;
}
}
/// <summary>
/// Represents a security token which contains multiple security keys that are generated using symmetric algorithms.
/// </summary>
public class MultipleSymmetricKeySecurityToken : SecurityToken
{
/// <summary>
/// Initializes a new instance of the MultipleSymmetricKeySecurityToken class.
/// </summary>
/// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param>
public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys)
: this(UniqueId.CreateUniqueId(), keys)
{
}
/// <summary>
/// Initializes a new instance of the MultipleSymmetricKeySecurityToken class.
/// </summary>
/// <param name="tokenId">The unique identifier of the security token.</param>
/// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param>
public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys)
{
if (keys == null)
{
throw new ArgumentNullException("keys");
}
if (String.IsNullOrEmpty(tokenId))
{
throw new ArgumentException("Value cannot be a null or empty string.", "tokenId");
}
foreach (byte[] key in keys)
{
if (key.Length <= 0)
{
throw new ArgumentException("The key length must be greater then zero.", "keys");
}
}
id = tokenId;
effectiveTime = DateTime.UtcNow;
securityKeys = CreateSymmetricSecurityKeys(keys);
}
/// <summary>
/// Gets the unique identifier of the security token.
/// </summary>
public override string Id
{
get
{
return id;
}
}
/// <summary>
/// Gets the cryptographic keys associated with the security token.
/// </summary>
public override ReadOnlyCollection<SecurityKey> SecurityKeys
{
get
{
return securityKeys.AsReadOnly();
}
}
/// <summary>
/// Gets the first instant in time at which this security token is valid.
/// </summary>
public override DateTime ValidFrom
{
get
{
return effectiveTime;
}
}
/// <summary>
/// Gets the last instant in time at which this security token is valid.
/// </summary>
public override DateTime ValidTo
{
get
{
// Never expire
return DateTime.MaxValue;
}
}
/// <summary>
/// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier.
/// </summary>
/// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param>
/// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns>
public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause)
{
if (keyIdentifierClause == null)
{
throw new ArgumentNullException("keyIdentifierClause");
}
// Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the
// presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later
// when the key is matched to the issuer.
if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause)
{
return true;
}
return base.MatchesKeyIdentifierClause(keyIdentifierClause);
}
#region private members
private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys)
{
List<SecurityKey> symmetricKeys = new List<SecurityKey>();
foreach (byte[] key in keys)
{
symmetricKeys.Add(new InMemorySymmetricSecurityKey(key));
}
return symmetricKeys;
}
private string id;
private DateTime effectiveTime;
private List<SecurityKey> securityKeys;
#endregion
}
}
| |
#region Copyright & License
//
// Copyright 2001-2005 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Collections;
using System.Runtime.Remoting;
using System.Runtime.Remoting.Channels;
using System.Runtime.Remoting.Channels.Tcp;
using log4net.Core;
using log4net.Appender;
using IRemoteLoggingSink = log4net.Appender.RemotingAppender.IRemoteLoggingSink;
using NUnit.Framework;
namespace log4net.Tests.Appender
{
/// <summary>
/// Used for internal unit testing the <see cref="RemotingAppender"/> class.
/// </summary>
/// <remarks>
/// Used for internal unit testing the <see cref="RemotingAppender"/> class.
/// </remarks>
[TestFixture] public class RemotingAppenderTest
{
private IChannel m_remotingChannel = null;
/// <summary>
/// Test that the Message property is correctly remoted
/// </summary>
[Test] public void TestRemotedMessage()
{
// Setup the remoting appender
ConfigureRootAppender(FixFlags.Partial);
RemoteLoggingSinkImpl.Instance.Reset();
log4net.Repository.Hierarchy.Logger root = null;
root = ((log4net.Repository.Hierarchy.Hierarchy)LogManager.GetRepository()).Root;
string testMessage = string.Format("test message [ {0} ]", (new Random()).Next());
// Log a message that will be remoted
root.Log(Level.Debug, testMessage, null);
// Wait for the remoted object to be delivered
System.Threading.Thread.Sleep(1000);
LoggingEvent[] events = RemoteLoggingSinkImpl.Instance.Events;
Assert.AreEqual(1, events.Length, "Expect to receive 1 remoted event");
Assert.AreEqual(testMessage, events[0].RenderedMessage, "Expect Message match after remoting event");
}
/// <summary>
/// Test that the UserName property is not remoted when doing a Fix.Partial
/// </summary>
[Test] public void TestPartialFix()
{
// Setup the remoting appender
ConfigureRootAppender(FixFlags.Partial);
RemoteLoggingSinkImpl.Instance.Reset();
log4net.Repository.Hierarchy.Logger root = null;
root = ((log4net.Repository.Hierarchy.Hierarchy)LogManager.GetRepository()).Root;
// Log a message that will be remoted
root.Log(Level.Debug, "test message", null);
// Wait for the remoted object to be delivered
System.Threading.Thread.Sleep(1000);
LoggingEvent[] events = RemoteLoggingSinkImpl.Instance.Events;
Assert.AreEqual(1, events.Length, "Expect to receive 1 remoted event");
// Grab the event data
LoggingEventData eventData = GetLoggingEventData(events[0]);
Assert.IsNull(eventData.UserName, "Expect username to be null because only doing a partial fix");
}
/// <summary>
/// Test that the UserName property is remoted when doing a Fix.All
/// </summary>
[Test] public void TestFullFix()
{
// Setup the remoting appender
ConfigureRootAppender(FixFlags.All);
RemoteLoggingSinkImpl.Instance.Reset();
log4net.Repository.Hierarchy.Logger root = null;
root = ((log4net.Repository.Hierarchy.Hierarchy)LogManager.GetRepository()).Root;
// Log a message that will be remoted
root.Log(Level.Debug, "test message", null);
// Wait for the remoted object to be delivered
System.Threading.Thread.Sleep(1000);
LoggingEvent[] events = RemoteLoggingSinkImpl.Instance.Events;
Assert.AreEqual(1, events.Length, "Expect to receive 1 remoted event");
// Grab the event data
LoggingEventData eventData = GetLoggingEventData(events[0]);
Assert.IsNotNull(eventData.UserName, "Expect username to not be null because doing a full fix");
}
/// <summary>
/// Test that the Message property is correctly remoted
/// </summary>
[Test] public void TestRemotedMessageNdcPushPop()
{
// Setup the remoting appender
ConfigureRootAppender(FixFlags.Partial);
RemoteLoggingSinkImpl.Instance.Reset();
log4net.Repository.Hierarchy.Logger root = null;
root = ((log4net.Repository.Hierarchy.Hierarchy)LogManager.GetRepository()).Root;
string testMessage = string.Format("test message [ {0} ]", (new Random()).Next());
using(NDC.Push("value")) {}
// Log a message that will be remoted
root.Log(Level.Debug, testMessage, null);
// Wait for the remoted object to be delivered
System.Threading.Thread.Sleep(1000);
LoggingEvent[] events = RemoteLoggingSinkImpl.Instance.Events;
Assert.AreEqual(1, events.Length, "Expect to receive 1 remoted event");
Assert.AreEqual(testMessage, events[0].RenderedMessage, "Expect Message match after remoting event");
}
[Test] public void TestNestedNdc()
{
// This test can suffer from timing and ordering issues as the RemotingAppender does dispatch events asynchronously
// Setup the remoting appender
ConfigureRootAppender(FixFlags.Partial);
RemoteLoggingSinkImpl.Instance.Reset();
log4net.Tests.Appender.Remoting.UserInterfaces.TestService t;
t = new log4net.Tests.Appender.Remoting.UserInterfaces.TestService();
t.Test();
// Wait for the remoted objects to be delivered
System.Threading.Thread.Sleep(3000);
LoggingEvent[] events = RemoteLoggingSinkImpl.Instance.Events;
Assert.AreEqual(5, events.Length, "Expect to receive 5 remoted event");
Assert.AreEqual("begin test", events[0].RenderedMessage, "Verify event 1 RenderedMessage");
Assert.AreEqual("feature", events[1].RenderedMessage, "Verify event 2 RenderedMessage");
Assert.AreEqual("return", events[2].RenderedMessage, "Verify event 3 RenderedMessage");
Assert.AreEqual("return", events[3].RenderedMessage, "Verify event 4 RenderedMessage");
Assert.AreEqual("end test", events[4].RenderedMessage, "Verify event 5 RenderedMessage");
Assert.IsNull(events[0].Properties["NDC"], "Verify event 1 Properties");
Assert.AreEqual("test1", events[1].Properties["NDC"], "Verify event 2 Properties");
Assert.AreEqual("test1 test2", events[2].Properties["NDC"], "Verify event 3 Properties");
Assert.AreEqual("test1", events[3].Properties["NDC"], "Verify event 4 Properties");
Assert.IsNull(events[4].Properties["NDC"], "Verify event 5 Properties");
}
private void RegisterRemotingServerChannel()
{
if (m_remotingChannel == null)
{
m_remotingChannel = new TcpChannel(8085);
// Setup remoting server
try
{
#if NET_2_0
ChannelServices.RegisterChannel(m_remotingChannel, false);
#else
ChannelServices.RegisterChannel(m_remotingChannel);
#endif
}
catch(Exception)
{
}
// Marshal the sink object
RemotingServices.Marshal(RemoteLoggingSinkImpl.Instance, "LoggingSink", typeof(IRemoteLoggingSink));
}
}
/// <summary>
/// Shuts down any loggers in the hierarchy, along
/// with all appenders.
/// </summary>
private void ResetRepository()
{
// Regular users should not use the clear method lightly!
LogManager.GetRepository().ResetConfiguration();
LogManager.GetRepository().Shutdown();
((log4net.Repository.Hierarchy.Hierarchy)LogManager.GetRepository()).Clear();
}
/// <summary>
/// Any initialization that happens before each test can
/// go here
/// </summary>
[SetUp] public void SetUp()
{
ResetRepository();
RegisterRemotingServerChannel();
}
/// <summary>
/// Any steps that happen after each test go here
/// </summary>
[TearDown] public void TearDown()
{
ResetRepository();
}
/// <summary>
/// Configures the root appender for counting and rolling
/// </summary>
private void ConfigureRootAppender(FixFlags fixFlags)
{
log4net.Repository.Hierarchy.Logger root = null;
root = ((log4net.Repository.Hierarchy.Hierarchy)LogManager.GetRepository()).Root;
root.Level = Level.Debug;
root.AddAppender(CreateAppender(fixFlags));
root.Repository.Configured = true;
}
private RemotingAppender CreateAppender(FixFlags fixFlags)
{
RemotingAppender appender = new RemotingAppender();
appender.Sink = "tcp://localhost:8085/LoggingSink";
appender.Lossy = false;
appender.BufferSize = 1;
appender.Fix = fixFlags;
appender.ActivateOptions();
return appender;
}
public class RemoteLoggingSinkImpl : MarshalByRefObject, IRemoteLoggingSink
{
public static readonly RemoteLoggingSinkImpl Instance = new RemoteLoggingSinkImpl();
private ArrayList m_events = new ArrayList();
#region Public Instance Constructors
private RemoteLoggingSinkImpl()
{
}
#endregion Public Instance Constructors
#region Implementation of IRemoteLoggingSink
/// <summary>
/// Logs the events to to an internal buffer
/// </summary>
/// <param name="events">The events to log.</param>
/// <remarks>
/// Logs the events to to an internal buffer. The logged events can
/// be retrieved via the <see cref="Events"/> property. To clear
/// the buffer call the <see cref="Reset"/> method.
/// </remarks>
public void LogEvents(LoggingEvent[] events)
{
m_events.AddRange(events);
}
#endregion Implementation of IRemoteLoggingSink
#region Override implementation of MarshalByRefObject
/// <summary>
/// Obtains a lifetime service object to control the lifetime
/// policy for this instance.
/// </summary>
/// <returns>
/// <c>null</c> to indicate that this instance should live
/// forever.
/// </returns>
public override object InitializeLifetimeService()
{
return null;
}
#endregion Override implementation of MarshalByRefObject
public void Reset()
{
m_events.Clear();
}
public LoggingEvent[] Events
{
get
{
return (LoggingEvent[])m_events.ToArray(typeof(LoggingEvent));
}
}
}
//
// Helper functions to dig into the appender
//
private static LoggingEventData GetLoggingEventData(LoggingEvent loggingEvent)
{
return (LoggingEventData)Utils.GetField(loggingEvent, "m_data");
}
}
}
// helper for TestNestedNdc
namespace log4net.Tests.Appender.Remoting.UserInterfaces
{
public class TestService
{
static ILog log = LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
public void Test()
{
log.Info("begin test");
System.Threading.Thread.Sleep(100);
Feature f = new Feature();
f.Test();
log.Info("end test");
System.Threading.Thread.Sleep(100);
}
}
}
// helper for TestNestedNdc
namespace log4net.Tests.Appender.Remoting
{
public class Feature
{
static ILog log = LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
public void Test()
{
using(NDC.Push("test1"))
{
log.Info("feature");
System.Threading.Thread.Sleep(100);
log4net.Tests.Appender.Remoting.Data.Dal d = new log4net.Tests.Appender.Remoting.Data.Dal();
d.Test();
log.Info("return");
System.Threading.Thread.Sleep(100);
}
}
}
}
// helper for TestNestedNdc
namespace log4net.Tests.Appender.Remoting.Data
{
public class Dal
{
static ILog log = LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
public void Test()
{
using(NDC.Push("test2"))
{
log.Info("return");
System.Threading.Thread.Sleep(100);
}
}
}
}
| |
using System;
using System.Data;
using PCSComUtils.Admin.DS;
using PCSComUtils.Common;
using PCSComUtils.PCSExc;
namespace PCSComUtils.Admin.BO
{
public class VisibilityBO
{
private const string THIS = "PCSComUtils.Admin.BO.ISys_VisibilityItemBO";
public VisibilityBO()
{
}
/// <summary>
/// This method checks business rule and call Add() method of DS class
/// </summary>
/// <param name="pobjObjectVO">VisibityItemVO object</param>
/// <Author> Hung LA, Dec 13, 2004</Author>
public void Add(object pobjObjectVO)
{
try
{
Sys_VisibilityItemDS templateDS = new Sys_VisibilityItemDS();
templateDS.Add(pobjObjectVO);
}
catch (PCSDBException ex)
{
throw ex;
}
}
/// <summary>
/// This method not implements yet
/// </summary>
/// <param name="pintID"></param>
/// <param name="VOclass"></param>
/// <returns></returns>
public object GetObjectVO(int pintID, string VOclass)
{
const string METHOD_NAME = THIS + ".GetObjectVO()";
throw new PCSException(ErrorCode.NOT_IMPLEMENT, METHOD_NAME, new Exception());
}
public void UpdateDataSet(DataSet dstData)
{
throw new NotImplementedException();
}
/// <summary>
/// This method not implements yet
/// </summary>
/// <param name="pObjectVO"></param>
public void Delete(object pObjectVO)
{
const string METHOD_NAME = THIS + ".Delete()";
throw new PCSException(ErrorCode.NOT_IMPLEMENT, METHOD_NAME, new Exception());
}
/// <summary>
/// Delete a record from Database. This method checks business rule and call Delete() method of DS class
/// </summary>
/// <param name="pintID"></param>
/// <Author> Hung LA, Dec 13, 2004</Author>
public void Delete(int pintID)
{
try
{
Sys_VisibilityItemDS templateDS = new Sys_VisibilityItemDS();
templateDS.Delete(pintID);
}
catch (PCSDBException ex)
{
throw ex;
}
}
/// <summary>
/// This method uses to get Sys_VisibilityItemVO object
/// </summary>
/// <param name="pintID">Sys_VisibilityItem identity</param>
/// <returns></returns>
/// <Author> Hung LA, Dec 13, 2004</Author>
public object GetObjectVO(int pintID)
{
try
{
Sys_VisibilityItemDS templateDS = new Sys_VisibilityItemDS();
return templateDS.GetObjectVO(pintID);
}
catch (PCSDBException ex)
{
throw ex;
}
}
/// <summary>
/// This method uses to update data
/// </summary>
/// <param name="pobjObjecVO"></param>
/// <Author> Hung LA, Dec 13, 2004</Author>
public void Update(object pobjObjecVO)
{
try
{
Sys_VisibilityItemDS templateDS = new Sys_VisibilityItemDS();
templateDS.Update(pobjObjecVO);
}
catch (PCSDBException ex)
{
throw ex;
}
}
/// <summary>
/// This method uses to get all data
/// </summary>
/// <returns>Dataset</returns>
/// <Author> Hung LA, Dec 13, 2004</Author>
public DataSet List()
{
try
{
Sys_VisibilityItemDS templateDS = new Sys_VisibilityItemDS();
return templateDS.List();
}
catch (PCSDBException ex)
{
throw ex;
}
}
/// <summary>
/// This method uses to update a DataSet
/// </summary>
/// <param name="pData"></param>
/// <Author> SonHT, Dec 13, 2004</Author>
public void UpdateDataSetRoleAndItem(DataSet pData)
{
Sys_VisibilityGroupDS templateDS = new Sys_VisibilityGroupDS();
templateDS.UpdateDataSetRoleAndItem(pData);
}
/// <summary>
/// This method uses to get all visibility data
/// </summary>
/// <returns>DataSet</returns>
/// <Author> Hung LA, Dec 13, 2004</Author>
public DataSet GetVisibilityData()
{
const string METHOD_NAME = THIS + "GetVisibilityData()";
// TODO : Remove after constant approved
const int CANNOT_GET_VISIBILITY_DATA = -1;
DataSet dstData = new DataSet();
try
{
Sys_VisibilityGroupDS dsVisibilityGroup = new Sys_VisibilityGroupDS();
dstData.Merge(dsVisibilityGroup.List().Tables[0]);
Sys_VisibilityItemDS dsVisibilityItem = new Sys_VisibilityItemDS();
dstData.Merge(dsVisibilityItem.List().Tables[0]);
Sys_VisibilityGroup_RoleDS dsVisibilityGroup_Role = new Sys_VisibilityGroup_RoleDS();
dstData.Merge(dsVisibilityGroup_Role.List().Tables[0]);
Sys_RoleDS dsRole = new Sys_RoleDS();
dstData.Merge(dsRole.ListAll().Tables[0]);
}
catch(PCSDBException ex)
{
throw new PCSBOException(CANNOT_GET_VISIBILITY_DATA,METHOD_NAME,ex);
}
catch (Exception ex)
{
throw ex;
}
return dstData;
}
/// <summary>
/// Add new Visibility group and return it's Identity
/// </summary>
/// <param name="pobjVO">Sys_VisibilityGroupVO object</param>
/// <returns></returns>
/// <Author> Hung LA, Dec 13, 2004</Author>
public int AddAndReturnID(Sys_VisibilityGroupVO pobjVO)
{
const string METHOD_NAME = THIS + "GetVisibilityData()";
const int CANNOT_INSERT_VISIBILITY_DATA = -1;
try
{
Sys_VisibilityGroupDS dsVisibilityGroup = new Sys_VisibilityGroupDS();
return dsVisibilityGroup.AddAndReturnID(pobjVO);
}
catch(PCSDBException ex)
{
throw new PCSBOException(CANNOT_INSERT_VISIBILITY_DATA,METHOD_NAME,ex);
}
catch (Exception ex)
{
throw ex;
}
}
/// <summary>
/// Update data in Sys_Visibility dataset
/// </summary>
/// <param name="pdstData">Sys_Visibility dataset</param>
/// <Author> Hung LA, Dec 13, 2004</Author>
public void UpdateAllDataSet(DataSet pdstData)
{
Sys_VisibilityGroupDS dsVisibilityGroup = new Sys_VisibilityGroupDS();
dsVisibilityGroup.UpdateAllDataSet(pdstData);
}
/// <summary>
/// Get visible control
/// </summary>
/// <param name="pstrFormName"></param>
/// <param name="pstrRoleIDs"></param>
/// <returns></returns>
/// <Authors>SonHT</Authors>
public DataTable GetVisibleControl(string pstrFormName,string[] pstrRoleIDs)
{
Sys_VisibilityGroupDS dsHidden = new Sys_VisibilityGroupDS();
return dsHidden.GetVisibleControl(pstrFormName, string.Empty, pstrRoleIDs);
}
/// <summary>
/// Get visible control
/// </summary>
/// <returns></returns>
/// <Authors>SonHT </Authors>
public void UpdateAllRole()
{
(new Sys_VisibilityGroup_RoleDS()).UpdateAllRole();
}
}
}
| |
/***************************************************************************
copyright : (C) 2005 by Brian Nickel
email : brian.nickel@gmail.com
based on : id3v2frame.cpp from TagLib
***************************************************************************/
/***************************************************************************
* This library is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Lesser General Public License version *
* 2.1 as published by the Free Software Foundation. *
* *
* This library is distributed in the hope that it will be useful, but *
* WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
* Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public *
* License along with this library; if not, write to the Free Software *
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
* USA *
***************************************************************************/
using System;
namespace TagLib.Asf
{
public enum DataType
{
Unicode = 0,
Bytes = 1,
Bool = 2,
DWord = 3,
QWord = 4,
Word = 5
}
public class ContentDescriptor
{
//////////////////////////////////////////////////////////////////////////
// private properties
//////////////////////////////////////////////////////////////////////////
private DataType type;
private string name;
private string sValue;
private ByteVector bvValue;
private long lValue;
//////////////////////////////////////////////////////////////////////////
// public methods
//////////////////////////////////////////////////////////////////////////
public ContentDescriptor (string name, string value) : this ()
{
this.name = name;
this.type = DataType.Unicode;
this.sValue = value;
}
public ContentDescriptor (string name, ByteVector value) : this ()
{
this.name = name;
this.type = DataType.Bytes;
this.bvValue = new ByteVector (value);
}
public ContentDescriptor (string name, uint value) : this ()
{
this.name = name;
this.type = DataType.DWord;
this.lValue = value;
}
public ContentDescriptor (string name, long value) : this ()
{
this.name = name;
this.type = DataType.QWord;
this.lValue = value;
}
public ContentDescriptor (string name, short value) : this ()
{
this.name = name;
this.type = DataType.Word;
this.lValue = value;
}
public ContentDescriptor (string name, bool value) : this ()
{
this.name = name;
this.type = DataType.Bool;
this.lValue = value ? 1 : 0;
}
public string Name {get {return name;}}
public DataType Type {get {return type;}}
public override string ToString ()
{
return sValue;
}
public ByteVector ToByteVector ()
{
return bvValue;
}
public bool ToBool ()
{
return lValue != 0;
}
public uint ToDWord ()
{
if (type == DataType.Unicode && sValue != null)
try
{
return System.UInt32.Parse (sValue);
}
catch {}
return (uint) lValue;
}
public long ToQWord ()
{
if (type == DataType.Unicode && sValue != null)
try
{
return System.Int64.Parse (sValue);
}
catch {}
return (long) lValue;
}
public short ToWord ()
{
if (type == DataType.Unicode && sValue != null)
try
{
return System.Int16.Parse (sValue);
}
catch {}
return (short) lValue;
}
public ByteVector Render ()
{
ByteVector v = Object.RenderUnicode (name);
ByteVector data = Object.RenderWord ((short) v.Count);
data.Add (v);
data.Add (Object.RenderWord ((short) type));
switch (type)
{
case DataType.Unicode:
v = Object.RenderUnicode (sValue);
data.Add (Object.RenderWord ((short) v.Count));
data.Add (v);
break;
case DataType.Bytes:
data.Add (Object.RenderWord ((short) bvValue.Count));
data.Add (bvValue);
break;
case DataType.Bool:
data.Add (Object.RenderWord (4));
data.Add (Object.RenderDWord ((uint) lValue));
break;
case DataType.DWord:
data.Add (Object.RenderWord (4));
data.Add (Object.RenderDWord ((uint) lValue));
break;
case DataType.QWord:
data.Add (Object.RenderWord (8));
data.Add (Object.RenderQWord (lValue));
break;
case DataType.Word:
data.Add (Object.RenderWord (2));
data.Add (Object.RenderWord ((short) lValue));
break;
default:
return null;
}
return data;
}
//////////////////////////////////////////////////////////////////////////
// protected methods
//////////////////////////////////////////////////////////////////////////
protected ContentDescriptor ()
{
type = DataType.Unicode;
name = null;
sValue = null;
bvValue = null;
lValue = 0;
}
protected internal ContentDescriptor (Asf.File file) : this ()
{
Parse (file);
}
protected bool Parse (Asf.File file)
{
int size = file.ReadWord ();
name = file.ReadUnicode (size);
type = (DataType) file.ReadWord ();
size = file.ReadWord ();
switch (type)
{
case DataType.Word:
lValue = file.ReadWord ();
break;
case DataType.Bool:
lValue = file.ReadDWord ();
break;
case DataType.DWord:
lValue = file.ReadDWord ();
break;
case DataType.QWord:
lValue = file.ReadQWord ();
break;
case DataType.Unicode:
sValue = file.ReadUnicode (size);
break;
case DataType.Bytes:
bvValue = file.ReadBlock (size);
break;
default:
return false;
}
return true;
}
}
}
| |
//
// CustomTreeModel.cs
//
// Author:
// Lluis Sanchez <lluis@xamarin.com>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using Gtk;
#if XWT_GTK3
using TreeModelImplementor = Gtk.ITreeModelImplementor;
#endif
namespace Xwt.GtkBackend
{
public class CustomListModel: GLib.Object, TreeModelImplementor
{
IListDataSource source;
Dictionary<int,int> nodeHash = new Dictionary<int,int> ();
Dictionary<int,int> handleHash = new Dictionary<int,int> ();
Type[] colTypes;
int counter = 1;
Gtk.TreeModelAdapter adapter;
Gtk.Widget parent;
public CustomListModel (IntPtr p): base (p)
{
}
public CustomListModel (IListDataSource source, Gtk.Widget w)
{
parent = w;
this.source = source;
adapter = new Gtk.TreeModelAdapter (this);
colTypes = source.ColumnTypes;
source.RowChanged += HandleRowChanged;
source.RowDeleted += HandleRowDeleted;
source.RowInserted += HandleRowInserted;
source.RowsReordered += HandleRowsReordered;
}
void HandleRowsReordered (object sender, ListRowOrderEventArgs e)
{
var p = new Gtk.TreePath (new int[] { e.Row });
var it = IterFromNode (e.Row);
adapter.EmitRowsReordered (p, it, e.ChildrenOrder);
parent.QueueResize ();
}
void HandleRowInserted (object sender, ListRowEventArgs e)
{
var p = new Gtk.TreePath (new int[] { e.Row });
var it = IterFromNode (e.Row);
adapter.EmitRowInserted (p, it);
parent.QueueResize ();
}
void HandleRowDeleted (object sender, ListRowEventArgs e)
{
var p = new Gtk.TreePath (new int[] { e.Row });
adapter.EmitRowDeleted (p);
parent.QueueResize ();
}
void HandleRowChanged (object sender, ListRowEventArgs e)
{
var p = new Gtk.TreePath (new int[] { e.Row });
var it = IterFromNode (e.Row);
adapter.EmitRowChanged (p, it);
parent.QueueResize ();
}
public Gtk.TreeModelAdapter Store {
get { return adapter; }
}
Gtk.TreeIter IterFromNode (int node)
{
int gch;
if (!handleHash.TryGetValue (node, out gch)) {
gch = counter++;
handleHash [node] = gch;
nodeHash [gch] = node;
}
Gtk.TreeIter result = Gtk.TreeIter.Zero;
result.UserData = (IntPtr)gch;
return result;
}
int NodeFromIter (Gtk.TreeIter iter)
{
int node;
int gch = (int)iter.UserData;
nodeHash.TryGetValue (gch, out node);
return node;
}
#region TreeModelImplementor implementation
public GLib.GType GetColumnType (int index)
{
return (GLib.GType)colTypes [index];
}
public bool GetIter (out Gtk.TreeIter iter, Gtk.TreePath path)
{
iter = Gtk.TreeIter.Zero;
if (path.Indices.Length == 0)
return false;
int row = path.Indices [0];
if (row >= source.RowCount) {
return false;
}
iter = IterFromNode (row);
return true;
}
public Gtk.TreePath GetPath (Gtk.TreeIter iter)
{
int row = NodeFromIter (iter);
return new Gtk.TreePath (new int[] { row });
}
public void GetValue (Gtk.TreeIter iter, int column, ref GLib.Value value)
{
int row = NodeFromIter (iter);
var v = source.GetValue (row, column);
value = v != null ? new GLib.Value (v) : GLib.Value.Empty;
}
public bool IterNext (ref Gtk.TreeIter iter)
{
int row = NodeFromIter (iter);
if (++row < source.RowCount) {
iter = IterFromNode (row);
return true;
} else
return false;
}
#if XWT_GTK3
public bool IterPrevious (ref Gtk.TreeIter iter)
{
int row = NodeFromIter (iter);
if (--row >= 0) {
iter = IterFromNode (row);
return true;
} else
return false;
}
#endif
public bool IterChildren (out Gtk.TreeIter iter, Gtk.TreeIter parent)
{
iter = Gtk.TreeIter.Zero;
return false;
}
public bool IterHasChild (Gtk.TreeIter iter)
{
return false;
}
public int IterNChildren (Gtk.TreeIter iter)
{
if (iter.Equals (Gtk.TreeIter.Zero))
return source.RowCount;
else
return 0;
}
public bool IterNthChild (out Gtk.TreeIter iter, Gtk.TreeIter parent, int n)
{
if (parent.Equals (Gtk.TreeIter.Zero)) {
iter = IterFromNode (n);
return true;
} else {
iter = Gtk.TreeIter.Zero;
return false;
}
}
public bool IterParent (out Gtk.TreeIter iter, Gtk.TreeIter child)
{
iter = Gtk.TreeIter.Zero;
return false;
}
public void RefNode (Gtk.TreeIter iter)
{
}
public void UnrefNode (Gtk.TreeIter iter)
{
}
public Gtk.TreeModelFlags Flags {
get {
return Gtk.TreeModelFlags.ItersPersist | Gtk.TreeModelFlags.ListOnly;
}
}
public int NColumns {
get {
return colTypes.Length;
}
}
#endregion
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Android.Service.Wallpaper.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Android.Service.Wallpaper
{
/// <summary>
/// <para>A wallpaper service is responsible for showing a live wallpaper behind applications that would like to sit on top of it. This service object itself does very little its only purpose is to generate instances of Engine as needed. Implementing a wallpaper thus involves subclassing from this, subclassing an Engine implementation, and implementing onCreateEngine() to return a new instance of your engine. </para>
/// </summary>
/// <java-name>
/// android/service/wallpaper/WallpaperService
/// </java-name>
[Dot42.DexImport("android/service/wallpaper/WallpaperService", AccessFlags = 1057)]
public abstract partial class WallpaperService : global::Android.App.Service
/* scope: __dot42__ */
{
/// <summary>
/// <para>The Intent that must be declared as handled by the service. To be supported, the service must also require the android.Manifest.permission#BIND_WALLPAPER permission so that other applications can not abuse it. </para>
/// </summary>
/// <java-name>
/// SERVICE_INTERFACE
/// </java-name>
[Dot42.DexImport("SERVICE_INTERFACE", "Ljava/lang/String;", AccessFlags = 25)]
public const string SERVICE_INTERFACE = "android.service.wallpaper.WallpaperService";
/// <summary>
/// <para>Name under which a WallpaperService component publishes information about itself. This meta-data must reference an XML resource containing a <code><wallpaper></code> tag. </para>
/// </summary>
/// <java-name>
/// SERVICE_META_DATA
/// </java-name>
[Dot42.DexImport("SERVICE_META_DATA", "Ljava/lang/String;", AccessFlags = 25)]
public const string SERVICE_META_DATA = "android.service.wallpaper";
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public WallpaperService() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Called by the system when the service is first created. Do not call this method directly. </para>
/// </summary>
/// <java-name>
/// onCreate
/// </java-name>
[Dot42.DexImport("onCreate", "()V", AccessFlags = 1)]
public override void OnCreate() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Called by the system to notify a Service that it is no longer used and is being removed. The service should clean up any resources it holds (threads, registered receivers, etc) at this point. Upon return, there will be no more calls in to this Service object and it is effectively dead. Do not call this method directly. </para>
/// </summary>
/// <java-name>
/// onDestroy
/// </java-name>
[Dot42.DexImport("onDestroy", "()V", AccessFlags = 1)]
public override void OnDestroy() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Implement to return the implementation of the internal accessibility service interface. Subclasses should not override. </para>
/// </summary>
/// <java-name>
/// onBind
/// </java-name>
[Dot42.DexImport("onBind", "(Landroid/content/Intent;)Landroid/os/IBinder;", AccessFlags = 17)]
public override global::Android.Os.IBinder OnBind(global::Android.Content.Intent intent) /* MethodBuilder.Create */
{
return default(global::Android.Os.IBinder);
}
/// <summary>
/// <para>Must be implemented to return a new instance of the wallpaper's engine. Note that multiple instances may be active at the same time, such as when the wallpaper is currently set as the active wallpaper and the user is in the wallpaper picker viewing a preview of it as well. </para>
/// </summary>
/// <java-name>
/// onCreateEngine
/// </java-name>
[Dot42.DexImport("onCreateEngine", "()Landroid/service/wallpaper/WallpaperService$Engine;", AccessFlags = 1025)]
public abstract global::Android.Service.Wallpaper.WallpaperService.Engine OnCreateEngine() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>The actual implementation of a wallpaper. A wallpaper service may have multiple instances running (for example as a real wallpaper and as a preview), each of which is represented by its own Engine instance. You must implement WallpaperService#onCreateEngine() to return your concrete Engine implementation. </para>
/// </summary>
/// <java-name>
/// android/service/wallpaper/WallpaperService$Engine
/// </java-name>
[Dot42.DexImport("android/service/wallpaper/WallpaperService$Engine", AccessFlags = 1)]
public partial class Engine
/* scope: __dot42__ */
{
/// <java-name>
/// this$0
/// </java-name>
[Dot42.DexImport("this$0", "Landroid/service/wallpaper/WallpaperService;", AccessFlags = 4112)]
internal readonly global::Android.Service.Wallpaper.WallpaperService This_0;
[Dot42.DexImport("<init>", "(Landroid/service/wallpaper/WallpaperService;)V", AccessFlags = 1)]
public Engine(global::Android.Service.Wallpaper.WallpaperService wallpaperService) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Provides access to the surface in which this wallpaper is drawn. </para>
/// </summary>
/// <java-name>
/// getSurfaceHolder
/// </java-name>
[Dot42.DexImport("getSurfaceHolder", "()Landroid/view/SurfaceHolder;", AccessFlags = 1)]
public virtual global::Android.View.ISurfaceHolder GetSurfaceHolder() /* MethodBuilder.Create */
{
return default(global::Android.View.ISurfaceHolder);
}
/// <summary>
/// <para>Convenience for WallpaperManager.getDesiredMinimumWidth(), returning the width that the system would like this wallpaper to run in. </para>
/// </summary>
/// <java-name>
/// getDesiredMinimumWidth
/// </java-name>
[Dot42.DexImport("getDesiredMinimumWidth", "()I", AccessFlags = 1)]
public virtual int GetDesiredMinimumWidth() /* MethodBuilder.Create */
{
return default(int);
}
/// <summary>
/// <para>Convenience for WallpaperManager.getDesiredMinimumHeight(), returning the height that the system would like this wallpaper to run in. </para>
/// </summary>
/// <java-name>
/// getDesiredMinimumHeight
/// </java-name>
[Dot42.DexImport("getDesiredMinimumHeight", "()I", AccessFlags = 1)]
public virtual int GetDesiredMinimumHeight() /* MethodBuilder.Create */
{
return default(int);
}
/// <summary>
/// <para>Return whether the wallpaper is currently visible to the user, this is the last value supplied to onVisibilityChanged(boolean). </para>
/// </summary>
/// <java-name>
/// isVisible
/// </java-name>
[Dot42.DexImport("isVisible", "()Z", AccessFlags = 1)]
public virtual bool IsVisible() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Returns true if this engine is running in preview mode that is, it is being shown to the user before they select it as the actual wallpaper. </para>
/// </summary>
/// <java-name>
/// isPreview
/// </java-name>
[Dot42.DexImport("isPreview", "()Z", AccessFlags = 1)]
public virtual bool IsPreview() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Control whether this wallpaper will receive raw touch events from the window manager as the user interacts with the window that is currently displaying the wallpaper. By default they are turned off. If enabled, the events will be received in onTouchEvent(MotionEvent). </para>
/// </summary>
/// <java-name>
/// setTouchEventsEnabled
/// </java-name>
[Dot42.DexImport("setTouchEventsEnabled", "(Z)V", AccessFlags = 1)]
public virtual void SetTouchEventsEnabled(bool enabled) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Called once to initialize the engine. After returning, the engine's surface will be created by the framework. </para>
/// </summary>
/// <java-name>
/// onCreate
/// </java-name>
[Dot42.DexImport("onCreate", "(Landroid/view/SurfaceHolder;)V", AccessFlags = 1)]
public virtual void OnCreate(global::Android.View.ISurfaceHolder surfaceHolder) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Called right before the engine is going away. After this the surface will be destroyed and this Engine object is no longer valid. </para>
/// </summary>
/// <java-name>
/// onDestroy
/// </java-name>
[Dot42.DexImport("onDestroy", "()V", AccessFlags = 1)]
public virtual void OnDestroy() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Called to inform you of the wallpaper becoming visible or hidden. <b>It is very important that a wallpaper only use CPU while it is visible.</b>. </para>
/// </summary>
/// <java-name>
/// onVisibilityChanged
/// </java-name>
[Dot42.DexImport("onVisibilityChanged", "(Z)V", AccessFlags = 1)]
public virtual void OnVisibilityChanged(bool visible) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Called as the user performs touch-screen interaction with the window that is currently showing this wallpaper. Note that the events you receive here are driven by the actual application the user is interacting with, so if it is slow you will get fewer move events. </para>
/// </summary>
/// <java-name>
/// onTouchEvent
/// </java-name>
[Dot42.DexImport("onTouchEvent", "(Landroid/view/MotionEvent;)V", AccessFlags = 1)]
public virtual void OnTouchEvent(global::Android.View.MotionEvent @event) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Called to inform you of the wallpaper's offsets changing within its contain, corresponding to the container's call to WallpaperManager.setWallpaperOffsets(). </para>
/// </summary>
/// <java-name>
/// onOffsetsChanged
/// </java-name>
[Dot42.DexImport("onOffsetsChanged", "(FFFFII)V", AccessFlags = 1)]
public virtual void OnOffsetsChanged(float xOffset, float yOffset, float xOffsetStep, float yOffsetStep, int xPixelOffset, int yPixelOffset) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Process a command that was sent to the wallpaper with WallpaperManager#sendWallpaperCommand. The default implementation does nothing, and always returns null as the result.</para><para></para>
/// </summary>
/// <returns>
/// <para>If returning a result, create a Bundle and place the result data in to it. Otherwise return null. </para>
/// </returns>
/// <java-name>
/// onCommand
/// </java-name>
[Dot42.DexImport("onCommand", "(Ljava/lang/String;IIILandroid/os/Bundle;Z)Landroid/os/Bundle;", AccessFlags = 1)]
public virtual global::Android.Os.Bundle OnCommand(string action, int x, int y, int z, global::Android.Os.Bundle extras, bool resultRequested) /* MethodBuilder.Create */
{
return default(global::Android.Os.Bundle);
}
/// <summary>
/// <para>Called when an application has changed the desired virtual size of the wallpaper. </para>
/// </summary>
/// <java-name>
/// onDesiredSizeChanged
/// </java-name>
[Dot42.DexImport("onDesiredSizeChanged", "(II)V", AccessFlags = 1)]
public virtual void OnDesiredSizeChanged(int desiredWidth, int desiredHeight) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Convenience for SurfaceHolder.Callback.surfaceChanged(). </para>
/// </summary>
/// <java-name>
/// onSurfaceChanged
/// </java-name>
[Dot42.DexImport("onSurfaceChanged", "(Landroid/view/SurfaceHolder;III)V", AccessFlags = 1)]
public virtual void OnSurfaceChanged(global::Android.View.ISurfaceHolder holder, int format, int width, int height) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Convenience for SurfaceHolder.Callback.surfaceRedrawNeeded(). </para>
/// </summary>
/// <java-name>
/// onSurfaceRedrawNeeded
/// </java-name>
[Dot42.DexImport("onSurfaceRedrawNeeded", "(Landroid/view/SurfaceHolder;)V", AccessFlags = 1)]
public virtual void OnSurfaceRedrawNeeded(global::Android.View.ISurfaceHolder holder) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Convenience for SurfaceHolder.Callback.surfaceCreated(). </para>
/// </summary>
/// <java-name>
/// onSurfaceCreated
/// </java-name>
[Dot42.DexImport("onSurfaceCreated", "(Landroid/view/SurfaceHolder;)V", AccessFlags = 1)]
public virtual void OnSurfaceCreated(global::Android.View.ISurfaceHolder holder) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Convenience for SurfaceHolder.Callback.surfaceDestroyed(). </para>
/// </summary>
/// <java-name>
/// onSurfaceDestroyed
/// </java-name>
[Dot42.DexImport("onSurfaceDestroyed", "(Landroid/view/SurfaceHolder;)V", AccessFlags = 1)]
public virtual void OnSurfaceDestroyed(global::Android.View.ISurfaceHolder holder) /* MethodBuilder.Create */
{
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal Engine() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para>Provides access to the surface in which this wallpaper is drawn. </para>
/// </summary>
/// <java-name>
/// getSurfaceHolder
/// </java-name>
public global::Android.View.ISurfaceHolder SurfaceHolder
{
[Dot42.DexImport("getSurfaceHolder", "()Landroid/view/SurfaceHolder;", AccessFlags = 1)]
get{ return GetSurfaceHolder(); }
}
/// <summary>
/// <para>Convenience for WallpaperManager.getDesiredMinimumWidth(), returning the width that the system would like this wallpaper to run in. </para>
/// </summary>
/// <java-name>
/// getDesiredMinimumWidth
/// </java-name>
public int DesiredMinimumWidth
{
[Dot42.DexImport("getDesiredMinimumWidth", "()I", AccessFlags = 1)]
get{ return GetDesiredMinimumWidth(); }
}
/// <summary>
/// <para>Convenience for WallpaperManager.getDesiredMinimumHeight(), returning the height that the system would like this wallpaper to run in. </para>
/// </summary>
/// <java-name>
/// getDesiredMinimumHeight
/// </java-name>
public int DesiredMinimumHeight
{
[Dot42.DexImport("getDesiredMinimumHeight", "()I", AccessFlags = 1)]
get{ return GetDesiredMinimumHeight(); }
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics.Contracts;
using Microsoft.Research.DataStructures;
namespace Microsoft.Research.CodeAnalysis
{
public class FactQueryForOverflow<Variable>
: IFactQueryForOverflow<BoxedExpression>
{
#region Private state
private readonly IFactQuery<BoxedExpression, Variable> FactQuery;
#endregion
#region Constructor
public FactQueryForOverflow(IFactQuery<BoxedExpression, Variable> facts)
{
Contract.Requires(facts != null);
this.FactQuery = facts;
}
#endregion
#region Implementation of the interface
public bool CanOverflow(APC pc, BoxedExpression exp)
{
var visitor = new CanOverflowVisitor(pc, this.FactQuery);
exp.Dispatch(visitor);
return visitor.CanOverflow;
}
public bool CanUnderflow(APC pc, BoxedExpression exp)
{
var visitor = new CanUndeflowVisitor(pc, this.FactQuery);
exp.Dispatch(visitor);
return visitor.CanUnderflow;
}
#endregion
#region Visitors
abstract class OverflowVisitorBase : IBoxedExpressionVisitor
{
#region State
protected bool Overflow { get; set; }
protected readonly APC pc;
protected readonly IFactQuery<BoxedExpression, Variable> Facts;
#endregion
#region Constructor
public OverflowVisitorBase(APC pc, IFactQuery<BoxedExpression, Variable> facts)
{
this.Overflow = true;
this.pc = pc;
this.Facts = facts;
}
#endregion
#region Special cases
abstract public void Binary(BinaryOperator binaryOperator, BoxedExpression left, BoxedExpression right, BoxedExpression parent);
abstract public void Unary(UnaryOperator unaryOperator, BoxedExpression argument, BoxedExpression parent);
#endregion
#region Common cases
public void Variable(object var, PathElement[] path, BoxedExpression parent)
{
this.Overflow = false;
}
public void Constant<Type>(Type type, object value, BoxedExpression parent)
{
this.Overflow = false;
}
public void SizeOf<Type>(Type type, int sizeAsConstant, BoxedExpression parent)
{
this.Overflow = false;
}
public void IsInst<Type>(Type type, BoxedExpression argument, BoxedExpression parent)
{
argument.Dispatch(this);
}
public void ArrayIndex<Type>(Type type, BoxedExpression array, BoxedExpression index, BoxedExpression parent)
{
index.Dispatch(this);
}
public void Result<Type>(Type type, BoxedExpression parent)
{
this.Overflow = false;
}
public void Old<Type>(Type type, BoxedExpression expression, BoxedExpression parent)
{
// should we use the entry point PC???
expression.Dispatch(this);
}
public void ValueAtReturn<Type>(Type type, BoxedExpression expression, BoxedExpression parent)
{
expression.Dispatch(this);
}
public void Assert(BoxedExpression condition, BoxedExpression parent)
{
condition.Dispatch(this);
}
public void Assume(BoxedExpression condition, BoxedExpression parent)
{
condition.Dispatch(this);
}
public void StatementSequence(IIndexable<BoxedExpression> statements, BoxedExpression parent)
{
for (var i = 0; i < statements.Count; i++)
{
statements[i].Dispatch(this);
if (this.Overflow == true)
return;
}
}
public void ForAll(BoxedExpression boundVariable, BoxedExpression lower, BoxedExpression upper, BoxedExpression body, BoxedExpression parent)
{
boundVariable.Dispatch(this);
if (this.Overflow == true)
{
return;
}
lower.Dispatch(this);
if (this.Overflow == true)
{
return;
}
upper.Dispatch(this);
if (this.Overflow == true)
{
return;
}
body.Dispatch(this);
if (this.Overflow == true)
{
return;
}
}
#endregion
}
class CanOverflowVisitor : OverflowVisitorBase
{
public bool CanOverflow { get { return this.Overflow; } }
public CanOverflowVisitor(APC pc, IFactQuery<BoxedExpression, Variable> facts)
: base(pc, facts)
{
}
override public void Binary(BinaryOperator binaryOperator, BoxedExpression left, BoxedExpression right, BoxedExpression parent)
{
left.Dispatch(this);
if (this.CanOverflow)
return;
right.Dispatch(this);
if (this.CanOverflow)
return;
switch (binaryOperator)
{
case BinaryOperator.Add:
case BinaryOperator.Add_Ovf:
case BinaryOperator.Add_Ovf_Un:
{
// different signs or not negative ==> no overflow
int leftSign, rightSign;
if (Facts.TrySign(pc, left, out leftSign) && Facts.TrySign(pc, right, out rightSign))
{
if ((leftSign <= 0 && rightSign <= 0) || leftSign * rightSign == -1)
{
this.Overflow = false;
return;
}
}
// TODO: improve using upper bounds
this.Overflow = true;
return;
}
case BinaryOperator.Div:
case BinaryOperator.Div_Un:
case BinaryOperator.Rem:
case BinaryOperator.Rem_Un:
{
// TODO: improve using upper bounds
if (Facts.IsNonZero(pc, right) == ProofOutcome.True)
{
this.Overflow = false;
}
this.Overflow = true;
return;
}
case BinaryOperator.Mul:
case BinaryOperator.Mul_Ovf:
case BinaryOperator.Mul_Ovf_Un:
{
this.Overflow = true;
return;
}
case BinaryOperator.Sub:
case BinaryOperator.Sub_Ovf:
case BinaryOperator.Sub_Ovf_Un:
{
int rightSign;
if (Facts.TrySign(pc, right, out rightSign) && rightSign >= 0)
{
this.Overflow = false;
return;
}
this.Overflow = true;
return;
}
case BinaryOperator.And:
case BinaryOperator.Ceq:
case BinaryOperator.Cge:
case BinaryOperator.Cge_Un:
case BinaryOperator.Cgt:
case BinaryOperator.Cgt_Un:
case BinaryOperator.Cle:
case BinaryOperator.Cle_Un:
case BinaryOperator.Clt:
case BinaryOperator.Clt_Un:
case BinaryOperator.Cne_Un:
case BinaryOperator.Cobjeq:
case BinaryOperator.LogicalAnd:
case BinaryOperator.LogicalOr:
case BinaryOperator.Or:
case BinaryOperator.Shl:
case BinaryOperator.Shr:
case BinaryOperator.Shr_Un:
case BinaryOperator.Xor:
{
this.Overflow = false;
return;
}
default:
{
this.Overflow = true;
return;
}
}
}
override public void Unary(UnaryOperator unaryOperator, BoxedExpression argument, BoxedExpression parent)
{
argument.Dispatch(this);
}
}
class CanUndeflowVisitor : OverflowVisitorBase
{
public bool CanUnderflow { get { return this.Overflow; } }
public CanUndeflowVisitor(APC pc, IFactQuery<BoxedExpression, Variable> facts)
: base(pc, facts)
{
}
public override void Binary(BinaryOperator binaryOperator, BoxedExpression left, BoxedExpression right, BoxedExpression parent)
{
left.Dispatch(this);
if (this.Overflow)
return;
right.Dispatch(this);
if (this.Overflow)
return;
switch (binaryOperator)
{
case BinaryOperator.Add:
case BinaryOperator.Add_Ovf:
case BinaryOperator.Add_Ovf_Un:
{
// different signs, or both positive ==> no overflow
int leftSign, rightSign;
if (Facts.TrySign(pc, left, out leftSign) && Facts.TrySign(pc, right, out rightSign))
{
if (leftSign >= 0 || rightSign >= 0)
{
this.Overflow = false;
return;
}
}
// TODO: improve using upper bounds
this.Overflow = true;
return;
}
case BinaryOperator.Div:
case BinaryOperator.Div_Un:
case BinaryOperator.Rem:
case BinaryOperator.Rem_Un:
{
// TODO: improve using upper bounds
if (Facts.IsNonZero(pc, right) == ProofOutcome.True)
{
this.Overflow = false;
}
this.Overflow = true;
return;
}
case BinaryOperator.Mul:
case BinaryOperator.Mul_Ovf:
case BinaryOperator.Mul_Ovf_Un:
{
// TODO: improve using upper bounds
this.Overflow = true;
return;
}
case BinaryOperator.Sub:
case BinaryOperator.Sub_Ovf:
case BinaryOperator.Sub_Ovf_Un:
{
int leftSign, rightSign;
// if left is non-negative or right is non-positive ==> no undeflow
if (Facts.TrySign(pc, left, out leftSign) && Facts.TrySign(pc, right, out rightSign))
{
if (leftSign >= 0 || rightSign <= 0)
{
this.Overflow = false;
return;
}
}
this.Overflow = true;
return;
}
case BinaryOperator.And:
case BinaryOperator.Ceq:
case BinaryOperator.Cge:
case BinaryOperator.Cge_Un:
case BinaryOperator.Cgt:
case BinaryOperator.Cgt_Un:
case BinaryOperator.Cle:
case BinaryOperator.Cle_Un:
case BinaryOperator.Clt:
case BinaryOperator.Clt_Un:
case BinaryOperator.Cne_Un:
case BinaryOperator.Cobjeq:
case BinaryOperator.LogicalAnd:
case BinaryOperator.LogicalOr:
case BinaryOperator.Or:
case BinaryOperator.Shl:
case BinaryOperator.Shr:
case BinaryOperator.Shr_Un:
case BinaryOperator.Xor:
{
this.Overflow = false;
return;
}
default:
{
this.Overflow = true;
return;
}
}
}
public override void Unary(UnaryOperator unaryOperator, BoxedExpression argument, BoxedExpression parent)
{
// TODO: check for negation of minValue
argument.Dispatch(this);
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics;
using System.IO;
using System.Net.Security;
using System.Runtime.InteropServices;
using System.Security.Cryptography.X509Certificates;
using SafeWinHttpHandle = Interop.WinHttp.SafeWinHttpHandle;
namespace System.Net.Http
{
/// <summary>
/// Static class containing the WinHttp global callback and associated routines.
/// </summary>
internal static class WinHttpRequestCallback
{
public static Interop.WinHttp.WINHTTP_STATUS_CALLBACK StaticCallbackDelegate =
new Interop.WinHttp.WINHTTP_STATUS_CALLBACK(WinHttpCallback);
public static void WinHttpCallback(
IntPtr handle,
IntPtr context,
uint internetStatus,
IntPtr statusInformation,
uint statusInformationLength)
{
WinHttpTraceHelper.TraceCallbackStatus("WinHttpCallback", handle, context, internetStatus);
if (Environment.HasShutdownStarted)
{
WinHttpTraceHelper.Trace("WinHttpCallback: Environment.HasShutdownStarted returned True");
return;
}
if (context == IntPtr.Zero)
{
return;
}
WinHttpRequestState state = WinHttpRequestState.FromIntPtr(context);
Debug.Assert(state != null, "WinHttpCallback must have a non-null state object");
RequestCallback(handle, state, internetStatus, statusInformation, statusInformationLength);
}
private static void RequestCallback(
IntPtr handle,
WinHttpRequestState state,
uint internetStatus,
IntPtr statusInformation,
uint statusInformationLength)
{
try
{
switch (internetStatus)
{
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_HANDLE_CLOSING:
OnRequestHandleClosing(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_SENDREQUEST_COMPLETE:
OnRequestSendRequestComplete(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_DATA_AVAILABLE:
Debug.Assert(statusInformationLength == Marshal.SizeOf<int>());
int bytesAvailable = Marshal.ReadInt32(statusInformation);
OnRequestDataAvailable(state, bytesAvailable);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_READ_COMPLETE:
OnRequestReadComplete(state, statusInformationLength);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_WRITE_COMPLETE:
OnRequestWriteComplete(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_HEADERS_AVAILABLE:
OnRequestReceiveResponseHeadersComplete(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_REDIRECT:
string redirectUriString = Marshal.PtrToStringUni(statusInformation);
var redirectUri = new Uri(redirectUriString);
OnRequestRedirect(state, redirectUri);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_SENDING_REQUEST:
OnRequestSendingRequest(state);
return;
case Interop.WinHttp.WINHTTP_CALLBACK_STATUS_REQUEST_ERROR:
Debug.Assert(
statusInformationLength == Marshal.SizeOf<Interop.WinHttp.WINHTTP_ASYNC_RESULT>(),
"RequestCallback: statusInformationLength=" + statusInformationLength +
" must be sizeof(WINHTTP_ASYNC_RESULT)=" + Marshal.SizeOf<Interop.WinHttp.WINHTTP_ASYNC_RESULT>());
var asyncResult = Marshal.PtrToStructure<Interop.WinHttp.WINHTTP_ASYNC_RESULT>(statusInformation);
OnRequestError(state, asyncResult);
return;
default:
return;
}
}
catch (Exception ex)
{
Interop.WinHttp.WinHttpCloseHandle(handle);
state.SavedException = ex;
}
}
private static void OnRequestHandleClosing(WinHttpRequestState state)
{
Debug.Assert(state != null, "OnRequestSendRequestComplete: state is null");
// This is the last notification callback that WinHTTP will send. Therefore, we can
// now explicitly dispose the state object which will free its corresponding GCHandle.
// This will then allow the state object to be garbage collected.
state.Dispose();
}
private static void OnRequestSendRequestComplete(WinHttpRequestState state)
{
Debug.Assert(state != null, "OnRequestSendRequestComplete: state is null");
Debug.Assert(state.TcsSendRequest != null, "OnRequestSendRequestComplete: TcsSendRequest is null");
Debug.Assert(!state.TcsSendRequest.Task.IsCompleted, "OnRequestSendRequestComplete: TcsSendRequest.Task is completed");
state.TcsSendRequest.TrySetResult(true);
}
private static void OnRequestDataAvailable(WinHttpRequestState state, int bytesAvailable)
{
Debug.Assert(state != null, "OnRequestDataAvailable: state is null");
Debug.Assert(state.TcsQueryDataAvailable != null, "TcsQueryDataAvailable is null");
Debug.Assert(!state.TcsQueryDataAvailable.Task.IsCompleted, "TcsQueryDataAvailable.Task is completed");
state.TcsQueryDataAvailable.TrySetResult(bytesAvailable);
}
private static void OnRequestReadComplete(WinHttpRequestState state, uint bytesRead)
{
Debug.Assert(state != null, "OnRequestReadComplete: state is null");
Debug.Assert(state.TcsReadFromResponseStream != null, "TcsReadFromResponseStream is null");
Debug.Assert(!state.TcsReadFromResponseStream.Task.IsCompleted, "TcsReadFromResponseStream.Task is completed");
state.DisposeCtrReadFromResponseStream();
// If we read to the end of the stream and we're using 'Content-Length' semantics on the response body,
// then verify we read at least the number of bytes required.
if (bytesRead == 0
&& state.ExpectedBytesToRead.HasValue
&& state.CurrentBytesRead < state.ExpectedBytesToRead.Value)
{
state.TcsReadFromResponseStream.TrySetException(
new IOException(string.Format(
SR.net_http_io_read_incomplete,
state.ExpectedBytesToRead.Value,
state.CurrentBytesRead)).InitializeStackTrace());
}
else
{
state.CurrentBytesRead += (long)bytesRead;
state.TcsReadFromResponseStream.TrySetResult((int)bytesRead);
}
}
private static void OnRequestWriteComplete(WinHttpRequestState state)
{
Debug.Assert(state != null, "OnRequestWriteComplete: state is null");
Debug.Assert(state.TcsInternalWriteDataToRequestStream != null, "TcsInternalWriteDataToRequestStream is null");
Debug.Assert(!state.TcsInternalWriteDataToRequestStream.Task.IsCompleted, "TcsInternalWriteDataToRequestStream.Task is completed");
state.TcsInternalWriteDataToRequestStream.TrySetResult(true);
}
private static void OnRequestReceiveResponseHeadersComplete(WinHttpRequestState state)
{
Debug.Assert(state != null, "OnRequestReceiveResponseHeadersComplete: state is null");
Debug.Assert(state.TcsReceiveResponseHeaders != null, "TcsReceiveResponseHeaders is null");
Debug.Assert(!state.TcsReceiveResponseHeaders.Task.IsCompleted, "TcsReceiveResponseHeaders.Task is completed");
state.TcsReceiveResponseHeaders.TrySetResult(true);
}
private static void OnRequestRedirect(WinHttpRequestState state, Uri redirectUri)
{
Debug.Assert(state != null, "OnRequestRedirect: state is null");
Debug.Assert(redirectUri != null, "OnRequestRedirect: redirectUri is null");
Debug.Assert(state.TcsReceiveResponseHeaders != null, "TcsReceiveResponseHeaders is null");
Debug.Assert(!state.TcsReceiveResponseHeaders.Task.IsCompleted, "TcsReceiveResponseHeaders.Task is completed");
// If we're manually handling cookies, we need to reset them based on the new URI.
if (state.Handler.CookieUsePolicy == CookieUsePolicy.UseSpecifiedCookieContainer)
{
// Add any cookies that may have arrived with redirect response.
WinHttpCookieContainerAdapter.AddResponseCookiesToContainer(state);
// Reset cookie request headers based on redirectUri.
WinHttpCookieContainerAdapter.ResetCookieRequestHeaders(state, redirectUri);
}
state.RequestMessage.RequestUri = redirectUri;
// Redirection to a new uri may require a new connection through a potentially different proxy.
// If so, we will need to respond to additional 407 proxy auth demands and re-attach any
// proxy credentials. The ProcessResponse() method looks at the state.LastStatusCode
// before attaching proxy credentials and marking the HTTP request to be re-submitted.
// So we need to reset the LastStatusCode remembered. Otherwise, it will see additional 407
// responses as an indication that proxy auth failed and won't retry the HTTP request.
if (state.LastStatusCode == HttpStatusCode.ProxyAuthenticationRequired)
{
state.LastStatusCode = 0;
}
// For security reasons, we drop the server credential if it is a
// NetworkCredential. But we allow credentials in a CredentialCache
// since they are specifically tied to URI's.
if (!(state.ServerCredentials is CredentialCache))
{
state.ServerCredentials = null;
}
}
private static void OnRequestSendingRequest(WinHttpRequestState state)
{
Debug.Assert(state != null, "OnRequestSendingRequest: state is null");
Debug.Assert(state.RequestHandle != null, "OnRequestSendingRequest: state.RequestHandle is null");
if (state.RequestMessage.RequestUri.Scheme != UriScheme.Https)
{
// Not SSL/TLS.
return;
}
// Grab the channel binding token (CBT) information from the request handle and put it into
// the TransportContext object.
state.TransportContext.SetChannelBinding(state.RequestHandle);
if (state.ServerCertificateValidationCallback != null)
{
IntPtr certHandle = IntPtr.Zero;
uint certHandleSize = (uint)IntPtr.Size;
if (!Interop.WinHttp.WinHttpQueryOption(
state.RequestHandle,
Interop.WinHttp.WINHTTP_OPTION_SERVER_CERT_CONTEXT,
ref certHandle,
ref certHandleSize))
{
int lastError = Marshal.GetLastWin32Error();
throw WinHttpException.CreateExceptionUsingError(lastError);
}
// Create a managed wrapper around the certificate handle. Since this results in duplicating
// the handle, we will close the original handle after creating the wrapper.
var serverCertificate = new X509Certificate2(certHandle);
Interop.Crypt32.CertFreeCertificateContext(certHandle);
X509Chain chain = null;
SslPolicyErrors sslPolicyErrors;
try
{
WinHttpCertificateHelper.BuildChain(
serverCertificate,
state.RequestMessage.RequestUri.Host,
state.CheckCertificateRevocationList,
out chain,
out sslPolicyErrors);
bool result = state.ServerCertificateValidationCallback(
state.RequestMessage,
serverCertificate,
chain,
sslPolicyErrors);
if (!result)
{
throw WinHttpException.CreateExceptionUsingError(
(int)Interop.WinHttp.ERROR_WINHTTP_SECURE_FAILURE);
}
}
finally
{
if (chain != null)
{
chain.Dispose();
}
serverCertificate.Dispose();
}
}
}
private static void OnRequestError(WinHttpRequestState state, Interop.WinHttp.WINHTTP_ASYNC_RESULT asyncResult)
{
WinHttpTraceHelper.TraceAsyncError("OnRequestError", asyncResult);
Debug.Assert(state != null, "OnRequestError: state is null");
var innerException = WinHttpException.CreateExceptionUsingError((int)asyncResult.dwError).InitializeStackTrace();
switch ((uint)asyncResult.dwResult.ToInt32())
{
case Interop.WinHttp.API_SEND_REQUEST:
state.TcsSendRequest.TrySetException(innerException);
break;
case Interop.WinHttp.API_RECEIVE_RESPONSE:
if (asyncResult.dwError == Interop.WinHttp.ERROR_WINHTTP_RESEND_REQUEST)
{
state.RetryRequest = true;
state.TcsReceiveResponseHeaders.TrySetResult(false);
}
else if (asyncResult.dwError == Interop.WinHttp.ERROR_WINHTTP_CLIENT_AUTH_CERT_NEEDED)
{
// WinHttp will automatically drop any client SSL certificates that we
// have pre-set into the request handle including the NULL certificate
// (which means we have no certs to send). For security reasons, we don't
// allow the certificate to be re-applied. But we need to tell WinHttp
// explicitly that we don't have any certificate to send.
Debug.Assert(state.RequestHandle != null, "OnRequestError: state.RequestHandle is null");
WinHttpHandler.SetNoClientCertificate(state.RequestHandle);
state.RetryRequest = true;
state.TcsReceiveResponseHeaders.TrySetResult(false);
}
else if (asyncResult.dwError == Interop.WinHttp.ERROR_WINHTTP_OPERATION_CANCELLED)
{
state.TcsReceiveResponseHeaders.TrySetCanceled(state.CancellationToken);
}
else
{
state.TcsReceiveResponseHeaders.TrySetException(innerException);
}
break;
case Interop.WinHttp.API_QUERY_DATA_AVAILABLE:
if (asyncResult.dwError == Interop.WinHttp.ERROR_WINHTTP_OPERATION_CANCELLED)
{
// TODO: Issue #2165. We need to pass in the cancellation token from the
// user's ReadAsync() call into the TrySetCanceled().
Debug.WriteLine("RequestCallback: QUERY_DATA_AVAILABLE - ERROR_WINHTTP_OPERATION_CANCELLED");
state.TcsQueryDataAvailable.TrySetCanceled();
}
else
{
state.TcsQueryDataAvailable.TrySetException(
new IOException(SR.net_http_io_read, innerException));
}
break;
case Interop.WinHttp.API_READ_DATA:
state.DisposeCtrReadFromResponseStream();
if (asyncResult.dwError == Interop.WinHttp.ERROR_WINHTTP_OPERATION_CANCELLED)
{
// TODO: Issue #2165. We need to pass in the cancellation token from the
// user's ReadAsync() call into the TrySetCanceled().
Debug.WriteLine("RequestCallback: API_READ_DATA - ERROR_WINHTTP_OPERATION_CANCELLED");
state.TcsReadFromResponseStream.TrySetCanceled();
}
else
{
state.TcsReadFromResponseStream.TrySetException(
new IOException(SR.net_http_io_read, innerException));
}
break;
case Interop.WinHttp.API_WRITE_DATA:
if (asyncResult.dwError == Interop.WinHttp.ERROR_WINHTTP_OPERATION_CANCELLED)
{
// TODO: Issue #2165. We need to pass in the cancellation token from the
// user's WriteAsync() call into the TrySetCanceled().
Debug.WriteLine("RequestCallback: API_WRITE_DATA - ERROR_WINHTTP_OPERATION_CANCELLED");
state.TcsInternalWriteDataToRequestStream.TrySetCanceled();
}
else
{
state.TcsInternalWriteDataToRequestStream.TrySetException(
new IOException(SR.net_http_io_write, innerException));
}
break;
default:
Debug.Fail(
"OnRequestError: Result (" + asyncResult.dwResult + ") is not expected.",
"Error code: " + asyncResult.dwError + " (" + innerException.Message + ")");
break;
}
}
}
}
| |
namespace Azure.Management.Resources
{
public partial class DeploymentOperations
{
protected DeploymentOperations() { }
public virtual Azure.Response<Azure.Management.Resources.Models.DeploymentOperation> Get(string resourceGroupName, string deploymentName, string operationId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.DeploymentOperation>> GetAsync(string resourceGroupName, string deploymentName, string operationId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Management.Resources.Models.DeploymentOperation> List(string resourceGroupName, string deploymentName, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Management.Resources.Models.DeploymentOperation> ListAsync(string resourceGroupName, string deploymentName, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class DeploymentsCreateOrUpdateOperation : Azure.Operation<Azure.Management.Resources.Models.DeploymentExtended>
{
internal DeploymentsCreateOrUpdateOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Management.Resources.Models.DeploymentExtended Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.DeploymentExtended>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.DeploymentExtended>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class DeploymentsDeleteOperation : Azure.Operation<Azure.Response>
{
internal DeploymentsDeleteOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Response Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class DeploymentsOperations
{
protected DeploymentsOperations() { }
public virtual Azure.Response<Azure.Management.Resources.Models.TemplateHashResult> CalculateTemplateHash(object template, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.TemplateHashResult>> CalculateTemplateHashAsync(object template, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response Cancel(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CancelAsync(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response CheckExistence(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CheckExistenceAsync(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.DeploymentExportResult> ExportTemplate(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.DeploymentExportResult>> ExportTemplateAsync(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.DeploymentExtended> Get(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.DeploymentExtended>> GetAsync(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Management.Resources.Models.DeploymentExtended> ListByResourceGroup(string resourceGroupName, string filter = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Management.Resources.Models.DeploymentExtended> ListByResourceGroupAsync(string resourceGroupName, string filter = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.DeploymentsCreateOrUpdateOperation StartCreateOrUpdate(string resourceGroupName, string deploymentName, Azure.Management.Resources.Models.Deployment parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.DeploymentsCreateOrUpdateOperation> StartCreateOrUpdateAsync(string resourceGroupName, string deploymentName, Azure.Management.Resources.Models.Deployment parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.DeploymentsDeleteOperation StartDelete(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.DeploymentsDeleteOperation> StartDeleteAsync(string resourceGroupName, string deploymentName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.DeploymentValidateResult> Validate(string resourceGroupName, string deploymentName, Azure.Management.Resources.Models.Deployment parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.DeploymentValidateResult>> ValidateAsync(string resourceGroupName, string deploymentName, Azure.Management.Resources.Models.Deployment parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ProvidersOperations
{
protected ProvidersOperations() { }
public virtual Azure.Response<Azure.Management.Resources.Models.Provider> Get(string resourceProviderNamespace, string expand = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.Provider>> GetAsync(string resourceProviderNamespace, string expand = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Management.Resources.Models.Provider> List(int? top = default(int?), string expand = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Management.Resources.Models.Provider> ListAsync(int? top = default(int?), string expand = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.Provider> Register(string resourceProviderNamespace, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.Provider>> RegisterAsync(string resourceProviderNamespace, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.Provider> Unregister(string resourceProviderNamespace, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.Provider>> UnregisterAsync(string resourceProviderNamespace, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourceGroupsDeleteOperation : Azure.Operation<Azure.Response>
{
internal ResourceGroupsDeleteOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Response Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourceGroupsOperations
{
protected ResourceGroupsOperations() { }
public virtual Azure.Response CheckExistence(string resourceGroupName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CheckExistenceAsync(string resourceGroupName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.ResourceGroup> CreateOrUpdate(string resourceGroupName, Azure.Management.Resources.Models.ResourceGroup parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.ResourceGroup>> CreateOrUpdateAsync(string resourceGroupName, Azure.Management.Resources.Models.ResourceGroup parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.ResourceGroupExportResult> ExportTemplate(string resourceGroupName, Azure.Management.Resources.Models.ExportTemplateRequest parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.ResourceGroupExportResult>> ExportTemplateAsync(string resourceGroupName, Azure.Management.Resources.Models.ExportTemplateRequest parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.ResourceGroup> Get(string resourceGroupName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.ResourceGroup>> GetAsync(string resourceGroupName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Management.Resources.Models.ResourceGroup> List(string filter = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Management.Resources.Models.ResourceGroup> ListAsync(string filter = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourceGroupsDeleteOperation StartDelete(string resourceGroupName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourceGroupsDeleteOperation> StartDeleteAsync(string resourceGroupName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.ResourceGroup> Update(string resourceGroupName, Azure.Management.Resources.Models.ResourceGroupPatchable parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.ResourceGroup>> UpdateAsync(string resourceGroupName, Azure.Management.Resources.Models.ResourceGroupPatchable parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesCreateOrUpdateByIdOperation : Azure.Operation<Azure.Management.Resources.Models.GenericResource>
{
internal ResourcesCreateOrUpdateByIdOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Management.Resources.Models.GenericResource Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesCreateOrUpdateOperation : Azure.Operation<Azure.Management.Resources.Models.GenericResource>
{
internal ResourcesCreateOrUpdateOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Management.Resources.Models.GenericResource Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesDeleteByIdOperation : Azure.Operation<Azure.Response>
{
internal ResourcesDeleteByIdOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Response Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesDeleteOperation : Azure.Operation<Azure.Response>
{
internal ResourcesDeleteOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Response Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesManagementClient
{
protected ResourcesManagementClient() { }
public ResourcesManagementClient(string subscriptionId, Azure.Core.TokenCredential tokenCredential, Azure.Management.Resources.ResourcesManagementClientOptions options = null) { }
public ResourcesManagementClient(string subscriptionId, System.Uri endpoint, Azure.Core.TokenCredential tokenCredential, Azure.Management.Resources.ResourcesManagementClientOptions options = null) { }
public virtual Azure.Management.Resources.DeploymentOperations Deployment { get { throw null; } }
public virtual Azure.Management.Resources.DeploymentsOperations Deployments { get { throw null; } }
public virtual Azure.Management.Resources.ProvidersOperations Providers { get { throw null; } }
public virtual Azure.Management.Resources.ResourceGroupsOperations ResourceGroups { get { throw null; } }
public virtual Azure.Management.Resources.ResourcesOperations Resources { get { throw null; } }
public virtual Azure.Management.Resources.TagsOperations Tags { get { throw null; } }
}
public partial class ResourcesManagementClientOptions : Azure.Core.ClientOptions
{
public ResourcesManagementClientOptions() { }
}
public partial class ResourcesMoveResourcesOperation : Azure.Operation<Azure.Response>
{
internal ResourcesMoveResourcesOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Response Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesOperations
{
protected ResourcesOperations() { }
public virtual Azure.Response CheckExistence(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CheckExistenceAsync(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response CheckExistenceById(string resourceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> CheckExistenceByIdAsync(string resourceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.GenericResource> Get(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.GenericResource>> GetAsync(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.GenericResource> GetById(string resourceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.GenericResource>> GetByIdAsync(string resourceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Management.Resources.Models.GenericResourceExpanded> List(string filter = null, string expand = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Management.Resources.Models.GenericResourceExpanded> ListAsync(string filter = null, string expand = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Management.Resources.Models.GenericResourceExpanded> ListByResourceGroup(string resourceGroupName, string filter = null, string expand = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Management.Resources.Models.GenericResourceExpanded> ListByResourceGroupAsync(string resourceGroupName, string filter = null, string expand = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesCreateOrUpdateOperation StartCreateOrUpdate(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesCreateOrUpdateOperation> StartCreateOrUpdateAsync(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesCreateOrUpdateByIdOperation StartCreateOrUpdateById(string resourceId, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesCreateOrUpdateByIdOperation> StartCreateOrUpdateByIdAsync(string resourceId, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesDeleteOperation StartDelete(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesDeleteOperation> StartDeleteAsync(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesDeleteByIdOperation StartDeleteById(string resourceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesDeleteByIdOperation> StartDeleteByIdAsync(string resourceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesMoveResourcesOperation StartMoveResources(string sourceResourceGroupName, Azure.Management.Resources.Models.ResourcesMoveInfo parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesMoveResourcesOperation> StartMoveResourcesAsync(string sourceResourceGroupName, Azure.Management.Resources.Models.ResourcesMoveInfo parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesUpdateOperation StartUpdate(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesUpdateOperation> StartUpdateAsync(string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesUpdateByIdOperation StartUpdateById(string resourceId, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesUpdateByIdOperation> StartUpdateByIdAsync(string resourceId, Azure.Management.Resources.Models.GenericResource parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Management.Resources.ResourcesValidateMoveResourcesOperation StartValidateMoveResources(string sourceResourceGroupName, Azure.Management.Resources.Models.ResourcesMoveInfo parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Management.Resources.ResourcesValidateMoveResourcesOperation> StartValidateMoveResourcesAsync(string sourceResourceGroupName, Azure.Management.Resources.Models.ResourcesMoveInfo parameters, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesUpdateByIdOperation : Azure.Operation<Azure.Management.Resources.Models.GenericResource>
{
internal ResourcesUpdateByIdOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Management.Resources.Models.GenericResource Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesUpdateOperation : Azure.Operation<Azure.Management.Resources.Models.GenericResource>
{
internal ResourcesUpdateOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Management.Resources.Models.GenericResource Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Management.Resources.Models.GenericResource>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class ResourcesValidateMoveResourcesOperation : Azure.Operation<Azure.Response>
{
internal ResourcesValidateMoveResourcesOperation() { }
public override bool HasCompleted { get { throw null; } }
public override bool HasValue { get { throw null; } }
public override string Id { get { throw null; } }
public override Azure.Response Value { get { throw null; } }
public override Azure.Response GetRawResponse() { throw null; }
public override Azure.Response UpdateStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response> UpdateStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public override System.Threading.Tasks.ValueTask<Azure.Response<Azure.Response>> WaitForCompletionAsync(System.TimeSpan pollingInterval, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
public partial class TagsOperations
{
protected TagsOperations() { }
public virtual Azure.Response<Azure.Management.Resources.Models.TagDetails> CreateOrUpdate(string tagName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.TagDetails>> CreateOrUpdateAsync(string tagName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response<Azure.Management.Resources.Models.TagValue> CreateOrUpdateValue(string tagName, string tagValue, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response<Azure.Management.Resources.Models.TagValue>> CreateOrUpdateValueAsync(string tagName, string tagValue, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response Delete(string tagName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> DeleteAsync(string tagName, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Response DeleteValue(string tagName, string tagValue, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual System.Threading.Tasks.Task<Azure.Response> DeleteValueAsync(string tagName, string tagValue, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.Pageable<Azure.Management.Resources.Models.TagDetails> List(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
public virtual Azure.AsyncPageable<Azure.Management.Resources.Models.TagDetails> ListAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; }
}
}
namespace Azure.Management.Resources.Models
{
public partial class AliasPathType
{
internal AliasPathType() { }
public System.Collections.Generic.IReadOnlyList<string> ApiVersions { get { throw null; } }
public string Path { get { throw null; } }
}
public partial class AliasType
{
internal AliasType() { }
public string Name { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.AliasPathType> Paths { get { throw null; } }
}
public partial class BasicDependency
{
internal BasicDependency() { }
public string Id { get { throw null; } }
public string ResourceName { get { throw null; } }
public string ResourceType { get { throw null; } }
}
public partial class DebugSetting
{
public DebugSetting() { }
public string DetailLevel { get { throw null; } set { } }
}
public partial class Dependency
{
internal Dependency() { }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.BasicDependency> DependsOn { get { throw null; } }
public string Id { get { throw null; } }
public string ResourceName { get { throw null; } }
public string ResourceType { get { throw null; } }
}
public partial class Deployment
{
public Deployment(Azure.Management.Resources.Models.DeploymentProperties properties) { }
public Azure.Management.Resources.Models.DeploymentProperties Properties { get { throw null; } }
}
public partial class DeploymentExportResult
{
internal DeploymentExportResult() { }
public object Template { get { throw null; } }
}
public partial class DeploymentExtended
{
internal DeploymentExtended() { }
public string Id { get { throw null; } }
public string Name { get { throw null; } }
public Azure.Management.Resources.Models.DeploymentPropertiesExtended Properties { get { throw null; } }
}
public partial class DeploymentListResult
{
internal DeploymentListResult() { }
public string NextLink { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.DeploymentExtended> Value { get { throw null; } }
}
public enum DeploymentMode
{
Incremental = 0,
Complete = 1,
}
public partial class DeploymentOperation
{
internal DeploymentOperation() { }
public string Id { get { throw null; } }
public string OperationId { get { throw null; } }
public Azure.Management.Resources.Models.DeploymentOperationProperties Properties { get { throw null; } }
}
public partial class DeploymentOperationProperties
{
internal DeploymentOperationProperties() { }
public string ProvisioningState { get { throw null; } }
public Azure.Management.Resources.Models.HttpMessage Request { get { throw null; } }
public Azure.Management.Resources.Models.HttpMessage Response { get { throw null; } }
public string ServiceRequestId { get { throw null; } }
public string StatusCode { get { throw null; } }
public object StatusMessage { get { throw null; } }
public Azure.Management.Resources.Models.TargetResource TargetResource { get { throw null; } }
public System.DateTimeOffset? Timestamp { get { throw null; } }
}
public partial class DeploymentOperationsListResult
{
internal DeploymentOperationsListResult() { }
public string NextLink { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.DeploymentOperation> Value { get { throw null; } }
}
public partial class DeploymentProperties
{
public DeploymentProperties(Azure.Management.Resources.Models.DeploymentMode mode) { }
public Azure.Management.Resources.Models.DebugSetting DebugSetting { get { throw null; } set { } }
public Azure.Management.Resources.Models.DeploymentMode Mode { get { throw null; } }
public string Parameters { get { throw null; } set { } }
public Azure.Management.Resources.Models.ParametersLink ParametersLink { get { throw null; } set { } }
public string Template { get { throw null; } set { } }
public Azure.Management.Resources.Models.TemplateLink TemplateLink { get { throw null; } set { } }
}
public partial class DeploymentPropertiesExtended
{
internal DeploymentPropertiesExtended() { }
public string CorrelationId { get { throw null; } }
public Azure.Management.Resources.Models.DebugSetting DebugSetting { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.Dependency> Dependencies { get { throw null; } }
public Azure.Management.Resources.Models.DeploymentMode? Mode { get { throw null; } }
public object Outputs { get { throw null; } }
public object Parameters { get { throw null; } }
public Azure.Management.Resources.Models.ParametersLink ParametersLink { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.Provider> Providers { get { throw null; } }
public string ProvisioningState { get { throw null; } }
public object Template { get { throw null; } }
public Azure.Management.Resources.Models.TemplateLink TemplateLink { get { throw null; } }
public System.DateTimeOffset? Timestamp { get { throw null; } }
}
public partial class DeploymentValidateResult
{
internal DeploymentValidateResult() { }
public Azure.Management.Resources.Models.ResourceManagementErrorWithDetails Error { get { throw null; } }
public Azure.Management.Resources.Models.DeploymentPropertiesExtended Properties { get { throw null; } }
}
public partial class ExportTemplateRequest
{
public ExportTemplateRequest() { }
public string Options { get { throw null; } set { } }
public System.Collections.Generic.IList<string> Resources { get { throw null; } set { } }
}
public partial class GenericResource : Azure.Management.Resources.Models.Resource
{
public GenericResource() { }
public Azure.Management.Resources.Models.Identity Identity { get { throw null; } set { } }
public string Kind { get { throw null; } set { } }
public string ManagedBy { get { throw null; } set { } }
public Azure.Management.Resources.Models.Plan Plan { get { throw null; } set { } }
public object Properties { get { throw null; } set { } }
public Azure.Management.Resources.Models.Sku Sku { get { throw null; } set { } }
}
public partial class GenericResourceExpanded : Azure.Management.Resources.Models.GenericResource
{
public GenericResourceExpanded() { }
public System.DateTimeOffset? ChangedTime { get { throw null; } }
public System.DateTimeOffset? CreatedTime { get { throw null; } }
public string ProvisioningState { get { throw null; } }
}
public partial class HttpMessage
{
internal HttpMessage() { }
public object Content { get { throw null; } }
}
public partial class Identity
{
public Identity() { }
public string PrincipalId { get { throw null; } }
public string TenantId { get { throw null; } }
public string Type { get { throw null; } set { } }
}
public partial class ParametersLink
{
public ParametersLink(string uri) { }
public string ContentVersion { get { throw null; } set { } }
public string Uri { get { throw null; } set { } }
}
public partial class Plan
{
public Plan() { }
public string Name { get { throw null; } set { } }
public string Product { get { throw null; } set { } }
public string PromotionCode { get { throw null; } set { } }
public string Publisher { get { throw null; } set { } }
public string Version { get { throw null; } set { } }
}
public partial class Provider
{
internal Provider() { }
public string Id { get { throw null; } }
public string Namespace { get { throw null; } }
public string RegistrationState { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.ProviderResourceType> ResourceTypes { get { throw null; } }
}
public partial class ProviderListResult
{
internal ProviderListResult() { }
public string NextLink { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.Provider> Value { get { throw null; } }
}
public partial class ProviderResourceType
{
internal ProviderResourceType() { }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.AliasType> Aliases { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> ApiVersions { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<string> Locations { get { throw null; } }
public System.Collections.Generic.IReadOnlyDictionary<string, string> Properties { get { throw null; } }
public string ResourceType { get { throw null; } }
}
public partial class Resource
{
public Resource() { }
public string Id { get { throw null; } }
public string Location { get { throw null; } set { } }
public string Name { get { throw null; } }
public System.Collections.Generic.IDictionary<string, string> Tags { get { throw null; } set { } }
public string Type { get { throw null; } }
}
public partial class ResourceGroup
{
public ResourceGroup(string location) { }
public string Id { get { throw null; } }
public string Location { get { throw null; } set { } }
public string ManagedBy { get { throw null; } set { } }
public string Name { get { throw null; } set { } }
public Azure.Management.Resources.Models.ResourceGroupProperties Properties { get { throw null; } set { } }
public System.Collections.Generic.IDictionary<string, string> Tags { get { throw null; } set { } }
}
public partial class ResourceGroupExportResult
{
internal ResourceGroupExportResult() { }
public Azure.Management.Resources.Models.ResourceManagementErrorWithDetails Error { get { throw null; } }
public object Template { get { throw null; } }
}
public partial class ResourceGroupListResult
{
internal ResourceGroupListResult() { }
public string NextLink { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.ResourceGroup> Value { get { throw null; } }
}
public partial class ResourceGroupPatchable
{
public ResourceGroupPatchable() { }
public string ManagedBy { get { throw null; } set { } }
public string Name { get { throw null; } set { } }
public Azure.Management.Resources.Models.ResourceGroupProperties Properties { get { throw null; } set { } }
public System.Collections.Generic.IDictionary<string, string> Tags { get { throw null; } set { } }
}
public partial class ResourceGroupProperties
{
public ResourceGroupProperties() { }
public string ProvisioningState { get { throw null; } }
}
public partial class ResourceListResult
{
internal ResourceListResult() { }
public string NextLink { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.GenericResourceExpanded> Value { get { throw null; } }
}
public partial class ResourceManagementErrorWithDetails
{
internal ResourceManagementErrorWithDetails() { }
public string Code { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.ResourceManagementErrorWithDetails> Details { get { throw null; } }
public string Message { get { throw null; } }
public string Target { get { throw null; } }
}
public partial class ResourcesMoveInfo
{
public ResourcesMoveInfo() { }
public System.Collections.Generic.IList<string> Resources { get { throw null; } set { } }
public string TargetResourceGroup { get { throw null; } set { } }
}
public partial class Sku
{
public Sku() { }
public int? Capacity { get { throw null; } set { } }
public string Family { get { throw null; } set { } }
public string Model { get { throw null; } set { } }
public string Name { get { throw null; } set { } }
public string Size { get { throw null; } set { } }
public string Tier { get { throw null; } set { } }
}
public partial class TagCount
{
internal TagCount() { }
public string Type { get { throw null; } }
public int? Value { get { throw null; } }
}
public partial class TagDetails
{
internal TagDetails() { }
public Azure.Management.Resources.Models.TagCount Count { get { throw null; } }
public string Id { get { throw null; } }
public string TagName { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.TagValue> Values { get { throw null; } }
}
public partial class TagsListResult
{
internal TagsListResult() { }
public string NextLink { get { throw null; } }
public System.Collections.Generic.IReadOnlyList<Azure.Management.Resources.Models.TagDetails> Value { get { throw null; } }
}
public partial class TagValue
{
internal TagValue() { }
public Azure.Management.Resources.Models.TagCount Count { get { throw null; } }
public string Id { get { throw null; } }
public string TagValueValue { get { throw null; } }
}
public partial class TargetResource
{
internal TargetResource() { }
public string Id { get { throw null; } }
public string ResourceName { get { throw null; } }
public string ResourceType { get { throw null; } }
}
public partial class TemplateHashResult
{
internal TemplateHashResult() { }
public string MinifiedTemplate { get { throw null; } }
public string TemplateHash { get { throw null; } }
}
public partial class TemplateLink
{
public TemplateLink(string uri) { }
public string ContentVersion { get { throw null; } set { } }
public string Uri { get { throw null; } set { } }
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
public enum databaseTable
{
PrimaryAccount,
Sessions,
CoinBank,
TotalCoins,
CoinOrder,
Logs,
}
namespace data.construct
{
public class tableFormats
{
public int tableCount = 6;
public string[][][] allTables()
{
string[][][] dbInfo = new string[tableCount][][];
dbInfo[0] = PrimaryAccount();
dbInfo[1] = Sessions();
dbInfo[2] = CoinBank();
dbInfo[3] = TotalCoins();
dbInfo[4] = CoinOrder();
dbInfo[5] = Logs();
return dbInfo;
}
#region Tables
public string[][] PrimaryAccount()
{
string[][] value = new string[][]
{
// Table Name
new string[]
{
"PrimaryAccount"
},
// Table Keys
new string[]
{
"id",
"email",
"username",
"password",
"salt",
"memberLevel",
"adminTrust",
"accountBalance",
"coinBankVal1",
"coinBankVal2"
},
// Table Key Features
new string[]
{
"int PRIMARY KEY IDENTITY",
"varchar(255)",
"varchar(255)",
"varchar(255)",
"varchar(255)",
"varchar(255)",
"varchar(255)",
"varchar(255)",
"varchar(255)",
"varchar(255)"
}
};
return value;
}
public string[][] Sessions()
{
string[][] value = new string[][]
{
// Table Name
new string[]
{
"Sessions"
},
// Table Keys
new string[]
{
"id",
"expiration",
"uid",
"sessionVal"
},
// Table Key Features
new string[]
{
"int PRIMARY KEY IDENTITY",
"DateTime",
"int",
"varchar(255)"
}
};
return value;
}
public string[][] CoinBank()
{
string[][] value = new string[][]
{
// Table Name
new string[]
{
"CoinBank"
},
// Table Keys
new string[]
{
"id",
"CT",
"coinBankVal",
"totalCoinString"
},
// Table Key Features
new string[]
{
"int PRIMARY KEY IDENTITY",
"varchar(255)",
"varchar(255)",
"varchar(255)"
}
};
return value;
}
public string[][] TotalCoins()
{
string[][] value = new string[][]
{
// Table Name
new string[]
{
"TotalCoins"
},
// Table Keys
new string[]
{
"id",
"totalCoinString",
"amount"
},
// Table Key Features
new string[]
{
"int PRIMARY KEY IDENTITY",
"varchar(255)",
"decimal(6,3)"
}
};
return value;
}
public string[][] CoinOrder()
{
string[][] value = new string[][]
{
// Table Name
new string[]
{
"CoinOrder"
},
// Table Keys
new string[]
{
"id",
"OT",
"CT",
"coinBankVal",
"perCoin",
"coinsToBeTraded"
},
// Table Key Features
new string[]
{
"int PRIMARY KEY IDENTITY",
"varchar(255)",
"varchar(255)",
"varchar(255)",
"decimal(6,3)",
"decimal(6,3)"
}
};
return value;
}
public string[][] Logs()
{
string[][] value = new string[][]
{
// Table Name
new string[]
{
"Logs"
},
// Table Keys
new string[]
{
"id",
"LT",
"timeOfEvent",
"uid1",
"uid2",
"logData"
},
// Table Key Features
new string[]
{
"int PRIMARY KEY IDENTITY",
"varchar(255)",
"DateTime",
"int",
"int",
"varchar(255)"
}
};
return value;
}
#endregion Tables
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq.Expressions;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Diagnostics;
namespace RefactoringEssentials.CSharp.Diagnostics
{
[DiagnosticAnalyzer(LanguageNames.CSharp)]
public class RedundantToStringCallAnalyzer : DiagnosticAnalyzer
{
static Tuple<int, int> onlyFirst = Tuple.Create(0, 0);
static IDictionary<Tuple<string, int>, Tuple<int, int>> membersCallingToString = new Dictionary<Tuple<string, int>, Tuple<int, int>> {
{ Tuple.Create("System.IO.TextWriter.Write", 1), onlyFirst },
{ Tuple.Create("System.IO.TextWriter.WriteLine", 1), onlyFirst },
{ Tuple.Create("System.Console.Write", 1), onlyFirst },
{ Tuple.Create("System.Console.WriteLine", 1), onlyFirst }
};
static readonly DiagnosticDescriptor descriptor1 = new DiagnosticDescriptor(
CSharpDiagnosticIDs.RedundantToStringCallAnalyzerID,
GettextCatalog.GetString("Finds calls to ToString() which would be generated automatically by the compiler"),
GettextCatalog.GetString("Redundant 'ToString()' call"),
DiagnosticAnalyzerCategories.RedundanciesInCode,
DiagnosticSeverity.Info,
isEnabledByDefault: true,
helpLinkUri: HelpLink.CreateFor(CSharpDiagnosticIDs.RedundantToStringCallAnalyzerID),
customTags: DiagnosticCustomTags.Unnecessary
);
static readonly DiagnosticDescriptor descriptor2 = new DiagnosticDescriptor(
CSharpDiagnosticIDs.RedundantToStringCallAnalyzer_ValueTypesID,
GettextCatalog.GetString("Finds calls to ToString() which would be generated automatically by the compiler"),
GettextCatalog.GetString("Redundant 'ToString()' call"),
DiagnosticAnalyzerCategories.RedundanciesInCode,
DiagnosticSeverity.Warning,
isEnabledByDefault: true,
helpLinkUri: HelpLink.CreateFor(CSharpDiagnosticIDs.RedundantToStringCallAnalyzerID),
customTags: DiagnosticCustomTags.Unnecessary
);
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(descriptor1, descriptor2);
public override void Initialize(AnalysisContext context)
{
context.RegisterSyntaxNodeAction(
AnalyzeBinaryExpression,
new SyntaxKind[] { SyntaxKind.AddExpression }
);
context.RegisterSyntaxNodeAction(
AnalyzeInvocationExpression,
new SyntaxKind[] { SyntaxKind.InvocationExpression }
);
}
static void AnalyzeBinaryExpression(SyntaxNodeAnalysisContext nodeContext)
{
if (nodeContext.IsFromGeneratedCode())
return;
var node = nodeContext.Node as BinaryExpressionSyntax;
var visitor = new BinaryExpressionVisitor(nodeContext);
visitor.Visit(node);
}
static void AnalyzeInvocationExpression(SyntaxNodeAnalysisContext nodeContext)
{
if (nodeContext.IsFromGeneratedCode())
return;
var invocationExpression = nodeContext.Node as InvocationExpressionSyntax;
if (invocationExpression.Parent is BinaryExpressionSyntax)
return;
var member = nodeContext.SemanticModel.GetSymbolInfo(invocationExpression).Symbol;
if (member == null)
return;
var invocationResolveResult = nodeContext.SemanticModel.GetTypeInfo(invocationExpression).Type;
// "".ToString()
CheckTargetedObject(nodeContext, invocationExpression, invocationResolveResult, member);
// Check list of members that call ToString() automatically
CheckAutomaticToStringCallers(nodeContext, invocationExpression, member);
// Check formatting calls
// CheckFormattingCall(invocationExpression, invocationResolveResult));
}
class BinaryExpressionVisitor : CSharpSyntaxWalker
{
readonly SyntaxNodeAnalysisContext nodeContext;
int stringExpressionCount;
ExpressionSyntax firstStringExpression;
HashSet<SyntaxNode> processedNodes = new HashSet<SyntaxNode>();
public BinaryExpressionVisitor(SyntaxNodeAnalysisContext nodeContext)
{
this.nodeContext = nodeContext;
}
public void Reset()
{
stringExpressionCount = 0;
firstStringExpression = null;
}
void Check(ExpressionSyntax expression)
{
if (stringExpressionCount <= 1)
{
var resolvedType = nodeContext.SemanticModel.GetTypeInfo(expression).Type;
if (resolvedType != null && resolvedType.SpecialType == SpecialType.System_String)
{
stringExpressionCount++;
if (stringExpressionCount == 1)
{
firstStringExpression = expression;
}
else {
CheckExpressionInAutoCallContext(firstStringExpression);
CheckExpressionInAutoCallContext(expression);
}
}
}
else {
CheckExpressionInAutoCallContext(expression);
}
}
public override void VisitBinaryExpression(BinaryExpressionSyntax node)
{
Check(node.Left);
Check(node.Right);
}
public override void VisitBaseExpression(BaseExpressionSyntax node)
{
base.VisitBaseExpression(node);
}
void CheckExpressionInAutoCallContext(ExpressionSyntax expression)
{
if (expression is InvocationExpressionSyntax && !processedNodes.Contains(expression))
{
CheckInvocationInAutoCallContext((InvocationExpressionSyntax)expression);
}
}
void CheckInvocationInAutoCallContext(InvocationExpressionSyntax invocationExpression)
{
var memberExpression = invocationExpression.Expression as MemberAccessExpressionSyntax;
if (memberExpression == null)
{
return;
}
if (memberExpression.Name.ToString() != "ToString" || invocationExpression.ArgumentList.Arguments.Any())
{
return;
}
var resolveResult = nodeContext.SemanticModel.GetSymbolInfo(invocationExpression).Symbol;
if (resolveResult == null)
{
return;
}
var type = nodeContext.SemanticModel.GetTypeInfo(memberExpression.Expression).Type;
AddRedundantToStringIssue(memberExpression, invocationExpression, type?.IsValueType == true);
}
void AddRedundantToStringIssue(MemberAccessExpressionSyntax memberExpression, InvocationExpressionSyntax invocationExpression, bool isValueType)
{
// Simon Lindgren 2012-09-14: Previously there was a check here to see if the node had already been processed
// This has been moved out to the callers, to check it earlier for a 30-40% run time reduction
processedNodes.Add(invocationExpression);
nodeContext.ReportDiagnostic(Diagnostic.Create(isValueType ? descriptor2 : descriptor1, memberExpression.Name.GetLocation()));
}
}
#region Invocation expression
static void CheckTargetedObject(SyntaxNodeAnalysisContext nodeContext, InvocationExpressionSyntax invocationExpression, ITypeSymbol type, ISymbol member)
{
var memberExpression = invocationExpression.Expression as MemberAccessExpressionSyntax;
if (memberExpression != null)
{
if (type.SpecialType == SpecialType.System_String && member.Name == "ToString")
{
nodeContext.ReportDiagnostic(Diagnostic.Create(descriptor1, memberExpression.Name.GetLocation()));
}
}
}
static void CheckAutomaticToStringCallers(SyntaxNodeAnalysisContext nodeContext, InvocationExpressionSyntax invocationExpression, ISymbol member)
{
if (member.IsOverride)
{
member = member.OverriddenMember();
if (member == null)
{
return;
}
}
var key = new Tuple<string, int>(member.GetDocumentationCommentId(), invocationExpression.ArgumentList.Arguments.Count);
Tuple<int, int> checkInfo;
if (membersCallingToString.TryGetValue(key, out checkInfo))
{
var arguments = invocationExpression.ArgumentList.Arguments;
for (int i = checkInfo.Item1; i < Math.Min(arguments.Count, checkInfo.Item2 + 1); ++i)
{
CheckExpressionInAutoCallContext(nodeContext, arguments[i].Expression);
}
}
}
static void CheckExpressionInAutoCallContext(SyntaxNodeAnalysisContext nodeContext, ExpressionSyntax expression)
{
var invocationExpressionSyntax = expression as InvocationExpressionSyntax;
if (invocationExpressionSyntax != null)
{
CheckInvocationInAutoCallContext(nodeContext, invocationExpressionSyntax);
}
}
static void CheckInvocationInAutoCallContext(SyntaxNodeAnalysisContext nodeContext, InvocationExpressionSyntax invocationExpression)
{
var memberExpression = invocationExpression.Expression as MemberAccessExpressionSyntax;
if (memberExpression == null)
{
return;
}
if (memberExpression.Name.ToString() != "ToString" || invocationExpression.ArgumentList.Arguments.Any())
{
return;
}
var resolveResult = nodeContext.SemanticModel.GetSymbolInfo(invocationExpression).Symbol;
if (resolveResult == null)
{
return;
}
var type = nodeContext.SemanticModel.GetTypeInfo(memberExpression.Expression).Type;
if (type?.IsValueType == true)
nodeContext.ReportDiagnostic(Diagnostic.Create(descriptor1, memberExpression.Name.GetLocation()));
}
//static void CheckFormattingCall(InvocationExpressionSyntax invocationExpression, CSharpInvocationResolveResult invocationResolveResult)
//{
// Expression formatArgument;
// IList<Expression> formatArguments;
// // Only check parameters that are of type object: String means it is neccessary, others
// // means that there is another problem (ie no matching overload of the method).
// Func<IParameter, Expression, bool> predicate = (parameter, argument) =>
// {
// var type = parameter.Type;
// if (type is TypeWithElementType && parameter.IsParams)
// {
// type = ((TypeWithElementType)type).ElementType;
// }
// var typeDefinition = type.GetDefinition();
// if (typeDefinition == null)
// return false;
// return typeDefinition.IsKnownType(KnownTypeCode.Object);
// };
// if (FormatStringHelper.TryGetFormattingParameters(invocationResolveResult, invocationExpression,
// out formatArgument, out formatArguments, predicate))
// {
// foreach (var argument in formatArguments)
// {
// CheckExpressionInAutoCallContext(argument);
// }
// }
//}
#endregion
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure;
using Microsoft.Azure.Management.Automation;
using Microsoft.Azure.Management.Automation.Models;
namespace Microsoft.Azure.Management.Automation
{
public static partial class TestJobOperationsExtensions
{
/// <summary>
/// Create a test job of the runbook. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='parameters'>
/// Required. The parameters supplied to the create test job operation.
/// </param>
/// <returns>
/// The response model for the create test job operation.
/// </returns>
public static TestJobCreateResponse Create(this ITestJobOperations operations, string resourceGroupName, string automationAccount, TestJobCreateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((ITestJobOperations)s).CreateAsync(resourceGroupName, automationAccount, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Create a test job of the runbook. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='parameters'>
/// Required. The parameters supplied to the create test job operation.
/// </param>
/// <returns>
/// The response model for the create test job operation.
/// </returns>
public static Task<TestJobCreateResponse> CreateAsync(this ITestJobOperations operations, string resourceGroupName, string automationAccount, TestJobCreateParameters parameters)
{
return operations.CreateAsync(resourceGroupName, automationAccount, parameters, CancellationToken.None);
}
/// <summary>
/// Retrieve the test job for the specified runbook. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// The response model for the get test job operation.
/// </returns>
public static TestJobGetResponse Get(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return Task.Factory.StartNew((object s) =>
{
return ((ITestJobOperations)s).GetAsync(resourceGroupName, automationAccount, runbookName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Retrieve the test job for the specified runbook. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// The response model for the get test job operation.
/// </returns>
public static Task<TestJobGetResponse> GetAsync(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return operations.GetAsync(resourceGroupName, automationAccount, runbookName, CancellationToken.None);
}
/// <summary>
/// Resume the test job. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Resume(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return Task.Factory.StartNew((object s) =>
{
return ((ITestJobOperations)s).ResumeAsync(resourceGroupName, automationAccount, runbookName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Resume the test job. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> ResumeAsync(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return operations.ResumeAsync(resourceGroupName, automationAccount, runbookName, CancellationToken.None);
}
/// <summary>
/// Stop the test job. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Stop(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return Task.Factory.StartNew((object s) =>
{
return ((ITestJobOperations)s).StopAsync(resourceGroupName, automationAccount, runbookName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Stop the test job. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> StopAsync(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return operations.StopAsync(resourceGroupName, automationAccount, runbookName, CancellationToken.None);
}
/// <summary>
/// Suspend the test job. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Suspend(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return Task.Factory.StartNew((object s) =>
{
return ((ITestJobOperations)s).SuspendAsync(resourceGroupName, automationAccount, runbookName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Suspend the test job. (see
/// http://aka.ms/azureautomationsdk/testjoboperations for more
/// information)
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Automation.ITestJobOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group
/// </param>
/// <param name='automationAccount'>
/// Required. The automation account name.
/// </param>
/// <param name='runbookName'>
/// Required. The runbook name.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> SuspendAsync(this ITestJobOperations operations, string resourceGroupName, string automationAccount, string runbookName)
{
return operations.SuspendAsync(resourceGroupName, automationAccount, runbookName, CancellationToken.None);
}
}
}
| |
// Copyright 2010-2014 Google
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using Google.OrTools.LinearSolver;
public class CsTestLp
{
static int error_count = 0;
static void Check(bool test, String message)
{
if (!test)
{
Console.WriteLine("Error: " + message);
error_count++;
}
}
static void CheckDoubleEq(double v1, double v2, String message)
{
if (v1 != v2)
{
Console.WriteLine("Error: " + v1 + " != " + v2 + " " + message);
error_count++;
}
}
static void TestVarOperator()
{
Console.WriteLine("Running TestVarOperator");
Solver solver = new Solver("TestVarOperator",
Solver.CLP_LINEAR_PROGRAMMING);
Variable x = solver.MakeNumVar(0.0, 100.0, "x");
Constraint ct1 = solver.Add(x >= 1);
Constraint ct2 = solver.Add(x <= 1);
Constraint ct3 = solver.Add(x == 1);
Constraint ct4 = solver.Add(1 >= x);
Constraint ct5 = solver.Add(1 <= x);
Constraint ct6 = solver.Add(1 == x);
CheckDoubleEq(ct1.GetCoefficient(x), 1.0, "test1");
CheckDoubleEq(ct2.GetCoefficient(x), 1.0, "test2");
CheckDoubleEq(ct3.GetCoefficient(x), 1.0, "test3");
CheckDoubleEq(ct4.GetCoefficient(x), 1.0, "test4");
CheckDoubleEq(ct5.GetCoefficient(x), 1.0, "test5");
CheckDoubleEq(ct6.GetCoefficient(x), 1.0, "test6");
CheckDoubleEq(ct1.Lb(), 1.0, "test7");
CheckDoubleEq(ct1.Ub(), double.PositiveInfinity, "test8");
CheckDoubleEq(ct2.Lb(), double.NegativeInfinity, "test9");
CheckDoubleEq(ct2.Ub(), 1.0, "test10");
CheckDoubleEq(ct3.Lb(), 1.0, "test11");
CheckDoubleEq(ct3.Ub(), 1.0, "test12");
CheckDoubleEq(ct4.Lb(), double.NegativeInfinity, "test13");
CheckDoubleEq(ct4.Ub(), 1.0, "test14");
CheckDoubleEq(ct5.Lb(), 1.0, "test15");
CheckDoubleEq(ct5.Ub(), double.PositiveInfinity, "test16");
CheckDoubleEq(ct6.Lb(), 1.0, "test17");
CheckDoubleEq(ct6.Ub(), 1.0, "test18");
}
static void TestVarAddition()
{
Console.WriteLine("Running TestVarAddition");
Solver solver = new Solver("TestVarAddition",
Solver.CLP_LINEAR_PROGRAMMING);
Variable x = solver.MakeNumVar(0.0, 100.0, "x");
Variable y = solver.MakeNumVar(0.0, 100.0, "y");
Constraint ct1 = solver.Add(x + y == 1);
CheckDoubleEq(ct1.GetCoefficient(x), 1.0, "test1");
CheckDoubleEq(ct1.GetCoefficient(y), 1.0, "test2");
Constraint ct2 = solver.Add(x + x == 1);
CheckDoubleEq(ct2.GetCoefficient(x), 2.0, "test3");
Constraint ct3 = solver.Add(x + (y + x) == 1);
CheckDoubleEq(ct3.GetCoefficient(x), 2.0, "test4");
CheckDoubleEq(ct3.GetCoefficient(y), 1.0, "test5");
Constraint ct4 = solver.Add(x + (y + x + 3) == 1);
CheckDoubleEq(ct4.GetCoefficient(x), 2.0, "test4");
CheckDoubleEq(ct4.GetCoefficient(y), 1.0, "test5");
CheckDoubleEq(ct4.Lb(), -2.0, "test6");
CheckDoubleEq(ct4.Ub(), -2.0, "test7");
}
static void TestVarMultiplication()
{
Console.WriteLine("Running TestVarMultiplication");
Solver solver = new Solver("TestVarMultiplication",
Solver.CLP_LINEAR_PROGRAMMING);
Variable x = solver.MakeNumVar(0.0, 100.0, "x");
Variable y = solver.MakeNumVar(0.0, 100.0, "y");
Constraint ct1 = solver.Add(3 * x == 1);
CheckDoubleEq(ct1.GetCoefficient(x), 3.0, "test1");
Constraint ct2 = solver.Add(x * 3 == 1);
CheckDoubleEq(ct2.GetCoefficient(x), 3.0, "test2");
Constraint ct3 = solver.Add(x + (2 * y + 3 * x) == 1);
CheckDoubleEq(ct3.GetCoefficient(x), 4.0, "test3");
CheckDoubleEq(ct3.GetCoefficient(y), 2.0, "test4");
Constraint ct4 = solver.Add(x + 5 * (y + x + 3) == 1);
CheckDoubleEq(ct4.GetCoefficient(x), 6.0, "test5");
CheckDoubleEq(ct4.GetCoefficient(y), 5.0, "test6");
CheckDoubleEq(ct4.Lb(), -14.0, "test7");
CheckDoubleEq(ct4.Ub(), -14.0, "test8");
Constraint ct5 = solver.Add(x + (2 * y + x + 3) * 3 == 1);
CheckDoubleEq(ct5.GetCoefficient(x), 4.0, "test9");
CheckDoubleEq(ct5.GetCoefficient(y), 6.0, "test10");
CheckDoubleEq(ct5.Lb(), -8.0, "test11");
CheckDoubleEq(ct5.Ub(), -8.0, "test12");
}
static void TestBinaryOperations()
{
Console.WriteLine("Running TestBinaryOperations");
Solver solver = new Solver("TestBinaryOperations",
Solver.CLP_LINEAR_PROGRAMMING);
Variable x = solver.MakeNumVar(0.0, 100.0, "x");
Variable y = solver.MakeNumVar(0.0, 100.0, "y");
Constraint ct1 = solver.Add(x == y);
CheckDoubleEq(ct1.GetCoefficient(x), 1.0, "test1");
CheckDoubleEq(ct1.GetCoefficient(y), -1.0, "test2");
Constraint ct2 = solver.Add(x == 3 * y + 5);
CheckDoubleEq(ct2.GetCoefficient(x), 1.0, "test3");
CheckDoubleEq(ct2.GetCoefficient(y), -3.0, "test4");
CheckDoubleEq(ct2.Lb(), 5.0, "test5");
CheckDoubleEq(ct2.Ub(), 5.0, "test6");
Constraint ct3 = solver.Add(2 * x - 9 == y);
CheckDoubleEq(ct3.GetCoefficient(x), 2.0, "test7");
CheckDoubleEq(ct3.GetCoefficient(y), -1.0, "test8");
CheckDoubleEq(ct3.Lb(), 9.0, "test9");
CheckDoubleEq(ct3.Ub(), 9.0, "test10");
Check(x == x, "test11");
Check(!(x == y), "test12");
Check(!(x != x), "test13");
Check((x != y), "test14");
}
static void TestInequalities()
{
Console.WriteLine("Running TestInequalities");
Solver solver = new Solver("TestInequalities",
Solver.CLP_LINEAR_PROGRAMMING);
Variable x = solver.MakeNumVar(0.0, 100.0, "x");
Variable y = solver.MakeNumVar(0.0, 100.0, "y");
Constraint ct1 = solver.Add(2 * (x + 3) + 5 * (y + x -1) >= 3);
CheckDoubleEq(ct1.GetCoefficient(x), 7.0, "test1");
CheckDoubleEq(ct1.GetCoefficient(y), 5.0, "test2");
CheckDoubleEq(ct1.Lb(), 2.0, "test3");
CheckDoubleEq(ct1.Ub(), double.PositiveInfinity, "test4");
Constraint ct2 = solver.Add(2 * (x + 3) + 5 * (y + x -1) <= 3);
CheckDoubleEq(ct2.GetCoefficient(x), 7.0, "test5");
CheckDoubleEq(ct2.GetCoefficient(y), 5.0, "test6");
CheckDoubleEq(ct2.Lb(), double.NegativeInfinity, "test7");
CheckDoubleEq(ct2.Ub(), 2.0, "test8");
Constraint ct3 = solver.Add(2 * (x + 3) + 5 * (y + x -1) >= 3 - x - y);
CheckDoubleEq(ct3.GetCoefficient(x), 8.0, "test9");
CheckDoubleEq(ct3.GetCoefficient(y), 6.0, "test10");
CheckDoubleEq(ct3.Lb(), 2.0, "test11");
CheckDoubleEq(ct3.Ub(), double.PositiveInfinity, "test12");
Constraint ct4 = solver.Add(2 * (x + 3) + 5 * (y + x -1) <= -x - y + 3);
CheckDoubleEq(ct4.GetCoefficient(x), 8.0, "test13");
CheckDoubleEq(ct4.GetCoefficient(y), 6.0, "test14");
CheckDoubleEq(ct4.Lb(), double.NegativeInfinity, "test15");
CheckDoubleEq(ct4.Ub(), 2.0, "test16");
}
static void TestSumArray()
{
Console.WriteLine("Running TestSumArray");
Solver solver = new Solver("TestSumArray", Solver.CLP_LINEAR_PROGRAMMING);
Variable[] x = solver.MakeBoolVarArray(10, "x");
Constraint ct1 = solver.Add(x.Sum() == 3);
CheckDoubleEq(ct1.GetCoefficient(x[0]), 1.0, "test1");
Constraint ct2 = solver.Add(-2 * x.Sum() == 3);
CheckDoubleEq(ct2.GetCoefficient(x[0]), -2.0, "test2");
LinearExpr[] array = new LinearExpr[] { x[0]+ 2.0, x[0] + 3, x[0] + 4 };
Constraint ct3 = solver.Add(array.Sum() == 1);
CheckDoubleEq(ct3.GetCoefficient(x[0]), 3.0, "test3");
CheckDoubleEq(ct3.Lb(), -8.0, "test4");
CheckDoubleEq(ct3.Ub(), -8.0, "test5");
}
static void TestObjective()
{
Console.WriteLine("Running TestObjective");
Solver solver = new Solver("TestObjective", Solver.CLP_LINEAR_PROGRAMMING);
Variable x = solver.MakeNumVar(0.0, 100.0, "x");
Variable y = solver.MakeNumVar(0.0, 100.0, "y");
solver.Maximize(x);
CheckDoubleEq(0.0, solver.Objective().Offset(), "test1");
CheckDoubleEq(1.0, solver.Objective().GetCoefficient(x), "test2");
Check(solver.Objective().Maximization(), "test3");
solver.Minimize(-x - 2 * y + 3);
CheckDoubleEq(3.0, solver.Objective().Offset(), "test4");
CheckDoubleEq(-1.0, solver.Objective().GetCoefficient(x), "test5");
CheckDoubleEq(-2.0, solver.Objective().GetCoefficient(y), "test6");
Check(solver.Objective().Minimization(), "test7");
}
static void Main()
{
TestVarOperator();
TestVarAddition();
TestVarMultiplication();
TestBinaryOperations();
TestInequalities();
TestSumArray();
TestObjective();
if (error_count != 0) {
Console.WriteLine("Found " + error_count + " errors.");
Environment.Exit(1);
}
}
}
| |
/*
** $Id: ltablib.c,v 1.38.1.3 2008/02/14 16:46:58 roberto Exp $
** Library for table Manipulation
** See Copyright Notice in lua.h
*/
using System;
using System.Collections.Generic;
using System.Text;
namespace SharpLua
{
using lua_Number = System.Double;
public partial class Lua
{
private static int aux_getn(LuaState L, int n) { luaL_checktype(L, n, LUA_TTABLE); return luaL_getn(L, n); }
private static int foreachi(LuaState L)
{
int i;
int n = aux_getn(L, 1);
luaL_checktype(L, 2, LUA_TFUNCTION);
for (i = 1; i <= n; i++)
{
lua_pushvalue(L, 2); /* function */
lua_pushinteger(L, i); /* 1st argument */
lua_rawgeti(L, 1, i); /* 2nd argument */
lua_call(L, 2, 1);
if (!lua_isnil(L, -1))
return 1;
lua_pop(L, 1); /* remove nil result */
}
return 0;
}
private static int _foreach(LuaState L)
{
luaL_checktype(L, 1, LUA_TTABLE);
luaL_checktype(L, 2, LUA_TFUNCTION);
lua_pushnil(L); /* first key */
while (lua_next(L, 1) != 0)
{
lua_pushvalue(L, 2); /* function */
lua_pushvalue(L, -3); /* key */
lua_pushvalue(L, -3); /* value */
lua_call(L, 2, 1);
if (!lua_isnil(L, -1))
return 1;
lua_pop(L, 2); /* remove value and result */
}
return 0;
}
private static int maxn(LuaState L)
{
lua_Number max = 0;
luaL_checktype(L, 1, LUA_TTABLE);
lua_pushnil(L); /* first key */
while (lua_next(L, 1) != 0)
{
lua_pop(L, 1); /* remove value */
if (lua_type(L, -1) == LUA_TNUMBER)
{
lua_Number v = lua_tonumber(L, -1);
if (v > max) max = v;
}
}
lua_pushnumber(L, max);
return 1;
}
private static int getn(LuaState L)
{
lua_pushinteger(L, aux_getn(L, 1));
return 1;
}
private static int setn(LuaState L)
{
luaL_checktype(L, 1, LUA_TTABLE);
//#ifndef luaL_setn
//luaL_setn(L, 1, luaL_checkint(L, 2));
//#else
luaL_error(L, LUA_QL("setn") + " is obsolete");
//#endif
lua_pushvalue(L, 1);
return 1;
}
private static int tinsert(LuaState L)
{
int e = aux_getn(L, 1) + 1; /* first empty element */
int pos; /* where to insert new element */
switch (lua_gettop(L))
{
case 2:
{ /* called with only 2 arguments */
pos = e; /* insert new element at the end */
break;
}
case 3:
{
int i;
pos = luaL_checkint(L, 2); /* 2nd argument is the position */
if (pos > e) e = pos; /* `grow' array if necessary */
for (i = e; i > pos; i--)
{ /* move up elements */
lua_rawgeti(L, 1, i - 1);
lua_rawseti(L, 1, i); /* t[i] = t[i-1] */
}
break;
}
default:
{
return luaL_error(L, "wrong number of arguments to " + LUA_QL("insert"));
}
}
luaL_setn(L, 1, e); /* new size */
lua_rawseti(L, 1, pos); /* t[pos] = v */
return 0;
}
private static int tremove(LuaState L)
{
int e = aux_getn(L, 1);
int pos = luaL_optint(L, 2, e);
if (!(1 <= pos && pos <= e)) /* position is outside bounds? */
return 0; /* nothing to remove */
luaL_setn(L, 1, e - 1); /* t.n = n-1 */
lua_rawgeti(L, 1, pos); /* result = t[pos] */
for (; pos < e; pos++)
{
lua_rawgeti(L, 1, pos + 1);
lua_rawseti(L, 1, pos); /* t[pos] = t[pos+1] */
}
lua_pushnil(L);
lua_rawseti(L, 1, e); /* t[e] = nil */
return 1;
}
private static void addfield(LuaState L, luaL_Buffer b, int i)
{
lua_rawgeti(L, 1, i);
if (lua_isstring(L, -1) == 0)
luaL_error(L, "invalid value (%s) at index %d in table for " +
LUA_QL("concat"), luaL_typename(L, -1), i);
luaL_addvalue(b);
}
private static int tconcat(LuaState L)
{
luaL_Buffer b = new luaL_Buffer();
uint lsep;
int i, last;
CharPtr sep = luaL_optlstring(L, 2, "", out lsep);
luaL_checktype(L, 1, LUA_TTABLE);
i = luaL_optint(L, 3, 1);
last = luaL_opt_integer(L, luaL_checkint, 4, luaL_getn(L, 1));
luaL_buffinit(L, b);
for (; i < last; i++)
{
addfield(L, b, i);
luaL_addlstring(b, sep, lsep);
}
if (i == last) /* add last value (if interval was not empty) */
addfield(L, b, i);
luaL_pushresult(b);
return 1;
}
/*
** {======================================================
** Quicksort
** (based on `Algorithms in MODULA-3', Robert Sedgewick;
** Addison-Wesley, 1993.)
*/
private static void set2(LuaState L, int i, int j)
{
lua_rawseti(L, 1, i);
lua_rawseti(L, 1, j);
}
private static int sort_comp(LuaState L, int a, int b)
{
if (!lua_isnil(L, 2))
{ /* function? */
int res;
lua_pushvalue(L, 2);
lua_pushvalue(L, a - 1); /* -1 to compensate function */
lua_pushvalue(L, b - 2); /* -2 to compensate function and `a' */
lua_call(L, 2, 1);
res = lua_toboolean(L, -1);
lua_pop(L, 1);
return res;
}
else /* a < b? */
return lua_lessthan(L, a, b);
}
private static int auxsort_loop1(LuaState L, ref int i)
{
lua_rawgeti(L, 1, ++i);
return sort_comp(L, -1, -2);
}
private static int auxsort_loop2(LuaState L, ref int j)
{
lua_rawgeti(L, 1, --j);
return sort_comp(L, -3, -1);
}
private static void auxsort(LuaState L, int l, int u)
{
while (l < u)
{ /* for tail recursion */
int i, j;
/* sort elements a[l], a[(l+u)/2] and a[u] */
lua_rawgeti(L, 1, l);
lua_rawgeti(L, 1, u);
if (sort_comp(L, -1, -2) != 0) /* a[u] < a[l]? */
set2(L, l, u); /* swap a[l] - a[u] */
else
lua_pop(L, 2);
if (u - l == 1) break; /* only 2 elements */
i = (l + u) / 2;
lua_rawgeti(L, 1, i);
lua_rawgeti(L, 1, l);
if (sort_comp(L, -2, -1) != 0) /* a[i]<a[l]? */
set2(L, i, l);
else
{
lua_pop(L, 1); /* remove a[l] */
lua_rawgeti(L, 1, u);
if (sort_comp(L, -1, -2) != 0) /* a[u]<a[i]? */
set2(L, i, u);
else
lua_pop(L, 2);
}
if (u - l == 2) break; /* only 3 elements */
lua_rawgeti(L, 1, i); /* Pivot */
lua_pushvalue(L, -1);
lua_rawgeti(L, 1, u - 1);
set2(L, i, u - 1);
/* a[l] <= P == a[u-1] <= a[u], only need to sort from l+1 to u-2 */
i = l; j = u - 1;
for (; ; )
{ /* invariant: a[l..i] <= P <= a[j..u] */
/* repeat ++i until a[i] >= P */
while (auxsort_loop1(L, ref i) != 0)
{
if (i > u) luaL_error(L, "invalid order function for sorting");
lua_pop(L, 1); /* remove a[i] */
}
/* repeat --j until a[j] <= P */
while (auxsort_loop2(L, ref j) != 0)
{
if (j < l) luaL_error(L, "invalid order function for sorting");
lua_pop(L, 1); /* remove a[j] */
}
if (j < i)
{
lua_pop(L, 3); /* pop pivot, a[i], a[j] */
break;
}
set2(L, i, j);
}
lua_rawgeti(L, 1, u - 1);
lua_rawgeti(L, 1, i);
set2(L, u - 1, i); /* swap pivot (a[u-1]) with a[i] */
/* a[l..i-1] <= a[i] == P <= a[i+1..u] */
/* adjust so that smaller half is in [j..i] and larger one in [l..u] */
if (i - l < u - i)
{
j = l; i = i - 1; l = i + 2;
}
else
{
j = i + 1; i = u; u = j - 2;
}
auxsort(L, j, i); /* call recursively the smaller one */
} /* repeat the routine for the larger one */
}
private static int sort(LuaState L)
{
int n = aux_getn(L, 1);
luaL_checkstack(L, 40, ""); /* assume array is smaller than 2^40 */
if (!lua_isnoneornil(L, 2)) /* is there a 2nd argument? */
luaL_checktype(L, 2, LUA_TFUNCTION);
lua_settop(L, 2); /* make sure there is two arguments */
auxsort(L, 1, n);
return 0;
}
private static int unpack(LuaState L)
{
int i, e, n;
luaL_checktype(L, 1, LUA_TTABLE);
i = luaL_optint(L, 2, 1);
e = luaL_opt_integer(L, luaL_checkint, 3, luaL_getn(L, 1));
if (i > e) return 0; /* empty range */
n = e - i + 1; /* number of elements */
if (n <= 0 || (lua_checkstack(L, n) == 0)) /* n <= 0 means arith. overflow */
return luaL_error(L, "too many results to unpack");
lua_rawgeti(L, 1, i); /* push arg[i] (avoiding overflow problems) */
while (i++ < e) /* push arg[i + 1...e] */
lua_rawgeti(L, 1, i);
return n;
}
/* }====================================================== */
private readonly static luaL_Reg[] tab_funcs = {
new luaL_Reg("concat", tconcat),
new luaL_Reg("foreach", _foreach),
new luaL_Reg("foreachi", foreachi),
new luaL_Reg("getn", getn),
new luaL_Reg("maxn", maxn),
new luaL_Reg("insert", tinsert),
new luaL_Reg("remove", tremove),
new luaL_Reg("setn", setn),
new luaL_Reg("sort", sort),
new luaL_Reg("unpack", unpack),
new luaL_Reg(null, null)
};
public static int luaopen_table(LuaState L)
{
luaL_register(L, LUA_TABLIBNAME, tab_funcs);
return 1;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*****************************************************************************************************
Rules for Multiple Nested Parent, enforce following constraints
1) At all times, only 1(ONE) FK can be NON-Null in a row.
2) NULL FK values are not associated with PARENT(x), even if if PK is NULL in Parent
3) Enforce <rule 1> when
a) Any FK value is changed
b) A relation created that result in Multiple Nested Child
WriteXml
1) WriteXml will throw if <rule 1> is violated
2) if NON-Null FK has parentRow (boolean check) print as Nested, else it will get written as normal row
additional notes:
We decided to enforce the rule 1 just if Xml being persisted
******************************************************************************************************/
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.Data.Common;
using System.Collections.Generic;
using System.Threading;
namespace System.Data
{
[DefaultProperty(nameof(RelationName))]
[TypeConverter(typeof(RelationshipConverter))]
public class DataRelation
{
// properties
private DataSet _dataSet = null;
internal PropertyCollection _extendedProperties = null;
internal string _relationName = string.Empty;
// state
private DataKey _childKey;
private DataKey _parentKey;
private UniqueConstraint _parentKeyConstraint = null;
private ForeignKeyConstraint _childKeyConstraint = null;
// Design time serialization
internal string[] _parentColumnNames = null;
internal string[] _childColumnNames = null;
internal string _parentTableName = null;
internal string _childTableName = null;
internal string _parentTableNamespace = null;
internal string _childTableNamespace = null;
/// <summary>
/// This stores whether the child element appears beneath the parent in the XML persisted files.
/// </summary>
internal bool _nested = false;
/// <summary>
/// This stores whether the relationship should make sure that KeyConstraints and ForeignKeyConstraints
/// exist when added to the ConstraintsCollections of the table.
/// </summary>
internal bool _createConstraints;
private bool _checkMultipleNested = true;
private static int s_objectTypeCount; // Bid counter
private readonly int _objectID = Interlocked.Increment(ref s_objectTypeCount);
/// <summary>
/// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name,
/// parent, and child columns.
/// </summary>
public DataRelation(string relationName, DataColumn parentColumn, DataColumn childColumn) :
this(relationName, parentColumn, childColumn, true)
{
}
/// <summary>
/// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name, parent, and child columns, and
/// value to create constraints.
/// </summary>
public DataRelation(string relationName, DataColumn parentColumn, DataColumn childColumn, bool createConstraints)
{
DataCommonEventSource.Log.Trace("<ds.DataRelation.DataRelation|API> {0}, relationName='{1}', parentColumn={2}, childColumn={3}, createConstraints={4}",
ObjectID, relationName, (parentColumn != null) ? parentColumn.ObjectID : 0, (childColumn != null) ? childColumn.ObjectID : 0,
createConstraints);
DataColumn[] parentColumns = new DataColumn[1];
parentColumns[0] = parentColumn;
DataColumn[] childColumns = new DataColumn[1];
childColumns[0] = childColumn;
Create(relationName, parentColumns, childColumns, createConstraints);
}
/// <summary>
/// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name
/// and matched arrays of parent and child columns.
/// </summary>
public DataRelation(string relationName, DataColumn[] parentColumns, DataColumn[] childColumns) :
this(relationName, parentColumns, childColumns, true)
{
}
/// <summary>
/// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name, matched arrays of parent
/// and child columns, and value to create constraints.
/// </summary>
public DataRelation(string relationName, DataColumn[] parentColumns, DataColumn[] childColumns, bool createConstraints)
{
Create(relationName, parentColumns, childColumns, createConstraints);
}
[Browsable(false)] // design-time ctor
public DataRelation(string relationName, string parentTableName, string childTableName, string[] parentColumnNames, string[] childColumnNames, bool nested)
{
_relationName = relationName;
_parentColumnNames = parentColumnNames;
_childColumnNames = childColumnNames;
_parentTableName = parentTableName;
_childTableName = childTableName;
_nested = nested;
}
[Browsable(false)] // design-time ctor
public DataRelation(string relationName, string parentTableName, string parentTableNamespace, string childTableName, string childTableNamespace, string[] parentColumnNames, string[] childColumnNames, bool nested)
{
_relationName = relationName;
_parentColumnNames = parentColumnNames;
_childColumnNames = childColumnNames;
_parentTableName = parentTableName;
_childTableName = childTableName;
_parentTableNamespace = parentTableNamespace;
_childTableNamespace = childTableNamespace;
_nested = nested;
}
/// <summary>
/// Gets the child columns of this relation.
/// </summary>
public virtual DataColumn[] ChildColumns
{
get
{
CheckStateForProperty();
return _childKey.ToArray();
}
}
internal DataColumn[] ChildColumnsReference
{
get
{
CheckStateForProperty();
return _childKey.ColumnsReference;
}
}
/// <summary>
/// The internal Key object for the child table.
/// </summary>
internal DataKey ChildKey
{
get
{
CheckStateForProperty();
return _childKey;
}
}
/// <summary>
/// Gets the child table of this relation.
/// </summary>
public virtual DataTable ChildTable
{
get
{
CheckStateForProperty();
return _childKey.Table;
}
}
/// <summary>
/// Gets the <see cref='System.Data.DataSet'/> to which the relations' collection belongs to.
/// </summary>
[DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden), Browsable(false)]
public virtual DataSet DataSet
{
get
{
CheckStateForProperty();
return _dataSet;
}
}
internal string[] ParentColumnNames => _parentKey.GetColumnNames();
internal string[] ChildColumnNames => _childKey.GetColumnNames();
private static bool IsKeyNull(object[] values)
{
for (int i = 0; i < values.Length; i++)
{
if (!DataStorage.IsObjectNull(values[i]))
{
return false;
}
}
return true;
}
/// <summary>
/// Gets the child rows for the parent row across the relation using the version given
/// </summary>
internal static DataRow[] GetChildRows(DataKey parentKey, DataKey childKey, DataRow parentRow, DataRowVersion version)
{
object[] values = parentRow.GetKeyValues(parentKey, version);
if (IsKeyNull(values))
{
return childKey.Table.NewRowArray(0);
}
Index index = childKey.GetSortIndex((version == DataRowVersion.Original) ? DataViewRowState.OriginalRows : DataViewRowState.CurrentRows);
return index.GetRows(values);
}
/// <summary>
/// Gets the parent rows for the given child row across the relation using the version given
/// </summary>
internal static DataRow[] GetParentRows(DataKey parentKey, DataKey childKey, DataRow childRow, DataRowVersion version)
{
object[] values = childRow.GetKeyValues(childKey, version);
if (IsKeyNull(values))
{
return parentKey.Table.NewRowArray(0);
}
Index index = parentKey.GetSortIndex((version == DataRowVersion.Original) ? DataViewRowState.OriginalRows : DataViewRowState.CurrentRows);
return index.GetRows(values);
}
internal static DataRow GetParentRow(DataKey parentKey, DataKey childKey, DataRow childRow, DataRowVersion version)
{
if (!childRow.HasVersion((version == DataRowVersion.Original) ? DataRowVersion.Original : DataRowVersion.Current))
{
if (childRow._tempRecord == -1)
{
return null;
}
}
object[] values = childRow.GetKeyValues(childKey, version);
if (IsKeyNull(values))
{
return null;
}
Index index = parentKey.GetSortIndex((version == DataRowVersion.Original) ? DataViewRowState.OriginalRows : DataViewRowState.CurrentRows);
Range range = index.FindRecords(values);
if (range.IsNull)
{
return null;
}
if (range.Count > 1)
{
throw ExceptionBuilder.MultipleParents();
}
return parentKey.Table._recordManager[index.GetRecord(range.Min)];
}
/// <summary>
/// Internally sets the DataSet pointer.
/// </summary>
internal void SetDataSet(DataSet dataSet)
{
if (_dataSet != dataSet)
{
_dataSet = dataSet;
}
}
internal void SetParentRowRecords(DataRow childRow, DataRow parentRow)
{
object[] parentKeyValues = parentRow.GetKeyValues(ParentKey);
if (childRow._tempRecord != -1)
{
ChildTable._recordManager.SetKeyValues(childRow._tempRecord, ChildKey, parentKeyValues);
}
if (childRow._newRecord != -1)
{
ChildTable._recordManager.SetKeyValues(childRow._newRecord, ChildKey, parentKeyValues);
}
if (childRow._oldRecord != -1)
{
ChildTable._recordManager.SetKeyValues(childRow._oldRecord, ChildKey, parentKeyValues);
}
}
/// <summary>
/// Gets the parent columns of this relation.
/// </summary>
public virtual DataColumn[] ParentColumns
{
get
{
CheckStateForProperty();
return _parentKey.ToArray();
}
}
internal DataColumn[] ParentColumnsReference => _parentKey.ColumnsReference;
/// <summary>
/// The internal constraint object for the parent table.
/// </summary>
internal DataKey ParentKey
{
get
{
CheckStateForProperty();
return _parentKey;
}
}
/// <summary>
/// Gets the parent table of this relation.
/// </summary>
public virtual DataTable ParentTable
{
get
{
CheckStateForProperty();
return _parentKey.Table;
}
}
/// <summary>
/// Gets or sets the name used to look up this relation in the parent
/// data set's <see cref='System.Data.DataRelationCollection'/>.
/// </summary>
[DefaultValue("")]
public virtual string RelationName
{
get
{
CheckStateForProperty();
return _relationName;
}
set
{
long logScopeId = DataCommonEventSource.Log.EnterScope("<ds.DataRelation.set_RelationName|API> {0}, '{1}'", ObjectID, value);
try
{
if (value == null)
{
value = string.Empty;
}
CultureInfo locale = (_dataSet != null ? _dataSet.Locale : CultureInfo.CurrentCulture);
if (string.Compare(_relationName, value, true, locale) != 0)
{
if (_dataSet != null)
{
if (value.Length == 0)
{
throw ExceptionBuilder.NoRelationName();
}
_dataSet.Relations.RegisterName(value);
if (_relationName.Length != 0)
{
_dataSet.Relations.UnregisterName(_relationName);
}
}
_relationName = value;
((DataRelationCollection.DataTableRelationCollection)(ParentTable.ChildRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this));
((DataRelationCollection.DataTableRelationCollection)(ChildTable.ParentRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this));
}
else if (string.Compare(_relationName, value, false, locale) != 0)
{
_relationName = value;
((DataRelationCollection.DataTableRelationCollection)(ParentTable.ChildRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this));
((DataRelationCollection.DataTableRelationCollection)(ChildTable.ParentRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this));
}
}
finally
{
DataCommonEventSource.Log.ExitScope(logScopeId);
}
}
}
internal void CheckNamespaceValidityForNestedRelations(string ns)
{
foreach (DataRelation rel in ChildTable.ParentRelations)
{
if (rel == this || rel.Nested)
{
if (rel.ParentTable.Namespace != ns)
{
throw ExceptionBuilder.InValidNestedRelation(ChildTable.TableName);
}
}
}
}
internal void CheckNestedRelations()
{
DataCommonEventSource.Log.Trace("<ds.DataRelation.CheckNestedRelations|INFO> {0}", ObjectID);
Debug.Assert(DataSet == null || !_nested, "this relation supposed to be not in dataset or not nested");
// 1. There is no other relation (R) that has this.ChildTable as R.ChildTable
// This is not valid for Whidbey anymore so the code has been removed
// 2. There is no loop in nested relations
#if DEBUG
int numTables = ParentTable.DataSet.Tables.Count;
#endif
DataTable dt = ParentTable;
if (ChildTable == ParentTable)
{
if (string.Compare(ChildTable.TableName, ChildTable.DataSet.DataSetName, true, ChildTable.DataSet.Locale) == 0)
throw ExceptionBuilder.SelfnestedDatasetConflictingName(ChildTable.TableName);
return; //allow self join tables.
}
List<DataTable> list = new List<DataTable>();
list.Add(ChildTable);
// We have already checked for nested relaion UP
for (int i = 0; i < list.Count; ++i)
{
DataRelation[] relations = list[i].NestedParentRelations;
foreach (DataRelation rel in relations)
{
if (rel.ParentTable == ChildTable && rel.ChildTable != ChildTable)
{
throw ExceptionBuilder.LoopInNestedRelations(ChildTable.TableName);
}
if (!list.Contains(rel.ParentTable))
{
// check for self nested
list.Add(rel.ParentTable);
}
}
}
}
/********************
The Namespace of a table nested inside multiple parents can be
1. Explicitly specified
2. Inherited from Parent Table
3. Empty (Form = unqualified case)
However, Schema does not allow (3) to be a global element and multiple nested child has to be a global element.
Therefore we'll reduce case (3) to (2) if all parents have same namespace else throw.
********************/
/// <summary>
/// Gets or sets a value indicating whether relations are nested.
/// </summary>
[DefaultValue(false)]
public virtual bool Nested
{
get
{
CheckStateForProperty();
return _nested;
}
set
{
long logScopeId = DataCommonEventSource.Log.EnterScope("<ds.DataRelation.set_Nested|API> {0}, {1}", ObjectID, value);
try
{
if (_nested != value)
{
if (_dataSet != null)
{
if (value)
{
if (ChildTable.IsNamespaceInherited())
{ // if not added to collection, don't do this check
CheckNamespaceValidityForNestedRelations(ParentTable.Namespace);
}
Debug.Assert(ChildTable != null, "On a DataSet, but not on Table. Bad state");
ForeignKeyConstraint constraint = ChildTable.Constraints.FindForeignKeyConstraint(ChildKey.ColumnsReference, ParentKey.ColumnsReference);
if (constraint != null)
{
constraint.CheckConstraint();
}
ValidateMultipleNestedRelations();
}
}
if (!value && (_parentKey.ColumnsReference[0].ColumnMapping == MappingType.Hidden))
{
throw ExceptionBuilder.RelationNestedReadOnly();
}
if (value)
{
ParentTable.Columns.RegisterColumnName(ChildTable.TableName, null);
}
else
{
ParentTable.Columns.UnregisterName(ChildTable.TableName);
}
RaisePropertyChanging(nameof(Nested));
if (value)
{
CheckNestedRelations();
if (DataSet != null)
if (ParentTable == ChildTable)
{
foreach (DataRow row in ChildTable.Rows)
{
row.CheckForLoops(this);
}
if (ChildTable.DataSet != null && (string.Compare(ChildTable.TableName, ChildTable.DataSet.DataSetName, true, ChildTable.DataSet.Locale) == 0))
{
throw ExceptionBuilder.DatasetConflictingName(_dataSet.DataSetName);
}
ChildTable._fNestedInDataset = false;
}
else
{
foreach (DataRow row in ChildTable.Rows)
{
row.GetParentRow(this);
}
}
ParentTable.ElementColumnCount++;
}
else
{
ParentTable.ElementColumnCount--;
}
_nested = value;
ChildTable.CacheNestedParent();
if (value)
{
if (string.IsNullOrEmpty(ChildTable.Namespace) && ((ChildTable.NestedParentsCount > 1) ||
((ChildTable.NestedParentsCount > 0) && !(ChildTable.DataSet.Relations.Contains(RelationName)))))
{
string parentNs = null;
foreach (DataRelation rel in ChildTable.ParentRelations)
{
if (rel.Nested)
{
if (null == parentNs)
{
parentNs = rel.ParentTable.Namespace;
}
else
{
if (!string.Equals(parentNs, rel.ParentTable.Namespace, StringComparison.Ordinal))
{
_nested = false;
throw ExceptionBuilder.InvalidParentNamespaceinNestedRelation(ChildTable.TableName);
}
}
}
}
// if not already in memory , form == unqualified
if (CheckMultipleNested && ChildTable._tableNamespace != null && ChildTable._tableNamespace.Length == 0)
{
throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName);
}
ChildTable._tableNamespace = null; // if we dont throw, then let it inherit the Namespace
}
}
}
}
finally
{
DataCommonEventSource.Log.ExitScope(logScopeId);
}
}
}
/// <summary>
/// Gets the constraint which ensures values in a column are unique.
/// </summary>
public virtual UniqueConstraint ParentKeyConstraint
{
get
{
CheckStateForProperty();
return _parentKeyConstraint;
}
}
internal void SetParentKeyConstraint(UniqueConstraint value)
{
Debug.Assert(_parentKeyConstraint == null || value == null, "ParentKeyConstraint should not have been set already.");
_parentKeyConstraint = value;
}
/// <summary>
/// Gets the <see cref='System.Data.ForeignKeyConstraint'/> for the relation.
/// </summary>
public virtual ForeignKeyConstraint ChildKeyConstraint
{
get
{
CheckStateForProperty();
return _childKeyConstraint;
}
}
/// <summary>
/// Gets the collection of custom user information.
/// </summary>
[Browsable(false)]
public PropertyCollection ExtendedProperties => _extendedProperties ?? (_extendedProperties = new PropertyCollection());
internal bool CheckMultipleNested
{
get { return _checkMultipleNested; }
set { _checkMultipleNested = value; }
}
internal void SetChildKeyConstraint(ForeignKeyConstraint value)
{
Debug.Assert(_childKeyConstraint == null || value == null, "ChildKeyConstraint should not have been set already.");
_childKeyConstraint = value;
}
internal event PropertyChangedEventHandler PropertyChanging;
// If we're not in a dataSet relations collection, we need to verify on every property get that we're
// still a good relation object.
internal void CheckState()
{
if (_dataSet == null)
{
_parentKey.CheckState();
_childKey.CheckState();
if (_parentKey.Table.DataSet != _childKey.Table.DataSet)
{
throw ExceptionBuilder.RelationDataSetMismatch();
}
if (_childKey.ColumnsEqual(_parentKey))
{
throw ExceptionBuilder.KeyColumnsIdentical();
}
for (int i = 0; i < _parentKey.ColumnsReference.Length; i++)
{
if ((_parentKey.ColumnsReference[i].DataType != _childKey.ColumnsReference[i].DataType) ||
((_parentKey.ColumnsReference[i].DataType == typeof(DateTime)) &&
(_parentKey.ColumnsReference[i].DateTimeMode != _childKey.ColumnsReference[i].DateTimeMode) &&
((_parentKey.ColumnsReference[i].DateTimeMode & _childKey.ColumnsReference[i].DateTimeMode) != DataSetDateTime.Unspecified)))
{
// allow unspecified and unspecifiedlocal
throw ExceptionBuilder.ColumnsTypeMismatch();
}
}
}
}
/// <summary>
/// Checks to ensure the DataRelation is a valid object, even if it doesn't
/// belong to a <see cref='System.Data.DataSet'/>.
/// </summary>
protected void CheckStateForProperty()
{
try
{
CheckState();
}
catch (Exception e) when (ADP.IsCatchableExceptionType(e))
{
throw ExceptionBuilder.BadObjectPropertyAccess(e.Message);
}
}
private void Create(string relationName, DataColumn[] parentColumns, DataColumn[] childColumns, bool createConstraints)
{
long logScopeId = DataCommonEventSource.Log.EnterScope("<ds.DataRelation.Create|INFO> {0}, relationName='{1}', createConstraints={2}", ObjectID, relationName, createConstraints);
try
{
_parentKey = new DataKey(parentColumns, true);
_childKey = new DataKey(childColumns, true);
if (parentColumns.Length != childColumns.Length)
{
throw ExceptionBuilder.KeyLengthMismatch();
}
for (int i = 0; i < parentColumns.Length; i++)
{
if ((parentColumns[i].Table.DataSet == null) || (childColumns[i].Table.DataSet == null))
{
throw ExceptionBuilder.ParentOrChildColumnsDoNotHaveDataSet();
}
}
CheckState();
_relationName = (relationName == null ? "" : relationName);
_createConstraints = createConstraints;
}
finally
{
DataCommonEventSource.Log.ExitScope(logScopeId);
}
}
internal DataRelation Clone(DataSet destination)
{
DataCommonEventSource.Log.Trace("<ds.DataRelation.Clone|INFO> {0}, destination={1}", ObjectID, (destination != null) ? destination.ObjectID : 0);
DataTable parent = destination.Tables[ParentTable.TableName, ParentTable.Namespace];
DataTable child = destination.Tables[ChildTable.TableName, ChildTable.Namespace];
int keyLength = _parentKey.ColumnsReference.Length;
DataColumn[] parentColumns = new DataColumn[keyLength];
DataColumn[] childColumns = new DataColumn[keyLength];
for (int i = 0; i < keyLength; i++)
{
parentColumns[i] = parent.Columns[ParentKey.ColumnsReference[i].ColumnName];
childColumns[i] = child.Columns[ChildKey.ColumnsReference[i].ColumnName];
}
DataRelation clone = new DataRelation(_relationName, parentColumns, childColumns, false);
clone.CheckMultipleNested = false; // disable the check in clone as it is already created
clone.Nested = Nested;
clone.CheckMultipleNested = true; // enable the check
// ...Extended Properties
if (_extendedProperties != null)
{
foreach (object key in _extendedProperties.Keys)
{
clone.ExtendedProperties[key] = _extendedProperties[key];
}
}
return clone;
}
protected internal void OnPropertyChanging(PropertyChangedEventArgs pcevent)
{
if (PropertyChanging != null)
{
DataCommonEventSource.Log.Trace("<ds.DataRelation.OnPropertyChanging|INFO> {0}", ObjectID);
PropertyChanging(this, pcevent);
}
}
protected internal void RaisePropertyChanging(string name)
{
OnPropertyChanging(new PropertyChangedEventArgs(name));
}
/// <summary>
/// </summary>
public override string ToString() => RelationName;
internal void ValidateMultipleNestedRelations()
{
// find all nested relations that this child table has
// if this relation is the only relation it has, then fine,
// otherwise check if all relations are created from XSD, without using Key/KeyRef
// check all keys to see autogenerated
if (!Nested || !CheckMultipleNested) // no need for this verification
{
return;
}
if (0 < ChildTable.NestedParentRelations.Length)
{
DataColumn[] childCols = ChildColumns;
if (childCols.Length != 1 || !IsAutoGenerated(childCols[0]))
{
throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName);
}
if (!XmlTreeGen.AutoGenerated(this))
{
throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName);
}
foreach (Constraint cs in ChildTable.Constraints)
{
if (cs is ForeignKeyConstraint)
{
ForeignKeyConstraint fk = (ForeignKeyConstraint)cs;
if (!XmlTreeGen.AutoGenerated(fk, true))
{
throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName);
}
}
else
{
UniqueConstraint unique = (UniqueConstraint)cs;
if (!XmlTreeGen.AutoGenerated(unique))
{
throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName);
}
}
}
}
}
private bool IsAutoGenerated(DataColumn col)
{
if (col.ColumnMapping != MappingType.Hidden)
{
return false;
}
if (col.DataType != typeof(int))
{
return false;
}
string generatedname = col.Table.TableName + "_Id";
if ((col.ColumnName == generatedname) || (col.ColumnName == generatedname + "_0"))
{
return true;
}
generatedname = ParentColumnsReference[0].Table.TableName + "_Id";
if ((col.ColumnName == generatedname) || (col.ColumnName == generatedname + "_0"))
{
return true;
}
return false;
}
internal int ObjectID => _objectID;
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.SolutionCrawler;
using Microsoft.Isam.Esent.Interop;
using Microsoft.VisualStudio.LanguageServices.Implementation.Esent;
using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem;
using Microsoft.VisualStudio.LanguageServices.Implementation.SolutionSize;
using Roslyn.Utilities;
namespace Microsoft.VisualStudio.LanguageServices.Implementation
{
/// <summary>
/// A service that enables storing and retrieving of information associated with solutions,
/// projects or documents across runtime sessions.
/// </summary>
internal partial class PersistentStorageService : IPersistentStorageService
{
/// <summary>
/// threshold to start to use esent (50MB)
/// </summary>
private const int SolutionSizeThreshold = 50 * 1024 * 1024;
internal static readonly IPersistentStorage NoOpPersistentStorageInstance = new NoOpPersistentStorage();
private readonly IOptionService _optionService;
private readonly SolutionSizeTracker _solutionSizeTracker;
private readonly object _lookupAccessLock;
private readonly Dictionary<string, AbstractPersistentStorage> _lookup;
private readonly bool _testing;
private string _lastSolutionPath;
private SolutionId _primarySolutionId;
private AbstractPersistentStorage _primarySolutionStorage;
public PersistentStorageService(
IOptionService optionService,
SolutionSizeTracker solutionSizeTracker)
{
_optionService = optionService;
_solutionSizeTracker = solutionSizeTracker;
_lookupAccessLock = new object();
_lookup = new Dictionary<string, AbstractPersistentStorage>();
_lastSolutionPath = null;
_primarySolutionId = null;
_primarySolutionStorage = null;
}
public PersistentStorageService(IOptionService optionService, bool testing) : this(optionService)
{
_testing = true;
}
public PersistentStorageService(IOptionService optionService) : this(optionService, null)
{
}
public IPersistentStorage GetStorage(Solution solution)
{
if (!ShouldUseEsent(solution))
{
return NoOpPersistentStorageInstance;
}
// can't use cached information
if (!string.Equals(solution.FilePath, _lastSolutionPath, StringComparison.OrdinalIgnoreCase))
{
// check whether the solution actually exist on disk
if (!File.Exists(solution.FilePath))
{
return NoOpPersistentStorageInstance;
}
}
// cache current result.
_lastSolutionPath = solution.FilePath;
// get working folder path
var workingFolderPath = GetWorkingFolderPath(solution);
if (workingFolderPath == null)
{
// we don't have place to save esent file. don't use esent
return NoOpPersistentStorageInstance;
}
return GetStorage(solution, workingFolderPath);
}
private IPersistentStorage GetStorage(Solution solution, string workingFolderPath)
{
lock (_lookupAccessLock)
{
// see whether we have something we can use
AbstractPersistentStorage storage;
if (_lookup.TryGetValue(solution.FilePath, out storage))
{
// previous attempt to create esent storage failed.
if (storage == null && !SolutionSizeAboveThreshold(solution))
{
return NoOpPersistentStorageInstance;
}
// everything seems right, use what we have
if (storage?.WorkingFolderPath == workingFolderPath)
{
storage.AddRefUnsafe();
return storage;
}
}
// either this is the first time, or working folder path has changed.
// remove existing one
_lookup.Remove(solution.FilePath);
var dbFile = EsentPersistentStorage.GetDatabaseFile(workingFolderPath);
if (!File.Exists(dbFile) && !SolutionSizeAboveThreshold(solution))
{
_lookup.Add(solution.FilePath, storage);
return NoOpPersistentStorageInstance;
}
// try create new one
storage = TryCreateEsentStorage(workingFolderPath, solution.FilePath);
_lookup.Add(solution.FilePath, storage);
if (storage != null)
{
RegisterPrimarySolutionStorageIfNeeded(solution, storage);
storage.AddRefUnsafe();
return storage;
}
return NoOpPersistentStorageInstance;
}
}
private bool ShouldUseEsent(Solution solution)
{
if (_testing)
{
return true;
}
// we only use esent for primary solution. (Ex, forked solution will not use esent)
if (solution.BranchId != solution.Workspace.PrimaryBranchId || solution.FilePath == null)
{
return false;
}
return true;
}
private bool SolutionSizeAboveThreshold(Solution solution)
{
if (_testing)
{
return true;
}
if (_solutionSizeTracker == null)
{
return false;
}
var size = _solutionSizeTracker.GetSolutionSize(solution.Workspace, solution.Id);
return size > SolutionSizeThreshold;
}
private void RegisterPrimarySolutionStorageIfNeeded(Solution solution, AbstractPersistentStorage storage)
{
if (_primarySolutionStorage != null || solution.Id != _primarySolutionId)
{
return;
}
// hold onto the primary solution when it is used the first time.
_primarySolutionStorage = storage;
storage.AddRefUnsafe();
}
private string GetWorkingFolderPath(Solution solution)
{
if (_testing)
{
return Path.Combine(Path.GetDirectoryName(solution.FilePath), ".vs", Path.GetFileNameWithoutExtension(solution.FilePath));
}
var vsWorkspace = solution.Workspace as VisualStudioWorkspaceImpl;
if (vsWorkspace == null)
{
return null;
}
return vsWorkspace.ProjectTracker.GetWorkingFolderPath(solution);
}
private AbstractPersistentStorage TryCreateEsentStorage(string workingFolderPath, string solutionPath)
{
AbstractPersistentStorage esentStorage;
if (TryCreateEsentStorage(workingFolderPath, solutionPath, out esentStorage))
{
return esentStorage;
}
// first attempt could fail if there was something wrong with existing esent db.
// try one more time in case the first attempt fixed the problem.
if (TryCreateEsentStorage(workingFolderPath, solutionPath, out esentStorage))
{
return esentStorage;
}
// okay, can't recover, then use no op persistent service
// so that things works old way (cache everything in memory)
return null;
}
private bool TryCreateEsentStorage(string workingFolderPath, string solutionPath, out AbstractPersistentStorage esentStorage)
{
esentStorage = null;
EsentPersistentStorage esent = null;
try
{
esent = new EsentPersistentStorage(_optionService, workingFolderPath, solutionPath, this.Release);
esent.Initialize();
esentStorage = esent;
return true;
}
catch (EsentAccessDeniedException ex)
{
// esent db is already in use by someone.
if (esent != null)
{
esent.Close();
}
EsentLogger.LogException(ex);
return false;
}
catch (Exception ex)
{
if (esent != null)
{
esent.Close();
}
EsentLogger.LogException(ex);
}
try
{
if (esent != null)
{
Directory.Delete(esent.EsentDirectory, recursive: true);
}
}
catch
{
// somehow, we couldn't delete the directory.
}
return false;
}
private void Release(AbstractPersistentStorage storage)
{
lock (_lookupAccessLock)
{
if (storage.ReleaseRefUnsafe())
{
_lookup.Remove(storage.SolutionFilePath);
storage.Close();
}
}
}
public void RegisterPrimarySolution(SolutionId solutionId)
{
// don't create esent storage file right away. it will be
// created when first C#/VB project is added
lock (_lookupAccessLock)
{
Contract.ThrowIfTrue(_primarySolutionStorage != null);
// just reset solutionId as long as there is no storage has created.
_primarySolutionId = solutionId;
}
}
public void UnregisterPrimarySolution(SolutionId solutionId, bool synchronousShutdown)
{
AbstractPersistentStorage storage = null;
lock (_lookupAccessLock)
{
if (_primarySolutionId == null)
{
// primary solution is never registered or already unregistered
Contract.ThrowIfTrue(_primarySolutionStorage != null);
return;
}
Contract.ThrowIfFalse(_primarySolutionId == solutionId);
_primarySolutionId = null;
if (_primarySolutionStorage == null)
{
// primary solution is registered but no C#/VB project was added
return;
}
storage = _primarySolutionStorage;
_primarySolutionStorage = null;
}
if (storage != null)
{
if (synchronousShutdown)
{
// dispose storage outside of the lock
storage.Dispose();
}
else
{
// make it to shutdown asynchronously
Task.Run(() => storage.Dispose());
}
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Threading;
using log4net.Config;
using NUnit.Framework;
using NUnit.Framework.SyntaxHelpers;
using OpenMetaverse;
using OpenMetaverse.Assets;
using OpenSim.Framework;
using OpenSim.Framework.Serialization;
using OpenSim.Framework.Serialization.External;
using OpenSim.Region.CoreModules.World.Serialiser;
using OpenSim.Region.CoreModules.World.Terrain;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Region.Framework.Scenes.Serialization;
using OpenSim.Tests.Common;
using OpenSim.Tests.Common.Mock;
using OpenSim.Tests.Common.Setup;
using ArchiveConstants = OpenSim.Framework.Serialization.ArchiveConstants;
using TarArchiveReader = OpenSim.Framework.Serialization.TarArchiveReader;
using TarArchiveWriter = OpenSim.Framework.Serialization.TarArchiveWriter;
namespace OpenSim.Region.CoreModules.World.Archiver.Tests
{
[TestFixture]
public class ArchiverTests
{
private Guid m_lastRequestId;
private string m_lastErrorMessage;
protected TestScene m_scene;
protected ArchiverModule m_archiverModule;
protected TaskInventoryItem m_soundItem;
[SetUp]
public void SetUp()
{
m_archiverModule = new ArchiverModule();
SerialiserModule serialiserModule = new SerialiserModule();
TerrainModule terrainModule = new TerrainModule();
m_scene = SceneSetupHelpers.SetupScene("useraccounts");
SceneSetupHelpers.SetupSceneModules(m_scene, m_archiverModule, serialiserModule, terrainModule);
}
private void LoadCompleted(Guid requestId, string errorMessage)
{
lock (this)
{
m_lastRequestId = requestId;
m_lastErrorMessage = errorMessage;
Console.WriteLine("About to pulse ArchiverTests on LoadCompleted");
Monitor.PulseAll(this);
}
}
private void SaveCompleted(Guid requestId, string errorMessage)
{
lock (this)
{
m_lastRequestId = requestId;
m_lastErrorMessage = errorMessage;
Console.WriteLine("About to pulse ArchiverTests on SaveCompleted");
Monitor.PulseAll(this);
}
}
protected SceneObjectPart CreateSceneObjectPart1()
{
string partName = "My Little Pony";
UUID ownerId = UUID.Parse("00000000-0000-0000-0000-000000000015");
PrimitiveBaseShape shape = PrimitiveBaseShape.CreateSphere();
Vector3 groupPosition = new Vector3(10, 20, 30);
Quaternion rotationOffset = new Quaternion(20, 30, 40, 50);
Vector3 offsetPosition = new Vector3(5, 10, 15);
return new SceneObjectPart(ownerId, shape, groupPosition, rotationOffset, offsetPosition) { Name = partName };
}
protected SceneObjectPart CreateSceneObjectPart2()
{
string partName = "Action Man";
UUID ownerId = UUID.Parse("00000000-0000-0000-0000-000000000016");
PrimitiveBaseShape shape = PrimitiveBaseShape.CreateCylinder();
Vector3 groupPosition = new Vector3(90, 80, 70);
Quaternion rotationOffset = new Quaternion(60, 70, 80, 90);
Vector3 offsetPosition = new Vector3(20, 25, 30);
return new SceneObjectPart(ownerId, shape, groupPosition, rotationOffset, offsetPosition) { Name = partName };
}
/// <summary>
/// Test saving a V0.2 OpenSim Region Archive.
/// </summary>
[Test]
public void TestSaveOarV0_2()
{
TestHelper.InMethod();
//log4net.Config.XmlConfigurator.Configure();
SceneObjectPart part1 = CreateSceneObjectPart1();
SceneObjectGroup sog1 = new SceneObjectGroup(part1);
m_scene.AddNewSceneObject(sog1, false);
SceneObjectPart part2 = CreateSceneObjectPart2();
AssetNotecard nc = new AssetNotecard("Hello World!");
UUID ncAssetUuid = new UUID("00000000-0000-0000-1000-000000000000");
UUID ncItemUuid = new UUID("00000000-0000-0000-1100-000000000000");
AssetBase ncAsset
= AssetHelpers.CreateAsset(ncAssetUuid, AssetType.Notecard, nc.AssetData, UUID.Zero);
m_scene.AssetService.Store(ncAsset);
SceneObjectGroup sog2 = new SceneObjectGroup(part2);
TaskInventoryItem ncItem
= new TaskInventoryItem { Name = "ncItem", AssetID = ncAssetUuid, ItemID = ncItemUuid };
part2.Inventory.AddInventoryItem(ncItem, true);
m_scene.AddNewSceneObject(sog2, false);
MemoryStream archiveWriteStream = new MemoryStream();
m_scene.EventManager.OnOarFileSaved += SaveCompleted;
Guid requestId = new Guid("00000000-0000-0000-0000-808080808080");
lock (this)
{
m_archiverModule.ArchiveRegion(archiveWriteStream, requestId);
//AssetServerBase assetServer = (AssetServerBase)scene.CommsManager.AssetCache.AssetServer;
//while (assetServer.HasWaitingRequests())
// assetServer.ProcessNextRequest();
Monitor.Wait(this, 60000);
}
Assert.That(m_lastRequestId, Is.EqualTo(requestId));
byte[] archive = archiveWriteStream.ToArray();
MemoryStream archiveReadStream = new MemoryStream(archive);
TarArchiveReader tar = new TarArchiveReader(archiveReadStream);
bool gotControlFile = false;
bool gotNcAssetFile = false;
string expectedNcAssetFileName = string.Format("{0}_{1}", ncAssetUuid, "notecard.txt");
List<string> foundPaths = new List<string>();
List<string> expectedPaths = new List<string>();
expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog1));
expectedPaths.Add(ArchiveHelpers.CreateObjectPath(sog2));
string filePath;
TarArchiveReader.TarEntryType tarEntryType;
while (tar.ReadEntry(out filePath, out tarEntryType) != null)
{
if (ArchiveConstants.CONTROL_FILE_PATH == filePath)
{
gotControlFile = true;
}
else if (filePath.StartsWith(ArchiveConstants.ASSETS_PATH))
{
string fileName = filePath.Remove(0, ArchiveConstants.ASSETS_PATH.Length);
Assert.That(fileName, Is.EqualTo(expectedNcAssetFileName));
gotNcAssetFile = true;
}
else if (filePath.StartsWith(ArchiveConstants.OBJECTS_PATH))
{
foundPaths.Add(filePath);
}
}
Assert.That(gotControlFile, Is.True, "No control file in archive");
Assert.That(gotNcAssetFile, Is.True, "No notecard asset file in archive");
Assert.That(foundPaths, Is.EquivalentTo(expectedPaths));
// TODO: Test presence of more files and contents of files.
}
/// <summary>
/// Test loading a V0.2 OpenSim Region Archive.
/// </summary>
[Test]
public void TestLoadOarV0_2()
{
TestHelper.InMethod();
//log4net.Config.XmlConfigurator.Configure();
MemoryStream archiveWriteStream = new MemoryStream();
TarArchiveWriter tar = new TarArchiveWriter(archiveWriteStream);
// Put in a random blank directory to check that this doesn't upset the load process
tar.WriteDir("ignoreme");
// Also check that direct entries which will also have a file entry containing that directory doesn't
// upset load
tar.WriteDir(ArchiveConstants.TERRAINS_PATH);
tar.WriteFile(ArchiveConstants.CONTROL_FILE_PATH, ArchiveWriteRequestExecution.Create0p2ControlFile());
SceneObjectPart part1 = CreateSceneObjectPart1();
SceneObjectGroup object1 = new SceneObjectGroup(part1);
// Let's put some inventory items into our object
string soundItemName = "sound-item1";
UUID soundItemUuid = UUID.Parse("00000000-0000-0000-0000-000000000002");
Type type = GetType();
Assembly assembly = type.Assembly;
string soundDataResourceName = null;
string[] names = assembly.GetManifestResourceNames();
foreach (string name in names)
{
if (name.EndsWith(".Resources.test-sound.wav"))
soundDataResourceName = name;
}
Assert.That(soundDataResourceName, Is.Not.Null);
byte[] soundData;
Console.WriteLine("Loading " + soundDataResourceName);
using (Stream resource = assembly.GetManifestResourceStream(soundDataResourceName))
{
using (BinaryReader br = new BinaryReader(resource))
{
// FIXME: Use the inspector insteadthere are so many forums and lists already, though admittedly none of them are suitable for cross virtual-enivornemnt discussion
soundData = br.ReadBytes(99999999);
UUID soundUuid = UUID.Parse("00000000-0000-0000-0000-000000000001");
string soundAssetFileName
= ArchiveConstants.ASSETS_PATH + soundUuid
+ ArchiveConstants.ASSET_TYPE_TO_EXTENSION[(sbyte)AssetType.SoundWAV];
tar.WriteFile(soundAssetFileName, soundData);
/*
AssetBase soundAsset = AssetHelpers.CreateAsset(soundUuid, soundData);
scene.AssetService.Store(soundAsset);
asset1FileName = ArchiveConstants.ASSETS_PATH + soundUuid + ".wav";
*/
TaskInventoryItem item1
= new TaskInventoryItem { AssetID = soundUuid, ItemID = soundItemUuid, Name = soundItemName };
part1.Inventory.AddInventoryItem(item1, true);
}
}
m_scene.AddNewSceneObject(object1, false);
string object1FileName = string.Format(
"{0}_{1:000}-{2:000}-{3:000}__{4}.xml",
part1.Name,
Math.Round(part1.GroupPosition.X), Math.Round(part1.GroupPosition.Y), Math.Round(part1.GroupPosition.Z),
part1.UUID);
tar.WriteFile(ArchiveConstants.OBJECTS_PATH + object1FileName, SceneObjectSerializer.ToXml2Format(object1));
tar.Close();
MemoryStream archiveReadStream = new MemoryStream(archiveWriteStream.ToArray());
lock (this)
{
m_scene.EventManager.OnOarFileLoaded += LoadCompleted;
m_archiverModule.DearchiveRegion(archiveReadStream);
}
Assert.That(m_lastErrorMessage, Is.Null);
SceneObjectPart object1PartLoaded = m_scene.GetSceneObjectPart(part1.Name);
Assert.That(object1PartLoaded, Is.Not.Null, "object1 was not loaded");
Assert.That(object1PartLoaded.Name, Is.EqualTo(part1.Name), "object1 names not identical");
Assert.That(object1PartLoaded.GroupPosition, Is.EqualTo(part1.GroupPosition), "object1 group position not equal");
Assert.That(
object1PartLoaded.RotationOffset, Is.EqualTo(part1.RotationOffset), "object1 rotation offset not equal");
Assert.That(
object1PartLoaded.OffsetPosition, Is.EqualTo(part1.OffsetPosition), "object1 offset position not equal");
TaskInventoryItem loadedSoundItem = object1PartLoaded.Inventory.GetInventoryItems(soundItemName)[0];
Assert.That(loadedSoundItem, Is.Not.Null, "loaded sound item was null");
AssetBase loadedSoundAsset = m_scene.AssetService.Get(loadedSoundItem.AssetID.ToString());
Assert.That(loadedSoundAsset, Is.Not.Null, "loaded sound asset was null");
Assert.That(loadedSoundAsset.Data, Is.EqualTo(soundData), "saved and loaded sound data do not match");
// Temporary
Console.WriteLine("Successfully completed {0}", MethodBase.GetCurrentMethod());
}
/// <summary>
/// Test loading the region settings of a V0.2 OpenSim Region Archive.
/// </summary>
[Test]
public void TestLoadOarV0_2RegionSettings()
{
TestHelper.InMethod();
//log4net.Config.XmlConfigurator.Configure();
MemoryStream archiveWriteStream = new MemoryStream();
TarArchiveWriter tar = new TarArchiveWriter(archiveWriteStream);
tar.WriteDir(ArchiveConstants.TERRAINS_PATH);
tar.WriteFile(ArchiveConstants.CONTROL_FILE_PATH, ArchiveWriteRequestExecution.Create0p2ControlFile());
RegionSettings rs = new RegionSettings();
rs.AgentLimit = 17;
rs.AllowDamage = true;
rs.AllowLandJoinDivide = true;
rs.AllowLandResell = true;
rs.BlockFly = true;
rs.BlockShowInSearch = true;
rs.BlockTerraform = true;
rs.DisableCollisions = true;
rs.DisablePhysics = true;
rs.DisableScripts = true;
rs.Elevation1NW = 15.9;
rs.Elevation1NE = 45.3;
rs.Elevation1SE = 49;
rs.Elevation1SW = 1.9;
rs.Elevation2NW = 4.5;
rs.Elevation2NE = 19.2;
rs.Elevation2SE = 9.2;
rs.Elevation2SW = 2.1;
rs.FixedSun = true;
rs.ObjectBonus = 1.4;
rs.RestrictPushing = true;
rs.TerrainLowerLimit = 0.4;
rs.TerrainRaiseLimit = 17.9;
rs.TerrainTexture1 = UUID.Parse("00000000-0000-0000-0000-000000000020");
rs.TerrainTexture2 = UUID.Parse("00000000-0000-0000-0000-000000000040");
rs.TerrainTexture3 = UUID.Parse("00000000-0000-0000-0000-000000000060");
rs.TerrainTexture4 = UUID.Parse("00000000-0000-0000-0000-000000000080");
rs.UseEstateSun = true;
rs.WaterHeight = 23;
tar.WriteFile(ArchiveConstants.SETTINGS_PATH + "region1.xml", RegionSettingsSerializer.Serialize(rs));
tar.Close();
MemoryStream archiveReadStream = new MemoryStream(archiveWriteStream.ToArray());
lock (this)
{
m_scene.EventManager.OnOarFileLoaded += LoadCompleted;
m_archiverModule.DearchiveRegion(archiveReadStream);
}
Assert.That(m_lastErrorMessage, Is.Null);
RegionSettings loadedRs = m_scene.RegionInfo.RegionSettings;
Assert.That(loadedRs.AgentLimit, Is.EqualTo(17));
Assert.That(loadedRs.AllowDamage, Is.True);
Assert.That(loadedRs.AllowLandJoinDivide, Is.True);
Assert.That(loadedRs.AllowLandResell, Is.True);
Assert.That(loadedRs.BlockFly, Is.True);
Assert.That(loadedRs.BlockShowInSearch, Is.True);
Assert.That(loadedRs.BlockTerraform, Is.True);
Assert.That(loadedRs.DisableCollisions, Is.True);
Assert.That(loadedRs.DisablePhysics, Is.True);
Assert.That(loadedRs.DisableScripts, Is.True);
Assert.That(loadedRs.Elevation1NW, Is.EqualTo(15.9));
Assert.That(loadedRs.Elevation1NE, Is.EqualTo(45.3));
Assert.That(loadedRs.Elevation1SE, Is.EqualTo(49));
Assert.That(loadedRs.Elevation1SW, Is.EqualTo(1.9));
Assert.That(loadedRs.Elevation2NW, Is.EqualTo(4.5));
Assert.That(loadedRs.Elevation2NE, Is.EqualTo(19.2));
Assert.That(loadedRs.Elevation2SE, Is.EqualTo(9.2));
Assert.That(loadedRs.Elevation2SW, Is.EqualTo(2.1));
Assert.That(loadedRs.FixedSun, Is.True);
Assert.That(loadedRs.ObjectBonus, Is.EqualTo(1.4));
Assert.That(loadedRs.RestrictPushing, Is.True);
Assert.That(loadedRs.TerrainLowerLimit, Is.EqualTo(0.4));
Assert.That(loadedRs.TerrainRaiseLimit, Is.EqualTo(17.9));
Assert.That(loadedRs.TerrainTexture1, Is.EqualTo(UUID.Parse("00000000-0000-0000-0000-000000000020")));
Assert.That(loadedRs.TerrainTexture2, Is.EqualTo(UUID.Parse("00000000-0000-0000-0000-000000000040")));
Assert.That(loadedRs.TerrainTexture3, Is.EqualTo(UUID.Parse("00000000-0000-0000-0000-000000000060")));
Assert.That(loadedRs.TerrainTexture4, Is.EqualTo(UUID.Parse("00000000-0000-0000-0000-000000000080")));
Assert.That(loadedRs.UseEstateSun, Is.True);
Assert.That(loadedRs.WaterHeight, Is.EqualTo(23));
}
/// <summary>
/// Test merging a V0.2 OpenSim Region Archive into an existing scene
/// </summary>
//[Test]
public void TestMergeOarV0_2()
{
TestHelper.InMethod();
//XmlConfigurator.Configure();
MemoryStream archiveWriteStream = new MemoryStream();
// string part2Name = "objectMerge";
// PrimitiveBaseShape part2Shape = PrimitiveBaseShape.CreateCylinder();
// Vector3 part2GroupPosition = new Vector3(90, 80, 70);
// Quaternion part2RotationOffset = new Quaternion(60, 70, 80, 90);
// Vector3 part2OffsetPosition = new Vector3(20, 25, 30);
SceneObjectPart part2 = CreateSceneObjectPart2();
// Create an oar file that we can use for the merge
{
ArchiverModule archiverModule = new ArchiverModule();
SerialiserModule serialiserModule = new SerialiserModule();
TerrainModule terrainModule = new TerrainModule();
Scene scene = SceneSetupHelpers.SetupScene();
SceneSetupHelpers.SetupSceneModules(scene, archiverModule, serialiserModule, terrainModule);
m_scene.AddNewSceneObject(new SceneObjectGroup(part2), false);
// Write out this scene
scene.EventManager.OnOarFileSaved += SaveCompleted;
lock (this)
{
m_archiverModule.ArchiveRegion(archiveWriteStream);
Monitor.Wait(this, 60000);
}
}
{
SceneObjectPart part1 = CreateSceneObjectPart1();
m_scene.AddNewSceneObject(new SceneObjectGroup(part1), false);
// Merge in the archive we created earlier
byte[] archive = archiveWriteStream.ToArray();
MemoryStream archiveReadStream = new MemoryStream(archive);
m_archiverModule.DearchiveRegion(archiveReadStream, true, false, Guid.Empty);
SceneObjectPart object1Existing = m_scene.GetSceneObjectPart(part1.Name);
Assert.That(object1Existing, Is.Not.Null, "object1 was not present after merge");
Assert.That(object1Existing.Name, Is.EqualTo(part1.Name), "object1 names not identical after merge");
Assert.That(object1Existing.GroupPosition, Is.EqualTo(part1.GroupPosition), "object1 group position not equal after merge");
SceneObjectPart object2PartMerged = m_scene.GetSceneObjectPart(part2.Name);
Assert.That(object2PartMerged, Is.Not.Null, "object2 was not present after merge");
Assert.That(object2PartMerged.Name, Is.EqualTo(part2.Name), "object2 names not identical after merge");
Assert.That(object2PartMerged.GroupPosition, Is.EqualTo(part2.GroupPosition), "object2 group position not equal after merge");
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Text;
/// <summary>
/// StringBuilder.ctor(String,Int32,Int32,Int32)
/// </summary>
public class StringBuilderctor6
{
public static int Main()
{
StringBuilderctor6 sbctor6 = new StringBuilderctor6();
TestLibrary.TestFramework.BeginTestCase("StringBuilderctor6");
if (sbctor6.RunTests())
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("PASS");
return 100;
}
else
{
TestLibrary.TestFramework.EndTestCase();
TestLibrary.TestFramework.LogInformation("FAIL");
return 0;
}
}
public bool RunTests()
{
bool retVal = true;
TestLibrary.TestFramework.LogInformation("[Positive]");
retVal = PosTest1() && retVal;
retVal = PosTest2() && retVal;
retVal = PosTest3() && retVal;
retVal = PosTest4() && retVal;
retVal = PosTest5() && retVal;
TestLibrary.TestFramework.LogInformation("[Negative]");
retVal = NegTest1() && retVal;
retVal = NegTest2() && retVal;
return retVal;
}
#region PositiveTest
public bool PosTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest1:Initialize StringBuilder with substring and capacity 1");
try
{
string strValue = null;
int capacity = this.GetInt32(1, 256);
StringBuilder sb = new StringBuilder(strValue, 0, 0, capacity);
if (sb == null || sb.ToString() != string.Empty || sb.Capacity != capacity)
{
TestLibrary.TestFramework.LogError("001", "The ExpectResult is not the ActualResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("002", "Unexpect exception:" + e);
retVal = false;
}
return retVal;
}
public bool PosTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest2:Initialize StringBuilder with substring and capacity 2");
try
{
string strValue = string.Empty;
int startIndex = 0;
int length = 0;
int capacity = this.GetInt32(1, 256);
StringBuilder sb = new StringBuilder(strValue, startIndex, length, capacity);
if (sb == null || sb.ToString() != string.Empty || sb.Capacity != capacity)
{
TestLibrary.TestFramework.LogError("003", "The ExpectResult is not the ActualResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("004", "Unexpect exception:" + e);
retVal = false;
}
return retVal;
}
public bool PosTest3()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest3:Initialize StringBuilder with substring and capacity 3");
try
{
string strValue = TestLibrary.Generator.GetString(-55, false, 8, 256);
int startIndex = 0;
int length = strValue.Length;
int capacity = this.GetInt32(1, 256);
StringBuilder sb = new StringBuilder(strValue, startIndex, length, capacity);
if (sb == null || sb.ToString() != strValue)
{
TestLibrary.TestFramework.LogError("005", "The ExpectResult is not the ActualResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("006", "Unexpect exception:" + e);
retVal = false;
}
return retVal;
}
public bool PosTest4()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest4:Initialize StringBuilder with substring and capacity 4");
try
{
string strValue = TestLibrary.Generator.GetString(-55, false, 8, 256);
int startIndex = this.GetInt32(0, strValue.Length);
int length = strValue.Length - startIndex;
int capacity = this.GetInt32(1, 256);
StringBuilder sb = new StringBuilder(strValue, startIndex, length, capacity);
if (sb == null || sb.ToString() != strValue.Substring(startIndex,length))
{
TestLibrary.TestFramework.LogError("007", "The ExpectResult is not the ActualResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("008", "Unexpect exception:" + e);
retVal = false;
}
return retVal;
}
public bool PosTest5()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("PosTest5:Initialize StringBuilder with substring and capacity 5");
try
{
string strValue = string.Empty;
int startIndex = 0;
int length = strValue.Length;
int capacity = 0;
StringBuilder sb = new StringBuilder(strValue, startIndex, length, capacity);
if (sb == null || sb.ToString() != string.Empty || sb.Capacity != 16)
{
TestLibrary.TestFramework.LogError("009", "The ExpectResult is not the ActualResult");
retVal = false;
}
}
catch (Exception e)
{
TestLibrary.TestFramework.LogError("010", "Unexpect exception:" + e);
retVal = false;
}
return retVal;
}
#endregion
#region NegativeTest
public bool NegTest1()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest1:The capacity is less than zero");
try
{
string strValue = TestLibrary.Generator.GetString(-55, false, 8, 256);
int startIndex = 0;
int length = strValue.Length;
int capacity = this.GetInt32(1, Int32.MaxValue) * (-1);
StringBuilder sb = new StringBuilder(strValue,startIndex,length,capacity);
TestLibrary.TestFramework.LogError("N001", "The capacity is less than zero but not throw exception");
retVal = false;
}
catch (ArgumentOutOfRangeException) { }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("N002", "Unexpect exception:" + e);
retVal = false;
}
return retVal;
}
public bool NegTest2()
{
bool retVal = true;
TestLibrary.TestFramework.BeginScenario("NegTest2:The startIndex plus length is not a position within value");
try
{
string strValue = TestLibrary.Generator.GetString(-55, false, 8, 256);
int startIndex = 1;
int length = strValue.Length;
int capacity = this.GetInt32(0, 256);
StringBuilder sb = new StringBuilder(strValue, startIndex, length, capacity);
TestLibrary.TestFramework.LogError("N003", "The startIndex plus length is not a position within value but not throw exception");
retVal = false;
}
catch (ArgumentOutOfRangeException) { }
catch (Exception e)
{
TestLibrary.TestFramework.LogError("N004", "Unexpect exception:" + e);
retVal = false;
}
return retVal;
}
#endregion
#region HelpMethod
private Int32 GetInt32(Int32 minValue, Int32 maxValue)
{
try
{
if (minValue == maxValue)
{
return minValue;
}
if (minValue < maxValue)
{
return minValue + TestLibrary.Generator.GetInt32(-55) % (maxValue - minValue);
}
}
catch
{
throw;
}
return minValue;
}
#endregion
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Net.Http.Headers;
using Xunit;
namespace System.Net.Http.Tests
{
public class AuthenticationHeaderValueTest
{
[Fact]
public void Ctor_SetBothSchemeAndParameters_MatchExpectation()
{
AuthenticationHeaderValue auth = new AuthenticationHeaderValue("Basic", "realm=\"contoso.com\"");
Assert.Equal("Basic", auth.Scheme);
Assert.Equal("realm=\"contoso.com\"", auth.Parameter);
Assert.Throws<ArgumentException>(() => { new AuthenticationHeaderValue(null, "x"); });
Assert.Throws<ArgumentException>(() => { new AuthenticationHeaderValue("", "x"); });
Assert.Throws<FormatException>(() => { new AuthenticationHeaderValue(" x", "x"); });
Assert.Throws<FormatException>(() => { new AuthenticationHeaderValue("x ", "x"); });
Assert.Throws<FormatException>(() => { new AuthenticationHeaderValue("x y", "x"); });
}
[Fact]
public void Ctor_SetSchemeOnly_MatchExpectation()
{
// Just verify that this ctor forwards the call to the overload taking 2 parameters.
AuthenticationHeaderValue auth = new AuthenticationHeaderValue("NTLM");
Assert.Equal("NTLM", auth.Scheme);
Assert.Null(auth.Parameter);
}
[Fact]
public void ToString_UseBothNoParameterAndSetParameter_AllSerializedCorrectly()
{
HttpResponseMessage response = new HttpResponseMessage();
string input = string.Empty;
AuthenticationHeaderValue auth = new AuthenticationHeaderValue("Digest",
"qop=\"auth\",algorithm=MD5-sess,nonce=\"+Upgraded+v109e309640b\",charset=utf-8,realm=\"Digest\"");
Assert.Equal(
"Digest qop=\"auth\",algorithm=MD5-sess,nonce=\"+Upgraded+v109e309640b\",charset=utf-8,realm=\"Digest\"",
auth.ToString());
response.Headers.ProxyAuthenticate.Add(auth);
input += auth.ToString();
auth = new AuthenticationHeaderValue("Negotiate");
Assert.Equal("Negotiate", auth.ToString());
response.Headers.ProxyAuthenticate.Add(auth);
input += ", " + auth.ToString();
auth = new AuthenticationHeaderValue("Custom", ""); // empty string should be treated like 'null'.
Assert.Equal("Custom", auth.ToString());
response.Headers.ProxyAuthenticate.Add(auth);
input += ", " + auth.ToString();
string result = response.Headers.ProxyAuthenticate.ToString();
Assert.Equal(input, result);
}
[Fact]
public void Parse_GoodValues_Success()
{
HttpRequestMessage request = new HttpRequestMessage();
string input = " Digest qop=\"auth\",algorithm=MD5-sess,nonce=\"+Upgraded+v109e309640b\",charset=utf-8 ";
request.Headers.Authorization = AuthenticationHeaderValue.Parse(input);
Assert.Equal(input.Trim(), request.Headers.Authorization.ToString());
}
[Fact]
public void TryParse_GoodValues_Success()
{
HttpRequestMessage request = new HttpRequestMessage();
string input = " Digest qop=\"auth\",algorithm=MD5-sess,nonce=\"+Upgraded+v109e309640b\",realm=\"Digest\" ";
AuthenticationHeaderValue parsedValue;
Assert.True(AuthenticationHeaderValue.TryParse(input, out parsedValue));
request.Headers.Authorization = parsedValue;
Assert.Equal(input.Trim(), request.Headers.Authorization.ToString());
}
[Fact]
public void Parse_BadValues_Throws()
{
string input = "D\rigest qop=\"auth\",algorithm=MD5-sess,charset=utf-8,realm=\"Digest\"";
Assert.Throws<FormatException>(() => { AuthenticationHeaderValue.Parse(input); });
}
[Fact]
public void TryParse_BadValues_False()
{
string input = ", Digest qop=\"auth\",nonce=\"+Upgraded+v109e309640b\",charset=utf-8,realm=\"Digest\"";
AuthenticationHeaderValue parsedValue;
Assert.False(AuthenticationHeaderValue.TryParse(input, out parsedValue));
}
[Fact]
public void Add_BadValues_Throws()
{
string x = SR.net_http_message_not_success_statuscode;
string input = "Digest algorithm=MD5-sess,nonce=\"+Upgraded+v109e309640b\",charset=utf-8,realm=\"Digest\", ";
HttpRequestMessage request = new HttpRequestMessage();
Assert.Throws<FormatException>(() => { request.Headers.Add(HttpKnownHeaderNames.Authorization, input); });
}
[Fact]
public void GetHashCode_UseSameAndDifferentAuth_SameOrDifferentHashCodes()
{
AuthenticationHeaderValue auth1 = new AuthenticationHeaderValue("A", "b");
AuthenticationHeaderValue auth2 = new AuthenticationHeaderValue("a", "b");
AuthenticationHeaderValue auth3 = new AuthenticationHeaderValue("A", "B");
AuthenticationHeaderValue auth4 = new AuthenticationHeaderValue("A");
AuthenticationHeaderValue auth5 = new AuthenticationHeaderValue("A", "");
AuthenticationHeaderValue auth6 = new AuthenticationHeaderValue("X", "b");
Assert.Equal(auth1.GetHashCode(), auth2.GetHashCode());
Assert.NotEqual(auth1.GetHashCode(), auth3.GetHashCode());
Assert.NotEqual(auth1.GetHashCode(), auth4.GetHashCode());
Assert.Equal(auth4.GetHashCode(), auth5.GetHashCode());
Assert.NotEqual(auth1.GetHashCode(), auth6.GetHashCode());
}
[Fact]
public void Equals_UseSameAndDifferentAuth_EqualOrNotEqualNoExceptions()
{
AuthenticationHeaderValue auth1 = new AuthenticationHeaderValue("A", "b");
AuthenticationHeaderValue auth2 = new AuthenticationHeaderValue("a", "b");
AuthenticationHeaderValue auth3 = new AuthenticationHeaderValue("A", "B");
AuthenticationHeaderValue auth4 = new AuthenticationHeaderValue("A");
AuthenticationHeaderValue auth5 = new AuthenticationHeaderValue("A", "");
AuthenticationHeaderValue auth6 = new AuthenticationHeaderValue("X", "b");
Assert.False(auth1.Equals(null));
Assert.True(auth1.Equals(auth2));
Assert.False(auth1.Equals(auth3));
Assert.False(auth1.Equals(auth4));
Assert.False(auth4.Equals(auth1));
Assert.False(auth1.Equals(auth5));
Assert.False(auth5.Equals(auth1));
Assert.True(auth4.Equals(auth5));
Assert.True(auth5.Equals(auth4));
Assert.False(auth1.Equals(auth6));
}
[Fact]
public void Clone_Call_CloneFieldsMatchSourceFields()
{
AuthenticationHeaderValue source = new AuthenticationHeaderValue("Basic", "QWxhZGRpbjpvcGVuIHNlc2FtZQ==");
AuthenticationHeaderValue clone = (AuthenticationHeaderValue)((ICloneable)source).Clone();
Assert.Equal(source.Scheme, clone.Scheme);
Assert.Equal(source.Parameter, clone.Parameter);
source = new AuthenticationHeaderValue("Kerberos");
clone = (AuthenticationHeaderValue)((ICloneable)source).Clone();
Assert.Equal(source.Scheme, clone.Scheme);
Assert.Null(clone.Parameter);
}
[Fact]
public void GetAuthenticationLength_DifferentValidScenarios_AllReturnNonZero()
{
CallGetAuthenticationLength(" Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ== ", 1, 37,
new AuthenticationHeaderValue("Basic", "QWxhZGRpbjpvcGVuIHNlc2FtZQ=="));
CallGetAuthenticationLength(" Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ== , ", 1, 37,
new AuthenticationHeaderValue("Basic", "QWxhZGRpbjpvcGVuIHNlc2FtZQ=="));
CallGetAuthenticationLength(" Basic realm=\"example.com\"", 1, 25,
new AuthenticationHeaderValue("Basic", "realm=\"example.com\""));
CallGetAuthenticationLength(" Basic realm=\"exam,,ple.com\",", 1, 27,
new AuthenticationHeaderValue("Basic", "realm=\"exam,,ple.com\""));
CallGetAuthenticationLength(" Basic realm=\"exam,ple.com\",", 1, 26,
new AuthenticationHeaderValue("Basic", "realm=\"exam,ple.com\""));
CallGetAuthenticationLength("NTLM ", 0, 7, new AuthenticationHeaderValue("NTLM"));
CallGetAuthenticationLength("Digest", 0, 6, new AuthenticationHeaderValue("Digest"));
CallGetAuthenticationLength("Digest,,", 0, 6, new AuthenticationHeaderValue("Digest"));
CallGetAuthenticationLength("Digest a=b, c=d,,", 0, 15, new AuthenticationHeaderValue("Digest", "a=b, c=d"));
CallGetAuthenticationLength("Kerberos,", 0, 8, new AuthenticationHeaderValue("Kerberos"));
CallGetAuthenticationLength("Basic,NTLM", 0, 5, new AuthenticationHeaderValue("Basic"));
CallGetAuthenticationLength("Digest a=b,c=\"d\", e=f, NTLM", 0, 21,
new AuthenticationHeaderValue("Digest", "a=b,c=\"d\", e=f"));
CallGetAuthenticationLength("Digest a = b , c = \"d\" , e = f ,NTLM", 0, 32,
new AuthenticationHeaderValue("Digest", "a = b , c = \"d\" , e = f"));
CallGetAuthenticationLength("Digest a = b , c = \"d\" , e = f , NTLM AbCdEf==", 0, 32,
new AuthenticationHeaderValue("Digest", "a = b , c = \"d\" , e = f"));
CallGetAuthenticationLength("Digest a = \"b\", c= \"d\" , e = f,NTLM AbC=,", 0, 31,
new AuthenticationHeaderValue("Digest", "a = \"b\", c= \"d\" , e = f"));
CallGetAuthenticationLength("Digest a=\"b\", c=d", 0, 17,
new AuthenticationHeaderValue("Digest", "a=\"b\", c=d"));
CallGetAuthenticationLength("Digest a=\"b\", c=d,", 0, 17,
new AuthenticationHeaderValue("Digest", "a=\"b\", c=d"));
CallGetAuthenticationLength("Digest a=\"b\", c=d ,", 0, 18,
new AuthenticationHeaderValue("Digest", "a=\"b\", c=d"));
CallGetAuthenticationLength("Digest a=\"b\", c=d ", 0, 19,
new AuthenticationHeaderValue("Digest", "a=\"b\", c=d"));
CallGetAuthenticationLength("Custom \"blob\", c=d,Custom2 \"blob\"", 0, 18,
new AuthenticationHeaderValue("Custom", "\"blob\", c=d"));
CallGetAuthenticationLength("Custom \"blob\", a=b,,,c=d,Custom2 \"blob\"", 0, 24,
new AuthenticationHeaderValue("Custom", "\"blob\", a=b,,,c=d"));
CallGetAuthenticationLength("Custom \"blob\", a=b,c=d,,,Custom2 \"blob\"", 0, 22,
new AuthenticationHeaderValue("Custom", "\"blob\", a=b,c=d"));
CallGetAuthenticationLength("Custom a=b, c=d,,,InvalidNextScheme\u670D", 0, 15,
new AuthenticationHeaderValue("Custom", "a=b, c=d"));
}
[Fact]
public void GetAuthenticationLength_DifferentInvalidScenarios_AllReturnZero()
{
CheckInvalidGetAuthenticationLength(" NTLM", 0); // no leading whitespaces allowed
CheckInvalidGetAuthenticationLength("Basic=", 0);
CheckInvalidGetAuthenticationLength("=Basic", 0);
CheckInvalidGetAuthenticationLength("Digest a=b, \u670D", 0);
CheckInvalidGetAuthenticationLength("Digest a=b, c=d, \u670D", 0);
CheckInvalidGetAuthenticationLength("Digest a=b, c=", 0);
CheckInvalidGetAuthenticationLength("Digest a=\"b, c", 0);
CheckInvalidGetAuthenticationLength("Digest a=\"b", 0);
CheckInvalidGetAuthenticationLength("Digest a=b, c=\u670D", 0);
CheckInvalidGetAuthenticationLength("", 0);
CheckInvalidGetAuthenticationLength(null, 0);
}
#region Helper methods
private static void CallGetAuthenticationLength(string input, int startIndex, int expectedLength,
AuthenticationHeaderValue expectedResult)
{
object result = null;
Assert.Equal(expectedLength, AuthenticationHeaderValue.GetAuthenticationLength(input, startIndex, out result));
Assert.Equal(expectedResult, result);
}
private static void CheckInvalidGetAuthenticationLength(string input, int startIndex)
{
object result = null;
Assert.Equal(0, AuthenticationHeaderValue.GetAuthenticationLength(input, startIndex, out result));
Assert.Null(result);
}
#endregion
}
}
| |
// <copyright file="QRTests.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
// Copyright (c) 2009-2010 Math.NET
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
using System;
using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.LinearAlgebra.Factorization;
using MathNet.Numerics.LinearAlgebra.Single;
using MathNet.Numerics.LinearAlgebra.Single.Factorization;
using NUnit.Framework;
namespace MathNet.Numerics.UnitTests.LinearAlgebraTests.Single.Factorization
{
/// <summary>
/// QR factorization tests for a dense matrix.
/// </summary>
[TestFixture, Category("LAFactorization")]
public class QRTests
{
/// <summary>
/// Constructor with wide matrix throws <c>ArgumentException</c>.
/// </summary>
[Test]
public void ConstructorWideMatrixThrowsInvalidMatrixOperationException()
{
Assert.That(() => UserQR.Create(new DenseMatrix(3, 4)), Throws.ArgumentException);
}
/// <summary>
/// Can factorize identity matrix.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(10)]
[TestCase(100)]
public void CanFactorizeIdentity(int order)
{
var matrixI = DenseMatrix.CreateIdentity(order);
var factorQR = matrixI.QR();
var r = factorQR.R;
Assert.AreEqual(matrixI.RowCount, r.RowCount);
Assert.AreEqual(matrixI.ColumnCount, r.ColumnCount);
for (var i = 0; i < r.RowCount; i++)
{
for (var j = 0; j < r.ColumnCount; j++)
{
if (i == j)
{
Assert.AreEqual(1.0, Math.Abs(r[i, j]));
}
else
{
Assert.AreEqual(0.0, r[i, j]);
}
}
}
}
/// <summary>
/// Can factorize identity matrix using thin QR.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(10)]
[TestCase(100)]
public void CanFactorizeIdentityUsingThinQR(int order)
{
var matrixI = DenseMatrix.CreateIdentity(order);
var factorQR = matrixI.QR(QRMethod.Thin);
var r = factorQR.R;
Assert.AreEqual(matrixI.ColumnCount, r.RowCount);
Assert.AreEqual(matrixI.ColumnCount, r.ColumnCount);
for (var i = 0; i < r.RowCount; i++)
{
for (var j = 0; j < r.ColumnCount; j++)
{
if (i == j)
{
Assert.AreEqual(1.0, Math.Abs(r[i, j]));
}
else
{
Assert.AreEqual(0.0, r[i, j]);
}
}
}
}
/// <summary>
/// Identity determinant is one.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(10)]
[TestCase(100)]
public void IdentityDeterminantIsOne(int order)
{
var matrixI = DenseMatrix.CreateIdentity(order);
var factorQR = matrixI.QR();
Assert.AreEqual(1.0, factorQR.Determinant);
}
/// <summary>
/// Can factorize a random matrix.
/// </summary>
/// <param name="row">Matrix row number.</param>
/// <param name="column">Matrix column number.</param>
[TestCase(1, 1)]
[TestCase(2, 2)]
[TestCase(5, 5)]
[TestCase(10, 6)]
[TestCase(50, 48)]
[TestCase(100, 98)]
public void CanFactorizeRandomMatrix(int row, int column)
{
var matrixA = Matrix<float>.Build.Random(row, column, 1);
var factorQR = matrixA.QR(QRMethod.Full);
var q = factorQR.Q;
var r = factorQR.R;
// Make sure the R has the right dimensions.
Assert.AreEqual(row, r.RowCount);
Assert.AreEqual(column, r.ColumnCount);
// Make sure the Q has the right dimensions.
Assert.AreEqual(row, q.RowCount);
Assert.AreEqual(row, q.ColumnCount);
// Make sure the R factor is upper triangular.
for (var i = 0; i < r.RowCount; i++)
{
for (var j = 0; j < r.ColumnCount; j++)
{
if (i > j)
{
Assert.AreEqual(0.0, r[i, j]);
}
}
}
// Make sure the Q*R is the original matrix.
var matrixQfromR = q * r;
for (var i = 0; i < matrixQfromR.RowCount; i++)
{
for (var j = 0; j < matrixQfromR.ColumnCount; j++)
{
Assert.AreEqual(matrixA[i, j], matrixQfromR[i, j], 1e-4);
}
}
// Make sure the Q is unitary --> (Q*)x(Q) = I
var matrixQtQ = q.Transpose() * q;
for (var i = 0; i < matrixQtQ.RowCount; i++)
{
for (var j = 0; j < matrixQtQ.ColumnCount; j++)
{
Assert.AreEqual(matrixQtQ[i, j], i == j ? 1.0f : 0.0f, 1e-3f);
}
}
}
/// <summary>
/// Can factorize a random matrix using thin QR.
/// </summary>
/// <param name="row">Matrix row number.</param>
/// <param name="column">Matrix column number.</param>
[TestCase(1, 1)]
[TestCase(2, 2)]
[TestCase(5, 5)]
[TestCase(10, 6)]
[TestCase(50, 48)]
[TestCase(100, 98)]
public void CanFactorizeRandomMatrixUsingThinQR(int row, int column)
{
var matrixA = Matrix<float>.Build.Random(row, column, 1);
var factorQR = matrixA.QR(QRMethod.Thin);
var q = factorQR.Q;
var r = factorQR.R;
// Make sure the R has the right dimensions.
Assert.AreEqual(column, r.RowCount);
Assert.AreEqual(column, r.ColumnCount);
// Make sure the Q has the right dimensions.
Assert.AreEqual(row, q.RowCount);
Assert.AreEqual(column, q.ColumnCount);
// Make sure the R factor is upper triangular.
for (var i = 0; i < r.RowCount; i++)
{
for (var j = 0; j < r.ColumnCount; j++)
{
if (i > j)
{
Assert.AreEqual(0.0, r[i, j]);
}
}
}
// Make sure the Q*R is the original matrix.
var matrixQfromR = q * r;
for (var i = 0; i < matrixQfromR.RowCount; i++)
{
for (var j = 0; j < matrixQfromR.ColumnCount; j++)
{
Assert.AreEqual(matrixA[i, j], matrixQfromR[i, j], 1.0e-4);
}
}
// Make sure the Q is unitary --> (Q*)x(Q) = I
var matrixQtQ = q.Transpose() * q;
for (var i = 0; i < matrixQtQ.RowCount; i++)
{
for (var j = 0; j < matrixQtQ.ColumnCount; j++)
{
Assert.AreEqual(matrixQtQ[i, j], i == j ? 1.0f : 0.0f, 1e-3f);
}
}
}
/// <summary>
/// Can solve a system of linear equations for a random vector (Ax=b).
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomVector(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR();
var vectorb = Vector<float>.Build.Random(order, 1);
var resultx = factorQR.Solve(vectorb);
Assert.AreEqual(matrixA.ColumnCount, resultx.Count);
var matrixBReconstruct = matrixA * resultx;
// Check the reconstruction.
for (var i = 0; i < order; i++)
{
Assert.AreEqual(vectorb[i], matrixBReconstruct[i], 1e-4);
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
}
/// <summary>
/// Can solve a system of linear equations for a random matrix (AX=B).
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomMatrix(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR();
var matrixB = Matrix<float>.Build.Random(order, order, 1);
var matrixX = factorQR.Solve(matrixB);
// The solution X row dimension is equal to the column dimension of A
Assert.AreEqual(matrixA.ColumnCount, matrixX.RowCount);
// The solution X has the same number of columns as B
Assert.AreEqual(matrixB.ColumnCount, matrixX.ColumnCount);
var matrixBReconstruct = matrixA * matrixX;
// Check the reconstruction.
for (var i = 0; i < matrixB.RowCount; i++)
{
for (var j = 0; j < matrixB.ColumnCount; j++)
{
Assert.AreEqual(matrixB[i, j], matrixBReconstruct[i, j], 1e-4);
}
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
}
/// <summary>
/// Can solve for a random vector into a result vector.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomVectorWhenResultVectorGiven(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR();
var vectorb = Vector<float>.Build.Random(order, 1);
var vectorbCopy = vectorb.Clone();
var resultx = new DenseVector(order);
factorQR.Solve(vectorb, resultx);
Assert.AreEqual(vectorb.Count, resultx.Count);
var matrixBReconstruct = matrixA * resultx;
// Check the reconstruction.
for (var i = 0; i < vectorb.Count; i++)
{
Assert.AreEqual(vectorb[i], matrixBReconstruct[i], 1e-4);
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
// Make sure b didn't change.
for (var i = 0; i < vectorb.Count; i++)
{
Assert.AreEqual(vectorbCopy[i], vectorb[i]);
}
}
/// <summary>
/// Can solve a system of linear equations for a random matrix (AX=B) into a result matrix.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomMatrixWhenResultMatrixGiven(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR();
var matrixB = Matrix<float>.Build.Random(order, order, 1);
var matrixBCopy = matrixB.Clone();
var matrixX = new DenseMatrix(order, order);
factorQR.Solve(matrixB, matrixX);
// The solution X row dimension is equal to the column dimension of A
Assert.AreEqual(matrixA.ColumnCount, matrixX.RowCount);
// The solution X has the same number of columns as B
Assert.AreEqual(matrixB.ColumnCount, matrixX.ColumnCount);
var matrixBReconstruct = matrixA * matrixX;
// Check the reconstruction.
for (var i = 0; i < matrixB.RowCount; i++)
{
for (var j = 0; j < matrixB.ColumnCount; j++)
{
Assert.AreEqual(matrixB[i, j], matrixBReconstruct[i, j], 1e-4);
}
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
// Make sure B didn't change.
for (var i = 0; i < matrixB.RowCount; i++)
{
for (var j = 0; j < matrixB.ColumnCount; j++)
{
Assert.AreEqual(matrixBCopy[i, j], matrixB[i, j]);
}
}
}
/// <summary>
/// Can solve a system of linear equations for a random vector (Ax=b).
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomVectorUsingThinQR(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR(QRMethod.Thin);
var vectorb = Vector<float>.Build.Random(order, 1);
var resultx = factorQR.Solve(vectorb);
Assert.AreEqual(matrixA.ColumnCount, resultx.Count);
var matrixBReconstruct = matrixA * resultx;
// Check the reconstruction.
for (var i = 0; i < order; i++)
{
AssertHelpers.AlmostEqual(vectorb[i], matrixBReconstruct[i], 3);
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
}
/// <summary>
/// Can solve a system of linear equations for a random matrix (AX=B).
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomMatrixUsingThinQR(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR(QRMethod.Thin);
var matrixB = Matrix<float>.Build.Random(order, order, 1);
var matrixX = factorQR.Solve(matrixB);
// The solution X row dimension is equal to the column dimension of A
Assert.AreEqual(matrixA.ColumnCount, matrixX.RowCount);
// The solution X has the same number of columns as B
Assert.AreEqual(matrixB.ColumnCount, matrixX.ColumnCount);
var matrixBReconstruct = matrixA * matrixX;
// Check the reconstruction.
for (var i = 0; i < matrixB.RowCount; i++)
{
for (var j = 0; j < matrixB.ColumnCount; j++)
{
Assert.AreEqual(matrixB[i, j], matrixBReconstruct[i, j], 1e-3);
}
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
}
/// <summary>
/// Can solve for a random vector into a result vector.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomVectorWhenResultVectorGivenUsingThinQR(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR(QRMethod.Thin);
var vectorb = Vector<float>.Build.Random(order, 1);
var vectorbCopy = vectorb.Clone();
var resultx = new DenseVector(order);
factorQR.Solve(vectorb, resultx);
Assert.AreEqual(vectorb.Count, resultx.Count);
var matrixBReconstruct = matrixA * resultx;
// Check the reconstruction.
for (var i = 0; i < vectorb.Count; i++)
{
AssertHelpers.AlmostEqual(vectorb[i], matrixBReconstruct[i], 3);
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
// Make sure b didn't change.
for (var i = 0; i < vectorb.Count; i++)
{
Assert.AreEqual(vectorbCopy[i], vectorb[i]);
}
}
/// <summary>
/// Can solve a system of linear equations for a random matrix (AX=B) into a result matrix.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(1)]
[TestCase(2)]
[TestCase(5)]
[TestCase(10)]
[TestCase(50)]
[TestCase(100)]
public void CanSolveForRandomMatrixWhenResultMatrixGivenUsingThinQR(int order)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR(QRMethod.Thin);
var matrixB = Matrix<float>.Build.Random(order, order, 1);
var matrixBCopy = matrixB.Clone();
var matrixX = new DenseMatrix(order, order);
factorQR.Solve(matrixB, matrixX);
// The solution X row dimension is equal to the column dimension of A
Assert.AreEqual(matrixA.ColumnCount, matrixX.RowCount);
// The solution X has the same number of columns as B
Assert.AreEqual(matrixB.ColumnCount, matrixX.ColumnCount);
var matrixBReconstruct = matrixA * matrixX;
// Check the reconstruction.
for (var i = 0; i < matrixB.RowCount; i++)
{
for (var j = 0; j < matrixB.ColumnCount; j++)
{
Assert.AreEqual(matrixB[i, j], matrixBReconstruct[i, j], 1e-3);
}
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
// Make sure B didn't change.
for (var i = 0; i < matrixB.RowCount; i++)
{
for (var j = 0; j < matrixB.ColumnCount; j++)
{
Assert.AreEqual(matrixBCopy[i, j], matrixB[i, j]);
}
}
}
/// <summary>
/// Can solve when using a tall matrix.
/// </summary>
/// <param name="method">The QR decomp method to use.</param>
[TestCase(QRMethod.Full)]
[TestCase(QRMethod.Thin)]
public void CanSolveForMatrixWithTallRandomMatrix(QRMethod method)
{
var matrixA = Matrix<float>.Build.Random(20, 10, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR(method);
var matrixB = Matrix<float>.Build.Random(20, 5, 1);
var matrixX = factorQR.Solve(matrixB);
// The solution X row dimension is equal to the column dimension of A
Assert.AreEqual(matrixA.ColumnCount, matrixX.RowCount);
// The solution X has the same number of columns as B
Assert.AreEqual(matrixB.ColumnCount, matrixX.ColumnCount);
var test = (matrixA.Transpose() * matrixA).Inverse() * matrixA.Transpose() * matrixB;
for (var i = 0; i < matrixX.RowCount; i++)
{
for (var j = 0; j < matrixX.ColumnCount; j++)
{
AssertHelpers.AlmostEqual(test[i, j], matrixX[i, j], 5);
}
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
}
/// <summary>
/// Can solve when using a tall matrix.
/// </summary>
/// <param name="method">The QR decomp method to use.</param>
[TestCase(QRMethod.Full)]
[TestCase(QRMethod.Thin)]
public void CanSolveForVectorWithTallRandomMatrix(QRMethod method)
{
var matrixA = Matrix<float>.Build.Random(20, 10, 1);
var matrixACopy = matrixA.Clone();
var factorQR = matrixA.QR(method);
var vectorB = Vector<float>.Build.Random(20, 1);
var vectorX = factorQR.Solve(vectorB);
// The solution x dimension is equal to the column dimension of A
Assert.AreEqual(matrixA.ColumnCount, vectorX.Count);
var test = (matrixA.Transpose() * matrixA).Inverse() * matrixA.Transpose() * vectorB;
for (var i = 0; i < vectorX.Count; i++)
{
AssertHelpers.AlmostEqual(test[i], vectorX[i], 5);
}
// Make sure A didn't change.
for (var i = 0; i < matrixA.RowCount; i++)
{
for (var j = 0; j < matrixA.ColumnCount; j++)
{
Assert.AreEqual(matrixACopy[i, j], matrixA[i, j]);
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Runtime.CompilerServices;
namespace HTLib2
{
using DEBUG = System.Diagnostics.Debug;
public partial class HDebug
{
public static Random rand = new Random();
public static readonly HDebug debug = new HDebug();
[MethodImpl(MethodImplOptions.AggressiveInlining)]
// [System.Diagnostics.Conditional("DEBUG")]
// [System.Diagnostics.DebuggerHiddenAttribute()]
public static bool Check(bool istrue)
{
if(IsDebuggerAttached == false)
#pragma warning disable CS0162
return false;
#pragma warning restore CS0162
return istrue;
}
[System.Diagnostics.Conditional("DEBUG")]
// [System.Diagnostics.DebuggerHiddenAttribute()]
public static void SetEpsilon(IList<double> values)
{
for(int i=0; i<values.Count; i++)
values[i] = double.Epsilon;
}
[System.Diagnostics.Conditional("DEBUG")]
// [System.Diagnostics.DebuggerHiddenAttribute()]
public static void SetEpsilon(double[,] values)
{
for(int i0=0; i0<values.GetLength(0); i0++)
for(int i1=0; i1<values.GetLength(1); i1++)
values[i0,i1] = double.Epsilon;
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void Assert(params bool[] conditions)
{
AssertAnd(conditions);
// System.Diagnostics.Debug.Assert(condition);
}
static bool selftest_IsEqualsAll = true;
public static bool IsEqualsAll<T>(params T[] values)
where T : IEquatable<T>
{
if(IsDebuggerAttached && selftest_IsEqualsAll)
#region selftest
{
selftest_IsEqualsAll = false;
HDebug.Assert(IsEqualsAll(1, 1, 1, 1, 1));
HDebug.Assert(IsEqualsAll(1, 1, 1, 1, 2) == false);
HDebug.Assert(IsEqualsAll(2, 1, 1, 1, 1) == false);
}
#endregion
for(int i=1; i<values.Length; i++)
{
if(values[0].Equals(values[i]) == false)
return false;
}
return true;
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertAllEquals<T>(params T[] values)
where T : IEquatable<T>
{
System.Diagnostics.Debug.Assert(IsEqualsAll(values));
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertDouble(params double[] values)
{
foreach(double value in values)
{
System.Diagnostics.Debug.Assert(double.IsInfinity(value) == false);
System.Diagnostics.Debug.Assert(double.IsNaN(value) == false);
}
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertTolerance(double tolerance, params double[] values)
{
System.Diagnostics.Debug.Assert(CheckTolerance(tolerance, values));
}
public static bool CheckTolerance(double tolerance, params double[] values)
{
for(int i=0; i<values.Length; i++)
if(Math.Abs(values[i]) > tolerance)
return false;
//System.Diagnostics.Debug.Assert(assert);
return true;
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertToleranceIf(bool condition, double tolerance, params double[] values)
{
if(condition)
{
bool assert = true;
for(int i=0; i<values.Length; i++)
assert &= (Math.Abs(values[i]) <= tolerance);
System.Diagnostics.Debug.Assert(assert);
}
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertTolerance(double tolerance, params double[][] values)
{
bool assert = true;
for(int i=0; i<values.Length; i++)
for(int j=0; j<values[i].Length; j++)
assert &= (Math.Abs(values[i][j]) <= tolerance);
System.Diagnostics.Debug.Assert(assert);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertTolerance(double tolerance, double[,] values)
{
bool assert = CheckTolerance(tolerance, values);
System.Diagnostics.Debug.Assert(assert);
}
public static bool CheckTolerance(double tolerance, double[,] values)
{
for(int c=0; c<values.GetLength(0); c++)
for(int r=0; r<values.GetLength(1); r++)
{
double value = values[c, r];
if(Math.Abs(value) > tolerance)
return false;
}
return true;
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertSimilar(double left, double right, double tolerance)
{
System.Diagnostics.Debug.Assert(Math.Abs(left-right) <= tolerance);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertSimilar(double[] left, double[] right, double tolerance)
{
if(left.Length != right.Length) { DEBUG.Assert(false); return; }
for(int i=0; i<left.Length; i++)
{
if(Math.Abs(left[i]-right[i]) <= tolerance)
continue;
DEBUG.Assert(false);
return;
}
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertSimilar(double[,] left, double[,] right, double tolerance)
{
if(left.GetLength(0) != right.GetLength(0)) { DEBUG.Assert(false); return; }
if(left.GetLength(1) != right.GetLength(1)) { DEBUG.Assert(false); return; }
for(int i=0; i<left.GetLength(0); i++)
{
for(int j=0; j<left.GetLength(1); j++)
{
if(Math.Abs(left[i, j]-right[i, j]) <= tolerance)
continue;
DEBUG.Assert(false);
return;
}
}
}
//////////////////////////////////////////////
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertNotSimilar(double left, double right, double tolerance)
{
System.Diagnostics.Debug.Assert(Math.Abs(left-right) > tolerance);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertNotSimilar(double[] left, double[] right, double tolerance)
{
if(left.Length != right.Length) return;
for(int i=0; i<left.Length; i++)
{
if(Math.Abs(left[i]-right[i]) <= tolerance)
continue;
return;
}
DEBUG.Assert(false);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertNotSimilar(double[,] left, double[,] right, double tolerance)
{
if(left.GetLength(0) != right.GetLength(0)) return;
if(left.GetLength(1) != right.GetLength(1)) return;
for(int i=0; i<left.GetLength(0); i++)
{
for(int j=0; j<left.GetLength(1); j++)
{
if(Math.Abs(left[i, j]-right[i, j]) <= tolerance)
continue;
return;
}
}
DEBUG.Assert(false);
}
//////////////////////////////////////////////
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertNotNull<T>(T value)
where T : class
{
System.Diagnostics.Debug.Assert(value != null);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertNotNull<T>(params T[] value)
where T : class
{
bool assert = true;
if(value == null)
assert = false;
else
for(int i=0; i<value.Length; i++)
if(value[i] == null)
assert = false;
DEBUG.Assert(assert);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertNotNull<T>(T[,] value)
where T : class
{
bool assert = true;
if(value == null)
assert = false;
else
for(int i=0; i<value.GetLength(0); i++)
for(int j=0; j<value.GetLength(1); j++)
if(value[i,j] == null)
assert = false;
DEBUG.Assert(assert);
}
//////////////////////////////////////////////
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void Verify(bool condition)
{
System.Diagnostics.Debug.Assert(condition);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertOr(params bool[] conditions)
{
foreach(bool condition in conditions)
{
if(condition == true)
{
return;
}
}
System.Diagnostics.Debug.Assert(false);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertAnd(params bool[] conditions)
{
bool success = true;
foreach(bool condition in conditions)
{
if(condition == false)
{
success = false;
}
}
System.Diagnostics.Debug.Assert(success);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertXor(params bool[] conditions)
{
int numsuccess = 0;
foreach(bool condition in conditions)
{
if(condition == true)
{
numsuccess++;
}
}
System.Diagnostics.Debug.Assert(numsuccess == 1);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertIf(bool condition, params bool[] asserts)
{
if(condition)
{
bool assert = true;
for(int i=0; i<asserts.Length; i++)
assert = assert && asserts[i];
Assert(assert);
}
}
static Dictionary<int, Dictionary<string, bool>> _ConditionalAssert = new Dictionary<int, Dictionary<string, bool>>();
// <hashcode_for_name, <name, is_assert>>
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
public static void AssertIf(string name, bool initial_condition, bool assert)
{
int hash = name.GetHashCode();
if(_ConditionalAssert.ContainsKey(hash) == false)
{
_ConditionalAssert[hash] = new Dictionary<string,bool>();
_ConditionalAssert[hash][name] = initial_condition;
AssertIf(initial_condition, assert);
return;
}
if(_ConditionalAssert[hash].ContainsKey(name) == false)
{
_ConditionalAssert[hash][name] = initial_condition;
AssertIf(initial_condition, assert);
return;
}
AssertIf(_ConditionalAssert[hash][name], assert);
}
public bool this[string name]
{
get
{
int hash = name.GetHashCode();
if(_ConditionalAssert.ContainsKey(hash) == false)
return false;
if(_ConditionalAssert[hash].ContainsKey(name) == false)
return false;
return _ConditionalAssert[hash][name];
}
set
{
int hash = name.GetHashCode();
if(_ConditionalAssert.ContainsKey(hash) == false)
return;
if(_ConditionalAssert[hash].ContainsKey(name) == false)
return;
_ConditionalAssert[hash][name] = value;
}
}
//static public bool IsDebuggerAttached
//{
// get
// {
// return System.Diagnostics.Debugger.IsAttached;
// }
//}
#if DEBUG
public const bool IsDebuggerAttached = true;
#else
public const bool IsDebuggerAttached = false;
#endif
static public bool IsDebuggerAttachedWithProb(double prob)
{
if(System.Diagnostics.Debugger.IsAttached)
{
HDebug.Assert(0<=prob, prob<=1);
double nrand = rand.NextDouble();
return (nrand < prob);
}
return false;
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
static public void Break()
{
System.Diagnostics.Debugger.Break();
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
static public void Break(params bool[] conditions)
{
BreakOr(conditions);
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
static public void BreakAnd(params bool[] conditions)
{
if(conditions.Length >= 1)
{
bool dobreak = true;
foreach(bool condition in conditions)
dobreak = dobreak && condition;
if(dobreak)
System.Diagnostics.Debugger.Break();
}
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
static public void BreakOr(params bool[] conditions)
{
if(conditions.Length >= 1)
{
bool dobreak = false;
foreach(bool condition in conditions)
dobreak = dobreak || condition;
if(dobreak)
System.Diagnostics.Debugger.Break();
}
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
static public void ToDo(params string[] todos)
{
foreach(string todo in todos)
System.Console.Error.WriteLine("TODO: " + todo);
Break();
}
[System.Diagnostics.Conditional("DEBUG")]
[System.Diagnostics.DebuggerHiddenAttribute()]
static public void Depreciated(params string[] messages)
{
foreach(string message in messages)
System.Console.Error.WriteLine("Depreciated: " + message);
Break();
}
public static class Trace
{
public static bool AutoFlush { set{ System.Diagnostics.Trace.AutoFlush = value; } get{ return System.Diagnostics.Trace.AutoFlush ; } }
public static int IndentLevel { set{ System.Diagnostics.Trace.IndentLevel = value; } get{ return System.Diagnostics.Trace.IndentLevel; } }
public static int IndentSize { set{ System.Diagnostics.Trace.IndentSize = value; } get{ return System.Diagnostics.Trace.IndentSize ; } }
public static System.Diagnostics.CorrelationManager CorrelationManager { get{ return System.Diagnostics.Trace.CorrelationManager; } }
public static System.Diagnostics.TraceListenerCollection Listeners { get{ return System.Diagnostics.Trace.Listeners ; } }
[System.Diagnostics.Conditional("TRACE")] public static void Flush() { System.Diagnostics.Trace.Flush(); }
[System.Diagnostics.Conditional("TRACE")] public static void Indent() { System.Diagnostics.Trace.Indent(); }
[System.Diagnostics.Conditional("TRACE")] public static void Unindent() { System.Diagnostics.Trace.Unindent(); }
public static void Refresh() { System.Diagnostics.Trace.Refresh(); }
[System.Diagnostics.Conditional("TRACE")] public static void Write(object value) { System.Diagnostics.Trace.Write(value); }
[System.Diagnostics.Conditional("TRACE")] public static void Write(string message) { System.Diagnostics.Trace.Write(message); }
[System.Diagnostics.Conditional("TRACE")] public static void Write(object value, string category) { System.Diagnostics.Trace.Write(value, category); }
[System.Diagnostics.Conditional("TRACE")] public static void Write(string message, string category) { System.Diagnostics.Trace.Write(message, category); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, object value) { System.Diagnostics.Trace.WriteIf(condition, value); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, string message) { System.Diagnostics.Trace.WriteIf(condition, message); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, object value, string category) { System.Diagnostics.Trace.WriteIf(condition, value, category); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, string message, string category) { System.Diagnostics.Trace.WriteIf(condition, message, category); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(object value) { System.Diagnostics.Trace.WriteLine(value); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(string message) { System.Diagnostics.Trace.WriteLine(message); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(object value, string category) { System.Diagnostics.Trace.WriteLine(value, category); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(string message, string category) { System.Diagnostics.Trace.WriteLine(message, category); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, object value) { System.Diagnostics.Trace.WriteLineIf(condition, value); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, string message) { System.Diagnostics.Trace.WriteLineIf(condition, message); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, object value, string category) { System.Diagnostics.Trace.WriteLineIf(condition, value, category); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, string message, string category) { System.Diagnostics.Trace.WriteLineIf(condition, message, category); }
}
public static class TraceFile
{
static System.IO.StreamWriter writer = System.IO.File.CreateText("TRACE.TXT");
//[System.Diagnostics.Conditional("TRACE")] public static void Write(object value) { writer.Write(value); writer.Flush(); }
[System.Diagnostics.Conditional("TRACE")] public static void Write(string message) { writer.Write(message); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void Write(object value, string category) { writer.Write(value, category); writer.Flush(); }
[System.Diagnostics.Conditional("TRACE")] public static void Write(string message, string category) { writer.Write(message, category); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, object value) { writer.WriteIf(condition, value); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, string message) { writer.WriteIf(condition, message); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, object value, string category) { writer.WriteIf(condition, value, category); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteIf(bool condition, string message, string category) { writer.WriteIf(condition, message, category); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(object value) { writer.WriteLine(value); writer.Flush(); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(string message) { writer.WriteLine(message); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(object value, string category) { writer.WriteLine(value, category); writer.Flush(); }
[System.Diagnostics.Conditional("TRACE")] public static void WriteLine(string message, string category) { writer.WriteLine(message, category); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, object value) { writer.WriteLineIf(condition, value); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, string message) { writer.WriteLineIf(condition, message); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, object value, string category) { writer.WriteLineIf(condition, value, category); writer.Flush(); }
//[System.Diagnostics.Conditional("TRACE")] public static void WriteLineIf(bool condition, string message, string category) { writer.WriteLineIf(condition, message, category); writer.Flush(); }
}
}
}
| |
// dnlib: See LICENSE.txt for more info
using System;
using System.Threading;
using dnlib.DotNet.MD;
namespace dnlib.DotNet {
/// <summary>
/// A high-level representation of a row in the MethodSpec table
/// </summary>
public abstract class MethodSpec : IHasCustomAttribute, IMethod, IContainsGenericParameter {
/// <summary>
/// The row id in its table
/// </summary>
protected uint rid;
/// <inheritdoc/>
public MDToken MDToken {
get { return new MDToken(Table.MethodSpec, rid); }
}
/// <inheritdoc/>
public uint Rid {
get { return rid; }
set { rid = value; }
}
/// <inheritdoc/>
public int HasCustomAttributeTag {
get { return 21; }
}
/// <summary>
/// From column MethodSpec.Method
/// </summary>
public IMethodDefOrRef Method {
get { return method; }
set { method = value; }
}
/// <summary/>
protected IMethodDefOrRef method;
/// <summary>
/// From column MethodSpec.Instantiation
/// </summary>
public CallingConventionSig Instantiation {
get { return instantiation; }
set { instantiation = value; }
}
/// <summary/>
protected CallingConventionSig instantiation;
/// <summary>
/// Gets all custom attributes
/// </summary>
public CustomAttributeCollection CustomAttributes {
get {
if (customAttributes == null)
InitializeCustomAttributes();
return customAttributes;
}
}
/// <summary/>
protected CustomAttributeCollection customAttributes;
/// <summary>Initializes <see cref="customAttributes"/></summary>
protected virtual void InitializeCustomAttributes() {
Interlocked.CompareExchange(ref customAttributes, new CustomAttributeCollection(), null);
}
/// <inheritdoc/>
public bool HasCustomAttributes {
get { return CustomAttributes.Count > 0; }
}
/// <inheritdoc/>
MethodSig IMethod.MethodSig {
get {
var m = method;
return m == null ? null : m.MethodSig;
}
set {
var m = method;
if (m != null)
m.MethodSig = value;
}
}
/// <inheritdoc/>
public UTF8String Name {
get {
var m = method;
return m == null ? UTF8String.Empty : m.Name;
}
set {
var m = method;
if (m != null)
m.Name = value;
}
}
/// <inheritdoc/>
public ITypeDefOrRef DeclaringType {
get {
var m = method;
return m == null ? null : m.DeclaringType;
}
}
/// <summary>
/// Gets/sets the generic instance method sig
/// </summary>
public GenericInstMethodSig GenericInstMethodSig {
get { return instantiation as GenericInstMethodSig; }
set { instantiation = value; }
}
/// <inheritdoc/>
int IGenericParameterProvider.NumberOfGenericParameters {
get {
var sig = GenericInstMethodSig;
return sig == null ? 0 : sig.GenericArguments.Count;
}
}
/// <inheritdoc/>
public ModuleDef Module {
get {
var m = method;
return m == null ? null : m.Module;
}
}
/// <summary>
/// Gets the full name
/// </summary>
public string FullName {
get {
var gims = GenericInstMethodSig;
var methodGenArgs = gims == null ? null : gims.GenericArguments;
var m = method;
var methodDef = m as MethodDef;
if (methodDef != null) {
var declaringType = methodDef.DeclaringType;
return FullNameCreator.MethodFullName(declaringType == null ? null : declaringType.FullName, methodDef.Name, methodDef.MethodSig, null, methodGenArgs, null, null);
}
var memberRef = m as MemberRef;
if (memberRef != null) {
var methodSig = memberRef.MethodSig;
if (methodSig != null) {
var tsOwner = memberRef.Class as TypeSpec;
var gis = tsOwner == null ? null : tsOwner.TypeSig as GenericInstSig;
var typeGenArgs = gis == null ? null : gis.GenericArguments;
return FullNameCreator.MethodFullName(memberRef.GetDeclaringTypeFullName(), memberRef.Name, methodSig, typeGenArgs, methodGenArgs, null, null);
}
}
return string.Empty;
}
}
bool IIsTypeOrMethod.IsType {
get { return false; }
}
bool IIsTypeOrMethod.IsMethod {
get { return true; }
}
bool IMemberRef.IsField {
get { return false; }
}
bool IMemberRef.IsTypeSpec {
get { return false; }
}
bool IMemberRef.IsTypeRef {
get { return false; }
}
bool IMemberRef.IsTypeDef {
get { return false; }
}
bool IMemberRef.IsMethodSpec {
get { return true; }
}
bool IMemberRef.IsMethodDef {
get { return false; }
}
bool IMemberRef.IsMemberRef {
get { return false; }
}
bool IMemberRef.IsFieldDef {
get { return false; }
}
bool IMemberRef.IsPropertyDef {
get { return false; }
}
bool IMemberRef.IsEventDef {
get { return false; }
}
bool IMemberRef.IsGenericParam {
get { return false; }
}
bool IContainsGenericParameter.ContainsGenericParameter {
get { return TypeHelper.ContainsGenericParameter(this); }
}
/// <inheritdoc/>
public override string ToString() {
return FullName;
}
}
/// <summary>
/// A MethodSpec row created by the user and not present in the original .NET file
/// </summary>
public class MethodSpecUser : MethodSpec {
/// <summary>
/// Default constructor
/// </summary>
public MethodSpecUser() {
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="method">The generic method</param>
public MethodSpecUser(IMethodDefOrRef method)
: this(method, null) {
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="method">The generic method</param>
/// <param name="sig">The instantiated method sig</param>
public MethodSpecUser(IMethodDefOrRef method, GenericInstMethodSig sig) {
this.method = method;
this.instantiation = sig;
}
}
/// <summary>
/// Created from a row in the MethodSpec table
/// </summary>
sealed class MethodSpecMD : MethodSpec, IMDTokenProviderMD {
/// <summary>The module where this instance is located</summary>
readonly ModuleDefMD readerModule;
readonly uint origRid;
/// <inheritdoc/>
public uint OrigRid {
get { return origRid; }
}
/// <inheritdoc/>
protected override void InitializeCustomAttributes() {
var list = readerModule.MetaData.GetCustomAttributeRidList(Table.MethodSpec, origRid);
var tmp = new CustomAttributeCollection((int)list.Length, list, (list2, index) => readerModule.ReadCustomAttribute(((RidList)list2)[index]));
Interlocked.CompareExchange(ref customAttributes, tmp, null);
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="readerModule">The module which contains this <c>MethodSpec</c> row</param>
/// <param name="rid">Row ID</param>
/// <param name="gpContext">Generic parameter context</param>
/// <exception cref="ArgumentNullException">If <paramref name="readerModule"/> is <c>null</c></exception>
/// <exception cref="ArgumentException">If <paramref name="rid"/> is invalid</exception>
public MethodSpecMD(ModuleDefMD readerModule, uint rid, GenericParamContext gpContext) {
#if DEBUG
if (readerModule == null)
throw new ArgumentNullException("readerModule");
if (readerModule.TablesStream.MethodSpecTable.IsInvalidRID(rid))
throw new BadImageFormatException(string.Format("MethodSpec rid {0} does not exist", rid));
#endif
this.origRid = rid;
this.rid = rid;
this.readerModule = readerModule;
uint method;
uint instantiation = readerModule.TablesStream.ReadMethodSpecRow(origRid, out method);
this.method = readerModule.ResolveMethodDefOrRef(method, gpContext);
this.instantiation = readerModule.ReadSignature(instantiation, gpContext);
}
}
}
| |
/*
FluorineFx open source library
Copyright (C) 2007 Zoltan Csibi, zoltan@TheSilentGroup.com, FluorineFx.com
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
using System;
using System.Reflection;
using System.Security.Permissions;
using FluorineFx.Collections.Generic;
using FluorineFx.Configuration;
#if !SILVERLIGHT
using FluorineFx.Reflection;
using FluorineFx.Reflection.Lightweight;
using FluorineFx.Util;
using log4net;
#endif
namespace FluorineFx
{
/// <summary>
/// This type supports the Fluorine infrastructure and is not intended to be used directly from your code.
/// </summary>
sealed class ObjectFactory
{
#if !SILVERLIGHT
private static readonly ILog Log = LogManager.GetLogger(typeof(ObjectFactory));
#endif
private static volatile ObjectFactory _instance;
private static readonly object SyncRoot = new Object();
private readonly CopyOnWriteDictionary<string, Type> _typeCache;
#if !SILVERLIGHT
private readonly CopyOnWriteDictionary<Type, ConstructorInvoker> _typeConstructorCache;
#endif
private readonly string[] _lacLocations;
private readonly bool _reflectionEmitPermission;
private ObjectFactory()
{
_lacLocations = TypeHelper.GetLacLocations();
_typeCache = new CopyOnWriteDictionary<string, Type>();
#if !SILVERLIGHT
_typeConstructorCache = new CopyOnWriteDictionary<Type, ConstructorInvoker>();
try
{
new ReflectionPermission(ReflectionPermissionFlag.ReflectionEmit).Demand();
new ReflectionPermission(ReflectionPermissionFlag.MemberAccess).Demand();
new ReflectionPermission(ReflectionPermissionFlag.RestrictedMemberAccess).Demand();
new SecurityPermission(SecurityPermissionFlag.UnmanagedCode).Demand();
_reflectionEmitPermission = true;
}
catch(Exception ex)
{
Unreferenced.Parameter(ex);
_reflectionEmitPermission = false;
}
#endif
}
public static ObjectFactory Instance
{
get
{
if (_instance == null)
{
lock (SyncRoot)
{
if (_instance == null)
_instance = new ObjectFactory();
}
}
return _instance;
}
}
public Type InternalLocate(string typeName)
{
if( string.IsNullOrEmpty(typeName) )
return null;
string mappedTypeName = FluorineConfiguration.Instance.GetMappedTypeName(typeName);
//Lookup first in our cache.
Type type;
if (!_typeCache.TryGetValue(mappedTypeName, out type))
{
type = TypeHelper.Locate(mappedTypeName);
if (type != null)
_typeCache[mappedTypeName] = type;
else
type = InternalLocateInLac(mappedTypeName); // Locate in the LAC
}
return type;
}
public Type InternalLocateInLac(string typeName)
{
if( string.IsNullOrEmpty(typeName) )
return null;
string mappedTypeName = FluorineConfiguration.Instance.GetMappedTypeName(typeName);
//Lookup first in our cache.
Type type;
if (!_typeCache.TryGetValue(mappedTypeName, out type))
{
//Locate in LAC
for (int i = 0; i < _lacLocations.Length; i++)
{
type = TypeHelper.LocateInLac(mappedTypeName, _lacLocations[i]);
if (type != null)
{
_typeCache[mappedTypeName] = type;
return type;
}
}
}
return type;
}
internal void AddTypeToCache(Type type)
{
if (type != null)
_typeCache[type.FullName] = type;
}
public bool ContainsType(string typeName)
{
if (string.IsNullOrEmpty(typeName))
return false;
return _typeCache.ContainsKey(typeName);
}
public object InternalCreateInstance(Type type)
{
return InternalCreateInstance(type, null);
}
public object InternalCreateInstance(string typeName)
{
return InternalCreateInstance(typeName, null);
}
public object InternalCreateInstance(string typeName, object[] args)
{
Type type = InternalLocate(typeName);
return InternalCreateInstance(type, args);
}
public object InternalCreateInstance(Type type, object[] args)
{
if (type != null)
{
if (type.IsAbstract && type.IsSealed)
return type;
#if !SILVERLIGHT
if (_reflectionEmitPermission)
{
ConstructorInvoker invoker;
_typeConstructorCache.TryGetValue(type, out invoker);
if (invoker == null)
{
invoker = ConstructorExtensions.DelegateForCreateInstance(type, args);
_typeConstructorCache[type] = invoker;
}
return invoker(args);
}
return Activator.CreateInstance(type, BindingFlags.CreateInstance | BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static, null, args, null);
#else
return type.InvokeMember(null, BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.Instance | BindingFlags.CreateInstance | BindingFlags.Static, null, null, args);
#endif
}
return null;
}
static public Type Locate(string type)
{
return Instance.InternalLocate(type);
}
static public Type LocateInLac(string type)
{
return Instance.InternalLocateInLac(type);
}
static public object CreateInstance(Type type)
{
return Instance.InternalCreateInstance(type);
}
static public object CreateInstance(string type)
{
return Instance.InternalCreateInstance(type);
}
static public object CreateInstance(Type type, object[] args)
{
return Instance.InternalCreateInstance(type, args);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyFormData
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Models;
/// <summary>
/// Formdata operations.
/// </summary>
public partial class Formdata : IServiceOperations<AutoRestSwaggerBATFormDataService>, IFormdata
{
/// <summary>
/// Initializes a new instance of the Formdata class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
public Formdata(AutoRestSwaggerBATFormDataService client)
{
if (client == null)
{
throw new ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestSwaggerBATFormDataService
/// </summary>
public AutoRestSwaggerBATFormDataService Client { get; private set; }
/// <summary>
/// Upload file
/// </summary>
/// <param name='fileContent'>
/// File to upload.
/// </param>
/// <param name='fileName'>
/// File name to upload. Name has to be spelled exactly as written here.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<System.IO.Stream>> UploadFileWithHttpMessagesAsync(System.IO.Stream fileContent, string fileName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (fileContent == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileContent");
}
if (fileName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileName");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("fileContent", fileContent);
tracingParameters.Add("fileName", fileName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "UploadFile", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "formdata/stream/uploadfile").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
MultipartFormDataContent _multiPartContent = new MultipartFormDataContent();
if (fileContent != null)
{
StreamContent _fileContent = new StreamContent(fileContent);
_fileContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
System.IO.FileStream _fileContentAsFileStream = fileContent as System.IO.FileStream;
if (_fileContentAsFileStream != null)
{
ContentDispositionHeaderValue _contentDispositionHeaderValue = new ContentDispositionHeaderValue("form-data");
_contentDispositionHeaderValue.Name = "fileContent";
_contentDispositionHeaderValue.FileName = _fileContentAsFileStream.Name;
_fileContent.Headers.ContentDisposition = _contentDispositionHeaderValue;
}
_multiPartContent.Add(_fileContent, "fileContent");
}
if (fileName != null)
{
StringContent _fileName = new StringContent(fileName, Encoding.UTF8);
_multiPartContent.Add(_fileName, "fileName");
}
_httpRequest.Content = _multiPartContent;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<System.IO.Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Upload file
/// </summary>
/// <param name='fileContent'>
/// File to upload.
/// </param>
/// <param name='fileName'>
/// File name to upload. Name has to be spelled exactly as written here.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<HttpOperationResponse<System.IO.Stream>> UploadFileViaBodyWithHttpMessagesAsync(System.IO.Stream fileContent, string fileName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (fileContent == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileContent");
}
if (fileName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "fileName");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("fileContent", fileContent);
tracingParameters.Add("fileName", fileName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "UploadFileViaBody", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "formdata/stream/uploadfile").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
StreamContent _fileStreamContent = new StreamContent(fileContent);
_httpRequest.Content = _fileStreamContent;
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/octet-stream");
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<System.IO.Stream>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_result.Body = await _httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false);
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
//Copyright (C) 2004 by Autodesk, Inc.
//
//Permission to use, copy, modify, and distribute this software in
//object code form for any purpose and without fee is hereby granted,
//provided that the above copyright notice appears in all copies and
//that both that copyright notice and the limited warranty and
//restricted rights notice below appear in all supporting
//documentation.
//
//AUTODESK PROVIDES THIS PROGRAM "AS IS" AND WITH ALL FAULTS.
//AUTODESK SPECIFICALLY DISCLAIMS ANY IMPLIED WARRANTY OF
//MERCHANTABILITY OR FITNESS FOR A PARTICULAR USE. AUTODESK, INC.
//DOES NOT WARRANT THAT THE OPERATION OF THE PROGRAM WILL BE
//UNINTERRUPTED OR ERROR FREE.
//
//Use, duplication, or disclosure by the U.S. Government is subject to
//restrictions set forth in FAR 52.227-19 (Commercial Computer
//Software - Restricted Rights) and DFAR 252.227-7013(c)(1)(ii)
//(Rights in Technical Data and Computer Software), as applicable
using System;
using AcadApp = Autodesk.AutoCAD.ApplicationServices.Application;
namespace Autodesk.AutoCAD.Samples.DockingPalette
{
/// <summary>
/// Sample control to be embedded on a palette
/// </summary>
public class TestControl : System.Windows.Forms.UserControl
{
private System.Windows.Forms.Label label1;
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
public TestControl()
{
// This call is required by the Windows.Forms Form Designer.
InitializeComponent();
// TODO: Add any initialization after the InitializeComponent call
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.label1 = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// label1
//
this.label1.Anchor = System.Windows.Forms.AnchorStyles.None;
this.label1.Location = new System.Drawing.Point(72, 128);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(128, 16);
this.label1.TabIndex = 8;
this.label1.Text = "Drag me on the drawing";
this.label1.MouseMove += new System.Windows.Forms.MouseEventHandler(this.label1_MouseMove);
//
// TestControl
//
this.Controls.AddRange(new System.Windows.Forms.Control[] {
this.label1});
this.Name = "TestControl";
this.Size = new System.Drawing.Size(280, 296);
this.GiveFeedback += new System.Windows.Forms.GiveFeedbackEventHandler(this.TestControl_GiveFeedback);
this.QueryContinueDrag += new System.Windows.Forms.QueryContinueDragEventHandler(this.TestControl_QueryContinueDrag);
this.ResumeLayout(false);
}
#endregion
/// <summary>
/// Drop target to control the drop behavior to the AutoCAD window
/// </summary>
public class MyDropTarget : Autodesk.AutoCAD.Windows.DropTarget
{
public override void OnDragEnter(System.Windows.Forms.DragEventArgs e)
{
System.Diagnostics.Debug.WriteLine("DragEnter");
}
public override void OnDragLeave()
{
System.Diagnostics.Debug.WriteLine("DragLeave");
}
public override void OnDragOver(System.Windows.Forms.DragEventArgs e)
{
System.Diagnostics.Debug.WriteLine("DragOver");
}
static string data;
public override void OnDrop(System.Windows.Forms.DragEventArgs e)
{
System.Diagnostics.Debug.WriteLine("Drop");
//let's drop it
//stash away the payload
data = (string)e.Data.GetData(typeof(string)) ;
//start a command to handle the interaction with the user. Don't do it directly from the OnDrop method
AcadApp.DocumentManager.MdiActiveDocument.SendStringToExecute("netdrop\n",false,false,false);
}
//command handler for the netdrop command. This is executed when our payload is
//dropped on the acad window.
[Autodesk.AutoCAD.Runtime.CommandMethod("netdrop")]
public static void netdropCmd()
{
if (data!=null)
{
AcadApp.DocumentManager.MdiActiveDocument.Editor.WriteMessage(data);
data = null;
}
else
AcadApp.DocumentManager.MdiActiveDocument.Editor.WriteMessage("nothing to do.");
}
}
#region Handlers for drag events from control
private void TestControl_GiveFeedback(object sender, System.Windows.Forms.GiveFeedbackEventArgs e)
{
System.Diagnostics.Debug.WriteLine("GiveFeedback");
}
private void TestControl_QueryContinueDrag(object sender, System.Windows.Forms.QueryContinueDragEventArgs e)
{
System.Diagnostics.Debug.WriteLine("QueryContinueDrag");
}
#endregion
private void label1_MouseMove(object sender, System.Windows.Forms.MouseEventArgs e)
{
if (System.Windows.Forms.Control.MouseButtons == System.Windows.Forms.MouseButtons.Left)
{
//start dragDrop operation, MyDropTarget will be called when the cursor enters the AutoCAD view area.
AcadApp.DoDragDrop(this,"Drag & drop successful!!!",System.Windows.Forms.DragDropEffects.All,new MyDropTarget());
}
}
}
public class TestPalettes
{
static Autodesk.AutoCAD.Windows.PaletteSet ps;
[Autodesk.AutoCAD.Runtime.CommandMethod("palettedemo")]
public static void DoIt()
{
if (ps==null)
{
//use constructor with Guid so that we can save/load user data
ps = new Autodesk.AutoCAD.Windows.PaletteSet("Test Palette Set",new Guid("63B8DB5B-10E4-4924-B8A2-A9CF9158E4F6"));
ps.Load+=new Autodesk.AutoCAD.Windows.PalettePersistEventHandler(ps_Load);
ps.Save+=new Autodesk.AutoCAD.Windows.PalettePersistEventHandler(ps_Save);
ps.Style = Autodesk.AutoCAD.Windows.PaletteSetStyles.NameEditable |
Autodesk.AutoCAD.Windows.PaletteSetStyles.ShowPropertiesMenu |
Autodesk.AutoCAD.Windows.PaletteSetStyles.ShowAutoHideButton |
Autodesk.AutoCAD.Windows.PaletteSetStyles.ShowCloseButton;
ps.MinimumSize = new System.Drawing.Size(300,300);
ps.Add("Test Palette 1", new TestControl());
}
bool b = ps.Visible;
ps.Visible = true;
Autodesk.AutoCAD.EditorInput.Editor e = AcadApp.DocumentManager.MdiActiveDocument.Editor;
Autodesk.AutoCAD.EditorInput.PromptResult res = e.GetKeywords("Select a palette set option:","Opacity","TitleBarLocation","Docking");
if (res.Status == Autodesk.AutoCAD.EditorInput.PromptStatus.OK)
{
switch (res.StringResult)
{
case "Opacity":
Autodesk.AutoCAD.EditorInput.PromptIntegerResult resInt;
do
{
resInt = e.GetInteger("Enter opacity:");
if (resInt.Status != Autodesk.AutoCAD.EditorInput.PromptStatus.OK)
break;
if (resInt.Value>=0 && resInt.Value<=100)
break;
e.WriteMessage("Opacity must be between 0 and 100\n");
}
while (true);
ps.Opacity = resInt.Value;
break;
case "TitleBarLocation":
res = e.GetKeywords("Select titlebar location:","Left","Right");
if (res.Status == Autodesk.AutoCAD.EditorInput.PromptStatus.OK)
switch (res.StringResult)
{
case "Left":
ps.TitleBarLocation = Autodesk.AutoCAD.Windows.PaletteSetTitleBarLocation.Left;
break;
case "Right":
ps.TitleBarLocation = Autodesk.AutoCAD.Windows.PaletteSetTitleBarLocation.Right;
break;
}
break;
case "Docking":
{
res = e.GetKeywords("Choose a docking option:","None","Left","Right","Top","Bottom");
if (res.Status == Autodesk.AutoCAD.EditorInput.PromptStatus.OK)
{
switch (res.StringResult)
{
case "None":
ps.Dock = Autodesk.AutoCAD.Windows.DockSides.None;
break;
case "Left":
ps.Dock = Autodesk.AutoCAD.Windows.DockSides.Left;
break;
case "Right":
ps.Dock = Autodesk.AutoCAD.Windows.DockSides.Right;
break;
case "Top":
ps.Dock = Autodesk.AutoCAD.Windows.DockSides.Top;
break;
case "Bottom":
ps.Dock = Autodesk.AutoCAD.Windows.DockSides.Bottom;
break;
}
}
break;
}
}
}
}
private static void ps_Load(object sender, Autodesk.AutoCAD.Windows.PalettePersistEventArgs e)
{
//demo loading user data
double a = (double)e.ConfigurationSection.ReadProperty("whatever",22.3);
}
private static void ps_Save(object sender, Autodesk.AutoCAD.Windows.PalettePersistEventArgs e)
{
//demo saving user data
e.ConfigurationSection.WriteProperty("whatever",32.3);
}
}
}
| |
using Nancy.Diagnostics;
namespace Nancy.Conventions
{
using System;
using System.Collections.Concurrent;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using Nancy.Helpers;
using Nancy.Responses;
/// <summary>
/// Helper class for defining directory-based conventions for static contents.
/// </summary>
public class StaticContentConventionBuilder
{
private static readonly ConcurrentDictionary<ResponseFactoryCacheKey, Func<NancyContext, Response>> ResponseFactoryCache;
private static readonly Regex PathReplaceRegex = new Regex(@"[/\\]", RegexOptions.Compiled);
static StaticContentConventionBuilder()
{
ResponseFactoryCache = new ConcurrentDictionary<ResponseFactoryCacheKey, Func<NancyContext, Response>>();
}
/// <summary>
/// Adds a directory-based convention for static convention.
/// </summary>
/// <param name="requestedPath">The path that should be matched with the request.</param>
/// <param name="contentPath">The path to where the content is stored in your application, relative to the root. If this is <see langword="null" /> then it will be the same as <paramref name="requestedPath"/>.</param>
/// <param name="allowedExtensions">A list of extensions that is valid for the conventions. If not supplied, all extensions are valid.</param>
/// <returns>A <see cref="GenericFileResponse"/> instance for the requested static contents if it was found, otherwise <see langword="null"/>.</returns>
public static Func<NancyContext, string, Response> AddDirectory(string requestedPath, string contentPath = null, params string[] allowedExtensions)
{
if (!requestedPath.StartsWith("/"))
{
requestedPath = string.Concat("/", requestedPath);
}
return (ctx, root) =>
{
var path =
HttpUtility.UrlDecode(ctx.Request.Path);
var fileName = GetSafeFileName(path);
if (string.IsNullOrEmpty(fileName))
{
return null;
}
var pathWithoutFilename =
GetPathWithoutFilename(fileName, path);
if (!pathWithoutFilename.StartsWith(requestedPath, StringComparison.OrdinalIgnoreCase))
{
(ctx.Trace.TraceLog ?? new NullLog()).WriteLog(x => x.AppendLine(string.Concat("[StaticContentConventionBuilder] The requested resource '", path, "' does not match convention mapped to '", requestedPath, "'" )));
return null;
}
contentPath =
GetContentPath(requestedPath, contentPath);
if (contentPath.Equals("/"))
{
throw new ArgumentException("This is not the security vulnerability you are looking for. Mapping static content to the root of your application is not a good idea.");
}
var responseFactory =
ResponseFactoryCache.GetOrAdd(new ResponseFactoryCacheKey(path, root), BuildContentDelegate(ctx, root, requestedPath, contentPath, allowedExtensions));
return responseFactory.Invoke(ctx);
};
}
/// <summary>
/// Adds a file-based convention for static convention.
/// </summary>
/// <param name="requestedFile">The file that should be matched with the request.</param>
/// <param name="contentFile">The file that should be served when the requested path is matched.</param>
public static Func<NancyContext, string, Response> AddFile(string requestedFile, string contentFile)
{
return (ctx, root) =>
{
var path =
ctx.Request.Path;
if (!path.Equals(requestedFile, StringComparison.OrdinalIgnoreCase))
{
ctx.Trace.TraceLog.WriteLog(x => x.AppendLine(string.Concat("[StaticContentConventionBuilder] The requested resource '", path, "' does not match convention mapped to '", requestedFile, "'")));
return null;
}
var responseFactory =
ResponseFactoryCache.GetOrAdd(new ResponseFactoryCacheKey(path, root), BuildContentDelegate(ctx, root, requestedFile, contentFile, new string[] { }));
return responseFactory.Invoke(ctx);
};
}
private static string GetSafeFileName(string path)
{
try
{
return Path.GetFileName(path);
}
catch (Exception)
{
}
return null;
}
private static string GetContentPath(string requestedPath, string contentPath)
{
contentPath =
contentPath ?? requestedPath;
if (!contentPath.StartsWith("/"))
{
contentPath = string.Concat("/", contentPath);
}
return contentPath;
}
private static Func<ResponseFactoryCacheKey, Func<NancyContext, Response>> BuildContentDelegate(NancyContext context, string applicationRootPath, string requestedPath, string contentPath, string[] allowedExtensions)
{
return pathAndRootPair =>
{
context.Trace.TraceLog.WriteLog(x => x.AppendLine(string.Concat("[StaticContentConventionBuilder] Attempting to resolve static content '", pathAndRootPair, "'")));
var extension =
Path.GetExtension(pathAndRootPair.Path);
if (!string.IsNullOrEmpty(extension))
{
extension = extension.Substring(1);
}
if (allowedExtensions.Length != 0 && !allowedExtensions.Any(e => string.Equals(e.TrimStart(new [] {'.'}), extension, StringComparison.OrdinalIgnoreCase)))
{
context.Trace.TraceLog.WriteLog(x => x.AppendLine(string.Concat("[StaticContentConventionBuilder] The requested extension '", extension, "' does not match any of the valid extensions for the convention '", string.Join(",", allowedExtensions), "'")));
return ctx => null;
}
var transformedRequestPath =
GetSafeRequestPath(pathAndRootPair.Path, requestedPath, contentPath);
transformedRequestPath =
GetEncodedPath(transformedRequestPath);
var fileName =
Path.GetFullPath(Path.Combine(applicationRootPath, transformedRequestPath));
var contentRootPath =
Path.GetFullPath(Path.Combine(applicationRootPath, GetEncodedPath(contentPath)));
if (!IsWithinContentFolder(contentRootPath, fileName))
{
context.Trace.TraceLog.WriteLog(x => x.AppendLine(string.Concat("[StaticContentConventionBuilder] The request '", fileName, "' is trying to access a path outside the content folder '", contentPath, "'")));
return ctx => null;
}
if (!File.Exists(fileName))
{
context.Trace.TraceLog.WriteLog(x => x.AppendLine(string.Concat("[StaticContentConventionBuilder] The requested file '", fileName, "' does not exist")));
return ctx => null;
}
context.Trace.TraceLog.WriteLog(x => x.AppendLine(string.Concat("[StaticContentConventionBuilder] Returning file '", fileName, "'")));
return ctx => new GenericFileResponse(fileName, ctx);
};
}
private static string GetEncodedPath(string path)
{
return PathReplaceRegex.Replace(path.TrimStart(new[] { '/' }), Path.DirectorySeparatorChar.ToString());
}
private static string GetPathWithoutFilename(string fileName, string path)
{
var pathWithoutFileName =
path.Replace(fileName, string.Empty);
return (pathWithoutFileName.Equals("/")) ?
pathWithoutFileName :
pathWithoutFileName.TrimEnd(new[] {'/'});
}
private static string GetSafeRequestPath(string requestPath, string requestedPath, string contentPath)
{
var actualContentPath =
(contentPath.Equals("/") ? string.Empty : contentPath);
if (requestedPath.Equals("/"))
{
return string.Concat(actualContentPath, requestPath);
}
var expression =
new Regex(Regex.Escape(requestedPath), RegexOptions.IgnoreCase);
return expression.Replace(requestPath, actualContentPath, 1);
}
/// <summary>
/// Returns whether the given filename is contained within the content folder
/// </summary>
/// <param name="contentRootPath">Content root path</param>
/// <param name="fileName">Filename requested</param>
/// <returns>True if contained within the content root, false otherwise</returns>
private static bool IsWithinContentFolder(string contentRootPath, string fileName)
{
return fileName.StartsWith(contentRootPath, StringComparison.Ordinal);
}
/// <summary>
/// Used to uniquely identify a request. Needed for when two Nancy applications want to serve up static content of the same
/// name from within the same AppDomain.
/// </summary>
private class ResponseFactoryCacheKey : IEquatable<ResponseFactoryCacheKey>
{
private readonly string path;
private readonly string rootPath;
public ResponseFactoryCacheKey(string path, string rootPath)
{
this.path = path;
this.rootPath = rootPath;
}
/// <summary>
/// The path of the static content for which this response is being issued
/// </summary>
public string Path
{
get { return this.path; }
}
/// <summary>
/// The root folder path of the Nancy application for which this response will be issued
/// </summary>
public string RootPath
{
get { return this.rootPath; }
}
public bool Equals(ResponseFactoryCacheKey other)
{
if (ReferenceEquals(null, other))
{
return false;
}
if (ReferenceEquals(this, other))
{
return true;
}
return string.Equals(this.path, other.path) && string.Equals(this.rootPath, other.rootPath);
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj))
{
return false;
}
if (ReferenceEquals(this, obj))
{
return true;
}
if (obj.GetType() != this.GetType())
{
return false;
}
return Equals((ResponseFactoryCacheKey)obj);
}
public override int GetHashCode()
{
unchecked
{
return ((this.path != null ? this.path.GetHashCode() : 0) * 397) ^ (this.rootPath != null ? this.rootPath.GetHashCode() : 0);
}
}
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2013, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
/**
* Created at 7:50:04 AM Jan 20, 2011
*/
using SharpBox2D.Collision.Shapes;
using SharpBox2D.Common;
using SharpBox2D.Dynamics;
using SharpBox2D.Dynamics.Joints;
using SharpBox2D.TestBed.Framework;
namespace SharpBox2D.TestBed.Tests
{
/**
* @author Daniel Murphy
*/
public class ApplyForce : TestbedTest
{
private static long BODY_TAG = 12;
private Body m_body;
public override void initTest(bool deserialized)
{
if (deserialized)
{
return;
}
getWorld().setGravity(new Vec2(0.0f, 0.0f));
float k_restitution = 0.4f;
Body ground;
{
BodyDef bd = new BodyDef();
bd.position.set(0.0f, 20.0f);
ground = getWorld().createBody(bd);
EdgeShape shape = new EdgeShape();
FixtureDef sd = new FixtureDef();
sd.shape = shape;
sd.density = 0.0f;
sd.restitution = k_restitution;
// Left vertical
shape.set(new Vec2(-20.0f, -20.0f), new Vec2(-20.0f, 20.0f));
ground.createFixture(sd);
// Right vertical
shape.set(new Vec2(20.0f, -20.0f), new Vec2(20.0f, 20.0f));
ground.createFixture(sd);
// Top horizontal
shape.set(new Vec2(-20.0f, 20.0f), new Vec2(20.0f, 20.0f));
ground.createFixture(sd);
// Bottom horizontal
shape.set(new Vec2(-20.0f, -20.0f), new Vec2(20.0f, -20.0f));
ground.createFixture(sd);
}
{
Transform xf1 = new Transform();
xf1.q.set(0.3524f*MathUtils.PI);
Rot.mulToOutUnsafe(xf1.q, new Vec2(1.0f, 0.0f), ref xf1.p);
Vec2[] vertices = new Vec2[3];
vertices[0] = Transform.mul(xf1, new Vec2(-1.0f, 0.0f));
vertices[1] = Transform.mul(xf1, new Vec2(1.0f, 0.0f));
vertices[2] = Transform.mul(xf1, new Vec2(0.0f, 0.5f));
PolygonShape poly1 = new PolygonShape();
poly1.set(vertices, 3);
FixtureDef sd1 = new FixtureDef();
sd1.shape = poly1;
sd1.density = 4.0f;
Transform xf2 = new Transform();
xf2.q.set(-0.3524f*MathUtils.PI);
Rot.mulToOut(xf2.q, new Vec2(-1.0f, 0.0f), ref xf2.p);
vertices[0] = Transform.mul(xf2, new Vec2(-1.0f, 0.0f));
vertices[1] = Transform.mul(xf2, new Vec2(1.0f, 0.0f));
vertices[2] = Transform.mul(xf2, new Vec2(0.0f, 0.5f));
PolygonShape poly2 = new PolygonShape();
poly2.set(vertices, 3);
FixtureDef sd2 = new FixtureDef();
sd2.shape = poly2;
sd2.density = 2.0f;
BodyDef bd = new BodyDef();
bd.type = BodyType.DYNAMIC;
bd.angularDamping = 2.0f;
bd.linearDamping = 0.5f;
bd.position.set(0.0f, 2.0f);
bd.angle = MathUtils.PI;
bd.allowSleep = false;
m_body = getWorld().createBody(bd);
m_body.createFixture(sd1);
m_body.createFixture(sd2);
}
{
PolygonShape shape = new PolygonShape();
shape.setAsBox(0.5f, 0.5f);
FixtureDef fd = new FixtureDef();
fd.shape = shape;
fd.density = 1.0f;
fd.friction = 0.3f;
for (int i = 0; i < 10; ++i)
{
BodyDef bd = new BodyDef();
bd.type = BodyType.DYNAMIC;
bd.position.set(0.0f, 5.0f + 1.54f*i);
Body body = getWorld().createBody(bd);
body.createFixture(fd);
float gravity = 10.0f;
float I = body.getInertia();
float mass = body.getMass();
// For a circle: I = 0.5 * m * r * r ==> r = sqrt(2 * I / m)
float radius = MathUtils.sqrt(2.0f*I/mass);
FrictionJointDef jd = new FrictionJointDef();
jd.localAnchorA.setZero();
jd.localAnchorB.setZero();
jd.bodyA = ground;
jd.bodyB = body;
jd.collideConnected = true;
jd.maxForce = mass*gravity;
jd.maxTorque = mass*radius*gravity;
getWorld().createJoint(jd);
}
}
}
public override void keyPressed(char keyCar, int keyCode)
{
// TODO Auto-generated method stub
base.keyPressed(keyCar, keyCode);
}
public override void step(TestbedSettings settings)
{
base.step(settings);
addTextLine("Use 'wasd' to move, 'e' and 's' drift.");
if (getModel().getKeys()['w'])
{
Vec2 f = m_body.getWorldVector(new Vec2(0.0f, -30.0f));
Vec2 p = m_body.getWorldPoint(m_body.getLocalCenter().add(new Vec2(0.0f, 2.0f)));
m_body.applyForce(f, p);
}
else if (getModel().getKeys()['q'])
{
Vec2 f = m_body.getWorldVector(new Vec2(0.0f, -30.0f));
Vec2 p = m_body.getWorldPoint(m_body.getLocalCenter().add(new Vec2(-.2f, 0f)));
m_body.applyForce(f, p);
}
else if (getModel().getKeys()['e'])
{
Vec2 f = m_body.getWorldVector(new Vec2(0.0f, -30.0f));
Vec2 p = m_body.getWorldPoint(m_body.getLocalCenter().add(new Vec2(.2f, 0f)));
m_body.applyForce(f, p);
}
else if (getModel().getKeys()['s'])
{
Vec2 f = m_body.getWorldVector(new Vec2(0.0f, 30.0f));
Vec2 p = m_body.getWorldCenter();
m_body.applyForce(f, p);
}
if (getModel().getKeys()['a'])
{
m_body.applyTorque(20.0f);
}
if (getModel().getKeys()['d'])
{
m_body.applyTorque(-20.0f);
}
}
public override bool isSaveLoadEnabled()
{
return true;
}
public override long getTag(Body body)
{
if (body == m_body)
{
return BODY_TAG;
}
return base.getTag(body);
}
public override void processBody(Body body, long tag)
{
if (tag == BODY_TAG)
{
m_body = body;
}
base.processBody(body, tag);
}
public override string getTestName()
{
return "Apply Force";
}
}
}
| |
// "Therefore those skilled at the unorthodox
// are infinite as heaven and earth,
// inexhaustible as the great rivers.
// When they come to an end,
// they begin again,
// like the days and months;
// they die and are reborn,
// like the four seasons."
//
// - Sun Tsu,
// "The Art of War"
using System;
using System.Drawing;
using System.Text;
namespace TheArtOfDev.HtmlRenderer.Demo.Common
{
/// <summary>
/// HTML syntax highlighting using Rich-Text formatting.<br/>
/// - Handle plain input or already in RTF format.<br/>
/// - Handle if input already contains RTF color table.<br/>
/// - Rich coloring adjusted to Visual Studio HTML coloring.<br/>
/// - Support to provide custom colors.<br/>
/// - High performance (as much as RTF format allows).<br/>
/// </summary>
/// <remarks>
/// The MIT License (MIT) Copyright (c) 2014 Arthur Teplitzki.<br/>
/// Based on work by Alun Evans 2006 (http://www.codeproject.com/Articles/15038/C-Formatting-Text-in-a-RichTextBox-by-Parsing-the).
/// </remarks>
public static class HtmlSyntaxHighlighter
{
#region Fields/Consts
/// <summary>
/// RTF header field
/// </summary>
private const string Header = "\\rtf";
/// <summary>
/// RTF color table
/// </summary>
private const string ColorTbl = "\\colortbl";
/// <summary>
/// cf0 = default
/// cf1 = dark red
/// cf2 = bright red
/// cf3 = green
/// cf4 = blue
/// cf5 = blue
/// cf6 = purple
/// </summary>
private const string DefaultColorScheme = "\\red128\\green0\\blue0;\\red240\\green0\\blue0;\\red0\\green128\\blue0;\\red0\\green0\\blue255;\\red0\\green0\\blue255;\\red128\\green0\\blue171;";
/// <summary>
/// Used to test if a char requires more than 1 byte
/// </summary>
private static readonly char[] _unicodeTest = new char[1];
#endregion
/// <summary>
/// Process the given text to create RTF text with HTML syntax highlighting using default Visual Studio colors.<br/>
/// The given text can be plain HTML or already parsed RTF format.
/// </summary>
/// <param name="text">the text to create color RTF text from</param>
/// <returns>text with RTF formatting for HTML syntax</returns>
public static string Process(string text)
{
return Process(text, DefaultColorScheme);
}
/// <summary>
/// Process the given text to create RTF text with HTML syntax highlighting using custom colors.<br/>
/// The given text can be plain HTML or already parsed RTF format.
/// </summary>
/// <param name="text">the text to create color RTF text from</param>
/// <param name="element">the color for HTML elements</param>
/// <param name="attribute">the color for HTML attributes</param>
/// <param name="comment">the color for HTML comments</param>
/// <param name="chars">the color for HTML special chars: (<![CDATA[<,>,",',=,:]]>)</param>
/// <param name="values">the color for HTML attribute or styles values</param>
/// <param name="style">the color for HTML style attribute</param>
/// <returns>text with RTF formatting for HTML syntax</returns>
public static string Process(string text, Color element, Color attribute, Color comment, Color chars, Color values, Color style)
{
return Process(text, CreateColorScheme(element, attribute, comment, chars, values, style));
}
#region Private/Protected methods
/// <summary>
/// Process the given text to create RTF text with HTML syntax highlighting.
/// </summary>
/// <param name="text">the text to create color RTF text from</param>
/// <param name="colorScheme">the color scheme to add to RTF color table</param>
/// <returns>text with RTF formatting for HTML syntax</returns>
private static string Process(string text, string colorScheme)
{
var sb = new StringBuilder(text.Length * 2);
// add color table used to set color in RTL formatted text
bool rtfFormated;
int i = AddColorTable(sb, text, colorScheme, out rtfFormated);
// Scan through RTF data adding RTF color tags
bool inComment = false;
bool inHtmlTag = false;
bool inAttributeVal = false;
for (; i < text.Length; i++)
{
var c = text[i];
var c2 = text.Length > i + 1 ? text[i + 1] : (char)0;
if (!inComment && c == '<')
{
if (text.Length > i + 3 && c2 == '!' && text[i + 2] == '-' && text[i + 3] == '-')
{
// Comments tag
sb.Append("\\cf3").Append(c);
inComment = true;
}
else
{
// Html start/end tag
sb.Append("\\cf4").Append(c);
if (c2 == '/')
{
sb.Append(c2);
i++;
}
sb.Append("\\cf1 ");
inHtmlTag = true;
}
}
else if (c == '>')
{
//Check for comments tags
if (inComment && text[i - 1] == '-' && text[i - 2] == '-')
{
sb.Append(c).Append("\\cf0 ");
inComment = false;
}
else if (!inComment)
{
sb.Append("\\cf4").Append(c).Append("\\cf0 ");
inHtmlTag = false;
inAttributeVal = false;
}
}
else if (inHtmlTag && !inComment && c == '/' && c2 == '>')
{
sb.Append("\\cf4").Append(c).Append(c2).Append("\\cf0 ");
inHtmlTag = false;
i++;
}
else if (inHtmlTag && !inComment && !inAttributeVal && c == ' ')
{
sb.Append(c).Append("\\cf2 ");
}
else if (inHtmlTag && !inComment && c == '=')
{
sb.Append("\\cf4").Append(c).Append("\\cf6 ");
}
else if (inHtmlTag && !inComment && inAttributeVal && c == ':')
{
sb.Append("\\cf0").Append(c).Append("\\cf5 ");
}
else if (inHtmlTag && !inComment && inAttributeVal && c == ';')
{
sb.Append("\\cf0").Append(c).Append("\\cf6 ");
}
else if (inHtmlTag && !inComment && (c == '"' || c == '\''))
{
sb.Append("\\cf4").Append(c).Append("\\cf6 ");
inAttributeVal = !inAttributeVal;
}
else if (!rtfFormated && c == '\n')
{
sb.Append(c).Append("\\par ");
}
else if (!rtfFormated && (c == '{' || c == '}'))
{
sb.Append('\\').Append(c);
}
else if (!rtfFormated)
{
_unicodeTest[0] = c;
if (Encoding.UTF8.GetByteCount(_unicodeTest, 0, 1) > 1)
sb.Append("\\u" + Convert.ToUInt32(c) + "?");
else
sb.Append(c);
}
else
{
sb.Append(c);
}
}
// close the RTF if we added the header ourselves
if (!rtfFormated)
sb.Append('}');
// return the created colored RTF
return sb.ToString();
}
/// <summary>
/// Add color table used to set color in RTL formatted text.
/// </summary>
/// <param name="sb">the builder to add the RTF string to</param>
/// <param name="text">the original RTF text to build color RTF from</param>
/// <param name="colorScheme">the color scheme to add to RTF color table</param>
/// <param name="rtfFormated">return if the given text is already in RTF format</param>
/// <returns>the index in the given RTF text to start scan from</returns>
private static int AddColorTable(StringBuilder sb, string text, string colorScheme, out bool rtfFormated)
{
// Search for color table, if exists replace it, otherwise add our
rtfFormated = true;
int idx = text.IndexOf(ColorTbl, StringComparison.OrdinalIgnoreCase);
if (idx != -1)
{
sb.Append(text, 0, idx);
// insert our color table at our chosen location
sb.Append(ColorTbl).Append(";").Append(colorScheme).Append("}");
// skip the existing color table
idx = text.IndexOf('}', idx);
}
else
{
// find index of start of header if exists
idx = text.IndexOf(Header, StringComparison.OrdinalIgnoreCase);
if (idx != -1)
{
// append the existing header
idx += Header.Length;
sb.Append(text, 0, idx);
while (text[idx] != '\\' && text[idx] != '{' && text[idx] != '}')
sb.Append(text[idx++]);
}
else
{
// not RTF text, add the RTF header as well
idx = 0;
sb.Append("{").Append(Header);
rtfFormated = false;
}
// insert the color table at our chosen location
sb.Append("{").Append(ColorTbl).Append(";").Append(colorScheme).Append("}");
}
return idx;
}
/// <summary>
/// Create RTF colortbl formatted string for the given colors.
/// </summary>
private static string CreateColorScheme(Color element, Color attribute, Color comment, Color chars, Color values, Color style)
{
var sb = new StringBuilder(DefaultColorScheme.Length);
AppendColorValue(sb, element);
AppendColorValue(sb, attribute);
AppendColorValue(sb, comment);
AppendColorValue(sb, chars);
AppendColorValue(sb, values);
AppendColorValue(sb, style);
return sb.ToString();
}
/// <summary>
/// Append single color in RTF colortbl format.
/// </summary>
private static void AppendColorValue(StringBuilder sb, Color color)
{
sb.Append("\\red").Append(color.R)
.Append("\\green").Append(color.R)
.Append("\\blue").Append(color.R)
.Append(';');
}
#endregion
}
}
| |
using System.Diagnostics;
namespace Lucene.Net.Util
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Base class for sorting algorithms implementations.
/// <para/>
/// @lucene.internal
/// </summary>
public abstract class Sorter
{
internal const int THRESHOLD = 20;
/// <summary>
/// Sole constructor, used for inheritance. </summary>
protected Sorter()
{
}
/// <summary>
/// Compare entries found in slots <paramref name="i"/> and <paramref name="j"/>.
/// The contract for the returned value is the same as
/// <see cref="System.Collections.Generic.IComparer{T}.Compare(T, T)"/>.
/// </summary>
protected abstract int Compare(int i, int j);
/// <summary>
/// Swap values at slots <paramref name="i"/> and <paramref name="j"/>. </summary>
protected abstract void Swap(int i, int j);
/// <summary>
/// Sort the slice which starts at <paramref name="from"/> (inclusive) and ends at
/// <paramref name="to"/> (exclusive).
/// </summary>
public abstract void Sort(int from, int to);
internal virtual void CheckRange(int from, int to)
{
if (to < from)
{
throw new System.ArgumentException("'to' must be >= 'from', got from=" + from + " and to=" + to);
}
}
internal virtual void MergeInPlace(int from, int mid, int to)
{
if (from == mid || mid == to || Compare(mid - 1, mid) <= 0)
{
return;
}
else if (to - from == 2)
{
Swap(mid - 1, mid);
return;
}
while (Compare(from, mid) <= 0)
{
++from;
}
while (Compare(mid - 1, to - 1) <= 0)
{
--to;
}
int first_cut, second_cut;
int len11, len22;
if (mid - from > to - mid)
{
len11 = (int)((uint)(mid - from) >> 1);
first_cut = from + len11;
second_cut = Lower(mid, to, first_cut);
len22 = second_cut - mid;
}
else
{
len22 = (int)((uint)(to - mid) >> 1);
second_cut = mid + len22;
first_cut = Upper(from, mid, second_cut);
len11 = first_cut - from;
}
Rotate(first_cut, mid, second_cut);
int new_mid = first_cut + len22;
MergeInPlace(from, first_cut, new_mid);
MergeInPlace(new_mid, second_cut, to);
}
internal virtual int Lower(int from, int to, int val)
{
int len = to - from;
while (len > 0)
{
int half = (int)((uint)len >> 1);
int mid = from + half;
if (Compare(mid, val) < 0)
{
from = mid + 1;
len = len - half - 1;
}
else
{
len = half;
}
}
return from;
}
internal virtual int Upper(int from, int to, int val)
{
int len = to - from;
while (len > 0)
{
int half = (int)((uint)len >> 1);
int mid = from + half;
if (Compare(val, mid) < 0)
{
len = half;
}
else
{
from = mid + 1;
len = len - half - 1;
}
}
return from;
}
// faster than lower when val is at the end of [from:to[
internal virtual int Lower2(int from, int to, int val)
{
int f = to - 1, t = to;
while (f > from)
{
if (Compare(f, val) < 0)
{
return Lower(f, t, val);
}
int delta = t - f;
t = f;
f -= delta << 1;
}
return Lower(from, t, val);
}
// faster than upper when val is at the beginning of [from:to[
internal virtual int Upper2(int from, int to, int val)
{
int f = from, t = f + 1;
while (t < to)
{
if (Compare(t, val) > 0)
{
return Upper(f, t, val);
}
int delta = t - f;
f = t;
t += delta << 1;
}
return Upper(f, to, val);
}
internal void Reverse(int from, int to)
{
for (--to; from < to; ++from, --to)
{
Swap(from, to);
}
}
internal void Rotate(int lo, int mid, int hi)
{
Debug.Assert(lo <= mid && mid <= hi);
if (lo == mid || mid == hi)
{
return;
}
DoRotate(lo, mid, hi);
}
internal virtual void DoRotate(int lo, int mid, int hi)
{
if (mid - lo == hi - mid)
{
// happens rarely but saves n/2 swaps
while (mid < hi)
{
Swap(lo++, mid++);
}
}
else
{
Reverse(lo, mid);
Reverse(mid, hi);
Reverse(lo, hi);
}
}
internal virtual void InsertionSort(int from, int to)
{
for (int i = from + 1; i < to; ++i)
{
for (int j = i; j > from; --j)
{
if (Compare(j - 1, j) > 0)
{
Swap(j - 1, j);
}
else
{
break;
}
}
}
}
internal virtual void BinarySort(int from, int to)
{
BinarySort(from, to, from + 1);
}
internal virtual void BinarySort(int from, int to, int i)
{
for (; i < to; ++i)
{
int l = from;
int h = i - 1;
while (l <= h)
{
int mid = (int)((uint)(l + h) >> 1);
int cmp = Compare(i, mid);
if (cmp < 0)
{
h = mid - 1;
}
else
{
l = mid + 1;
}
}
switch (i - l)
{
case 2:
Swap(l + 1, l + 2);
Swap(l, l + 1);
break;
case 1:
Swap(l, l + 1);
break;
case 0:
break;
default:
for (int j = i; j > l; --j)
{
Swap(j - 1, j);
}
break;
}
}
}
internal virtual void HeapSort(int from, int to)
{
if (to - from <= 1)
{
return;
}
Heapify(from, to);
for (int end = to - 1; end > from; --end)
{
Swap(from, end);
SiftDown(from, from, end);
}
}
internal virtual void Heapify(int from, int to)
{
for (int i = HeapParent(from, to - 1); i >= from; --i)
{
SiftDown(i, from, to);
}
}
internal virtual void SiftDown(int i, int from, int to)
{
for (int leftChild = HeapChild(from, i); leftChild < to; leftChild = HeapChild(from, i))
{
int rightChild = leftChild + 1;
if (Compare(i, leftChild) < 0)
{
if (rightChild < to && Compare(leftChild, rightChild) < 0)
{
Swap(i, rightChild);
i = rightChild;
}
else
{
Swap(i, leftChild);
i = leftChild;
}
}
else if (rightChild < to && Compare(i, rightChild) < 0)
{
Swap(i, rightChild);
i = rightChild;
}
else
{
break;
}
}
}
internal static int HeapParent(int from, int i)
{
return ((int)((uint)(i - 1 - from) >> 1)) + from;
}
internal static int HeapChild(int from, int i)
{
return ((i - from) << 1) + 1 + from;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Hie.Core.Model;
namespace Hie.Core
{
public interface IApplicationHost
{
void Deploy(Application app);
void StartProcessing();
void StopProcessing();
void PublishMessage(object source, Message message);
void AddPipelineComponent(IEndpoint endpoint, IPipelineComponent pipelineComponent);
void PushPipelineData(IEndpoint endpoint, byte[] data);
void PushPipelineData(IEndpoint endpoint, Message message);
void ProcessInPipeline(IEndpoint source, byte[] data);
void ProcessInPipeline(IEndpoint source, Message message);
}
public class ApplicationHost : IApplicationHost
{
private Dictionary<IEndpoint, Queue<IPipelineComponent>> _pipelines = new Dictionary<IEndpoint, Queue<IPipelineComponent>>();
public IList<Application> Applications { get; private set; }
public ApplicationHost()
{
Applications = new List<Application>();
}
public void Deploy(Application app)
{
// Setup application
app.HostService = this;
foreach (Port port in app.Ports)
{
foreach (var encoder in port.Encoders)
{
AddPipelineComponent(port.Endpoint, encoder);
}
foreach (var assembler in port.Assembers)
{
AddPipelineComponent(port.Endpoint, assembler);
}
}
// Setup channels
foreach (var channel in app.Channels)
{
channel.HostService = this;
foreach (var destination in channel.Destinations)
{
destination.Channel = channel;
}
channel.Source.Channel = channel;
}
// Setup endpoints
foreach (var port in app.Ports)
{
port.Endpoint.Initialize(this, null);
}
Applications.Add(app);
}
public void StartProcessing()
{
foreach (var application in Applications)
{
foreach (var port in application.Ports)
{
port.Endpoint.StartProcessing();
}
}
}
public void StopProcessing()
{
foreach (var application in Applications)
{
foreach (var port in application.Ports)
{
port.Endpoint.StopProcessing();
}
}
}
public virtual void PublishMessage(object source, Message message)
{
// Store message in queue (message box). Not yet implemented, but is what publish/subscribe will do
// Process messages
if (source is IEndpoint)
{
foreach (var application in Applications)
{
foreach (var channel in application.Channels)
{
if (channel.Source.AcceptMessage(source, message))
{
channel.Source.ProcessMessage(source, message.Clone());
}
}
}
}
else if (source is Source)
{
// This is coming from source after transformation
foreach (Destination destination in ((Source) source).Channel.Destinations)
{
if (destination.AcceptMessage((Source) source, message))
{
destination.ProcessMessage((Source) source, message.Clone());
}
}
}
else if (source is Destination)
{
foreach (var application in Applications)
{
foreach (var port in application.Ports)
{
ProcessInPipeline(port.Endpoint, message);
}
}
}
else
{
throw new Exception(string.Format("Illegal route. Source: {0}, Message {2}", source, message.Id));
}
}
public void ProcessInPipeline(IEndpoint source, byte[] data)
{
PushPipelineData(source, data);
}
public void ProcessInPipeline(IEndpoint source, Message message)
{
PushPipelineData(source, message);
}
public void AddPipelineComponent(IEndpoint endpoint, IPipelineComponent pipelineComponent)
{
Queue<IPipelineComponent> pipeline;
if (_pipelines.ContainsKey(endpoint))
{
pipeline = _pipelines[endpoint];
}
else
{
pipeline = new Queue<IPipelineComponent>();
_pipelines.Add(endpoint, pipeline);
}
pipeline.Enqueue(pipelineComponent);
}
public void PushPipelineData(IEndpoint endpoint, byte[] data)
{
// Find pipeline for endpoint and process..
Queue<IPipelineComponent> pipeline;
if (!_pipelines.TryGetValue(endpoint, out pipeline))
{
// Temporary. Remove during refactoring of endpoints.
Message message = new Message("text/plain");
message.SetValueFrom(data);
PublishMessage(endpoint, message);
}
else
{
byte[] decoded = null;
foreach (IDecoder component in pipeline.OfType<IDecoder>())
{
decoded = component.Decode(data);
if (decoded != null) break;
}
if (decoded == null) decoded = data;
foreach (IDisassembler component in pipeline.OfType<IDisassembler>())
{
component.Disassemble(decoded);
Message message;
do
{
message = component.NextMessage();
if (message != null) PublishMessage(endpoint, message);
} while (message != null);
}
}
}
public void PushPipelineData(IEndpoint endpoint, Message message)
{
Queue<IPipelineComponent> pipeline;
if (!_pipelines.TryGetValue(endpoint, out pipeline))
{
//TODO: Temporary for testing
endpoint.ProcessMessage(endpoint, message.GetBytes());
}
else
{
byte[] data = null;
foreach (IAssembler component in pipeline.OfType<IAssembler>())
{
component.AddMessage(message);
data = component.Assemble();
// Decide what to do if data is returned.
// Right now, we break out and go to encoders
if (data != null) break;
}
if (data == null) data = message.GetBytes();
foreach (IEncoder component in pipeline.OfType<IEncoder>())
{
data = component.Encode(data);
}
if (data != null)
{
endpoint.ProcessMessage(endpoint, data);
}
}
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using java = biz.ritter.javapi;
namespace biz.ritter.javapi.io
{
/**
* A specialized {@link Reader} for reading the contents of a char array.
*
* @see CharArrayWriter
*/
public class CharArrayReader : Reader {
/**
* The buffer for characters.
*/
protected char []buf;
/**
* The current buffer position.
*/
protected int pos;
/**
* The current mark position.
*/
protected int markedPos = -1;
/**
* The ending index of the buffer.
*/
protected int count;
/**
* Constructs a CharArrayReader on the char array {@code buf}. The size of
* the reader is set to the length of the buffer and the object to read
* from is set to {@code buf}.
*
* @param buf
* the char array from which to read.
*/
public CharArrayReader(char[] buf) {
this.buf = buf;
this.count = buf.Length;
}
/**
* Constructs a CharArrayReader on the char array {@code buf}. The size of
* the reader is set to {@code length} and the start position from which to
* read the buffer is set to {@code offset}.
*
* @param buf
* the char array from which to read.
* @param offset
* the index of the first character in {@code buf} to read.
* @param length
* the number of characters that can be read from {@code buf}.
* @throws IllegalArgumentException
* if {@code offset < 0} or {@code length < 0}, or if
* {@code offset} is greater than the size of {@code buf} .
*/
public CharArrayReader(char[] buf, int offset, int length) {
/*
* The spec of this constructor is broken. In defining the legal values
* of offset and length, it doesn't consider buffer's length. And to be
* compatible with the broken spec, we must also test whether
* (offset + length) overflows.
*/
if (offset < 0 || offset > buf.Length || length < 0 || offset + length < 0) {
throw new java.lang.IllegalArgumentException();
}
this.buf = buf;
this.pos = offset;
this.markedPos = offset;
/* This is according to spec */
int bufferLength = buf.Length;
this.count = offset + length < bufferLength ? length : bufferLength;
}
/**
* This method closes this CharArrayReader. Once it is closed, you can no
* longer read from it. Only the first invocation of this method has any
* effect.
*/
public override void close() {
lock (lockJ) {
if (isOpen()) {
buf = null;
}
}
}
/**
* Indicates whether this reader is open.
*
* @return {@code true} if the reader is open, {@code false} otherwise.
*/
private bool isOpen() {
return buf != null;
}
/**
* Indicates whether this reader is closed.
*
* @return {@code true} if the reader is closed, {@code false} otherwise.
*/
private bool isClosed() {
return buf == null;
}
/**
* Sets a mark position in this reader. The parameter {@code readLimit} is
* ignored for CharArrayReaders. Calling {@code reset()} will reposition the
* reader back to the marked position provided the mark has not been
* invalidated.
*
* @param readLimit
* ignored for CharArrayReaders.
* @throws IOException
* if this reader is closed.
*/
public override void mark(int readLimit) {//throws IOException {
lock (lockJ) {
if (isClosed()) {
throw new IOException("CharArrayReader is closed."); //$NON-NLS-1$
}
markedPos = pos;
}
}
/**
* Indicates whether this reader supports the {@code mark()} and
* {@code reset()} methods.
*
* @return {@code true} for CharArrayReader.
* @see #mark(int)
* @see #reset()
*/
public override bool markSupported() {
return true;
}
/**
* Reads a single character from this reader and returns it as an integer
* with the two higher-order bytes set to 0. Returns -1 if no more
* characters are available from this reader.
*
* @return the character read as an int or -1 if the end of the reader has
* been reached.
* @throws IOException
* if this reader is closed.
*/
public override int read() {//throws IOException {
lock (lockJ) {
if (isClosed()) {
throw new IOException("CharArrayReader is closed."); //$NON-NLS-1$
}
if (pos == count) {
return -1;
}
return buf[pos++];
}
}
/**
* Reads at most {@code count} characters from this CharArrayReader and
* stores them at {@code offset} in the character array {@code buf}.
* Returns the number of characters actually read or -1 if the end of reader
* was encountered.
*
* @param buffer
* the character array to store the characters read.
* @param offset
* the initial position in {@code buffer} to store the characters
* read from this reader.
* @param len
* the maximum number of characters to read.
* @return number of characters read or -1 if the end of the reader has been
* reached.
* @throws IndexOutOfBoundsException
* if {@code offset < 0} or {@code len < 0}, or if
* {@code offset + len} is bigger than the size of
* {@code buffer}.
* @throws IOException
* if this reader is closed.
*/
public override int read(char[] buffer, int offset, int len) {//throws IOException {
if (offset < 0 || offset > buffer.Length) {
// luni.12=Offset out of bounds \: {0}
throw new java.lang.ArrayIndexOutOfBoundsException(
"Offset out of bounds : "+ offset); //$NON-NLS-1$
}
if (len < 0 || len > buffer.Length - offset) {
// luni.18=Length out of bounds \: {0}
throw new java.lang.ArrayIndexOutOfBoundsException(
"Length out of bounds : "+ len); //$NON-NLS-1$
}
lock (lockJ) {
if (isClosed()) {
throw new IOException("CharArrayReader is closed."); //$NON-NLS-1$
}
if (pos < this.count) {
int bytesRead = pos + len > this.count ? this.count - pos : len;
java.lang.SystemJ.arraycopy(this.buf, pos, buffer, offset, bytesRead);
pos += bytesRead;
return bytesRead;
}
return -1;
}
}
/**
* Indicates whether this reader is ready to be read without blocking.
* Returns {@code true} if the next {@code read} will not block. Returns
* {@code false} if this reader may or may not block when {@code read} is
* called. The implementation in CharArrayReader always returns {@code true}
* even when it has been closed.
*
* @return {@code true} if this reader will not block when {@code read} is
* called, {@code false} if unknown or blocking will occur.
* @throws IOException
* if this reader is closed.
*/
public override bool ready() {//throws IOException {
lock (lockJ) {
if (isClosed()) {
throw new IOException("CharArrayReader is closed."); //$NON-NLS-1$
}
return pos != count;
}
}
/**
* Resets this reader's position to the last {@code mark()} location.
* Invocations of {@code read()} and {@code skip()} will occur from this new
* location. If this reader has not been marked, it is reset to the
* beginning of the string.
*
* @throws IOException
* if this reader is closed.
*/
public override void reset() {//throws IOException {
lock (lockJ) {
if (isClosed()) {
throw new IOException("CharArrayReader is closed."); //$NON-NLS-1$
}
pos = markedPos != -1 ? markedPos : 0;
}
}
/**
* Skips {@code count} number of characters in this reader. Subsequent
* {@code read()}s will not return these characters unless {@code reset()}
* is used. This method does nothing and returns 0 if {@code n} is negative.
*
* @param n
* the number of characters to skip.
* @return the number of characters actually skipped.
* @throws IOException
* if this reader is closed.
*/
public override long skip(long n) {//throws IOException {
lock (lockJ) {
if (isClosed()) {
throw new IOException("CharArrayReader is closed."); //$NON-NLS-1$
}
if (n <= 0) {
return 0;
}
long skipped = 0;
if (n < this.count - pos) {
pos = pos + (int) n;
skipped = n;
} else {
skipped = this.count - pos;
pos = this.count;
}
return skipped;
}
}
}}
| |
using System;
using System.Collections.Generic;
using System.Text;
using NUnit.Framework;
using OpenQA.Selenium.Environment;
namespace OpenQA.Selenium
{
[TestFixture]
public class FormHandlingTests : DriverTestFixture
{
[Test]
public void ShouldClickOnSubmitInputElements()
{
driver.Url = formsPage;
driver.FindElement(By.Id("submitButton")).Click();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ClickingOnUnclickableElementsDoesNothing()
{
driver.Url = formsPage;
driver.FindElement(By.XPath("//body")).Click();
}
[Test]
public void ShouldBeAbleToClickImageButtons()
{
driver.Url = formsPage;
driver.FindElement(By.Id("imageButton")).Click();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldBeAbleToSubmitForms()
{
driver.Url = formsPage;
driver.FindElement(By.Name("login")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldSubmitAFormWhenAnyInputElementWithinThatFormIsSubmitted()
{
driver.Url = formsPage;
driver.FindElement(By.Id("checky")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
public void ShouldSubmitAFormWhenAnyElementWithinThatFormIsSubmitted()
{
driver.Url = formsPage;
driver.FindElement(By.XPath("//form/p")).Submit();
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual(driver.Title, "We Arrive Here");
}
[Test]
[IgnoreBrowser(Browser.Android)]
[IgnoreBrowser(Browser.Chrome)]
[IgnoreBrowser(Browser.IPhone)]
[IgnoreBrowser(Browser.Opera)]
[IgnoreBrowser(Browser.PhantomJS)]
[IgnoreBrowser(Browser.Safari)]
public void ShouldNotBeAbleToSubmitAFormThatDoesNotExist()
{
driver.Url = formsPage;
Assert.Throws<NoSuchElementException>(() => driver.FindElement(By.Name("SearchableText")).Submit());
}
[Test]
public void ShouldBeAbleToEnterTextIntoATextAreaBySettingItsValue()
{
driver.Url = javascriptPage;
IWebElement textarea = driver.FindElement(By.Id("keyUpArea"));
string cheesey = "Brie and cheddar";
textarea.SendKeys(cheesey);
Assert.AreEqual(textarea.GetAttribute("value"), cheesey);
}
[Test]
public void SendKeysKeepsCapitalization()
{
driver.Url = javascriptPage;
IWebElement textarea = driver.FindElement(By.Id("keyUpArea"));
string cheesey = "BrIe And CheDdar";
textarea.SendKeys(cheesey);
Assert.AreEqual(textarea.GetAttribute("value"), cheesey);
}
[Test]
public void ShouldSubmitAFormUsingTheNewlineLiteral()
{
driver.Url = formsPage;
IWebElement nestedForm = driver.FindElement(By.Id("nested_form"));
IWebElement input = nestedForm.FindElement(By.Name("x"));
input.SendKeys("\n");
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual("We Arrive Here", driver.Title);
Assert.IsTrue(driver.Url.EndsWith("?x=name"));
}
[Test]
public void ShouldSubmitAFormUsingTheEnterKey()
{
driver.Url = formsPage;
IWebElement nestedForm = driver.FindElement(By.Id("nested_form"));
IWebElement input = nestedForm.FindElement(By.Name("x"));
input.SendKeys(Keys.Enter);
WaitFor(TitleToBe("We Arrive Here"), "Browser title is not 'We Arrive Here'");
Assert.AreEqual("We Arrive Here", driver.Title);
Assert.IsTrue(driver.Url.EndsWith("?x=name"));
}
[Test]
public void ShouldEnterDataIntoFormFields()
{
driver.Url = xhtmlTestPage;
IWebElement element = driver.FindElement(By.XPath("//form[@name='someForm']/input[@id='username']"));
String originalValue = element.GetAttribute("value");
Assert.AreEqual(originalValue, "change");
element.Clear();
element.SendKeys("some text");
element = driver.FindElement(By.XPath("//form[@name='someForm']/input[@id='username']"));
String newFormValue = element.GetAttribute("value");
Assert.AreEqual(newFormValue, "some text");
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToAlterTheContentsOfAFileUploadInputElement()
{
driver.Url = formsPage;
IWebElement uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
System.IO.FileInfo inputFile = new System.IO.FileInfo("test.txt");
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
uploadElement.SendKeys(inputFile.FullName);
System.IO.FileInfo outputFile = new System.IO.FileInfo(uploadElement.GetAttribute("value"));
Assert.AreEqual(inputFile.Name, outputFile.Name);
inputFile.Delete();
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToSendKeysToAFileUploadInputElementInAnXhtmlDocument()
{
// IE before 9 doesn't handle pages served with an XHTML content type, and just prompts for to
// download it
if (TestUtilities.IsOldIE(driver))
{
return;
}
driver.Url = xhtmlFormPage;
IWebElement uploadElement = driver.FindElement(By.Id("file"));
Assert.AreEqual(string.Empty, uploadElement.GetAttribute("value"));
System.IO.FileInfo inputFile = new System.IO.FileInfo("test.txt");
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
uploadElement.SendKeys(inputFile.FullName);
System.IO.FileInfo outputFile = new System.IO.FileInfo(uploadElement.GetAttribute("value"));
Assert.AreEqual(inputFile.Name, outputFile.Name);
inputFile.Delete();
}
[Test]
[IgnoreBrowser(Browser.Android, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.IPhone, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.Safari, "Does not yet support file uploads")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support file uploads")]
public void ShouldBeAbleToUploadTheSameFileTwice()
{
System.IO.FileInfo inputFile = new System.IO.FileInfo("test.txt");
System.IO.StreamWriter inputFileWriter = inputFile.CreateText();
inputFileWriter.WriteLine("Hello world");
inputFileWriter.Close();
driver.Url = formsPage;
IWebElement uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
uploadElement.SendKeys(inputFile.FullName);
uploadElement.Submit();
driver.Url = formsPage;
uploadElement = driver.FindElement(By.Id("upload"));
Assert.IsTrue(string.IsNullOrEmpty(uploadElement.GetAttribute("value")));
uploadElement.SendKeys(inputFile.FullName);
uploadElement.Submit();
// If we get this far, then we're all good.
}
[Test]
public void SendingKeyboardEventsShouldAppendTextInInputs()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("working"));
element.SendKeys("Some");
String value = element.GetAttribute("value");
Assert.AreEqual(value, "Some");
element.SendKeys(" text");
value = element.GetAttribute("value");
Assert.AreEqual(value, "Some text");
}
[Test]
public void SendingKeyboardEventsShouldAppendTextInInputsWithExistingValue()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("inputWithText"));
element.SendKeys(". Some text");
string value = element.GetAttribute("value");
Assert.AreEqual("Example text. Some text", value);
}
[Test]
[IgnoreBrowser(Browser.HtmlUnit, "Not implemented going to the end of the line first")]
public void SendingKeyboardEventsShouldAppendTextInTextAreas()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("withText"));
element.SendKeys(". Some text");
String value = element.GetAttribute("value");
Assert.AreEqual(value, "Example text. Some text");
}
[Test]
public void ShouldBeAbleToClearTextFromInputElements()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("working"));
element.SendKeys("Some text");
String value = element.GetAttribute("value");
Assert.IsTrue(value.Length > 0);
element.Clear();
value = element.GetAttribute("value");
Assert.AreEqual(value.Length, 0);
}
[Test]
public void EmptyTextBoxesShouldReturnAnEmptyStringNotNull()
{
driver.Url = formsPage;
IWebElement emptyTextBox = driver.FindElement(By.Id("working"));
Assert.AreEqual(emptyTextBox.GetAttribute("value"), "");
IWebElement emptyTextArea = driver.FindElement(By.Id("emptyTextArea"));
Assert.AreEqual(emptyTextBox.GetAttribute("value"), "");
}
[Test]
public void ShouldBeAbleToClearTextFromTextAreas()
{
driver.Url = formsPage;
IWebElement element = driver.FindElement(By.Id("withText"));
element.SendKeys("Some text");
String value = element.GetAttribute("value");
Assert.IsTrue(value.Length > 0);
element.Clear();
value = element.GetAttribute("value");
Assert.AreEqual(value.Length, 0);
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Opera, "Untested")]
[IgnoreBrowser(Browser.PhantomJS, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.WindowsPhone, "Does not yet support alert handling")]
public void HandleFormWithJavascriptAction()
{
string url = EnvironmentManager.Instance.UrlBuilder.WhereIs("form_handling_js_submit.html");
driver.Url = url;
IWebElement element = driver.FindElement(By.Id("theForm"));
element.Submit();
IAlert alert = driver.SwitchTo().Alert();
string text = alert.Text;
alert.Dismiss();
Assert.AreEqual("Tasty cheese", text);
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
public void CanClickOnASubmitButton()
{
CheckSubmitButton("internal_explicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
public void CanClickOnAnImplicitSubmitButton()
{
CheckSubmitButton("internal_implicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Fails on HtmlUnit")]
[IgnoreBrowser(Browser.IE, "Fails on IE")]
public void CanClickOnAnExternalSubmitButton()
{
CheckSubmitButton("external_explicit_submit");
}
[Test]
[IgnoreBrowser(Browser.Android, "Untested")]
[IgnoreBrowser(Browser.IPhone, "Untested")]
[IgnoreBrowser(Browser.Safari, "Untested")]
[IgnoreBrowser(Browser.HtmlUnit, "Fails on HtmlUnit")]
[IgnoreBrowser(Browser.IE, "Fails on IE")]
public void CanClickOnAnExternalImplicitSubmitButton()
{
CheckSubmitButton("external_implicit_submit");
}
private void CheckSubmitButton(string buttonId)
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("click_tests/html5_submit_buttons.html");
string name = "Gromit";
driver.FindElement(By.Id("name")).SendKeys(name);
driver.FindElement(By.Id(buttonId)).Click();
WaitFor(TitleToBe("Submitted Successfully!"), "Browser title is not 'Submitted Successfully!'");
Assert.That(driver.Url.Contains("name=" + name), "URL does not contain 'name=" + name + "'. Actual URL:" + driver.Url);
}
private Func<bool> TitleToBe(string desiredTitle)
{
return () =>
{
return driver.Title == desiredTitle;
};
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
#if ES_BUILD_STANDALONE
namespace Microsoft.Diagnostics.Tracing
#else
namespace System.Diagnostics.Tracing
#endif
{
#region NullTypeInfo
/// <summary>
/// TraceLogging: Type handler for empty or unsupported types.
/// </summary>
/// <typeparam name="DataType">The type to handle.</typeparam>
internal sealed class NullTypeInfo<DataType>
: TraceLoggingTypeInfo<DataType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddGroup(name);
}
public override void WriteData(TraceLoggingDataCollector collector, ref DataType value)
{
return;
}
public override object GetData(object value)
{
return null;
}
}
#endregion
#region Primitive scalars
/// <summary>
/// TraceLogging: Type handler for Boolean.
/// </summary>
internal sealed class BooleanTypeInfo
: TraceLoggingTypeInfo<Boolean>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format8(format, TraceLoggingDataType.Boolean8));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Boolean value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Byte.
/// </summary>
internal sealed class ByteTypeInfo
: TraceLoggingTypeInfo<Byte>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format8(format, TraceLoggingDataType.UInt8));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Byte value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for SByte.
/// </summary>
internal sealed class SByteTypeInfo
: TraceLoggingTypeInfo<SByte>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format8(format, TraceLoggingDataType.Int8));
}
public override void WriteData(TraceLoggingDataCollector collector, ref SByte value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Int16.
/// </summary>
internal sealed class Int16TypeInfo
: TraceLoggingTypeInfo<Int16>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format16(format, TraceLoggingDataType.Int16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Int16 value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UInt16.
/// </summary>
internal sealed class UInt16TypeInfo
: TraceLoggingTypeInfo<UInt16>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format16(format, TraceLoggingDataType.UInt16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UInt16 value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Int32.
/// </summary>
internal sealed class Int32TypeInfo
: TraceLoggingTypeInfo<Int32>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format32(format, TraceLoggingDataType.Int32));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Int32 value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UInt32.
/// </summary>
internal sealed class UInt32TypeInfo
: TraceLoggingTypeInfo<UInt32>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format32(format, TraceLoggingDataType.UInt32));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UInt32 value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Int64.
/// </summary>
internal sealed class Int64TypeInfo
: TraceLoggingTypeInfo<Int64>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format64(format, TraceLoggingDataType.Int64));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Int64 value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UInt64.
/// </summary>
internal sealed class UInt64TypeInfo
: TraceLoggingTypeInfo<UInt64>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format64(format, TraceLoggingDataType.UInt64));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UInt64 value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for IntPtr.
/// </summary>
internal sealed class IntPtrTypeInfo
: TraceLoggingTypeInfo<IntPtr>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.FormatPtr(format, Statics.IntPtrType));
}
public override void WriteData(TraceLoggingDataCollector collector, ref IntPtr value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UIntPtr.
/// </summary>
internal sealed class UIntPtrTypeInfo
: TraceLoggingTypeInfo<UIntPtr>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.FormatPtr(format, Statics.UIntPtrType));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UIntPtr value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Double.
/// </summary>
internal sealed class DoubleTypeInfo
: TraceLoggingTypeInfo<Double>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format64(format, TraceLoggingDataType.Double));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Double value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Single.
/// </summary>
internal sealed class SingleTypeInfo
: TraceLoggingTypeInfo<Single>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format32(format, TraceLoggingDataType.Float));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Single value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Char.
/// </summary>
internal sealed class CharTypeInfo
: TraceLoggingTypeInfo<Char>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format16(format, TraceLoggingDataType.Char16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Char value)
{
collector.AddScalar(value);
}
}
#endregion
#region Primitive arrays
/// <summary>
/// TraceLogging: Type handler for Boolean[].
/// </summary>
internal sealed class BooleanArrayTypeInfo
: TraceLoggingTypeInfo<Boolean[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format8(format, TraceLoggingDataType.Boolean8));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Boolean[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Byte[].
/// </summary>
internal sealed class ByteArrayTypeInfo
: TraceLoggingTypeInfo<Byte[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
switch (format)
{
default:
collector.AddBinary(name, Statics.MakeDataType(TraceLoggingDataType.Binary, format));
break;
case EventFieldFormat.String:
collector.AddBinary(name, TraceLoggingDataType.CountedMbcsString);
break;
case EventFieldFormat.Xml:
collector.AddBinary(name, TraceLoggingDataType.CountedMbcsXml);
break;
case EventFieldFormat.Json:
collector.AddBinary(name, TraceLoggingDataType.CountedMbcsJson);
break;
case EventFieldFormat.Boolean:
collector.AddArray(name, TraceLoggingDataType.Boolean8);
break;
case EventFieldFormat.Hexadecimal:
collector.AddArray(name, TraceLoggingDataType.HexInt8);
break;
#if false
case EventSourceFieldFormat.Signed:
collector.AddArray(name, TraceLoggingDataType.Int8);
break;
case EventSourceFieldFormat.Unsigned:
collector.AddArray(name, TraceLoggingDataType.UInt8);
break;
#endif
}
}
public override void WriteData(TraceLoggingDataCollector collector, ref Byte[] value)
{
collector.AddBinary(value);
}
}
/// <summary>
/// TraceLogging: Type handler for SByte[].
/// </summary>
internal sealed class SByteArrayTypeInfo
: TraceLoggingTypeInfo<SByte[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format8(format, TraceLoggingDataType.Int8));
}
public override void WriteData(TraceLoggingDataCollector collector, ref SByte[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Int16[].
/// </summary>
internal sealed class Int16ArrayTypeInfo
: TraceLoggingTypeInfo<Int16[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format16(format, TraceLoggingDataType.Int16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Int16[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UInt16[].
/// </summary>
internal sealed class UInt16ArrayTypeInfo
: TraceLoggingTypeInfo<UInt16[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format16(format, TraceLoggingDataType.UInt16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UInt16[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Int32[].
/// </summary>
internal sealed class Int32ArrayTypeInfo
: TraceLoggingTypeInfo<Int32[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format32(format, TraceLoggingDataType.Int32));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Int32[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UInt32[].
/// </summary>
internal sealed class UInt32ArrayTypeInfo
: TraceLoggingTypeInfo<UInt32[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format32(format, TraceLoggingDataType.UInt32));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UInt32[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Int64[].
/// </summary>
internal sealed class Int64ArrayTypeInfo
: TraceLoggingTypeInfo<Int64[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format64(format, TraceLoggingDataType.Int64));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Int64[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UInt64[].
/// </summary>
internal sealed class UInt64ArrayTypeInfo
: TraceLoggingTypeInfo<UInt64[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format64(format, TraceLoggingDataType.UInt64));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UInt64[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for IntPtr[].
/// </summary>
internal sealed class IntPtrArrayTypeInfo
: TraceLoggingTypeInfo<IntPtr[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.FormatPtr(format, Statics.IntPtrType));
}
public override void WriteData(TraceLoggingDataCollector collector, ref IntPtr[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for UIntPtr[].
/// </summary>
internal sealed class UIntPtrArrayTypeInfo
: TraceLoggingTypeInfo<UIntPtr[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.FormatPtr(format, Statics.UIntPtrType));
}
public override void WriteData(TraceLoggingDataCollector collector, ref UIntPtr[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Char[].
/// </summary>
internal sealed class CharArrayTypeInfo
: TraceLoggingTypeInfo<Char[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format16(format, TraceLoggingDataType.Char16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Char[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Double[].
/// </summary>
internal sealed class DoubleArrayTypeInfo
: TraceLoggingTypeInfo<Double[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format64(format, TraceLoggingDataType.Double));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Double[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Single[].
/// </summary>
internal sealed class SingleArrayTypeInfo
: TraceLoggingTypeInfo<Single[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.Format32(format, TraceLoggingDataType.Float));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Single[] value)
{
collector.AddArray(value);
}
}
#endregion
#region Enum scalars
internal sealed class EnumByteTypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format8(format, TraceLoggingDataType.UInt8));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<Byte>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
internal sealed class EnumSByteTypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format8(format, TraceLoggingDataType.Int8));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<SByte>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
internal sealed class EnumInt16TypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format16(format, TraceLoggingDataType.Int16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<Int16>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
internal sealed class EnumUInt16TypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format16(format, TraceLoggingDataType.UInt16));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<UInt16>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
internal sealed class EnumInt32TypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format32(format, TraceLoggingDataType.Int32));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<Int32>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
internal sealed class EnumUInt32TypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format32(format, TraceLoggingDataType.UInt32));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<UInt32>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
internal sealed class EnumInt64TypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format64(format, TraceLoggingDataType.Int64));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<Int64>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
internal sealed class EnumUInt64TypeInfo<EnumType>
: TraceLoggingTypeInfo<EnumType>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.Format64(format, TraceLoggingDataType.UInt64));
}
public override void WriteData(TraceLoggingDataCollector collector, ref EnumType value)
{
collector.AddScalar(EnumHelper<UInt64>.Cast(value));
}
public override object GetData(object value)
{
return value;
}
}
#endregion
#region Other built-in types
/// <summary>
/// TraceLogging: Type handler for String.
/// </summary>
internal sealed class StringTypeInfo
: TraceLoggingTypeInfo<String>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddBinary(name, Statics.MakeDataType(TraceLoggingDataType.CountedUtf16String, format));
}
public override void WriteData(TraceLoggingDataCollector collector, ref String value)
{
collector.AddBinary(value);
}
public override object GetData(object value)
{
object val = base.GetData(value);
if (null == val)
val = "";
return val;
}
}
/// <summary>
/// TraceLogging: Type handler for Guid.
/// </summary>
internal sealed class GuidTypeInfo
: TraceLoggingTypeInfo<Guid>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.MakeDataType(TraceLoggingDataType.Guid, format));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Guid value)
{
collector.AddScalar(value);
}
}
/// <summary>
/// TraceLogging: Type handler for Guid[].
/// </summary>
internal sealed class GuidArrayTypeInfo
: TraceLoggingTypeInfo<Guid[]>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddArray(name, Statics.MakeDataType(TraceLoggingDataType.Guid, format));
}
public override void WriteData(TraceLoggingDataCollector collector, ref Guid[] value)
{
collector.AddArray(value);
}
}
/// <summary>
/// TraceLogging: Type handler for DateTime.
/// </summary>
internal sealed class DateTimeTypeInfo
: TraceLoggingTypeInfo<DateTime>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.MakeDataType(TraceLoggingDataType.FileTime, format));
}
public override void WriteData(TraceLoggingDataCollector collector, ref DateTime value)
{
var ticks = value.Ticks;
collector.AddScalar(ticks < 504911232000000000 ? 0 : ticks - 504911232000000000);
}
}
/// <summary>
/// TraceLogging: Type handler for DateTimeOffset.
/// </summary>
internal sealed class DateTimeOffsetTypeInfo
: TraceLoggingTypeInfo<DateTimeOffset>
{
public override void WriteMetadata(TraceLoggingMetadataCollector collector, string name, EventFieldFormat format)
{
var group = collector.AddGroup(name);
group.AddScalar("Ticks", Statics.MakeDataType(TraceLoggingDataType.FileTime, format));
group.AddScalar("Offset", TraceLoggingDataType.Int64);
}
public override void WriteData(TraceLoggingDataCollector collector, ref DateTimeOffset value)
{
var ticks = value.Ticks;
collector.AddScalar(ticks < 504911232000000000 ? 0 : ticks - 504911232000000000);
collector.AddScalar(value.Offset.Ticks);
}
}
/// <summary>
/// TraceLogging: Type handler for TimeSpan.
/// </summary>
internal sealed class TimeSpanTypeInfo
: TraceLoggingTypeInfo<TimeSpan>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.MakeDataType(TraceLoggingDataType.Int64, format));
}
public override void WriteData(TraceLoggingDataCollector collector, ref TimeSpan value)
{
collector.AddScalar(value.Ticks);
}
}
/// <summary>
/// TraceLogging: Type handler for Decimal. (Note: not full-fidelity, exposed as Double.)
/// </summary>
internal sealed class DecimalTypeInfo
: TraceLoggingTypeInfo<Decimal>
{
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
collector.AddScalar(name, Statics.MakeDataType(TraceLoggingDataType.Double, format));
}
public override void WriteData(TraceLoggingDataCollector collector, ref decimal value)
{
collector.AddScalar((double)value);
}
}
/// <summary>
/// TraceLogging: Type handler for KeyValuePair.
/// </summary>
/// <typeparam name="K">Type of the KeyValuePair's Key property.</typeparam>
/// <typeparam name="V">Type of the KeyValuePair's Value property.</typeparam>
internal sealed class KeyValuePairTypeInfo<K, V>
: TraceLoggingTypeInfo<KeyValuePair<K, V>>
{
private readonly TraceLoggingTypeInfo<K> keyInfo;
private readonly TraceLoggingTypeInfo<V> valueInfo;
public KeyValuePairTypeInfo(List<Type> recursionCheck)
{
this.keyInfo = TraceLoggingTypeInfo<K>.GetInstance(recursionCheck);
this.valueInfo = TraceLoggingTypeInfo<V>.GetInstance(recursionCheck);
}
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
var group = collector.AddGroup(name);
this.keyInfo.WriteMetadata(group, "Key", EventFieldFormat.Default);
this.valueInfo.WriteMetadata(group, "Value", format);
}
public override void WriteData(
TraceLoggingDataCollector collector,
ref KeyValuePair<K, V> value)
{
var key = value.Key;
var val = value.Value;
this.keyInfo.WriteData(collector, ref key);
this.valueInfo.WriteData(collector, ref val);
}
public override object GetData(object value)
{
var serializedType = new Dictionary<string, object>();
var keyValuePair = (KeyValuePair<K, V>) value;
serializedType.Add("Key", this.keyInfo.GetData(keyValuePair.Key));
serializedType.Add("Value", this.valueInfo.GetData(keyValuePair.Value));
return serializedType;
}
}
/// <summary>
/// TraceLogging: Type handler for Nullable.
/// </summary>
/// <typeparam name="T">Type of the Nullable's Value property.</typeparam>
internal sealed class NullableTypeInfo<T>
: TraceLoggingTypeInfo<Nullable<T>>
where T : struct
{
private readonly TraceLoggingTypeInfo<T> valueInfo;
public NullableTypeInfo(List<Type> recursionCheck)
{
this.valueInfo = TraceLoggingTypeInfo<T>.GetInstance(recursionCheck);
}
public override void WriteMetadata(
TraceLoggingMetadataCollector collector,
string name,
EventFieldFormat format)
{
var group = collector.AddGroup(name);
group.AddScalar("HasValue", TraceLoggingDataType.Boolean8);
this.valueInfo.WriteMetadata(group, "Value", format);
}
public override void WriteData(
TraceLoggingDataCollector collector,
ref Nullable<T> value)
{
var hasValue = value.HasValue;
collector.AddScalar(hasValue);
var val = hasValue ? value.Value : default(T);
this.valueInfo.WriteData(collector, ref val);
}
}
#endregion
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.txt file at the root of this distribution.
*
* You must not remove this notice, or any other, from this software.
*
* ***************************************************************************/
using System;
using System.Diagnostics;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
using IServiceProvider = System.IServiceProvider;
using ShellConstants = Microsoft.VisualStudio.Shell.Interop.Constants;
namespace Microsoft.VisualStudio.Project
{
/// <summary>
/// Defines an abstract class implementing IVsUpdateSolutionEvents interfaces.
/// </summary>
[CLSCompliant(false)]
public abstract class UpdateSolutionEventsListener : IVsUpdateSolutionEvents3, IVsUpdateSolutionEvents2, IDisposable
{
#region fields
/// <summary>
/// The cookie associated to the the events based IVsUpdateSolutionEvents2.
/// </summary>
private uint solutionEvents2Cookie = (uint)ShellConstants.VSCOOKIE_NIL;
/// <summary>
/// The cookie associated to the theIVsUpdateSolutionEvents3 events.
/// </summary>
private uint solutionEvents3Cookie = (uint)ShellConstants.VSCOOKIE_NIL;
/// <summary>
/// The IVsSolutionBuildManager2 object controlling the update solution events.
/// </summary>
private IVsSolutionBuildManager2 solutionBuildManager;
/// <summary>
/// The associated service provider.
/// </summary>
private IServiceProvider serviceProvider;
/// <summary>
/// Flag determining if the object has been disposed.
/// </summary>
private bool isDisposed;
/// <summary>
/// Defines an object that will be a mutex for this object for synchronizing thread calls.
/// </summary>
private static volatile object Mutex = new object();
#endregion
#region ctors
/// <summary>
/// Overloaded constructor.
/// </summary>
/// <param name="serviceProvider">A service provider.</param>
protected UpdateSolutionEventsListener(IServiceProvider serviceProvider)
{
if(serviceProvider == null)
{
throw new ArgumentNullException("serviceProvider");
}
this.serviceProvider = serviceProvider;
ThreadHelper.ThrowIfNotOnUIThread();
this.solutionBuildManager = this.serviceProvider.GetService(typeof(SVsSolutionBuildManager)) as IVsSolutionBuildManager2;
Assumes.Present(solutionBuildManager);
if (this.solutionBuildManager == null)
{
throw new InvalidOperationException();
}
ErrorHandler.ThrowOnFailure(this.solutionBuildManager.AdviseUpdateSolutionEvents(this, out this.solutionEvents2Cookie));
Debug.Assert(this.solutionBuildManager is IVsSolutionBuildManager3, "The solution build manager object implementing IVsSolutionBuildManager2 does not implement IVsSolutionBuildManager3");
ErrorHandler.ThrowOnFailure(this.SolutionBuildManager3.AdviseUpdateSolutionEvents3(this, out this.solutionEvents3Cookie));
}
#endregion
#region properties
/// <summary>
/// The associated service provider.
/// </summary>
protected IServiceProvider ServiceProvider
{
get
{
return this.serviceProvider;
}
}
/// <summary>
/// The solution build manager object controlling the solution events.
/// </summary>
protected IVsSolutionBuildManager2 SolutionBuildManager2
{
get
{
return this.solutionBuildManager;
}
}
/// <summary>
/// The solution build manager object controlling the solution events.
/// </summary>
protected IVsSolutionBuildManager3 SolutionBuildManager3
{
get
{
ThreadHelper.ThrowIfNotOnUIThread();
return (IVsSolutionBuildManager3)this.solutionBuildManager;
}
}
#endregion
#region IVsUpdateSolutionEvents3 Members
/// <summary>
/// Fired after the active solution config is changed (pOldActiveSlnCfg can be NULL).
/// </summary>
/// <param name="oldActiveSlnCfg">Old configuration.</param>
/// <param name="newActiveSlnCfg">New configuration.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int OnAfterActiveSolutionCfgChange(IVsCfg oldActiveSlnCfg, IVsCfg newActiveSlnCfg)
{
return VSConstants.E_NOTIMPL;
}
/// <summary>
/// Fired before the active solution config is changed (pOldActiveSlnCfg can be NULL
/// </summary>
/// <param name="oldActiveSlnCfg">Old configuration.</param>
/// <param name="newActiveSlnCfg">New configuration.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int OnBeforeActiveSolutionCfgChange(IVsCfg oldActiveSlnCfg, IVsCfg newActiveSlnCfg)
{
return VSConstants.E_NOTIMPL;
}
#endregion
#region IVsUpdateSolutionEvents2 Members
/// <summary>
/// Called when the active project configuration for a project in the solution has changed.
/// </summary>
/// <param name="hierarchy">The project whose configuration has changed.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int OnActiveProjectCfgChange(IVsHierarchy hierarchy)
{
return VSConstants.E_NOTIMPL;
}
/// <summary>
/// Called right before a project configuration begins to build.
/// </summary>
/// <param name="hierarchy">The project that is to be build.</param>
/// <param name="configProject">A configuration project object.</param>
/// <param name="configSolution">A configuration solution object.</param>
/// <param name="action">The action taken.</param>
/// <param name="cancel">A flag indicating cancel.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
/// <remarks>The values for the action are defined in the enum _SLNUPDACTION env\msenv\core\slnupd2.h</remarks>
public int UpdateProjectCfg_Begin(IVsHierarchy hierarchy, IVsCfg configProject, IVsCfg configSolution, uint action, ref int cancel)
{
return VSConstants.E_NOTIMPL;
}
/// <summary>
/// Called right after a project configuration is finished building.
/// </summary>
/// <param name="hierarchy">The project that has finished building.</param>
/// <param name="configProject">A configuration project object.</param>
/// <param name="configSolution">A configuration solution object.</param>
/// <param name="action">The action taken.</param>
/// <param name="success">Flag indicating success.</param>
/// <param name="cancel">Flag indicating cancel.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
/// <remarks>The values for the action are defined in the enum _SLNUPDACTION env\msenv\core\slnupd2.h</remarks>
public virtual int UpdateProjectCfg_Done(IVsHierarchy hierarchy, IVsCfg configProject, IVsCfg configSolution, uint action, int success, int cancel)
{
return VSConstants.E_NOTIMPL;
}
/// <summary>
/// Called before any build actions have begun. This is the last chance to cancel the build before any building begins.
/// </summary>
/// <param name="cancelUpdate">Flag indicating cancel update.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int UpdateSolution_Begin(ref int cancelUpdate)
{
return VSConstants.E_NOTIMPL;
}
/// <summary>
/// Called when a build is being cancelled.
/// </summary>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int UpdateSolution_Cancel()
{
return VSConstants.E_NOTIMPL;
}
/// <summary>
/// Called when a build is completed.
/// </summary>
/// <param name="succeeded">true if no update actions failed.</param>
/// <param name="modified">true if any update action succeeded.</param>
/// <param name="cancelCommand">true if update actions were canceled.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int UpdateSolution_Done(int fSucceeded, int fModified, int fCancelCommand)
{
return VSConstants.E_NOTIMPL;
}
/// <summary>
/// Called before the first project configuration is about to be built.
/// </summary>
/// <param name="cancelUpdate">A flag indicating cancel update.</param>
/// <returns>If the method succeeds, it returns S_OK. If it fails, it returns an error code.</returns>
public virtual int UpdateSolution_StartUpdate(ref int cancelUpdate)
{
return VSConstants.E_NOTIMPL;
}
#endregion
#region IDisposable Members
/// <summary>
/// The IDispose interface Dispose method for disposing the object determinastically.
/// </summary>
public void Dispose()
{
ThreadHelper.ThrowIfNotOnUIThread();
this.Dispose(true);
GC.SuppressFinalize(this);
}
#endregion
#region methods
/// <summary>
/// The method that does the cleanup.
/// </summary>
/// <param name="disposing">true if called from IDispose.Dispose; false if called from Finalizer.</param>
protected virtual void Dispose(bool disposing)
{
// Everybody can go here.
ThreadHelper.ThrowIfNotOnUIThread();
if (!this.isDisposed)
{
// Synchronize calls to the Dispose simultaniously.
lock(Mutex)
{
if(this.solutionEvents2Cookie != (uint)ShellConstants.VSCOOKIE_NIL)
{
this.solutionBuildManager.UnadviseUpdateSolutionEvents(this.solutionEvents2Cookie);
this.solutionEvents2Cookie = (uint)ShellConstants.VSCOOKIE_NIL;
}
if(this.solutionEvents3Cookie != (uint)ShellConstants.VSCOOKIE_NIL)
{
this.SolutionBuildManager3.UnadviseUpdateSolutionEvents3(this.solutionEvents3Cookie);
this.solutionEvents3Cookie = (uint)ShellConstants.VSCOOKIE_NIL;
}
this.isDisposed = true;
}
}
}
#endregion
}
}
| |
/******************************************************************************
* Spine Runtimes Software License
* Version 2.3
*
* Copyright (c) 2013-2015, Esoteric Software
* All rights reserved.
*
* You are granted a perpetual, non-exclusive, non-sublicensable and
* non-transferable license to use, install, execute and perform the Spine
* Runtimes Software (the "Software") and derivative works solely for personal
* or internal use. Without the written permission of Esoteric Software (see
* Section 2 of the Spine Software License Agreement), you may not (a) modify,
* translate, adapt or otherwise create derivative works, improvements of the
* Software or develop new applications using the Software or (b) remove,
* delete, alter or obscure any trademarks or any copyright, trademark, patent
* or other intellectual property or proprietary rights notices on or in the
* Software, including any copy thereof. Redistributions in binary or source
* form must include this license and terms.
*
* THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*****************************************************************************/
using UnityEngine;
using System.Collections.Generic;
using Spine;
namespace Spine.Unity.Modules {
public class SpriteAttacher : MonoBehaviour {
public bool attachOnStart = true;
public bool keepLoaderInMemory = true;
public Sprite sprite;
[SpineSlot]
public string slot;
private SpriteAttachmentLoader loader;
private RegionAttachment attachment;
void Start () {
if (attachOnStart)
Attach();
}
public void Attach () {
var skeletonRenderer = GetComponent<SkeletonRenderer>();
if (loader == null)
//create loader instance, tell it what sprite and shader to use
loader = new SpriteAttachmentLoader(sprite, Shader.Find("Spine/Skeleton"));
if (attachment == null)
attachment = loader.NewRegionAttachment(null, sprite.name, "");
skeletonRenderer.skeleton.FindSlot(slot).Attachment = attachment;
if (!keepLoaderInMemory)
loader = null;
}
}
public class SpriteAttachmentLoader : AttachmentLoader {
//MITCH: left a todo: Memory cleanup functions
//IMPORTANT: Make sure you clear this when you don't need it anymore. Goodluck.
public static Dictionary<int, AtlasRegion> atlasTable = new Dictionary<int, AtlasRegion>();
//Shouldn't need to clear this, should just prevent redoing premultiply alpha pass on packed atlases
public static List<int> premultipliedAtlasIds = new List<int>();
Sprite sprite;
Shader shader;
public SpriteAttachmentLoader (Sprite sprite, Shader shader) {
if (sprite.packed && sprite.packingMode == SpritePackingMode.Tight) {
Debug.LogError("Tight Packer Policy not supported yet!");
return;
}
this.sprite = sprite;
this.shader = shader;
Texture2D tex = sprite.texture;
//premultiply texture if it hasn't been yet
int instanceId = tex.GetInstanceID();
if (!premultipliedAtlasIds.Contains(instanceId)) {
try {
var colors = tex.GetPixels();
Color c;
float a;
for (int i = 0; i < colors.Length; i++) {
c = colors[i];
a = c.a;
c.r *= a;
c.g *= a;
c.b *= a;
colors[i] = c;
}
tex.SetPixels(colors);
tex.Apply();
premultipliedAtlasIds.Add(instanceId);
} catch {
//texture is not readable! Can't pre-multiply it, you're on your own.
}
}
}
public RegionAttachment NewRegionAttachment (Skin skin, string name, string path) {
RegionAttachment attachment = new RegionAttachment(name);
Texture2D tex = sprite.texture;
int instanceId = tex.GetInstanceID();
AtlasRegion atlasRegion;
// Check cache first
if (atlasTable.ContainsKey(instanceId)) {
atlasRegion = atlasTable[instanceId];
} else {
// Setup new material.
var material = new Material(shader);
if (sprite.packed)
material.name = "Unity Packed Sprite Material";
else
material.name = sprite.name + " Sprite Material";
material.mainTexture = tex;
// Create faux-region to play nice with SkeletonRenderer.
atlasRegion = new AtlasRegion();
AtlasPage page = new AtlasPage();
page.rendererObject = material;
atlasRegion.page = page;
// Cache it.
atlasTable[instanceId] = atlasRegion;
}
Rect texRect = sprite.textureRect;
//normalize rect to UV space of packed atlas
texRect.x = Mathf.InverseLerp(0, tex.width, texRect.x);
texRect.y = Mathf.InverseLerp(0, tex.height, texRect.y);
texRect.width = Mathf.InverseLerp(0, tex.width, texRect.width);
texRect.height = Mathf.InverseLerp(0, tex.height, texRect.height);
Bounds bounds = sprite.bounds;
Vector3 size = bounds.size;
//MITCH: left todo: make sure this rotation thing actually works
bool rotated = false;
if (sprite.packed)
rotated = sprite.packingRotation == SpritePackingRotation.Any;
attachment.SetUVs(texRect.xMin, texRect.yMax, texRect.xMax, texRect.yMin, rotated);
attachment.RendererObject = atlasRegion;
attachment.SetColor(Color.white);
attachment.ScaleX = 1;
attachment.ScaleY = 1;
attachment.RegionOffsetX = sprite.rect.width * (0.5f - InverseLerp(bounds.min.x, bounds.max.x, 0)) / sprite.pixelsPerUnit;
attachment.RegionOffsetY = sprite.rect.height * (0.5f - InverseLerp(bounds.min.y, bounds.max.y, 0)) / sprite.pixelsPerUnit;
attachment.Width = size.x;
attachment.Height = size.y;
attachment.RegionWidth = size.x;
attachment.RegionHeight = size.y;
attachment.RegionOriginalWidth = size.x;
attachment.RegionOriginalHeight = size.y;
attachment.UpdateOffset();
return attachment;
}
public MeshAttachment NewMeshAttachment (Skin skin, string name, string path) {
//MITCH : Left todo: Unity 5 only
return null;
}
public BoundingBoxAttachment NewBoundingBoxAttachment (Skin skin, string name) {
return null;
}
public PathAttachment NewPathAttachment (Skin skin, string name) {
return null;
}
private float InverseLerp(float a, float b, float value)
{
return (value - a) / (b - a);
}
}
public static class SpriteAttachmentExtensions {
public static Attachment AttachUnitySprite (this Skeleton skeleton, string slotName, Sprite sprite, string shaderName = "Spine/Skeleton") {
var att = sprite.ToRegionAttachment(shaderName);
skeleton.FindSlot(slotName).Attachment = att;
return att;
}
public static Attachment AddUnitySprite (this SkeletonData skeletonData, string slotName, Sprite sprite, string skinName = "", string shaderName = "Spine/Skeleton") {
var att = sprite.ToRegionAttachment(shaderName);
var slotIndex = skeletonData.FindSlotIndex(slotName);
Skin skin = skeletonData.defaultSkin;
if (skinName != "")
skin = skeletonData.FindSkin(skinName);
skin.AddAttachment(slotIndex, att.Name, att);
return att;
}
public static RegionAttachment ToRegionAttachment (this Sprite sprite, string shaderName = "Spine/Skeleton") {
var loader = new SpriteAttachmentLoader(sprite, Shader.Find(shaderName));
var att = loader.NewRegionAttachment(null, sprite.name, "");
loader = null;
return att;
}
}
}
| |
#region Copyright notice and license
// Copyright 2015 gRPC authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using System.Text.RegularExpressions;
using Grpc.Core.Internal;
using Grpc.Core.Utils;
namespace Grpc.Core
{
/// <summary>
/// A collection of metadata entries that can be exchanged during a call.
/// gRPC supports these types of metadata:
/// <list type="bullet">
/// <item><term>Request headers</term><description>are sent by the client at the beginning of a remote call before any request messages are sent.</description></item>
/// <item><term>Response headers</term><description>are sent by the server at the beginning of a remote call handler before any response messages are sent.</description></item>
/// <item><term>Response trailers</term><description>are sent by the server at the end of a remote call along with resulting call status.</description></item>
/// </list>
/// </summary>
public sealed class Metadata : IList<Metadata.Entry>
{
/// <summary>
/// All binary headers should have this suffix.
/// </summary>
public const string BinaryHeaderSuffix = "-bin";
/// <summary>
/// An read-only instance of metadata containing no entries.
/// </summary>
public static readonly Metadata Empty = new Metadata().Freeze();
/// <summary>
/// To be used in initial metadata to request specific compression algorithm
/// for given call. Direct selection of compression algorithms is an internal
/// feature and is not part of public API.
/// </summary>
internal const string CompressionRequestAlgorithmMetadataKey = "grpc-internal-encoding-request";
readonly List<Entry> entries;
bool readOnly;
/// <summary>
/// Initializes a new instance of <c>Metadata</c>.
/// </summary>
public Metadata()
{
this.entries = new List<Entry>();
}
/// <summary>
/// Makes this object read-only.
/// </summary>
/// <returns>this object</returns>
internal Metadata Freeze()
{
this.readOnly = true;
return this;
}
// TODO: add support for access by key
#region IList members
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public int IndexOf(Metadata.Entry item)
{
return entries.IndexOf(item);
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public void Insert(int index, Metadata.Entry item)
{
GrpcPreconditions.CheckNotNull(item);
CheckWriteable();
entries.Insert(index, item);
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public void RemoveAt(int index)
{
CheckWriteable();
entries.RemoveAt(index);
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public Metadata.Entry this[int index]
{
get
{
return entries[index];
}
set
{
GrpcPreconditions.CheckNotNull(value);
CheckWriteable();
entries[index] = value;
}
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public void Add(Metadata.Entry item)
{
GrpcPreconditions.CheckNotNull(item);
CheckWriteable();
entries.Add(item);
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public void Add(string key, string value)
{
Add(new Entry(key, value));
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public void Add(string key, byte[] valueBytes)
{
Add(new Entry(key, valueBytes));
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public void Clear()
{
CheckWriteable();
entries.Clear();
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public bool Contains(Metadata.Entry item)
{
return entries.Contains(item);
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public void CopyTo(Metadata.Entry[] array, int arrayIndex)
{
entries.CopyTo(array, arrayIndex);
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public int Count
{
get { return entries.Count; }
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public bool IsReadOnly
{
get { return readOnly; }
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public bool Remove(Metadata.Entry item)
{
CheckWriteable();
return entries.Remove(item);
}
/// <summary>
/// <see cref="T:IList`1"/>
/// </summary>
public IEnumerator<Metadata.Entry> GetEnumerator()
{
return entries.GetEnumerator();
}
IEnumerator System.Collections.IEnumerable.GetEnumerator()
{
return entries.GetEnumerator();
}
private void CheckWriteable()
{
GrpcPreconditions.CheckState(!readOnly, "Object is read only");
}
#endregion
/// <summary>
/// Metadata entry
/// </summary>
public class Entry
{
private static readonly Regex ValidKeyRegex = new Regex("^[.a-z0-9_-]+$");
readonly string key;
readonly string value;
readonly byte[] valueBytes;
private Entry(string key, string value, byte[] valueBytes)
{
this.key = key;
this.value = value;
this.valueBytes = valueBytes;
}
/// <summary>
/// Initializes a new instance of the <see cref="Grpc.Core.Metadata.Entry"/> struct with a binary value.
/// </summary>
/// <param name="key">Metadata key, needs to have suffix indicating a binary valued metadata entry.</param>
/// <param name="valueBytes">Value bytes.</param>
public Entry(string key, byte[] valueBytes)
{
this.key = NormalizeKey(key);
GrpcPreconditions.CheckArgument(HasBinaryHeaderSuffix(this.key),
"Key for binary valued metadata entry needs to have suffix indicating binary value.");
this.value = null;
GrpcPreconditions.CheckNotNull(valueBytes, "valueBytes");
this.valueBytes = new byte[valueBytes.Length];
Buffer.BlockCopy(valueBytes, 0, this.valueBytes, 0, valueBytes.Length); // defensive copy to guarantee immutability
}
/// <summary>
/// Initializes a new instance of the <see cref="Grpc.Core.Metadata.Entry"/> struct holding an ASCII value.
/// </summary>
/// <param name="key">Metadata key, must not use suffix indicating a binary valued metadata entry.</param>
/// <param name="value">Value string. Only ASCII characters are allowed.</param>
public Entry(string key, string value)
{
this.key = NormalizeKey(key);
GrpcPreconditions.CheckArgument(!HasBinaryHeaderSuffix(this.key),
"Key for ASCII valued metadata entry cannot have suffix indicating binary value.");
this.value = GrpcPreconditions.CheckNotNull(value, "value");
this.valueBytes = null;
}
/// <summary>
/// Gets the metadata entry key.
/// </summary>
public string Key
{
get
{
return this.key;
}
}
/// <summary>
/// Gets the binary value of this metadata entry.
/// </summary>
public byte[] ValueBytes
{
get
{
if (valueBytes == null)
{
return MarshalUtils.GetBytesASCII(value);
}
// defensive copy to guarantee immutability
var bytes = new byte[valueBytes.Length];
Buffer.BlockCopy(valueBytes, 0, bytes, 0, valueBytes.Length);
return bytes;
}
}
/// <summary>
/// Gets the string value of this metadata entry.
/// </summary>
public string Value
{
get
{
GrpcPreconditions.CheckState(!IsBinary, "Cannot access string value of a binary metadata entry");
return value ?? MarshalUtils.GetStringASCII(valueBytes);
}
}
/// <summary>
/// Returns <c>true</c> if this entry is a binary-value entry.
/// </summary>
public bool IsBinary
{
get
{
return value == null;
}
}
/// <summary>
/// Returns a <see cref="System.String"/> that represents the current <see cref="Grpc.Core.Metadata.Entry"/>.
/// </summary>
public override string ToString()
{
if (IsBinary)
{
return string.Format("[Entry: key={0}, valueBytes={1}]", key, valueBytes);
}
return string.Format("[Entry: key={0}, value={1}]", key, value);
}
/// <summary>
/// Gets the serialized value for this entry. For binary metadata entries, this leaks
/// the internal <c>valueBytes</c> byte array and caller must not change contents of it.
/// </summary>
internal byte[] GetSerializedValueUnsafe()
{
return valueBytes ?? MarshalUtils.GetBytesASCII(value);
}
/// <summary>
/// Creates a binary value or ascii value metadata entry from data received from the native layer.
/// We trust C core to give us well-formed data, so we don't perform any checks or defensive copying.
/// </summary>
internal static Entry CreateUnsafe(string key, byte[] valueBytes)
{
if (HasBinaryHeaderSuffix(key))
{
return new Entry(key, null, valueBytes);
}
return new Entry(key, MarshalUtils.GetStringASCII(valueBytes), null);
}
private static string NormalizeKey(string key)
{
var normalized = GrpcPreconditions.CheckNotNull(key, "key").ToLowerInvariant();
GrpcPreconditions.CheckArgument(ValidKeyRegex.IsMatch(normalized),
"Metadata entry key not valid. Keys can only contain lowercase alphanumeric characters, underscores, hyphens and dots.");
return normalized;
}
/// <summary>
/// Returns <c>true</c> if the key has "-bin" binary header suffix.
/// </summary>
private static bool HasBinaryHeaderSuffix(string key)
{
// We don't use just string.EndsWith because its implementation is extremely slow
// on CoreCLR and we've seen significant differences in gRPC benchmarks caused by it.
// See https://github.com/dotnet/coreclr/issues/5612
int len = key.Length;
if (len >= 4 &&
key[len - 4] == '-' &&
key[len - 3] == 'b' &&
key[len - 2] == 'i' &&
key[len - 1] == 'n')
{
return true;
}
return false;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Security.Cryptography;
using System.Security.Cryptography.Apple;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Win32.SafeHandles;
namespace Internal.Cryptography.Pal
{
internal sealed partial class StorePal
{
public static IStorePal FromHandle(IntPtr storeHandle)
{
if (storeHandle == IntPtr.Zero)
throw new ArgumentNullException(nameof(storeHandle));
var keychainHandle = new SafeKeychainHandle(storeHandle);
Interop.CoreFoundation.CFRetain(storeHandle);
return new AppleKeychainStore(keychainHandle, OpenFlags.MaxAllowed);
}
public static ILoaderPal FromBlob(byte[] rawData, SafePasswordHandle password, X509KeyStorageFlags keyStorageFlags)
{
Debug.Assert(password != null);
X509ContentType contentType = X509Certificate2.GetCertContentType(rawData);
SafeKeychainHandle keychain;
bool exportable = true;
if (contentType == X509ContentType.Pkcs12)
{
if ((keyStorageFlags & X509KeyStorageFlags.EphemeralKeySet) == X509KeyStorageFlags.EphemeralKeySet)
{
throw new PlatformNotSupportedException(SR.Cryptography_X509_NoEphemeralPfx);
}
exportable = (keyStorageFlags & X509KeyStorageFlags.Exportable) == X509KeyStorageFlags.Exportable;
bool persist =
(keyStorageFlags & X509KeyStorageFlags.PersistKeySet) == X509KeyStorageFlags.PersistKeySet;
keychain = persist
? Interop.AppleCrypto.SecKeychainCopyDefault()
: Interop.AppleCrypto.CreateTemporaryKeychain();
}
else
{
keychain = SafeTemporaryKeychainHandle.InvalidHandle;
password = SafePasswordHandle.InvalidHandle;
}
// Only dispose tmpKeychain on the exception path, otherwise it's managed by AppleCertLoader.
try
{
SafeCFArrayHandle certs = Interop.AppleCrypto.X509ImportCollection(
rawData,
contentType,
password,
keychain,
exportable);
// If the default keychain was used, null will be passed to the loader.
return new AppleCertLoader(certs, keychain as SafeTemporaryKeychainHandle);
}
catch
{
keychain.Dispose();
throw;
}
}
public static ILoaderPal FromFile(string fileName, SafePasswordHandle password, X509KeyStorageFlags keyStorageFlags)
{
Debug.Assert(password != null);
byte[] fileBytes = File.ReadAllBytes(fileName);
return FromBlob(fileBytes, password, keyStorageFlags);
}
public static IExportPal FromCertificate(ICertificatePalCore cert)
{
return new AppleCertificateExporter(cert);
}
public static IExportPal LinkFromCertificateCollection(X509Certificate2Collection certificates)
{
return new AppleCertificateExporter(certificates);
}
public static IStorePal FromSystemStore(string storeName, StoreLocation storeLocation, OpenFlags openFlags)
{
StringComparer ordinalIgnoreCase = StringComparer.OrdinalIgnoreCase;
switch (storeLocation)
{
case StoreLocation.CurrentUser:
if (ordinalIgnoreCase.Equals("My", storeName))
return AppleKeychainStore.OpenDefaultKeychain(openFlags);
if (ordinalIgnoreCase.Equals("Root", storeName))
return AppleTrustStore.OpenStore(StoreName.Root, storeLocation, openFlags);
if (ordinalIgnoreCase.Equals("Disallowed", storeName))
return AppleTrustStore.OpenStore(StoreName.Disallowed, storeLocation, openFlags);
return FromCustomKeychainStore(storeName, openFlags);
case StoreLocation.LocalMachine:
if (ordinalIgnoreCase.Equals("My", storeName))
return AppleKeychainStore.OpenSystemSharedKeychain(openFlags);
if (ordinalIgnoreCase.Equals("Root", storeName))
return AppleTrustStore.OpenStore(StoreName.Root, storeLocation, openFlags);
if (ordinalIgnoreCase.Equals("Disallowed", storeName))
return AppleTrustStore.OpenStore(StoreName.Disallowed, storeLocation, openFlags);
break;
}
if ((openFlags & OpenFlags.OpenExistingOnly) == OpenFlags.OpenExistingOnly)
throw new CryptographicException(SR.Cryptography_X509_StoreNotFound);
string message = SR.Format(
SR.Cryptography_X509_StoreCannotCreate,
storeName,
storeLocation);
throw new CryptographicException(message, new PlatformNotSupportedException(message));
}
private static IStorePal FromCustomKeychainStore(string storeName, OpenFlags openFlags)
{
string storePath;
if (!IsValidStoreName(storeName))
throw new CryptographicException(SR.Format(SR.Security_InvalidValue, nameof(storeName)));
storePath = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.UserProfile),
"Library",
"Keychains",
storeName.ToLowerInvariant() + ".keychain");
return AppleKeychainStore.CreateOrOpenKeychain(storePath, openFlags);
}
private static bool IsValidStoreName(string storeName)
{
try
{
return !string.IsNullOrWhiteSpace(storeName) && Path.GetFileName(storeName) == storeName;
}
catch (IOException)
{
return false;
}
}
private static void ReadCollection(SafeCFArrayHandle matches, HashSet<X509Certificate2> collection)
{
if (matches.IsInvalid)
{
return;
}
long count = Interop.CoreFoundation.CFArrayGetCount(matches);
for (int i = 0; i < count; i++)
{
IntPtr handle = Interop.CoreFoundation.CFArrayGetValueAtIndex(matches, i);
SafeSecCertificateHandle certHandle;
SafeSecIdentityHandle identityHandle;
if (Interop.AppleCrypto.X509DemuxAndRetainHandle(handle, out certHandle, out identityHandle))
{
X509Certificate2 cert;
if (certHandle.IsInvalid)
{
certHandle.Dispose();
cert = new X509Certificate2(new AppleCertificatePal(identityHandle));
}
else
{
identityHandle.Dispose();
cert = new X509Certificate2(new AppleCertificatePal(certHandle));
}
if (!collection.Add(cert))
{
cert.Dispose();
}
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Media;
using System.Windows.Input;
using System.Windows.Media.Animation;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
using Microsoft.Msagl.Core.Geometry.Curves;
using Microsoft.Msagl.Core.Layout;
using Microsoft.Msagl.Drawing;
using Ellipse = Microsoft.Msagl.Core.Geometry.Curves.Ellipse;
using LineSegment = Microsoft.Msagl.Core.Geometry.Curves.LineSegment;
using Node = Microsoft.Msagl.Drawing.Node;
using Point = Microsoft.Msagl.Core.Geometry.Point;
using Rectangle = Microsoft.Msagl.Core.Geometry.Rectangle;
using Shape = Microsoft.Msagl.Drawing.Shape;
using Size = System.Windows.Size;
using Microsoft.VisualStudio.GraphModel;
using System.Globalization;
namespace xCodeMap.xGraphControl
{
internal class XNode : IViewerNodeX, IInvalidatable {
internal Path BoundaryPath;
internal List<XEdge> inEdges = new List<XEdge>();
internal List<XEdge> outEdges = new List<XEdge>();
internal List<XEdge> selfEdges = new List<XEdge>();
public LgNodeInfo LgNodeInfo { get; set; }
private LevelOfDetailsContainer _visualObject;
public FrameworkElement VisualObject
{
get { return _visualObject; }
}
public Node Node { get; private set; }
private GraphNode _vsGraphNodeInfo;
private string _category;
private Brush _fill;
public XNode(Node node, GraphNode gnode = null)
{
Node = node;
_vsGraphNodeInfo = gnode;
_visualObject = new LevelOfDetailsContainer();
Brush strokeBrush = CommonX.BrushFromMsaglColor(Node.Attr.Color);
_fill = CommonX.BrushFromMsaglColor(Node.Attr.FillColor);
if (gnode != null)
{
if (gnode.Categories.Count() > 0)
{
_category = gnode.Categories.ElementAt(0).ToString().Replace("CodeSchema_", "");
_fill = NodeCategories.GetFill(_category);
Brush brush = NodeCategories.GetStroke(_category);
if (brush != null) strokeBrush = brush;
}
}
BoundaryPath = new Path {
//Data = CreatePathFromNodeBoundary(),
Stroke = strokeBrush,
Fill = _fill,
StrokeThickness = Node.Attr.LineWidth,
Tag = this
};
BoundaryCurveIsDirty = true;
Node.Attr.VisualsChanged += AttrLineWidthHasChanged;
//Node.Attr.GeometryNode.LayoutChangeEvent += GeometryNodeBeforeLayoutChangeEvent;
}
double Scale() {
return LgNodeInfo == null ? 1 : LgNodeInfo.Scale;
}
public void GeometryNodeBeforeLayoutChangeEvent(object sender, LayoutChangeEventArgs e) {
var newBoundaryCurve = e.DataAfterChange as ICurve;
if (newBoundaryCurve != null) {
//just compare the bounding boxes for the time being
var nb = newBoundaryCurve.BoundingBox;
var box = Node.BoundingBox;
if (Math.Abs(nb.Width - box.Width) > 0.00001 || Math.Abs(nb.Height - box.Height) > 0.00001)
BoundaryCurveIsDirty = true;
} else
BoundaryCurveIsDirty = true;
}
void AttrLineWidthHasChanged(object sender, EventArgs e) {
BoundaryPath.StrokeThickness = Node.Attr.LineWidth;
}
Geometry DoubleCircle() {
var box = Node.BoundingBox;
double w = box.Width;
double h = box.Height;
var pathGeometry = new PathGeometry();
var r = new Rect(box.Left, box.Bottom, w, h);
pathGeometry.AddGeometry(new EllipseGeometry(r));
var inflation = Math.Min(5.0, Math.Min(w/3, h/3));
r.Inflate(-inflation, -inflation);
pathGeometry.AddGeometry(new EllipseGeometry(r));
return pathGeometry;
}
Geometry CreatePathFromNodeBoundary() {
Geometry geometry;
switch (Node.Attr.Shape) {
case Shape.Box:
case Shape.House:
case Shape.InvHouse:
case Shape.Diamond:
case Shape.Octagon:
geometry = CreateGeometryFromMsaglCurve(Node.GeometryNode.BoundaryCurve);
break;
case Shape.DoubleCircle:
geometry = DoubleCircle();
break;
default:
geometry = GetEllipseGeometry();
break;
}
return geometry;
}
Geometry CreateGeometryFromMsaglCurve(ICurve iCurve) {
var pathGeometry = new PathGeometry();
var pathFigure = new PathFigure {
IsClosed = true,
IsFilled = true,
StartPoint = CommonX.WpfPoint(iCurve.Start)
};
var curve = iCurve as Curve;
if (curve != null) {
AddCurve(pathFigure, curve);
}
else {
var rect = iCurve as RoundedRect;
if (rect != null)
AddCurve(pathFigure, rect.Curve);
else
throw new Exception();
}
pathGeometry.Figures.Add(pathFigure);
return pathGeometry;
}
static void AddCurve(PathFigure pathFigure, Curve curve) {
foreach (ICurve seg in curve.Segments) {
var ls = seg as LineSegment;
if (ls != null)
pathFigure.Segments.Add(new System.Windows.Media.LineSegment(CommonX.WpfPoint(ls.End), true));
else {
var ellipse = seg as Ellipse;
pathFigure.Segments.Add(new ArcSegment(CommonX.WpfPoint(ellipse.End),
new Size(ellipse.AxisA.Length, ellipse.AxisB.Length),
Point.Angle(new Point(1, 0), ellipse.AxisA),
ellipse.ParEnd - ellipse.ParEnd >= Math.PI,
!ellipse.OrientedCounterclockwise()
? SweepDirection.Counterclockwise
: SweepDirection.Clockwise, true));
}
}
}
Geometry GetEllipseGeometry() {
return new EllipseGeometry( CommonX.WpfPoint(Node.BoundingBox.Center), Node.BoundingBox.Width/2,
Node.BoundingBox.Height/2);
}
#region Implementation of IViewerObject
public DrawingObject DrawingObject {
get { return Node; }
}
public bool MarkedForDragging { get; set; }
public event EventHandler MarkedForDraggingEvent;
public event EventHandler UnmarkedForDraggingEvent;
#endregion
public IEnumerable<IViewerEdge> InEdges {
get { return inEdges; }
}
public IEnumerable<IViewerEdge> OutEdges {
get { return outEdges; }
}
public IEnumerable<IViewerEdge> SelfEdges {
get { return selfEdges; }
}
public void SetStrokeFill() {
throw new NotImplementedException();
}
public void AddPort(Port port) {
}
public void RemovePort(Port port) {
}
public void Invalidate(double scale = 1)
{
if (BoundaryCurveIsDirty) {
BoundaryPath.Data = CreatePathFromNodeBoundary();
BoundaryCurveIsDirty = false;
}
double node_scale = Scale();
Rectangle bounds = LgNodeInfo.OriginalCurveOfGeomNode.BoundingBox;
Size real_size = new Size(bounds.Width * node_scale * scale,
bounds.Height * node_scale * scale);
if (node_scale < 0.5 && (real_size.Width<_fontSize || real_size.Height<_fontSize))
{
_visualObject.LevelOfDetail = 0;
}
else
{
if (_visualObject.MaxLevelOfDetail == 0)
{
// First time becoming visible: generate visuals
InitiateContainer();
Visual visual;
Rectangle rect;
rect = CreateIcon(new Point(), out visual);
_visualObject.AddDetail(visual, rect);
rect = CreateTitle(rect.RightBottom, out visual);
_visualObject.AddDetail(visual, rect);
rect = CreateDescription(new Point(rect.Center.X, rect.Top), out visual);
_visualObject.AddDetail(visual, rect);
}
_visualObject.LevelOfDetail = _visualObject.MeasureLevelOfDetail(real_size);
CommonX.PositionElement(_visualObject, _visualObject.BoundingBox, Node.BoundingBox, 1 / scale);
}
}
private double _fontSize = 12;
private void InitiateContainer()
{
_visualObject.ToolTip = _category + " " + Node.LabelText;
_visualObject.MouseEnter += (o, e) => { BoundaryPath.Fill = Brushes.Gold; };
_visualObject.MouseLeave += (o, e) => { BoundaryPath.Fill = _fill; };
if (LgNodeInfo.GeometryNode is Cluster)
{
_fontSize *= 1.25;
_visualObject.VerticalAlignment = VerticalAlignment.Top;
_visualObject.Margin = new Thickness(0, -_fontSize / 2, 0, -_fontSize / 2);
}
}
private TextBlock _textMeasurer = new TextBlock { FontFamily = new FontFamily("Calibri") };
private Rectangle CreateIcon(Point origin, out Visual visual)
{
if (_category != null)
{
ImageSource src = NodeCategories.GetIcon(_category);
if (src != null)
{
DrawingVisual icon = new DrawingVisual();
icon.CacheMode = new BitmapCache(1);
DrawingContext context = icon.RenderOpen();
context.DrawImage(src, new Rect(origin.X, origin.Y, _fontSize, _fontSize));
context.Close();
visual = icon;
return new Rectangle(origin.X, origin.Y, _fontSize, _fontSize);
}
}
visual = null;
return new Rectangle();
}
private Rectangle CreateTitle(Point origin, out Visual visual)
{
DrawingVisual title = new DrawingVisual();
DrawingContext context = title.RenderOpen();
FormattedText fText = new FormattedText(Node.LabelText, CultureInfo.CurrentCulture, FlowDirection.LeftToRight, new Typeface("Calibri"), _fontSize, Brushes.Black);
context.DrawText(fText, CommonX.WpfPoint(origin));
context.Close();
visual = title;
_textMeasurer.FontSize = _fontSize;
_textMeasurer.Text = Node.LabelText;
Size size = CommonX.Measure(_textMeasurer);
return new Rectangle(origin.X, origin.Y, origin.X + size.Width, origin.Y + size.Height);
}
private Rectangle CreateDescription(Point origin, out Visual visual)
{
if (_vsGraphNodeInfo != null)
{
DrawingVisual desc = new DrawingVisual();
DrawingContext context = desc.RenderOpen();
string properties = "";
foreach (KeyValuePair<GraphProperty, object> kvp in _vsGraphNodeInfo.Properties)
{
string name = kvp.Key.ToString();
bool value = (kvp.Value is bool && ((bool)kvp.Value) == true);
if (name.StartsWith("CodeSchemaProperty_Is") && value)
{
properties += (properties.Length > 0 ? " : " : "") + name.Replace("CodeSchemaProperty_Is", "");
}
}
FormattedText fText = new FormattedText(properties, CultureInfo.CurrentCulture, FlowDirection.LeftToRight, new Typeface("Calibri"), _fontSize * 0.8, Brushes.Black);
fText.SetFontStyle(FontStyles.Italic);
fText.TextAlignment = TextAlignment.Center;
context.DrawText(fText, CommonX.WpfPoint(origin));
context.Close();
_textMeasurer.FontSize = _fontSize * 0.8;
_textMeasurer.Text = properties;
Size size = CommonX.Measure(_textMeasurer);
visual = desc;
return new Rectangle(origin.X-size.Width/2, origin.Y, origin.X+size.Width/2, origin.Y+size.Height);
}
visual = null;
return new Rectangle();
}
public override string ToString() {
return Node.Id;
}
protected bool BoundaryCurveIsDirty { get; set; }
internal double BorderPathThickness
{
set
{
BoundaryPath.StrokeThickness = value;
}
}
}
internal static class NodeCategories
{
static VisualBrush _hatchBrush;
internal static Brush GetStroke(string category)
{
switch (category)
{
case "Class": return Brushes.DarkRed;
case "Method": return Brushes.MediumVioletRed;
case "Property": return Brushes.DimGray;
case "Field": return Brushes.MidnightBlue;
case "Namespace": return Brushes.Transparent;
default: return null;
}
}
internal static Brush GetFill(string category)
{
switch (category)
{
case "Namespace":
if (_hatchBrush == null)
{
DrawingVisual visual = new DrawingVisual();
DrawingContext context = visual.RenderOpen();
context.DrawLine(new Pen(Brushes.LightGray, 0.5), new System.Windows.Point(-3, -1), new System.Windows.Point(3, 5));
context.DrawLine(new Pen(Brushes.LightGray, 0.5), new System.Windows.Point(1, -1), new System.Windows.Point(7, 5));
context.Close();
_hatchBrush = new VisualBrush(visual);
_hatchBrush.TileMode = TileMode.Tile;
_hatchBrush.Viewbox = new Rect(0, 0, 4, 4);
_hatchBrush.ViewboxUnits = BrushMappingMode.Absolute;
_hatchBrush.Viewport = new Rect(0, 0, 4, 4);
_hatchBrush.ViewportUnits = BrushMappingMode.Absolute;
}
return _hatchBrush;
default: return Brushes.White;
}
}
internal static ImageSource GetIcon(string category)
{
switch (category)
{
case "Class":
case "Method":
case "Property":
case "Field":
case "Interface":
case "Namespace":
case "Delegate":
case "Event":
case "Solution":
return new BitmapImage(new Uri("Images/Icon_" + category + ".png", UriKind.RelativeOrAbsolute));
default: return null;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
namespace ParquetSharp.IO.Api
{
using System;
using System.Collections.Generic;
using System.Text;
using ParquetSharp.External;
abstract public class Binary : IComparable<Binary>, IEquatable<Binary>, IComparable
{
protected bool _isBackingBytesReused;
// this isn't really something others should extend
internal Binary() { }
public static readonly Binary EMPTY = fromConstantByteArray(new byte[0]);
public static readonly IComparer<Binary> Comparer = new BinaryComparer();
public static readonly IEqualityComparer<Binary> EqualityComparer = (IEqualityComparer<Binary>)Comparer;
abstract public string toStringUsingUTF8();
abstract public int length();
abstract public void writeTo(OutputStream @out);
abstract public void writeTo(DataOutput @out);
abstract public byte[] getBytes();
/**
* Variant of getBytes() that avoids copying backing data structure by returning
* backing byte[] of the Binary. Do not modify backing byte[] unless you know what
* you are doing.
* @return backing byte[] of correct size, with an offset of 0, if possible, else returns result of getBytes()
*/
abstract public byte[] getBytesUnsafe();
abstract public Binary slice(int start, int length);
internal abstract bool equals(byte[] bytes, int offset, int length);
internal abstract bool equals(ByteBuffer bytes, int offset, int length);
internal abstract bool equals(Binary other);
abstract public int CompareTo(Binary other);
public int CompareTo(object obj)
{
if (obj is Binary)
{
return CompareTo((Binary)obj);
}
if (obj is byte[])
{
return compareTo((byte[])obj, 0, ((byte[])obj).Length);
}
return 0;
}
internal abstract int compareTo(byte[] bytes, int offset, int length);
internal abstract int compareTo(ByteBuffer bytes, int offset, int length);
abstract public ByteBuffer toByteBuffer();
public bool Equals(Binary other)
{
if (other == null)
{
return false;
}
return equals(other);
}
public override bool Equals(object obj)
{
return Equals(obj as Binary);
}
public override string ToString()
{
return "Binary{" +
length() +
(_isBackingBytesReused ? " reused" : " constant") +
" bytes, " +
Arrays.toString(getBytesUnsafe())
+ "}";
}
public Binary copy()
{
if (_isBackingBytesReused)
{
return Binary.fromConstantByteArray(getBytes());
}
else
{
return this;
}
}
/**
* Signals if backing bytes are owned, and can be modified, by producer of the Binary
* @return if backing bytes are held on by producer of the Binary
*/
public bool isBackingBytesReused()
{
return _isBackingBytesReused;
}
private class ByteArraySliceBackedBinary : Binary
{
private readonly byte[] value;
private readonly int offset;
private readonly int _length;
public ByteArraySliceBackedBinary(byte[] value, int offset, int length, bool isBackingBytesReused)
{
this.value = value;
this.offset = offset;
this._length = length;
this._isBackingBytesReused = isBackingBytesReused;
}
public override string toStringUsingUTF8()
{
return Encoding.UTF8.GetString(value, offset, _length);
}
public override int length()
{
return _length;
}
public override void writeTo(OutputStream @out)
{
@out.Write(value, offset, _length);
}
public override byte[] getBytes()
{
return Arrays.copyOfRange(value, offset, offset + _length);
}
public override byte[] getBytesUnsafe()
{
// Backing array is larger than the slice used for this Binary.
return getBytes();
}
public override Binary slice(int start, int length)
{
if (_isBackingBytesReused)
{
return Binary.fromReusedByteArray(value, offset + start, length);
}
else
{
return Binary.fromConstantByteArray(value, offset + start, length);
}
}
public override int GetHashCode()
{
return Binary.hashCode(value, offset, _length);
}
internal override bool equals(Binary other)
{
return other.equals(value, offset, _length);
}
internal override bool equals(byte[] other, int otherOffset, int otherLength)
{
return Binary.equals(value, offset, _length, other, otherOffset, otherLength);
}
internal override bool equals(ByteBuffer bytes, int otherOffset, int otherLength)
{
return Binary.equals(value, offset, _length, bytes, otherOffset, otherLength);
}
public override int CompareTo(Binary other)
{
return other.compareTo(value, offset, _length);
}
internal override int compareTo(byte[] other, int otherOffset, int otherLength)
{
return Binary.compareTwoByteArrays(value, offset, _length, other, otherOffset, otherLength);
}
internal override int compareTo(ByteBuffer bytes, int otherOffset, int otherLength)
{
return Binary.compareByteArrayToByteBuffer(value, offset, _length, bytes, otherOffset, otherLength);
}
public override ByteBuffer toByteBuffer()
{
return ByteBuffer.wrap(value, offset, _length);
}
public override void writeTo(DataOutput @out)
{
@out.write(value, offset, _length);
}
}
private class BinaryComparer : IComparer<Binary>, IEqualityComparer<Binary>
{
public int Compare(Binary x, Binary y)
{
return x.CompareTo(y);
}
public bool Equals(Binary x, Binary y)
{
return x.Equals(y);
}
public int GetHashCode(Binary obj)
{
return obj.GetHashCode();
}
}
private class FromStringBinary : ByteArrayBackedBinary
{
public FromStringBinary(string value)
: base(encodeUTF8(value), false)
{
// reused is false, because we do not
// hold on to the underlying bytes,
// and nobody else has a handle to them
}
private static byte[] encodeUTF8(string value)
{
return Encoding.UTF8.GetBytes(value);
}
public override string ToString()
{
return "Binary{\"" + toStringUsingUTF8() + "\"}";
}
}
public static Binary fromReusedByteArray(byte[] value, int offset, int length)
{
return new ByteArraySliceBackedBinary(value, offset, length, true);
}
public static Binary fromConstantByteArray(byte[] value, int offset,
int length)
{
return new ByteArraySliceBackedBinary(value, offset, length, false);
}
/**
* @deprecated Use @link{fromReusedByteArray} or @link{fromConstantByteArray} instead
*/
[Obsolete]
public static Binary fromByteArray(byte[] value, int offset, int length)
{
return fromReusedByteArray(value, offset, length); // Assume producer intends to reuse byte[]
}
private class ByteArrayBackedBinary : Binary
{
private readonly byte[] value;
public ByteArrayBackedBinary(byte[] value, bool isBackingBytesReused)
{
this.value = value;
this._isBackingBytesReused = isBackingBytesReused;
}
public override string toStringUsingUTF8()
{
return Encoding.UTF8.GetString(value);
}
public override int length()
{
return value.Length;
}
public override void writeTo(OutputStream @out)
{
@out.Write(value);
}
public override byte[] getBytes()
{
return Arrays.copyOfRange(value, 0, value.Length);
}
public override byte[] getBytesUnsafe()
{
return value;
}
public override Binary slice(int start, int length)
{
if (_isBackingBytesReused)
{
return Binary.fromReusedByteArray(value, start, length);
}
else
{
return Binary.fromConstantByteArray(value, start, length);
}
}
public override int GetHashCode()
{
return Binary.hashCode(value, 0, value.Length);
}
internal override bool equals(Binary other)
{
return other.equals(value, 0, value.Length);
}
internal override bool equals(byte[] other, int otherOffset, int otherLength)
{
return Binary.equals(value, 0, value.Length, other, otherOffset, otherLength);
}
internal override bool equals(ByteBuffer bytes, int otherOffset, int otherLength)
{
return Binary.equals(value, 0, value.Length, bytes, otherOffset, otherLength);
}
public override int CompareTo(Binary other)
{
return other.compareTo(value, 0, value.Length);
}
internal override int compareTo(byte[] other, int otherOffset, int otherLength)
{
return Binary.compareTwoByteArrays(value, 0, value.Length, other, otherOffset, otherLength);
}
internal override int compareTo(ByteBuffer bytes, int otherOffset, int otherLength)
{
return Binary.compareByteArrayToByteBuffer(value, 0, value.Length, bytes, otherOffset, otherLength);
}
public override ByteBuffer toByteBuffer()
{
return ByteBuffer.wrap(value);
}
public override void writeTo(DataOutput @out)
{
@out.write(value);
}
}
public static Binary fromReusedByteArray(byte[] value)
{
return new ByteArrayBackedBinary(value, true);
}
public static Binary fromConstantByteArray(byte[] value)
{
return new ByteArrayBackedBinary(value, false);
}
[Obsolete]
/**
* @deprecated Use @link{fromReusedByteArray} or @link{fromConstantByteArray} instead
*/
public static Binary fromByteArray(byte[] value)
{
return fromReusedByteArray(value); // Assume producer intends to reuse byte[]
}
private class ByteBufferBackedBinary : Binary
{
private ByteBuffer value;
private byte[] cachedBytes;
private readonly int offset;
private readonly int _length;
public ByteBufferBackedBinary(ByteBuffer value, int offset, int length, bool isBackingBytesReused)
{
this.value = value;
this.offset = offset;
this._length = length;
this._isBackingBytesReused = isBackingBytesReused;
}
public override string toStringUsingUTF8()
{
return Encoding.UTF8.GetString(value.array(), offset, _length);
}
public override int length()
{
return _length;
}
public override void writeTo(OutputStream @out)
{
if (value.hasArray())
{
@out.Write(value.array(), value.arrayOffset() + offset, _length);
}
else
{
@out.Write(getBytesUnsafe(), 0, _length);
}
}
public override byte[] getBytes()
{
byte[] bytes = new byte[_length];
int limit = value.limit();
value.limit(offset + _length);
int position = value.position();
value.position(offset);
value.get(bytes);
value.limit(limit);
value.position(position);
if (!_isBackingBytesReused)
{ // backing buffer might change
cachedBytes = bytes;
}
return bytes;
}
public override byte[] getBytesUnsafe()
{
return cachedBytes != null ? cachedBytes : getBytes();
}
public override Binary slice(int start, int length)
{
return Binary.fromConstantByteArray(getBytesUnsafe(), start, length);
}
public override int GetHashCode()
{
if (value.hasArray())
{
return Binary.hashCode(value.array(), value.arrayOffset() + offset, _length);
}
else
{
return Binary.hashCode(value, offset, _length);
}
}
internal override bool equals(Binary other)
{
if (value.hasArray())
{
return other.equals(value.array(), value.arrayOffset() + offset, _length);
}
else
{
return other.equals(value, offset, _length);
}
}
internal override bool equals(byte[] other, int otherOffset, int otherLength)
{
if (value.hasArray())
{
return Binary.equals(value.array(), value.arrayOffset() + offset, _length, other, otherOffset, otherLength);
}
else
{
return Binary.equals(other, otherOffset, otherLength, value, offset, _length);
}
}
internal override bool equals(ByteBuffer otherBytes, int otherOffset, int otherLength)
{
return Binary.equals(value, 0, _length, otherBytes, otherOffset, otherLength);
}
public override int CompareTo(Binary other)
{
if (value.hasArray())
{
return other.compareTo(value.array(), value.arrayOffset() + offset, _length);
}
else
{
return other.compareTo(value, offset, _length);
}
}
internal override int compareTo(byte[] other, int otherOffset, int otherLength)
{
if (value.hasArray())
{
return Binary.compareTwoByteArrays(value.array(), value.arrayOffset() + offset, _length,
other, otherOffset, otherLength);
}
{
return Binary.compareByteBufferToByteArray(value, offset, _length, other, otherOffset, otherLength);
}
}
internal override int compareTo(ByteBuffer bytes, int otherOffset, int otherLength)
{
return Binary.compareTwoByteBuffers(value, offset, _length, bytes, otherOffset, otherLength);
}
public override ByteBuffer toByteBuffer()
{
ByteBuffer ret = value.slice();
ret.position(offset);
ret.limit(offset + _length);
return ret;
}
public override void writeTo(DataOutput @out)
{
// TODO: should not have to materialize those bytes
@out.write(getBytesUnsafe());
}
#if false
private void writeObject(java.io.ObjectOutputStream @out)
{
byte[] bytes = getBytesUnsafe();
@out.writeInt(bytes.Length);
@out.write(bytes);
}
private void readObject(java.io.ObjectInputStream @in)
{
int length = @in.readInt();
byte[] bytes = new byte[length];
@in.readFully(bytes, 0, length);
this.value = ByteBuffer.wrap(bytes);
}
#endif
private void readObjectNoData()
{
this.value = ByteBuffer.wrap(new byte[0]);
}
}
public static Binary fromReusedByteBuffer(ByteBuffer value, int offset, int length)
{
return new ByteBufferBackedBinary(value, offset, length, true);
}
public static Binary fromConstantByteBuffer(ByteBuffer value, int offset, int length)
{
return new ByteBufferBackedBinary(value, offset, length, false);
}
public static Binary fromReusedByteBuffer(ByteBuffer value)
{
return new ByteBufferBackedBinary(value, value.position(), value.remaining(), true);
}
public static Binary fromConstantByteBuffer(ByteBuffer value)
{
return new ByteBufferBackedBinary(value, value.position(), value.remaining(), false);
}
/**
* @deprecated Use @link{fromReusedByteBuffer} or @link{fromConstantByteBuffer} instead
*/
[Obsolete]
public static Binary fromByteBuffer(ByteBuffer value)
{
return fromReusedByteBuffer(value); // Assume producer intends to reuse byte[]
}
public static Binary fromString(string value)
{
return new FromStringBinary(value);
}
/**
* @see {@link Arrays#hashCode(byte[])}
* @param array
* @param offset
* @param length
* @return
*/
private static int hashCode(byte[] array, int offset, int length)
{
int result = 1;
for (int i = offset; i < offset + length; i++)
{
byte b = array[i];
result = 31 * result + b;
}
return result;
}
private static int hashCode(ByteBuffer buf, int offset, int length)
{
int result = 1;
for (int i = offset; i < offset + length; i++)
{
byte b = buf.get(i);
result = 31 * result + b;
}
return result;
}
private static bool equals(ByteBuffer buf1, int offset1, int length1, ByteBuffer buf2, int offset2, int length2)
{
if (buf1 == null && buf2 == null) return true;
if (buf1 == null || buf2 == null) return false;
if (length1 != length2) return false;
for (int i = 0; i < length1; i++)
{
if (buf1.get(i + offset1) != buf2.get(i + offset2))
{
return false;
}
}
return true;
}
private static bool equals(byte[] array1, int offset1, int length1, ByteBuffer buf, int offset2, int length2)
{
if (array1 == null && buf == null) return true;
if (array1 == null || buf == null) return false;
if (length1 != length2) return false;
for (int i = 0; i < length1; i++)
{
if (array1[i + offset1] != buf.get(i + offset2))
{
return false;
}
}
return true;
}
/**
* @see {@link Arrays#equals(byte[], byte[])}
* @param array1
* @param offset1
* @param length1
* @param array2
* @param offset2
* @param length2
* @return
*/
private static bool equals(byte[] array1, int offset1, int length1, byte[] array2, int offset2, int length2)
{
if (array1 == null && array2 == null) return true;
if (array1 == null || array2 == null) return false;
if (length1 != length2) return false;
if (array1 == array2 && offset1 == offset2) return true;
for (int i = 0; i < length1; i++)
{
if (array1[i + offset1] != array2[i + offset2])
{
return false;
}
}
return true;
}
private static int compareByteBufferToByteArray(ByteBuffer buf, int offset1, int length1,
byte[] array, int offset2, int length2)
{
return -1 * Binary.compareByteArrayToByteBuffer(array, offset1, length1, buf, offset2, length2);
}
private static int compareByteArrayToByteBuffer(byte[] array1, int offset1, int length1,
ByteBuffer buf, int offset2, int length2)
{
if (array1 == null && buf == null) return 0;
int min_length = (length1 < length2) ? length1 : length2;
for (int i = 0; i < min_length; i++)
{
if (array1[i + offset1] < buf.get(i + offset2))
{
return 1;
}
if (array1[i + offset1] > buf.get(i + offset2))
{
return -1;
}
}
// check remainder
if (length1 == length2) { return 0; }
else if (length1 < length2) { return 1; }
else { return -1; }
}
private static int compareTwoByteBuffers(ByteBuffer buf1, int offset1, int length1,
ByteBuffer buf2, int offset2, int length2)
{
if (buf1 == null && buf2 == null) return 0;
int min_length = (length1 < length2) ? length1 : length2;
for (int i = 0; i < min_length; i++)
{
if (buf1.get(i + offset1) < buf2.get(i + offset2))
{
return 1;
}
if (buf1.get(i + offset1) > buf2.get(i + offset2))
{
return -1;
}
}
// check remainder
if (length1 == length2) { return 0; }
else if (length1 < length2) { return 1; }
else { return -1; }
}
private static int compareTwoByteArrays(byte[] array1, int offset1, int length1,
byte[] array2, int offset2, int length2)
{
if (array1 == null && array2 == null) return 0;
if (array1 == array2 && offset1 == offset2 && length1 == length2) return 0;
int min_length = (length1 < length2) ? length1 : length2;
for (int i = 0; i < min_length; i++)
{
if (array1[i + offset1] < array2[i + offset2])
{
return 1;
}
if (array1[i + offset1] > array2[i + offset2])
{
return -1;
}
}
// check remainder
if (length1 == length2) { return 0; }
else if (length1 < length2) { return 1; }
else { return -1; }
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
using System.Security;
namespace System.IO.Pipes
{
public abstract partial class PipeStream : Stream
{
private const string PipeDirectoryPath = "/tmp/corefxnamedpipes/";
internal static string GetPipePath(string serverName, string pipeName)
{
if (serverName != "." && serverName != Interop.libc.gethostname())
{
// Cross-machine pipes are not supported.
throw new PlatformNotSupportedException();
}
if (pipeName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0)
{
// Since pipes are stored as files in the file system, we don't support
// pipe names that are actually paths or that otherwise have invalid
// filename characters in them.
throw new PlatformNotSupportedException();
}
// Make sure we have the directory in which to put the pipe paths
while (true)
{
int result = Interop.libc.mkdir(PipeDirectoryPath, (int)Interop.libc.Permissions.S_IRWXU);
if (result >= 0)
{
// directory created
break;
}
int errno = Marshal.GetLastWin32Error();
if (errno == Interop.Errors.EINTR)
{
// I/O was interrupted, try again
continue;
}
else if (errno == Interop.Errors.EEXIST)
{
// directory already exists
break;
}
else
{
throw Interop.GetExceptionForIoErrno(errno, PipeDirectoryPath, isDirectory: true);
}
}
// Return the pipe path
return PipeDirectoryPath + pipeName;
}
/// <summary>Throws an exception if the supplied handle does not represent a valid pipe.</summary>
/// <param name="safePipeHandle">The handle to validate.</param>
internal static void ValidateHandleIsPipe(SafePipeHandle safePipeHandle)
{
SysCall(safePipeHandle, (fd, _, __) =>
{
Interop.libcoreclr.fileinfo buf;
int result = Interop.libcoreclr.GetFileInformationFromFd(fd, out buf);
if (result == 0)
{
if ((buf.mode & Interop.libcoreclr.FileTypes.S_IFMT) != Interop.libcoreclr.FileTypes.S_IFIFO)
{
throw new IOException(SR.IO_InvalidPipeHandle);
}
}
return result;
});
}
/// <summary>Initializes the handle to be used asynchronously.</summary>
/// <param name="handle">The handle.</param>
[SecurityCritical]
private void InitializeAsyncHandle(SafePipeHandle handle)
{
// nop
}
private void UninitializeAsyncHandle()
{
// nop
}
[SecurityCritical]
private unsafe int ReadCore(byte[] buffer, int offset, int count)
{
Debug.Assert(_handle != null, "_handle is null");
Debug.Assert(!_handle.IsClosed, "_handle is closed");
Debug.Assert(CanRead, "can't read");
Debug.Assert(buffer != null, "buffer is null");
Debug.Assert(offset >= 0, "offset is negative");
Debug.Assert(count >= 0, "count is negative");
fixed (byte* bufPtr = buffer)
{
return (int)SysCall(_handle, (fd, ptr, len) =>
{
long result = (long)Interop.libc.read(fd, (byte*)ptr, (IntPtr)len);
Debug.Assert(result <= len);
return result;
}, (IntPtr)(bufPtr + offset), count);
}
}
[SecurityCritical]
private unsafe void WriteCore(byte[] buffer, int offset, int count)
{
Debug.Assert(_handle != null, "_handle is null");
Debug.Assert(!_handle.IsClosed, "_handle is closed");
Debug.Assert(CanWrite, "can't write");
Debug.Assert(buffer != null, "buffer is null");
Debug.Assert(offset >= 0, "offset is negative");
Debug.Assert(count >= 0, "count is negative");
fixed (byte* bufPtr = buffer)
{
while (count > 0)
{
int bytesWritten = (int)SysCall(_handle, (fd, ptr, len) =>
{
long result = (long)Interop.libc.write(fd, (byte*)ptr, (IntPtr)len);
Debug.Assert(result <= len);
return result;
}, (IntPtr)(bufPtr + offset), count);
count -= bytesWritten;
offset += bytesWritten;
}
}
}
// Blocks until the other end of the pipe has read in all written buffer.
[SecurityCritical]
public void WaitForPipeDrain()
{
CheckWriteOperations();
if (!CanWrite)
{
throw __Error.GetWriteNotSupported();
}
throw new PlatformNotSupportedException(); // no mechanism for this on Unix
}
// Gets the transmission mode for the pipe. This is virtual so that subclassing types can
// override this in cases where only one mode is legal (such as anonymous pipes)
public virtual PipeTransmissionMode TransmissionMode
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
get
{
CheckPipePropertyOperations();
return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based
}
}
// Gets the buffer size in the inbound direction for the pipe. This checks if pipe has read
// access. If that passes, call to GetNamedPipeInfo will succeed.
public virtual int InBufferSize
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
get
{
CheckPipePropertyOperations();
if (!CanRead)
{
throw new NotSupportedException(SR.NotSupported_UnreadableStream);
}
// On Linux this could be retrieved using F_GETPIPE_SZ with fcntl, but that's non-conforming
// and works only on recent versions of Linux. For now, we'll leave this as unsupported.
throw new PlatformNotSupportedException();
}
}
// Gets the buffer size in the outbound direction for the pipe. This uses cached version
// if it's an outbound only pipe because GetNamedPipeInfo requires read access to the pipe.
// However, returning cached is good fallback, especially if user specified a value in
// the ctor.
public virtual int OutBufferSize
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
get
{
CheckPipePropertyOperations();
if (!CanWrite)
{
throw new NotSupportedException(SR.NotSupported_UnwritableStream);
}
// See comments in inBufferSize
throw new PlatformNotSupportedException();
}
}
public virtual PipeTransmissionMode ReadMode
{
[SecurityCritical]
get
{
CheckPipePropertyOperations();
return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based
}
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
set
{
CheckPipePropertyOperations();
if (value < PipeTransmissionMode.Byte || value > PipeTransmissionMode.Message)
{
throw new ArgumentOutOfRangeException("value", SR.ArgumentOutOfRange_TransmissionModeByteOrMsg);
}
if (value != PipeTransmissionMode.Byte) // Unix pipes are only byte-based, not message-based
{
throw new PlatformNotSupportedException();
}
// nop, since it's already the only valid value
}
}
// -----------------------------
// ---- PAL layer ends here ----
// -----------------------------
internal static Interop.libc.OpenFlags TranslateFlags(PipeDirection direction, PipeOptions options, HandleInheritability inheritability)
{
// Translate direction
Interop.libc.OpenFlags flags =
direction == PipeDirection.InOut ? Interop.libc.OpenFlags.O_RDWR :
direction == PipeDirection.Out ? Interop.libc.OpenFlags.O_WRONLY :
Interop.libc.OpenFlags.O_RDONLY;
// Translate options
if ((options & PipeOptions.WriteThrough) != 0)
{
flags |= Interop.libc.OpenFlags.O_SYNC;
}
// Translate inheritability.
if ((inheritability & HandleInheritability.Inheritable) == 0)
{
flags |= Interop.libc.OpenFlags.O_CLOEXEC;
}
// PipeOptions.Asynchronous is ignored, at least for now. Asynchronous processing
// is handling just by queueing a work item to do the work synchronously on a pool thread.
return flags;
}
/// <summary>
/// Helper for making system calls that involve the stream's file descriptor.
/// System calls are expected to return greather than or equal to zero on success,
/// and less than zero on failure. In the case of failure, errno is expected to
/// be set to the relevant error code.
/// </summary>
/// <param name="sysCall">A delegate that invokes the system call.</param>
/// <param name="arg1">The first argument to be passed to the system call, after the file descriptor.</param>
/// <param name="arg2">The second argument to be passed to the system call.</param>
/// <returns>The return value of the system call.</returns>
/// <remarks>
/// Arguments are expected to be passed via <paramref name="arg1"/> and <paramref name="arg2"/>
/// so as to avoid delegate and closure allocations at the call sites.
/// </remarks>
private static long SysCall(
SafePipeHandle handle,
Func<int, IntPtr, int, long> sysCall,
IntPtr arg1 = default(IntPtr), int arg2 = default(int))
{
bool gotRefOnHandle = false;
try
{
// Get the file descriptor from the handle. We increment the ref count to help
// ensure it's not closed out from under us.
handle.DangerousAddRef(ref gotRefOnHandle);
Debug.Assert(gotRefOnHandle);
int fd = (int)handle.DangerousGetHandle();
Debug.Assert(fd >= 0);
long result;
while (Interop.CheckIo(result = sysCall(fd, arg1, arg2))) ;
return result;
}
finally
{
if (gotRefOnHandle)
{
handle.DangerousRelease();
}
}
}
}
}
| |
using System;
using System.IO;
using System.Threading.Tasks;
using Godot;
using GodotTools.IdeMessaging;
using GodotTools.IdeMessaging.Requests;
using GodotTools.Internals;
namespace GodotTools.Ides
{
public sealed class GodotIdeManager : Node, ISerializationListener
{
private MessagingServer _messagingServer;
private MonoDevelop.Instance _monoDevelInstance;
private MonoDevelop.Instance _vsForMacInstance;
private MessagingServer GetRunningOrNewServer()
{
if (_messagingServer != null && !_messagingServer.IsDisposed)
return _messagingServer;
_messagingServer?.Dispose();
_messagingServer = new MessagingServer(OS.GetExecutablePath(), ProjectSettings.GlobalizePath(GodotSharpDirs.ResMetadataDir), new GodotLogger());
_ = _messagingServer.Listen();
return _messagingServer;
}
public override void _Ready()
{
_ = GetRunningOrNewServer();
}
public void OnBeforeSerialize()
{
}
public void OnAfterDeserialize()
{
_ = GetRunningOrNewServer();
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (disposing)
{
_messagingServer?.Dispose();
}
}
private string GetExternalEditorIdentity(ExternalEditorId editorId)
{
// Manually convert to string to avoid breaking compatibility in case we rename the enum fields.
switch (editorId)
{
case ExternalEditorId.None:
return null;
case ExternalEditorId.VisualStudio:
return "VisualStudio";
case ExternalEditorId.VsCode:
return "VisualStudioCode";
case ExternalEditorId.Rider:
return "Rider";
case ExternalEditorId.VisualStudioForMac:
return "VisualStudioForMac";
case ExternalEditorId.MonoDevelop:
return "MonoDevelop";
default:
throw new NotImplementedException();
}
}
public async Task<EditorPick?> LaunchIdeAsync(int millisecondsTimeout = 10000)
{
var editorId = (ExternalEditorId)GodotSharpEditor.Instance.GetEditorInterface()
.GetEditorSettings().GetSetting("mono/editor/external_editor");
string editorIdentity = GetExternalEditorIdentity(editorId);
var runningServer = GetRunningOrNewServer();
if (runningServer.IsAnyConnected(editorIdentity))
return new EditorPick(editorIdentity);
LaunchIde(editorId, editorIdentity);
var timeoutTask = Task.Delay(millisecondsTimeout);
var completedTask = await Task.WhenAny(timeoutTask, runningServer.AwaitClientConnected(editorIdentity));
if (completedTask != timeoutTask)
return new EditorPick(editorIdentity);
return null;
}
private void LaunchIde(ExternalEditorId editorId, string editorIdentity)
{
switch (editorId)
{
case ExternalEditorId.None:
case ExternalEditorId.VisualStudio:
case ExternalEditorId.VsCode:
case ExternalEditorId.Rider:
throw new NotSupportedException();
case ExternalEditorId.VisualStudioForMac:
goto case ExternalEditorId.MonoDevelop;
case ExternalEditorId.MonoDevelop:
{
MonoDevelop.Instance GetMonoDevelopInstance(string solutionPath)
{
if (Utils.OS.IsMacOS && editorId == ExternalEditorId.VisualStudioForMac)
{
_vsForMacInstance = (_vsForMacInstance?.IsDisposed ?? true ? null : _vsForMacInstance) ??
new MonoDevelop.Instance(solutionPath, MonoDevelop.EditorId.VisualStudioForMac);
return _vsForMacInstance;
}
_monoDevelInstance = (_monoDevelInstance?.IsDisposed ?? true ? null : _monoDevelInstance) ??
new MonoDevelop.Instance(solutionPath, MonoDevelop.EditorId.MonoDevelop);
return _monoDevelInstance;
}
try
{
var instance = GetMonoDevelopInstance(GodotSharpDirs.ProjectSlnPath);
if (instance.IsRunning && !GetRunningOrNewServer().IsAnyConnected(editorIdentity))
{
// After launch we wait up to 30 seconds for the IDE to connect to our messaging server.
var waitAfterLaunch = TimeSpan.FromSeconds(30);
var timeSinceLaunch = DateTime.Now - instance.LaunchTime;
if (timeSinceLaunch > waitAfterLaunch)
{
instance.Dispose();
instance.Execute();
}
}
else if (!instance.IsRunning)
{
instance.Execute();
}
}
catch (FileNotFoundException)
{
string editorName = editorId == ExternalEditorId.VisualStudioForMac ? "Visual Studio" : "MonoDevelop";
GD.PushError($"Cannot find code editor: {editorName}");
}
break;
}
default:
throw new ArgumentOutOfRangeException();
}
}
public readonly struct EditorPick
{
private readonly string _identity;
public EditorPick(string identity)
{
_identity = identity;
}
public bool IsAnyConnected() =>
GodotSharpEditor.Instance.GodotIdeManager.GetRunningOrNewServer().IsAnyConnected(_identity);
private void SendRequest<TResponse>(Request request)
where TResponse : Response, new()
{
// Logs an error if no client is connected with the specified identity
GodotSharpEditor.Instance.GodotIdeManager
.GetRunningOrNewServer()
.BroadcastRequest<TResponse>(_identity, request);
}
public void SendOpenFile(string file)
{
SendRequest<OpenFileResponse>(new OpenFileRequest {File = file});
}
public void SendOpenFile(string file, int line)
{
SendRequest<OpenFileResponse>(new OpenFileRequest {File = file, Line = line});
}
public void SendOpenFile(string file, int line, int column)
{
SendRequest<OpenFileResponse>(new OpenFileRequest {File = file, Line = line, Column = column});
}
}
public EditorPick PickEditor(ExternalEditorId editorId) => new EditorPick(GetExternalEditorIdentity(editorId));
private class GodotLogger : ILogger
{
public void LogDebug(string message)
{
if (OS.IsStdoutVerbose())
Console.WriteLine(message);
}
public void LogInfo(string message)
{
if (OS.IsStdoutVerbose())
Console.WriteLine(message);
}
public void LogWarning(string message)
{
GD.PushWarning(message);
}
public void LogError(string message)
{
GD.PushError(message);
}
public void LogError(string message, Exception e)
{
GD.PushError(message + "\n" + e);
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Win32.SafeHandles;
internal static partial class Interop
{
internal static class OpenSsl
{
#region structures
[StructLayout(LayoutKind.Sequential)]
private struct SslContext
{
internal IntPtr sslPtr;
internal IntPtr readBioPtr;
internal IntPtr writeBioPtr;
internal bool isServer;
}
#endregion
#region internal methods
//TODO (Issue #3362) Set remote certificate options
internal static IntPtr AllocateSslContext(long options, SafeX509Handle certHandle, SafeEvpPKeyHandle certKeyHandle, bool isServer, bool remoteCertRequired)
{
SslContext sslContext = new SslContext
{
isServer = isServer,
};
try
{
IntPtr method = GetSslMethod(isServer, options);
IntPtr contextPtr = libssl.SSL_CTX_new(method);
if (IntPtr.Zero == contextPtr)
{
throw CreateSslException("Failed to allocate SSL/TLS context");
}
libssl.SSL_CTX_ctrl(contextPtr, libssl.SSL_CTRL_OPTIONS, options, IntPtr.Zero);
libssl.SSL_CTX_set_quiet_shutdown(contextPtr, 1);
if (certHandle != null && certKeyHandle != null)
{
SetSslCertificate(contextPtr, certHandle, certKeyHandle);
}
sslContext.sslPtr = libssl.SSL_new(contextPtr);
libssl.SSL_CTX_free(contextPtr);
if (IntPtr.Zero == sslContext.sslPtr)
{
throw CreateSslException("Failed to create SSSL object from SSL context");
}
IntPtr memMethod = libcrypto.BIO_s_mem();
if (IntPtr.Zero == memMethod)
{
throw CreateSslException("Failed to return memory BIO method function");
}
sslContext.readBioPtr = libssl.BIO_new(memMethod);
sslContext.writeBioPtr = libssl.BIO_new(memMethod);
if ((IntPtr.Zero == sslContext.readBioPtr) || (IntPtr.Zero == sslContext.writeBioPtr))
{
FreeBio(sslContext);
throw CreateSslException("Failed to retun new BIO for a given method type");
}
if (isServer)
{
libssl.SSL_set_accept_state(sslContext.sslPtr);
}
else
{
libssl.SSL_set_connect_state(sslContext.sslPtr);
}
libssl.SSL_set_bio(sslContext.sslPtr, sslContext.readBioPtr, sslContext.writeBioPtr);
}
catch
{
Disconnect(sslContext.sslPtr);
throw;
}
IntPtr sslContextPtr = Marshal.AllocHGlobal(Marshal.SizeOf<SslContext>());
Marshal.StructureToPtr(sslContext, sslContextPtr, false);
return sslContextPtr;
}
internal static bool DoSslHandshake(IntPtr sslContextPtr, IntPtr recvPtr, int recvCount, out IntPtr sendPtr, out int sendCount)
{
sendPtr = IntPtr.Zero;
sendCount = 0;
SslContext context = Marshal.PtrToStructure<SslContext>(sslContextPtr);
bool isServer = context.isServer;
if ((IntPtr.Zero != recvPtr) && (recvCount > 0))
{
BioWrite(context.readBioPtr, recvPtr, recvCount);
}
int retVal = libssl.SSL_do_handshake(context.sslPtr);
if ((retVal == 1) && !isServer)
{
return true;
}
int error;
if (retVal != 1)
{
error = GetSslError(context.sslPtr, retVal);
if ((retVal != -1) || (error != libssl.SslErrorCode.SSL_ERROR_WANT_READ))
{
throw CreateSslException(context.sslPtr, "SSL Handshake failed: ", retVal);
}
}
sendCount = libssl.BIO_ctrl_pending(context.writeBioPtr);
if (sendCount > 0)
{
sendPtr = Marshal.AllocHGlobal(sendCount);
sendCount = BioRead(context.writeBioPtr, sendPtr, sendCount);
if (sendCount <= 0)
{
error = sendCount;
Marshal.FreeHGlobal(sendPtr);
sendPtr = IntPtr.Zero;
sendCount = 0;
throw CreateSslException(context.sslPtr, "Read Bio failed: ", error);
}
}
return ((libssl.SSL_state(context.sslPtr) == (int)libssl.SslState.SSL_ST_OK));
}
internal static int Encrypt(IntPtr handlePtr, IntPtr buffer, int offset, int count, int bufferCapacity)
{
SslContext context = Marshal.PtrToStructure<SslContext>(handlePtr);
var retVal = libssl.SSL_write(context.sslPtr, new IntPtr(buffer.ToInt64() + offset), count);
if (retVal != count)
{
int error = GetSslError(context.sslPtr, retVal);
if (libssl.SslErrorCode.SSL_ERROR_ZERO_RETURN == error)
{
return 0; // indicate end-of-file
}
throw CreateSslException("OpenSsl::Encrypt failed");
}
int capacityNeeded = libssl.BIO_ctrl_pending(context.writeBioPtr);
if (retVal == count)
{
if (capacityNeeded > bufferCapacity)
{
throw CreateSslException("OpenSsl::Encrypt capacity needed is more than buffer capacity. capacityNeeded = " + capacityNeeded + "," + "bufferCapacity = " + bufferCapacity);
}
IntPtr outBufferPtr = buffer;
retVal = BioRead(context.writeBioPtr, outBufferPtr, capacityNeeded);
if (retVal < 0)
{
throw CreateSslException("OpenSsl::Encrypt failed");
}
}
return retVal;
}
internal static int Decrypt(IntPtr sslContextPtr, IntPtr outBufferPtr, int count)
{
SslContext context = Marshal.PtrToStructure<SslContext>(sslContextPtr);
int retVal = BioWrite(context.readBioPtr, outBufferPtr, count);
if (retVal == count)
{
retVal = libssl.SSL_read(context.sslPtr, outBufferPtr, retVal);
if (retVal > 0)
{
count = retVal;
}
}
if (retVal != count)
{
int error = GetSslError(context.sslPtr, retVal);
if (libssl.SslErrorCode.SSL_ERROR_ZERO_RETURN == error)
{
return 0; // indicate end-of-file
}
throw CreateSslException("OpenSsl::Decrypt failed");
}
return retVal;
}
internal static IntPtr GetPeerCertificate(IntPtr sslContextPtr)
{
SslContext context = Marshal.PtrToStructure<SslContext>(sslContextPtr);
IntPtr sslPtr = context.sslPtr;
IntPtr certPtr = libssl.SSL_get_peer_certificate(sslPtr);
return certPtr;
}
internal static libssl.SSL_CIPHER GetConnectionInfo(IntPtr sslContextPtr)
{
SslContext context = Marshal.PtrToStructure<SslContext>(sslContextPtr);
IntPtr sslPtr = context.sslPtr;
IntPtr cipherPtr = libssl.SSL_get_current_cipher(sslPtr);
var cipher = new libssl.SSL_CIPHER();
if (IntPtr.Zero != cipherPtr)
{
cipher = Marshal.PtrToStructure<libssl.SSL_CIPHER>(cipherPtr);
}
return cipher;
}
internal static void FreeSslContext(IntPtr sslContextPtr)
{
if (IntPtr.Zero == sslContextPtr)
{
return;
}
SslContext context = Marshal.PtrToStructure<SslContext>(sslContextPtr);
Disconnect(context.sslPtr);
Marshal.FreeHGlobal(sslContextPtr);
sslContextPtr = IntPtr.Zero;
}
#endregion
#region private methods
private static void FreeBio(SslContext sslContext)
{
if (IntPtr.Zero != sslContext.readBioPtr)
{
Interop.libcrypto.BIO_free(sslContext.readBioPtr);
}
if (IntPtr.Zero != sslContext.writeBioPtr)
{
Interop.libcrypto.BIO_free(sslContext.writeBioPtr);
}
}
private static IntPtr GetSslMethod(bool isServer, long options)
{
long protocolMask = libssl.Options.SSL_OP_NO_SSLv2 | libssl.Options.SSL_OP_NO_SSLv3 |
libssl.Options.SSL_OP_NO_TLSv1 | libssl.Options.SSL_OP_NO_TLSv1_1 |
libssl.Options.SSL_OP_NO_TLSv1_2;
options &= protocolMask;
Debug.Assert(options != protocolMask, "All protocols are disabled");
bool noSsl2 = (options & libssl.Options.SSL_OP_NO_SSLv2) != 0;
bool noSsl3 = (options & libssl.Options.SSL_OP_NO_SSLv3) != 0;
bool noTls10 = (options & libssl.Options.SSL_OP_NO_TLSv1) != 0;
bool noTls11 = (options & libssl.Options.SSL_OP_NO_TLSv1_1) != 0;
bool noTls12 = (options & libssl.Options.SSL_OP_NO_TLSv1_2) != 0;
IntPtr method;
if (noSsl2 && noSsl3 && noTls11 && noTls12)
{
method = libssl.TLSv1_method();
}
else if (noSsl2 && noSsl3 && noTls10 && noTls12)
{
method = libssl.TLSv1_1_method();
}
else if (noSsl2 && noSsl3 && noTls10 && noTls11)
{
method = libssl.TLSv1_2_method();
}
else if (noSsl2 && noTls10 && noTls11 && noTls12)
{
method = libssl.SSLv3_method();
}
else
{
method = libssl.SSLv23_method();
}
if (IntPtr.Zero == method)
{
throw CreateSslException("Failed to get SSL method");
}
return method;
}
private static void Disconnect(IntPtr sslPtr)
{
if (IntPtr.Zero != sslPtr)
{
int retVal = libssl.SSL_shutdown(sslPtr);
if (retVal < 0)
{
//TODO (Issue #3362) check this error
libssl.SSL_get_error(sslPtr, retVal);
}
libssl.SSL_free(sslPtr);
}
}
//TODO (Issue #3362) should we check Bio should retry?
private static int BioRead(IntPtr BioPtr, IntPtr buffer, int count)
{
int bytes = libssl.BIO_read(BioPtr, buffer, count);
if (bytes != count)
{
throw CreateSslException("Failed in Read BIO");
}
return bytes;
}
//TODO (Issue #3362) should we check Bio should retry?
private static int BioWrite(IntPtr BioPtr, IntPtr buffer, int count)
{
int bytes = libssl.BIO_write(BioPtr, buffer, count);
if (bytes != count)
{
throw CreateSslException("Failed in Write BIO");
}
return bytes;
}
private static int GetSslError(IntPtr sslPtr, int result)
{
int retVal = libssl.SSL_get_error(sslPtr, result);
if (retVal == libssl.SslErrorCode.SSL_ERROR_SYSCALL)
{
retVal = (int)libssl.ERR_get_error();
}
return retVal;
}
private static void SetSslCertificate(IntPtr contextPtr, SafeX509Handle certPtr, SafeEvpPKeyHandle keyPtr)
{
Debug.Assert(certPtr != null && !certPtr.IsInvalid, "certPtr != null && !certPtr.IsInvalid");
Debug.Assert(keyPtr != null && !keyPtr.IsInvalid, "keyPtr != null && !keyPtr.IsInvalid");
int retVal = libssl.SSL_CTX_use_certificate(contextPtr, certPtr);
if (1 != retVal)
{
throw CreateSslException("Failed to use SSL certificate");
}
retVal = libssl.SSL_CTX_use_PrivateKey(contextPtr, keyPtr);
if (1 != retVal)
{
throw CreateSslException("Failed to use SSL certificate private key");
}
//check private key
retVal = libssl.SSL_CTX_check_private_key(contextPtr);
if (1 != retVal)
{
throw CreateSslException("Certificate pivate key check failed");
}
}
private static SslException CreateSslException(string message)
{
ulong errorVal = libssl.ERR_get_error();
string msg = message + ": " + Marshal.PtrToStringAnsi(libssl.ERR_reason_error_string(errorVal));
return new SslException(msg, (int)errorVal);
}
private static SslException CreateSslException(string message, int error)
{
if (error == libssl.SslErrorCode.SSL_ERROR_SYSCALL)
{
return new SslException(message, error);
}
else if (error == libssl.SslErrorCode.SSL_ERROR_SSL)
{
Exception innerEx = Interop.libcrypto.CreateOpenSslCryptographicException();
return new SslException(innerEx.Message, innerEx);
}
else
{
return new SslException(message + ": " + error, error);
}
}
private static SslException CreateSslException(IntPtr sslPtr, string message, int error)
{
return CreateSslException(message, libssl.SSL_get_error(sslPtr, error));
}
private sealed class SslException : Exception
{
public SslException(string inputMessage, int error): base(inputMessage)
{
HResult = error;
}
public SslException(string inputMessage, Exception ex): base(inputMessage, ex)
{
}
}
#endregion
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="TcpSpec.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Akka.Actor;
using Akka.IO;
using Akka.Pattern;
using Akka.Streams.Dsl;
using Akka.Streams.TestKit;
using Akka.Streams.TestKit.Tests;
using FluentAssertions;
using Xunit;
using Xunit.Abstractions;
using Tcp = Akka.Streams.Dsl.Tcp;
namespace Akka.Streams.Tests.IO
{
public class TcpSpec : TcpHelper
{
public TcpSpec(ITestOutputHelper helper) : base("akka.stream.materializer.subscription-timeout.timeout = 2s", helper)
{
}
[Fact(Skip="Fix me")]
public void Outgoing_TCP_stream_must_work_in_the_happy_case()
{
this.AssertAllStagesStopped(() =>
{
var testData = ByteString.Create(new byte[] {1, 2, 3, 4, 5});
var server = new Server(this);
var tcpReadProbe = new TcpReadProbe(this);
var tcpWriteProbe = new TcpWriteProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
ValidateServerClientCommunication(testData, serverConnection, tcpReadProbe, tcpWriteProbe);
tcpWriteProbe.Close();
tcpReadProbe.Close();
server.Close();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_be_able_to_write_a_sequence_of_ByteStrings()
{
var server = new Server(this);
var testInput = Enumerable.Range(0, 256).Select(i => ByteString.Create(new[] {Convert.ToByte(i)}));
var expectedOutput = ByteString.Create(Enumerable.Range(0, 256).Select(Convert.ToByte).ToArray());
Source.From(testInput)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.Ignore<ByteString>())
.Run(Materializer);
var serverConnection = server.WaitAccept();
serverConnection.Read(256);
serverConnection.WaitRead().ShouldBeEquivalentTo(expectedOutput);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_be_able_to_read_a_sequence_of_ByteStrings()
{
var server = new Server(this);
var testInput = Enumerable.Range(0, 255).Select(i => ByteString.Create(new[] { Convert.ToByte(i) }));
var expectedOutput = ByteString.Create(Enumerable.Range(0, 255).Select(Convert.ToByte).ToArray());
var idle = new TcpWriteProbe(this); //Just register an idle upstream
var resultFuture =
Source.FromPublisher(idle.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.RunAggregate(ByteString.Empty, (acc, input) => acc + input, Materializer);
var serverConnection = server.WaitAccept();
foreach (var input in testInput)
serverConnection.Write(input);
serverConnection.ConfirmedClose();
resultFuture.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
resultFuture.Result.ShouldBeEquivalentTo(expectedOutput);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_work_when_client_closes_write_then_remote_closes_write()
{
this.AssertAllStagesStopped(() =>
{
var testData = ByteString.Create(new byte[] { 1, 2, 3, 4, 5 });
var server = new Server(this);
var tcpWriteProbe = new TcpWriteProbe(this);
var tcpReadProbe = new TcpReadProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
// Client can still write
tcpWriteProbe.Write(testData);
serverConnection.Read(5);
serverConnection.WaitRead().ShouldBeEquivalentTo(testData);
// Close client side write
tcpWriteProbe.Close();
serverConnection.ExpectClosed(Akka.IO.Tcp.PeerClosed.Instance);
// Server can still write
serverConnection.Write(testData);
tcpReadProbe.Read(5).ShouldBeEquivalentTo(testData);
// Close server side write
serverConnection.ConfirmedClose();
tcpReadProbe.SubscriberProbe.ExpectComplete();
serverConnection.ExpectClosed(Akka.IO.Tcp.ConfirmedClosed.Instance);
serverConnection.ExpectTerminated();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_work_when_remote_closes_write_then_client_closes_write()
{
this.AssertAllStagesStopped(() =>
{
var testData = ByteString.Create(new byte[] {1, 2, 3, 4, 5});
var server = new Server(this);
var tcpWriteProbe = new TcpWriteProbe(this);
var tcpReadProbe = new TcpReadProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
// Server can still write
serverConnection.Write(testData);
tcpReadProbe.Read(5).ShouldBeEquivalentTo(testData);
// Close server side write
serverConnection.ConfirmedClose();
tcpReadProbe.SubscriberProbe.ExpectComplete();
// Client can still write
tcpWriteProbe.Write(testData);
serverConnection.Read(5);
serverConnection.WaitRead().ShouldBeEquivalentTo(testData);
// Close clint side write
tcpWriteProbe.Close();
serverConnection.ExpectClosed(Akka.IO.Tcp.ConfirmedClosed.Instance);
serverConnection.ExpectTerminated();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_work_when_client_closes_read_then_client_closes_write()
{
this.AssertAllStagesStopped(() =>
{
var testData = ByteString.Create(new byte[] { 1, 2, 3, 4, 5 });
var server = new Server(this);
var tcpWriteProbe = new TcpWriteProbe(this);
var tcpReadProbe = new TcpReadProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
// Server can still write
serverConnection.Write(testData);
tcpReadProbe.Read(5).ShouldBeEquivalentTo(testData);
// Close client side read
tcpReadProbe.TcpReadSubscription.Value.Cancel();
// Client can still write
tcpWriteProbe.Write(testData);
serverConnection.Read(5);
serverConnection.WaitRead().ShouldBeEquivalentTo(testData);
// Close client side write
tcpWriteProbe.Close();
// Need a write on the server side to detect the close event
AwaitAssert(() =>
{
serverConnection.Write(testData);
serverConnection.ExpectClosed(c=>c.IsErrorClosed, TimeSpan.FromMilliseconds(500));
}, TimeSpan.FromSeconds(5));
serverConnection.ExpectTerminated();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_work_when_client_closes_read_then_server_closes_write_then_client_closes_write()
{
this.AssertAllStagesStopped(() =>
{
var testData = ByteString.Create(new byte[] { 1, 2, 3, 4, 5 });
var server = new Server(this);
var tcpWriteProbe = new TcpWriteProbe(this);
var tcpReadProbe = new TcpReadProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
// Server can still write
serverConnection.Write(testData);
tcpReadProbe.Read(5).ShouldBeEquivalentTo(testData);
// Close client side read
tcpReadProbe.TcpReadSubscription.Value.Cancel();
// Client can still write
tcpWriteProbe.Write(testData);
serverConnection.Read(5);
serverConnection.WaitRead().ShouldBeEquivalentTo(testData);
serverConnection.ConfirmedClose();
// Close clint side write
tcpWriteProbe.Close();
serverConnection.ExpectClosed(Akka.IO.Tcp.ConfirmedClosed.Instance);
serverConnection.ExpectTerminated();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_shut_everything_down_if_client_signals_error()
{
this.AssertAllStagesStopped(() =>
{
var testData = ByteString.Create(new byte[] { 1, 2, 3, 4, 5 });
var server = new Server(this);
var tcpWriteProbe = new TcpWriteProbe(this);
var tcpReadProbe = new TcpReadProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
// Server can still write
serverConnection.Write(testData);
tcpReadProbe.Read(5).ShouldBeEquivalentTo(testData);
// Client can still write
tcpWriteProbe.Write(testData);
serverConnection.Read(5);
serverConnection.WaitRead().ShouldBeEquivalentTo(testData);
// Cause error
tcpWriteProbe.TcpWriteSubscription.Value.SendError(new IllegalStateException("test"));
tcpReadProbe.SubscriberProbe.ExpectError();
serverConnection.ExpectClosed(c=>c.IsErrorClosed);
serverConnection.ExpectTerminated();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_shut_everything_down_if_client_signals_error_after_remote_has_closed_write()
{
this.AssertAllStagesStopped(() =>
{
var testData = ByteString.Create(new byte[] { 1, 2, 3, 4, 5 });
var server = new Server(this);
var tcpWriteProbe = new TcpWriteProbe(this);
var tcpReadProbe = new TcpReadProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
// Server can still write
serverConnection.Write(testData);
tcpReadProbe.Read(5).ShouldBeEquivalentTo(testData);
// Close remote side write
serverConnection.ConfirmedClose();
tcpReadProbe.SubscriberProbe.ExpectComplete();
// Client can still write
tcpWriteProbe.Write(testData);
serverConnection.Read(5);
serverConnection.WaitRead().ShouldBeEquivalentTo(testData);
tcpWriteProbe.TcpWriteSubscription.Value.SendError(new IllegalStateException("test"));
serverConnection.ExpectClosed(c => c.IsErrorClosed);
serverConnection.ExpectTerminated();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_shut_down_both_streams_when_connection_is_aborted_remotely()
{
this.AssertAllStagesStopped(() =>
{
// Client gets a PeerClosed event and does not know that the write side is also closed
var server = new Server(this);
var tcpWriteProbe = new TcpWriteProbe(this);
var tcpReadProbe = new TcpReadProbe(this);
Source.FromPublisher(tcpWriteProbe.PublisherProbe)
.Via(Sys.TcpStream().OutgoingConnection(server.Address))
.To(Sink.FromSubscriber(tcpReadProbe.SubscriberProbe))
.Run(Materializer);
var serverConnection = server.WaitAccept();
serverConnection.Abort();
tcpReadProbe.SubscriberProbe.ExpectSubscriptionAndError();
tcpWriteProbe.TcpWriteSubscription.Value.ExpectCancellation();
serverConnection.ExpectTerminated();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_materialize_correctly_when_used_in_multiple_flows()
{
var testData = ByteString.Create(new byte[] { 1, 2, 3, 4, 5 });
var server = new Server(this);
var tcpWriteProbe1 = new TcpWriteProbe(this);
var tcpReadProbe1 = new TcpReadProbe(this);
var tcpWriteProbe2 = new TcpWriteProbe(this);
var tcpReadProbe2 = new TcpReadProbe(this);
var outgoingConnection = new Tcp().CreateExtension(Sys as ExtendedActorSystem).OutgoingConnection(server.Address);
var conn1F = Source.FromPublisher(tcpWriteProbe1.PublisherProbe)
.ViaMaterialized(outgoingConnection, Keep.Both)
.To(Sink.FromSubscriber(tcpReadProbe1.SubscriberProbe))
.Run(Materializer).Item2;
var serverConnection1 = server.WaitAccept();
var conn2F = Source.FromPublisher(tcpWriteProbe2.PublisherProbe)
.ViaMaterialized(outgoingConnection, Keep.Both)
.To(Sink.FromSubscriber(tcpReadProbe2.SubscriberProbe))
.Run(Materializer).Item2;
var serverConnection2 = server.WaitAccept();
ValidateServerClientCommunication(testData, serverConnection1, tcpReadProbe1, tcpWriteProbe1);
ValidateServerClientCommunication(testData, serverConnection2, tcpReadProbe2, tcpWriteProbe2);
conn1F.Wait(TimeSpan.FromSeconds(1)).Should().BeTrue();
conn2F.Wait(TimeSpan.FromSeconds(1)).Should().BeTrue();
var conn1 = conn1F.Result;
var conn2 = conn2F.Result;
// Since we have already communicated over the connections we can have short timeouts for the tasks
((IPEndPoint) conn1.RemoteAddress).Port.Should().Be(((IPEndPoint) server.Address).Port);
((IPEndPoint) conn2.RemoteAddress).Port.Should().Be(((IPEndPoint) server.Address).Port);
((IPEndPoint) conn1.LocalAddress).Port.Should().NotBe(((IPEndPoint) conn2.LocalAddress).Port);
tcpWriteProbe1.Close();
tcpReadProbe1.Close();
server.Close();
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_properly_full_close_if_requested()
{
this.AssertAllStagesStopped(() =>
{
var serverAddress = TestUtils.TemporaryServerAddress();
var writeButIgnoreRead = Flow.FromSinkAndSource(Sink.Ignore<ByteString>(),
Source.Single(ByteString.FromString("Early response")), Keep.Right);
var task =
Sys.TcpStream()
.Bind(serverAddress.Address.ToString(), serverAddress.Port, halfClose: false)
.ToMaterialized(
Sink.ForEach<Tcp.IncomingConnection>(conn => conn.Flow.Join(writeButIgnoreRead).Run(Materializer)),
Keep.Left)
.Run(Materializer);
task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
var binding = task.Result;
var t = Source.Maybe<ByteString>()
.Via(Sys.TcpStream().OutgoingConnection(serverAddress.Address.ToString(), serverAddress.Port))
.ToMaterialized(Sink.Aggregate<ByteString, ByteString>(ByteString.Empty, (s, s1) => s + s1), Keep.Both)
.Run(Materializer);
var promise = t.Item1;
var result = t.Item2;
result.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
result.Result.ShouldBeEquivalentTo(ByteString.FromString("Early response"));
promise.SetResult(null); // close client upstream, no more data
binding.Unbind();
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_Echo_should_work_even_if_server_is_in_full_close_mode()
{
var serverAddress = TestUtils.TemporaryServerAddress();
var task = Sys.TcpStream()
.Bind(serverAddress.Address.ToString(), serverAddress.Port, halfClose: false)
.ToMaterialized(
Sink.ForEach<Tcp.IncomingConnection>(conn => conn.Flow.Join(Flow.Create<ByteString>())),
Keep.Left)
.Run(Materializer);
task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
var binding = task.Result;
var result = Source.From(Enumerable.Repeat(0, 1000)
.Select(i => ByteString.Create(new[] {Convert.ToByte(i)})))
.Via(Sys.TcpStream().OutgoingConnection(serverAddress))
.RunAggregate(0, (i, s) => i + s.Count, Materializer);
result.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
result.Result.Should().Be(1000);
binding.Unbind();
}
[Fact(Skip = "Fix me")]
public void Outgoing_TCP_stream_must_handle_when_connection_actor_terminates_unexpectedly()
{
var system2 = ActorSystem.Create("system2");
var mat2 = ActorMaterializer.Create(system2);
var serverAddress = TestUtils.TemporaryServerAddress();
var binding = system2.TcpStream()
.BindAndHandle(Flow.Create<ByteString>(), mat2, serverAddress.Address.ToString(), serverAddress.Port);
var result = Source.Maybe<ByteString>()
.Via(system2.TcpStream().OutgoingConnection(serverAddress))
.RunAggregate(0, (i, s) => i + s.Count, mat2);
// Getting rid of existing connection actors by using a blunt instrument
system2.ActorSelection(system2.Tcp().Path/"selectors"/"$b"/"*").Tell(Kill.Instance);
result.Invoking(r => r.Wait(TimeSpan.FromSeconds(3))).ShouldThrow<StreamTcpException>();
binding.Result.Unbind().Wait();
system2.Terminate().Wait();
}
private void ValidateServerClientCommunication(ByteString testData, ServerConnection serverConnection, TcpReadProbe readProbe, TcpWriteProbe writeProbe)
{
serverConnection.Write(testData);
serverConnection.Read(5);
readProbe.Read(5).ShouldBeEquivalentTo(testData);
writeProbe.Write(testData);
serverConnection.WaitRead().ShouldBeEquivalentTo(testData);
}
private Sink<Tcp.IncomingConnection, Task> EchoHandler() =>
Sink.ForEach<Tcp.IncomingConnection>(c => c.Flow.Join(Flow.Create<ByteString>()).Run(Materializer));
[Fact(Skip = "Fix me")]
public void Tcp_listen_stream_must_be_able_to_implement_echo()
{
var serverAddress = TestUtils.TemporaryServerAddress();
var t = Sys.TcpStream()
.Bind(serverAddress.Address.ToString(), serverAddress.Port)
.ToMaterialized(EchoHandler(), Keep.Both)
.Run(Materializer);
var bindingFuture = t.Item1;
var echoServerFinish = t.Item2;
// make sure that the server has bound to the socket
bindingFuture.Wait(100).Should().BeTrue();
var binding = bindingFuture.Result;
var testInput = Enumerable.Range(0, 255).Select(i => ByteString.Create(new[] {Convert.ToByte(i)})).ToList();
var expectedOutput = testInput.Aggregate(ByteString.Empty, (agg, b) => agg.Concat(b));
var resultFuture =
Source.From(testInput)
.Via(Sys.TcpStream().OutgoingConnection(serverAddress))
.RunAggregate(ByteString.Empty, (agg, b) => agg.Concat(b), Materializer);
resultFuture.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
resultFuture.Result.ShouldBeEquivalentTo(expectedOutput);
binding.Unbind().Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
echoServerFinish.Wait(TimeSpan.FromSeconds(1)).Should().BeTrue();
}
[Fact(Skip = "Fix me")]
public void Tcp_listen_stream_must_work_with_a_chain_of_echoes()
{
var serverAddress = TestUtils.TemporaryServerAddress();
var t = Sys.TcpStream()
.Bind(serverAddress.Address.ToString(), serverAddress.Port)
.ToMaterialized(EchoHandler(), Keep.Both)
.Run(Materializer);
var bindingFuture = t.Item1;
var echoServerFinish = t.Item2;
// make sure that the server has bound to the socket
bindingFuture.Wait(100).Should().BeTrue();
var binding = bindingFuture.Result;
var echoConnection = Sys.TcpStream().OutgoingConnection(serverAddress);
var testInput = Enumerable.Range(0, 255).Select(i => ByteString.Create(new[] { Convert.ToByte(i) })).ToList();
var expectedOutput = testInput.Aggregate(ByteString.Empty, (agg, b) => agg.Concat(b));
var resultFuture = Source.From(testInput)
.Via(echoConnection) // The echoConnection is reusable
.Via(echoConnection)
.Via(echoConnection)
.Via(echoConnection)
.RunAggregate(ByteString.Empty, (agg, b) => agg.Concat(b), Materializer);
resultFuture.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
resultFuture.Result.ShouldBeEquivalentTo(expectedOutput);
binding.Unbind().Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
echoServerFinish.Wait(TimeSpan.FromSeconds(1)).Should().BeTrue();
}
[Fact(Skip = "On Windows unbinding is not immediate")]
public void Tcp_listen_stream_must_bind_and_unbind_correctly()
{
EventFilter.Exception<BindFailedException>().Expect(2, () =>
{
// if (Helpers.isWindows) {
// info("On Windows unbinding is not immediate")
// pending
//}
//val address = temporaryServerAddress()
//val probe1 = TestSubscriber.manualProbe[Tcp.IncomingConnection]()
//val bind = Tcp(system).bind(address.getHostName, address.getPort) // TODO getHostString in Java7
//// Bind succeeded, we have a local address
//val binding1 = Await.result(bind.to(Sink.fromSubscriber(probe1)).run(), 3.second)
//probe1.expectSubscription()
//val probe2 = TestSubscriber.manualProbe[Tcp.IncomingConnection]()
//val binding2F = bind.to(Sink.fromSubscriber(probe2)).run()
//probe2.expectSubscriptionAndError(BindFailedException)
//val probe3 = TestSubscriber.manualProbe[Tcp.IncomingConnection]()
//val binding3F = bind.to(Sink.fromSubscriber(probe3)).run()
//probe3.expectSubscriptionAndError()
//a[BindFailedException] shouldBe thrownBy { Await.result(binding2F, 1.second) }
//a[BindFailedException] shouldBe thrownBy { Await.result(binding3F, 1.second) }
//// Now unbind first
//Await.result(binding1.unbind(), 1.second)
//probe1.expectComplete()
//val probe4 = TestSubscriber.manualProbe[Tcp.IncomingConnection]()
//// Bind succeeded, we have a local address
//val binding4 = Await.result(bind.to(Sink.fromSubscriber(probe4)).run(), 3.second)
//probe4.expectSubscription()
//// clean up
//Await.result(binding4.unbind(), 1.second)
});
}
[Fact(Skip = "Fix me")]
public void Tcp_listen_stream_must_not_shut_down_connections_after_the_connection_stream_cacelled()
{
this.AssertAllStagesStopped(() =>
{
var serverAddress = TestUtils.TemporaryServerAddress();
Sys.TcpStream()
.Bind(serverAddress.Address.ToString(), serverAddress.Port)
.Take(1).RunForeach(c =>
{
Thread.Sleep(1000);
c.Flow.Join(Flow.Create<ByteString>()).Run(Materializer);
}, Materializer);
var total = Source.From(
Enumerable.Range(0, 1000).Select(_ => ByteString.Create(new byte[] {0})))
.Via(Sys.TcpStream().OutgoingConnection(serverAddress))
.RunAggregate(0, (i, s) => i + s.Count, Materializer);
total.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
total.Result.Should().Be(1000);
}, Materializer);
}
[Fact(Skip = "Fix me")]
public void Tcp_listen_stream_must_shut_down_properly_even_if_some_accepted_connection_Flows_have_not_been_subscribed_to ()
{
this.AssertAllStagesStopped(() =>
{
var serverAddress = TestUtils.TemporaryServerAddress();
var firstClientConnected = new TaskCompletionSource<NotUsed>();
var takeTwoAndDropSecond = Flow.Create<Tcp.IncomingConnection>().Select(c =>
{
firstClientConnected.TrySetResult(NotUsed.Instance);
return c;
}).Grouped(2).Take(1).Select(e => e.First());
Sys.TcpStream()
.Bind(serverAddress.Address.ToString(), serverAddress.Port)
.Via(takeTwoAndDropSecond)
.RunForeach(c => c.Flow.Join(Flow.Create<ByteString>()).Run(Materializer), Materializer);
var folder = Source.From(Enumerable.Range(0, 100).Select(_ => ByteString.Create(new byte[] {0})))
.Via(Sys.TcpStream().OutgoingConnection(serverAddress))
.Aggregate(0, (i, s) => i + s.Count)
.ToMaterialized(Sink.First<int>(), Keep.Right);
var total = folder.Run(Materializer);
firstClientConnected.Task.Wait(TimeSpan.FromSeconds(2)).Should().BeTrue();
var rejected = folder.Run(Materializer);
total.Wait(TimeSpan.FromSeconds(10)).Should().BeTrue();
total.Result.Should().Be(100);
rejected.Wait(TimeSpan.FromSeconds(5)).Should().BeTrue();
rejected.Exception.Flatten().InnerExceptions.Any(e => e is StreamTcpException).Should().BeTrue();
}, Materializer);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using TokenStream = Lucene.Net.Analysis.TokenStream;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using Parameter = Lucene.Net.Util.Parameter;
using StringHelper = Lucene.Net.Util.StringHelper;
namespace Lucene.Net.Documents
{
/// <summary>A field is a section of a Document. Each field has two parts, a name and a
/// value. Values may be free text, provided as a String or as a Reader, or they
/// may be atomic keywords, which are not further processed. Such keywords may
/// be used to represent dates, urls, etc. Fields are optionally stored in the
/// index, so that they may be returned with hits on the document.
/// </summary>
[Serializable]
public sealed class Field:AbstractField, Fieldable
{
/// <summary>Specifies whether and how a field should be stored. </summary>
[Serializable]
public sealed class Store:Parameter
{
internal Store(System.String name):base(name)
{
}
/// <summary>Store the original field value in the index in a compressed form. This is
/// useful for long documents and for binary valued fields.
/// </summary>
/// <deprecated> Please use {@link CompressionTools} instead.
/// For string fields that were previously indexed and stored using compression,
/// the new way to achieve this is: First add the field indexed-only (no store)
/// and additionally using the same field name as a binary, stored field
/// with {@link CompressionTools#compressString}.
/// </deprecated>
public static readonly Store COMPRESS = new Store("COMPRESS");
/// <summary>Store the original field value in the index. This is useful for short texts
/// like a document's title which should be displayed with the results. The
/// value is stored in its original form, i.e. no analyzer is used before it is
/// stored.
/// </summary>
public static readonly Store YES = new Store("YES");
/// <summary>Do not store the field value in the index. </summary>
public static readonly Store NO = new Store("NO");
}
/// <summary>Specifies whether and how a field should be indexed. </summary>
[Serializable]
public sealed class Index:Parameter
{
internal Index(System.String name):base(name)
{
}
/// <summary>Do not index the field value. This field can thus not be searched,
/// but one can still access its contents provided it is
/// {@link Field.Store stored}.
/// </summary>
public static readonly Index NO = new Index("NO");
/// <summary>Index the tokens produced by running the field's
/// value through an Analyzer. This is useful for
/// common text.
/// </summary>
public static readonly Index ANALYZED = new Index("ANALYZED");
/// <deprecated> this has been renamed to {@link #ANALYZED}
/// </deprecated>
[Obsolete("this has been renamed to ANALYZED")]
public static readonly Index TOKENIZED;
/// <summary>Index the field's value without using an Analyzer, so it can be searched.
/// As no analyzer is used the value will be stored as a single term. This is
/// useful for unique Ids like product numbers.
/// </summary>
public static readonly Index NOT_ANALYZED = new Index("NOT_ANALYZED");
/// <deprecated> This has been renamed to {@link #NOT_ANALYZED}
/// </deprecated>
[Obsolete("This has been renamed to NOT_ANALYZED")]
public static readonly Index UN_TOKENIZED;
/// <summary>Expert: Index the field's value without an Analyzer,
/// and also disable the storing of norms. Note that you
/// can also separately enable/disable norms by calling
/// {@link Field#setOmitNorms}. No norms means that
/// index-time field and document boosting and field
/// length normalization are disabled. The benefit is
/// less memory usage as norms take up one byte of RAM
/// per indexed field for every document in the index,
/// during searching. Note that once you index a given
/// field <i>with</i> norms enabled, disabling norms will
/// have no effect. In other words, for this to have the
/// above described effect on a field, all instances of
/// that field must be indexed with NOT_ANALYZED_NO_NORMS
/// from the beginning.
/// </summary>
public static readonly Index NOT_ANALYZED_NO_NORMS = new Index("NOT_ANALYZED_NO_NORMS");
/// <deprecated> This has been renamed to
/// {@link #NOT_ANALYZED_NO_NORMS}
/// </deprecated>
[Obsolete("This has been renamed to NOT_ANALYZED_NO_NORMS")]
public static readonly Index NO_NORMS;
/// <summary>Expert: Index the tokens produced by running the
/// field's value through an Analyzer, and also
/// separately disable the storing of norms. See
/// {@link #NOT_ANALYZED_NO_NORMS} for what norms are
/// and why you may want to disable them.
/// </summary>
public static readonly Index ANALYZED_NO_NORMS = new Index("ANALYZED_NO_NORMS");
static Index()
{
TOKENIZED = ANALYZED;
UN_TOKENIZED = NOT_ANALYZED;
NO_NORMS = NOT_ANALYZED_NO_NORMS;
}
}
/// <summary>Specifies whether and how a field should have term vectors. </summary>
[Serializable]
public sealed class TermVector:Parameter
{
internal TermVector(System.String name):base(name)
{
}
/// <summary>Do not store term vectors. </summary>
public static readonly TermVector NO = new TermVector("NO");
/// <summary>Store the term vectors of each document. A term vector is a list
/// of the document's terms and their number of occurrences in that document.
/// </summary>
public static readonly TermVector YES = new TermVector("YES");
/// <summary> Store the term vector + token position information
///
/// </summary>
/// <seealso cref="YES">
/// </seealso>
public static readonly TermVector WITH_POSITIONS = new TermVector("WITH_POSITIONS");
/// <summary> Store the term vector + Token offset information
///
/// </summary>
/// <seealso cref="YES">
/// </seealso>
public static readonly TermVector WITH_OFFSETS = new TermVector("WITH_OFFSETS");
/// <summary> Store the term vector + Token position and offset information
///
/// </summary>
/// <seealso cref="YES">
/// </seealso>
/// <seealso cref="WITH_POSITIONS">
/// </seealso>
/// <seealso cref="WITH_OFFSETS">
/// </seealso>
public static readonly TermVector WITH_POSITIONS_OFFSETS = new TermVector("WITH_POSITIONS_OFFSETS");
}
/// <summary>The value of the field as a String, or null. If null, the Reader value or
/// binary value is used. Exactly one of stringValue(),
/// readerValue(), and getBinaryValue() must be set.
/// </summary>
public override System.String StringValue()
{
return fieldsData is System.String?(System.String) fieldsData:null;
}
/// <summary>The value of the field as a Reader, or null. If null, the String value or
/// binary value is used. Exactly one of stringValue(),
/// readerValue(), and getBinaryValue() must be set.
/// </summary>
public override System.IO.TextReader ReaderValue()
{
return fieldsData is System.IO.TextReader?(System.IO.TextReader) fieldsData:null;
}
/// <summary>The value of the field in Binary, or null. If null, the Reader value,
/// or String value is used. Exactly one of stringValue(),
/// readerValue(), and getBinaryValue() must be set.
/// </summary>
/// <deprecated> This method must allocate a new byte[] if
/// the {@link AbstractField#GetBinaryOffset()} is non-zero
/// or {@link AbstractField#GetBinaryLength()} is not the
/// full length of the byte[]. Please use {@link
/// AbstractField#GetBinaryValue()} instead, which simply
/// returns the byte[].
/// </deprecated>
[Obsolete("This method must allocate a new byte[] if the AbstractField.GetBinaryOffset() is non-zero or AbstractField.GetBinaryLength() is not the full length of the byte[]. Please use AbstractField.GetBinaryValue() instead, which simply returns the byte[].")]
public override byte[] BinaryValue()
{
if (!isBinary)
return null;
byte[] data = (byte[]) fieldsData;
if (binaryOffset == 0 && data.Length == binaryLength)
return data; //Optimization
byte[] ret = new byte[binaryLength];
Array.Copy(data, binaryOffset, ret, 0, binaryLength);
return ret;
}
/// <summary>The TokesStream for this field to be used when indexing, or null. If null, the Reader value
/// or String value is analyzed to produce the indexed tokens.
/// </summary>
public override TokenStream TokenStreamValue()
{
return tokenStream;
}
/// <summary><p/>Expert: change the value of this field. This can
/// be used during indexing to re-use a single Field
/// instance to improve indexing speed by avoiding GC cost
/// of new'ing and reclaiming Field instances. Typically
/// a single {@link Document} instance is re-used as
/// well. This helps most on small documents.<p/>
///
/// <p/>Each Field instance should only be used once
/// within a single {@link Document} instance. See <a
/// href="http://wiki.apache.org/lucene-java/ImproveIndexingSpeed">ImproveIndexingSpeed</a>
/// for details.<p/>
/// </summary>
public void SetValue(System.String value_Renamed)
{
if (isBinary)
{
throw new System.ArgumentException("cannot set a String value on a binary field");
}
fieldsData = value_Renamed;
}
/// <summary>Expert: change the value of this field. See <a href="#setValue(java.lang.String)">setValue(String)</a>. </summary>
public void SetValue(System.IO.TextReader value_Renamed)
{
if (isBinary)
{
throw new System.ArgumentException("cannot set a Reader value on a binary field");
}
if (isStored)
{
throw new System.ArgumentException("cannot set a Reader value on a stored field");
}
fieldsData = value_Renamed;
}
/// <summary>Expert: change the value of this field. See <a href="#setValue(java.lang.String)">setValue(String)</a>. </summary>
public void SetValue(byte[] value_Renamed)
{
if (!isBinary)
{
throw new System.ArgumentException("cannot set a byte[] value on a non-binary field");
}
fieldsData = value_Renamed;
binaryLength = value_Renamed.Length;
binaryOffset = 0;
}
/// <summary>Expert: change the value of this field. See <a href="#setValue(java.lang.String)">setValue(String)</a>. </summary>
public void SetValue(byte[] value_Renamed, int offset, int length)
{
if (!isBinary)
{
throw new System.ArgumentException("cannot set a byte[] value on a non-binary field");
}
fieldsData = value_Renamed;
binaryLength = length;
binaryOffset = offset;
}
/// <summary>Expert: change the value of this field. See <a href="#setValue(java.lang.String)">setValue(String)</a>.</summary>
/// <deprecated> use {@link #setTokenStream}
/// </deprecated>
[Obsolete("use SetTokenStream ")]
public void SetValue(TokenStream value_Renamed)
{
if (isBinary)
{
throw new System.ArgumentException("cannot set a TokenStream value on a binary field");
}
if (isStored)
{
throw new System.ArgumentException("cannot set a TokenStream value on a stored field");
}
fieldsData = null;
tokenStream = value_Renamed;
}
/// <summary>Expert: sets the token stream to be used for indexing and causes isIndexed() and isTokenized() to return true.
/// May be combined with stored values from stringValue() or binaryValue()
/// </summary>
public void SetTokenStream(TokenStream tokenStream)
{
this.isIndexed = true;
this.isTokenized = true;
this.tokenStream = tokenStream;
}
/// <summary> Create a field by specifying its name, value and how it will
/// be saved in the index. Term vectors will not be stored in the index.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="value">The string to process
/// </param>
/// <param name="store">Whether <code>value</code> should be stored in the index
/// </param>
/// <param name="index">Whether the field should be indexed, and if so, if it should
/// be tokenized before indexing
/// </param>
/// <throws> NullPointerException if name or value is <code>null</code> </throws>
/// <throws> IllegalArgumentException if the field is neither stored nor indexed </throws>
public Field(System.String name, System.String value_Renamed, Store store, Index index):this(name, value_Renamed, store, index, TermVector.NO)
{
}
/// <summary> Create a field by specifying its name, value and how it will
/// be saved in the index.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="value">The string to process
/// </param>
/// <param name="store">Whether <code>value</code> should be stored in the index
/// </param>
/// <param name="index">Whether the field should be indexed, and if so, if it should
/// be tokenized before indexing
/// </param>
/// <param name="termVector">Whether term vector should be stored
/// </param>
/// <throws> NullPointerException if name or value is <code>null</code> </throws>
/// <throws> IllegalArgumentException in any of the following situations: </throws>
/// <summary> <ul>
/// <li>the field is neither stored nor indexed</li>
/// <li>the field is not indexed but termVector is <code>TermVector.YES</code></li>
/// </ul>
/// </summary>
public Field(System.String name, System.String value_Renamed, Store store, Index index, TermVector termVector):this(name, true, value_Renamed, store, index, termVector)
{
}
/// <summary> Create a field by specifying its name, value and how it will
/// be saved in the index.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="internName">Whether to .intern() name or not
/// </param>
/// <param name="value">The string to process
/// </param>
/// <param name="store">Whether <code>value</code> should be stored in the index
/// </param>
/// <param name="index">Whether the field should be indexed, and if so, if it should
/// be tokenized before indexing
/// </param>
/// <param name="termVector">Whether term vector should be stored
/// </param>
/// <throws> NullPointerException if name or value is <code>null</code> </throws>
/// <throws> IllegalArgumentException in any of the following situations: </throws>
/// <summary> <ul>
/// <li>the field is neither stored nor indexed</li>
/// <li>the field is not indexed but termVector is <code>TermVector.YES</code></li>
/// </ul>
/// </summary>
public Field(System.String name, bool internName, System.String value_Renamed, Store store, Index index, TermVector termVector)
{
if (name == null)
throw new System.NullReferenceException("name cannot be null");
if (value_Renamed == null)
throw new System.NullReferenceException("value cannot be null");
if (name.Length == 0 && value_Renamed.Length == 0)
throw new System.ArgumentException("name and value cannot both be empty");
if (index == Index.NO && store == Store.NO)
throw new System.ArgumentException("it doesn't make sense to have a field that " + "is neither indexed nor stored");
if (index == Index.NO && termVector != TermVector.NO)
throw new System.ArgumentException("cannot store term vector information " + "for a field that is not indexed");
if (internName)
// field names are optionally interned
name = StringHelper.Intern(name);
this.name = name;
this.fieldsData = value_Renamed;
if (store == Store.YES)
{
this.isStored = true;
this.isCompressed = false;
}
else if (store == Store.COMPRESS)
{
this.isStored = true;
this.isCompressed = true;
}
else if (store == Store.NO)
{
this.isStored = false;
this.isCompressed = false;
}
else
{
throw new System.ArgumentException("unknown store parameter " + store);
}
if (index == Index.NO)
{
this.isIndexed = false;
this.isTokenized = false;
this.omitTermFreqAndPositions = false;
this.omitNorms = true;
}
else if (index == Index.ANALYZED)
{
this.isIndexed = true;
this.isTokenized = true;
}
else if (index == Index.NOT_ANALYZED)
{
this.isIndexed = true;
this.isTokenized = false;
}
else if (index == Index.NOT_ANALYZED_NO_NORMS)
{
this.isIndexed = true;
this.isTokenized = false;
this.omitNorms = true;
}
else if (index == Index.ANALYZED_NO_NORMS)
{
this.isIndexed = true;
this.isTokenized = true;
this.omitNorms = true;
}
else
{
throw new System.ArgumentException("unknown index parameter " + index);
}
this.isBinary = false;
SetStoreTermVector(termVector);
}
/// <summary> Create a tokenized and indexed field that is not stored. Term vectors will
/// not be stored. The Reader is read only when the Document is added to the index,
/// i.e. you may not close the Reader until {@link IndexWriter#AddDocument(Document)}
/// has been called.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="reader">The reader with the content
/// </param>
/// <throws> NullPointerException if name or reader is <code>null</code> </throws>
public Field(System.String name, System.IO.TextReader reader):this(name, reader, TermVector.NO)
{
}
/// <summary> Create a tokenized and indexed field that is not stored, optionally with
/// storing term vectors. The Reader is read only when the Document is added to the index,
/// i.e. you may not close the Reader until {@link IndexWriter#AddDocument(Document)}
/// has been called.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="reader">The reader with the content
/// </param>
/// <param name="termVector">Whether term vector should be stored
/// </param>
/// <throws> NullPointerException if name or reader is <code>null</code> </throws>
public Field(System.String name, System.IO.TextReader reader, TermVector termVector)
{
if (name == null)
throw new System.NullReferenceException("name cannot be null");
if (reader == null)
throw new System.NullReferenceException("reader cannot be null");
this.name = StringHelper.Intern(name); // field names are interned
this.fieldsData = reader;
this.isStored = false;
this.isCompressed = false;
this.isIndexed = true;
this.isTokenized = true;
this.isBinary = false;
SetStoreTermVector(termVector);
}
/// <summary> Create a tokenized and indexed field that is not stored. Term vectors will
/// not be stored. This is useful for pre-analyzed fields.
/// The TokenStream is read only when the Document is added to the index,
/// i.e. you may not close the TokenStream until {@link IndexWriter#AddDocument(Document)}
/// has been called.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="tokenStream">The TokenStream with the content
/// </param>
/// <throws> NullPointerException if name or tokenStream is <code>null</code> </throws>
public Field(System.String name, TokenStream tokenStream):this(name, tokenStream, TermVector.NO)
{
}
/// <summary> Create a tokenized and indexed field that is not stored, optionally with
/// storing term vectors. This is useful for pre-analyzed fields.
/// The TokenStream is read only when the Document is added to the index,
/// i.e. you may not close the TokenStream until {@link IndexWriter#AddDocument(Document)}
/// has been called.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="tokenStream">The TokenStream with the content
/// </param>
/// <param name="termVector">Whether term vector should be stored
/// </param>
/// <throws> NullPointerException if name or tokenStream is <code>null</code> </throws>
public Field(System.String name, TokenStream tokenStream, TermVector termVector)
{
if (name == null)
throw new System.NullReferenceException("name cannot be null");
if (tokenStream == null)
throw new System.NullReferenceException("tokenStream cannot be null");
this.name = StringHelper.Intern(name); // field names are interned
this.fieldsData = null;
this.tokenStream = tokenStream;
this.isStored = false;
this.isCompressed = false;
this.isIndexed = true;
this.isTokenized = true;
this.isBinary = false;
SetStoreTermVector(termVector);
}
/// <summary> Create a stored field with binary value. Optionally the value may be compressed.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="value">The binary value
/// </param>
/// <param name="store">How <code>value</code> should be stored (compressed or not)
/// </param>
/// <throws> IllegalArgumentException if store is <code>Store.NO</code> </throws>
public Field(System.String name, byte[] value_Renamed, Store store):this(name, value_Renamed, 0, value_Renamed.Length, store)
{
}
/// <summary> Create a stored field with binary value. Optionally the value may be compressed.
///
/// </summary>
/// <param name="name">The name of the field
/// </param>
/// <param name="value">The binary value
/// </param>
/// <param name="offset">Starting offset in value where this Field's bytes are
/// </param>
/// <param name="length">Number of bytes to use for this Field, starting at offset
/// </param>
/// <param name="store">How <code>value</code> should be stored (compressed or not)
/// </param>
/// <throws> IllegalArgumentException if store is <code>Store.NO</code> </throws>
public Field(System.String name, byte[] value_Renamed, int offset, int length, Store store)
{
if (name == null)
throw new System.ArgumentException("name cannot be null");
if (value_Renamed == null)
throw new System.ArgumentException("value cannot be null");
this.name = StringHelper.Intern(name); // field names are interned
fieldsData = value_Renamed;
if (store == Store.YES)
{
isStored = true;
isCompressed = false;
}
else if (store == Store.COMPRESS)
{
isStored = true;
isCompressed = true;
}
else if (store == Store.NO)
throw new System.ArgumentException("binary values can't be unstored");
else
{
throw new System.ArgumentException("unknown store parameter " + store);
}
isIndexed = false;
isTokenized = false;
omitTermFreqAndPositions = false;
omitNorms = true;
isBinary = true;
binaryLength = length;
binaryOffset = offset;
SetStoreTermVector(TermVector.NO);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace WebService.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
private const int DefaultCollectionSize = 3;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Orleans;
using Orleans.Configuration;
using Orleans.Hosting;
using Orleans.Runtime;
using Orleans.Runtime.ReminderService;
using Orleans.TestingHost;
using TestExtensions;
using UnitTests.TestHelper;
using Xunit;
using Xunit.Sdk;
namespace UnitTests.General
{
public class ConsistentRingProviderTests_Silo : TestClusterPerTest
{
private const int numAdditionalSilos = 3;
private readonly TimeSpan failureTimeout = TimeSpan.FromSeconds(30);
private readonly TimeSpan endWait = TimeSpan.FromMinutes(5);
enum Fail { First, Random, Last }
protected override void ConfigureTestCluster(TestClusterBuilder builder)
{
builder.AddSiloBuilderConfigurator<Configurator>();
builder.AddClientBuilderConfigurator<Configurator>();
}
private class Configurator : ISiloConfigurator, IClientBuilderConfigurator
{
public void Configure(ISiloBuilder hostBuilder)
{
hostBuilder.AddMemoryGrainStorage("MemoryStore")
.AddMemoryGrainStorageAsDefault()
.UseInMemoryReminderService();
}
public void Configure(IConfiguration configuration, IClientBuilder clientBuilder)
{
clientBuilder.Configure<GatewayOptions>(
options => options.GatewayListRefreshPeriod = TimeSpan.FromMilliseconds(100));
}
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_Basic()
{
await this.HostedCluster.StartAdditionalSilosAsync(numAdditionalSilos);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
VerificationScenario(0);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_1F_Random()
{
await FailureTest(Fail.Random, 1);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_1F_Beginning()
{
await FailureTest(Fail.First, 1);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_1F_End()
{
await FailureTest(Fail.Last, 1);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_2F_Random()
{
await FailureTest(Fail.Random, 2);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_2F_Beginning()
{
await FailureTest(Fail.First, 2);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_2F_End()
{
await FailureTest(Fail.Last, 2);
}
private async Task FailureTest(Fail failCode, int numOfFailures)
{
await this.HostedCluster.StartAdditionalSilosAsync(numAdditionalSilos);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
List<SiloHandle> failures = await getSilosToFail(failCode, numOfFailures);
foreach (SiloHandle fail in failures) // verify before failure
{
VerificationScenario(PickKey(fail.SiloAddress)); // fail.SiloAddress.GetConsistentHashCode());
}
logger.Info("FailureTest {0}, Code {1}, Stopping silos: {2}", numOfFailures, failCode, Utils.EnumerableToString(failures, handle => handle.SiloAddress.ToString()));
List<uint> keysToTest = new List<uint>();
foreach (SiloHandle fail in failures) // verify before failure
{
keysToTest.Add(PickKey(fail.SiloAddress)); //fail.SiloAddress.GetConsistentHashCode());
await this.HostedCluster.StopSiloAsync(fail);
}
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
AssertEventually(() =>
{
foreach (var key in keysToTest) // verify after failure
{
VerificationScenario(key);
}
}, failureTimeout);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_1J()
{
await JoinTest(1);
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_2J()
{
await JoinTest(2);
}
private async Task JoinTest(int numOfJoins)
{
logger.Info("JoinTest {0}", numOfJoins);
await this.HostedCluster.StartAdditionalSilosAsync(numAdditionalSilos - numOfJoins);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
List<SiloHandle> silos = await this.HostedCluster.StartAdditionalSilosAsync(numOfJoins);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
foreach (SiloHandle sh in silos)
{
VerificationScenario(PickKey(sh.SiloAddress));
}
Thread.Sleep(TimeSpan.FromSeconds(15));
}
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_1F1J()
{
await this.HostedCluster.StartAdditionalSilosAsync(numAdditionalSilos);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
List<SiloHandle> failures = await getSilosToFail(Fail.Random, 1);
uint keyToCheck = PickKey(failures[0].SiloAddress);// failures[0].SiloAddress.GetConsistentHashCode();
List<SiloHandle> joins = null;
// kill a silo and join a new one in parallel
logger.Info("Killing silo {0} and joining a silo", failures[0].SiloAddress);
var tasks = new Task[2]
{
Task.Factory.StartNew(() => this.HostedCluster.StopSiloAsync(failures[0])),
this.HostedCluster.StartAdditionalSilosAsync(1).ContinueWith(t => joins = t.GetAwaiter().GetResult())
};
Task.WaitAll(tasks, endWait);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
AssertEventually(() =>
{
VerificationScenario(keyToCheck); // verify failed silo's key
VerificationScenario(PickKey(joins[0].SiloAddress)); // verify newly joined silo's key
}, failureTimeout);
}
// failing the secondary in this scenario exposed the bug in DomainGrain ... so, we keep it as a separate test than Ring_1F1J
[Fact, TestCategory("Functional"), TestCategory("Ring")]
public async Task Ring_1Fsec1J()
{
await this.HostedCluster.StartAdditionalSilosAsync(numAdditionalSilos);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
//List<SiloHandle> failures = getSilosToFail(Fail.Random, 1);
SiloHandle fail = this.HostedCluster.SecondarySilos.First();
uint keyToCheck = PickKey(fail.SiloAddress); //fail.SiloAddress.GetConsistentHashCode();
List<SiloHandle> joins = null;
// kill a silo and join a new one in parallel
logger.Info("Killing secondary silo {0} and joining a silo", fail.SiloAddress);
var tasks = new Task[2]
{
Task.Factory.StartNew(() => this.HostedCluster.StopSiloAsync(fail)),
this.HostedCluster.StartAdditionalSilosAsync(1).ContinueWith(t => joins = t.GetAwaiter().GetResult())
};
Task.WaitAll(tasks, endWait);
await this.HostedCluster.WaitForLivenessToStabilizeAsync();
AssertEventually(() =>
{
VerificationScenario(keyToCheck); // verify failed silo's key
VerificationScenario(PickKey(joins[0].SiloAddress));
}, failureTimeout);
}
private uint PickKey(SiloAddress responsibleSilo)
{
int iteration = 10000;
var testHooks = this.Client.GetTestHooks(this.HostedCluster.Primary);
for (int i = 0; i < iteration; i++)
{
double next = random.NextDouble();
uint randomKey = (uint)((double)RangeFactory.RING_SIZE * next);
SiloAddress s = testHooks.GetConsistentRingPrimaryTargetSilo(randomKey).Result;
if (responsibleSilo.Equals(s))
return randomKey;
}
throw new Exception(String.Format("Could not pick a key that silo {0} will be responsible for. Primary.Ring = \n{1}",
responsibleSilo, testHooks.GetConsistentRingProviderDiagnosticInfo().Result));
}
private void VerificationScenario(uint testKey)
{
// setup
List<SiloAddress> silos = new List<SiloAddress>();
foreach (var siloHandle in this.HostedCluster.GetActiveSilos())
{
long hash = siloHandle.SiloAddress.GetConsistentHashCode();
int index = silos.FindLastIndex(siloAddr => siloAddr.GetConsistentHashCode() < hash) + 1;
silos.Insert(index, siloHandle.SiloAddress);
}
// verify parameter key
VerifyKey(testKey, silos);
// verify some other keys as well, apart from the parameter key
// some random keys
for (int i = 0; i < 3; i++)
{
VerifyKey((uint)random.Next(), silos);
}
// lowest key
uint lowest = (uint)(silos.First().GetConsistentHashCode() - 1);
VerifyKey(lowest, silos);
// highest key
uint highest = (uint)(silos.Last().GetConsistentHashCode() + 1);
VerifyKey(lowest, silos);
}
private void VerifyKey(uint key, List<SiloAddress> silos)
{
var testHooks = this.Client.GetTestHooks(this.HostedCluster.Primary);
SiloAddress truth = testHooks.GetConsistentRingPrimaryTargetSilo(key).Result; //expected;
//if (truth == null) // if the truth isn't passed, we compute it here
//{
// truth = silos.Find(siloAddr => (key <= siloAddr.GetConsistentHashCode()));
// if (truth == null)
// {
// truth = silos.First();
// }
//}
// lookup for 'key' should return 'truth' on all silos
foreach (var siloHandle in this.HostedCluster.GetActiveSilos()) // do this for each silo
{
testHooks = this.Client.GetTestHooks(siloHandle);
SiloAddress s = testHooks.GetConsistentRingPrimaryTargetSilo((uint)key).Result;
Assert.Equal(truth, s);
}
}
private async Task<List<SiloHandle>> getSilosToFail(Fail fail, int numOfFailures)
{
List<SiloHandle> failures = new List<SiloHandle>();
int count = 0;
// Figure out the primary directory partition and the silo hosting the ReminderTableGrain.
var tableGrain = this.GrainFactory.GetGrain<IReminderTableGrain>(InMemoryReminderTable.ReminderTableGrainId);
// Ping the grain to make sure it is active.
await tableGrain.ReadRows((GrainReference)tableGrain);
var tableGrainId = ((GrainReference)tableGrain).GrainId;
SiloAddress reminderTableGrainPrimaryDirectoryAddress = (await TestUtils.GetDetailedGrainReport(this.HostedCluster.InternalGrainFactory, tableGrainId, this.HostedCluster.Primary)).PrimaryForGrain;
// ask a detailed report from the directory partition owner, and get the actionvation addresses
var address = (await TestUtils.GetDetailedGrainReport(this.HostedCluster.InternalGrainFactory, tableGrainId, this.HostedCluster.GetSiloForAddress(reminderTableGrainPrimaryDirectoryAddress))).LocalDirectoryActivationAddress;
ActivationAddress reminderGrainActivation = address;
SortedList<int, SiloHandle> ids = new SortedList<int, SiloHandle>();
foreach (var siloHandle in this.HostedCluster.GetActiveSilos())
{
SiloAddress siloAddress = siloHandle.SiloAddress;
if (siloAddress.Equals(this.HostedCluster.Primary.SiloAddress))
{
continue;
}
// Don't fail primary directory partition and the silo hosting the ReminderTableGrain.
if (siloAddress.Equals(reminderTableGrainPrimaryDirectoryAddress) || siloAddress.Equals(reminderGrainActivation.Silo))
{
continue;
}
ids.Add(siloHandle.SiloAddress.GetConsistentHashCode(), siloHandle);
}
int index;
// we should not fail the primary!
// we can't guarantee semantics of 'Fail' if it evalutes to the primary's address
switch (fail)
{
case Fail.First:
index = 0;
while (count++ < numOfFailures)
{
while (failures.Contains(ids.Values[index]))
{
index++;
}
failures.Add(ids.Values[index]);
}
break;
case Fail.Last:
index = ids.Count - 1;
while (count++ < numOfFailures)
{
while (failures.Contains(ids.Values[index]))
{
index--;
}
failures.Add(ids.Values[index]);
}
break;
case Fail.Random:
default:
while (count++ < numOfFailures)
{
SiloHandle r = ids.Values[random.Next(ids.Count)];
while (failures.Contains(r))
{
r = ids.Values[random.Next(ids.Count)];
}
failures.Add(r);
}
break;
}
return failures;
}
// for debugging only
private void printSilos(string msg)
{
SortedList<int, SiloAddress> ids = new SortedList<int, SiloAddress>(numAdditionalSilos + 2);
foreach (var siloHandle in this.HostedCluster.GetActiveSilos())
{
ids.Add(siloHandle.SiloAddress.GetConsistentHashCode(), siloHandle.SiloAddress);
}
logger.Info("{0} list of silos: ", msg);
foreach (var id in ids.Keys.ToList())
{
logger.Info("{0} -> {1}", ids[id], id);
}
}
private static void AssertEventually(Action assertion, TimeSpan timeout)
{
AssertEventually(assertion, timeout, TimeSpan.FromMilliseconds(500));
}
private static void AssertEventually(Action assertion, TimeSpan timeout, TimeSpan delayBetweenIterations)
{
var sw = Stopwatch.StartNew();
while (true)
{
try
{
assertion();
return;
}
catch (XunitException)
{
if (sw.ElapsedMilliseconds > timeout.TotalMilliseconds)
{
throw;
}
}
if (delayBetweenIterations > TimeSpan.Zero)
{
Thread.Sleep(delayBetweenIterations);
}
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Security.Cryptography.ECDiffieHellmanCng.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Security.Cryptography
{
sealed public partial class ECDiffieHellmanCng : ECDiffieHellman
{
#region Methods and constructors
public override byte[] DeriveKeyMaterial(ECDiffieHellmanPublicKey otherPartyPublicKey)
{
return default(byte[]);
}
public byte[] DeriveKeyMaterial(CngKey otherPartyPublicKey)
{
Contract.Ensures(Contract.Result<byte[]>() != null);
Contract.Ensures(this.Key.Handle != null);
return default(byte[]);
}
public Microsoft.Win32.SafeHandles.SafeNCryptSecretHandle DeriveSecretAgreementHandle(ECDiffieHellmanPublicKey otherPartyPublicKey)
{
Contract.Ensures(this.Key.Handle != null);
return default(Microsoft.Win32.SafeHandles.SafeNCryptSecretHandle);
}
public Microsoft.Win32.SafeHandles.SafeNCryptSecretHandle DeriveSecretAgreementHandle(CngKey otherPartyPublicKey)
{
Contract.Ensures(this.Key.Handle != null);
return default(Microsoft.Win32.SafeHandles.SafeNCryptSecretHandle);
}
protected override void Dispose(bool disposing)
{
}
public ECDiffieHellmanCng(int keySize)
{
Contract.Ensures(this.LegalKeySizesValue != null);
}
public ECDiffieHellmanCng(CngKey key)
{
Contract.Ensures(this.Key.AlgorithmGroup != null);
Contract.Ensures(this.Key.AlgorithmGroup == System.Security.Cryptography.CngAlgorithmGroup.ECDiffieHellman);
Contract.Ensures(this.LegalKeySizesValue != null);
}
public ECDiffieHellmanCng()
{
Contract.Ensures(this.LegalKeySizesValue != null);
}
public void FromXmlString(string xml, ECKeyXmlFormat format)
{
Contract.Ensures(System.Security.Cryptography.CngAlgorithmGroup.ECDiffieHellman == this.Key.AlgorithmGroup);
Contract.Ensures(this.Key.AlgorithmGroup != null);
}
public override void FromXmlString(string xmlString)
{
}
public override string ToXmlString(bool includePrivateParameters)
{
return default(string);
}
public string ToXmlString(ECKeyXmlFormat format)
{
Contract.Ensures(Contract.Result<string>() != null);
return default(string);
}
#endregion
#region Properties and indexers
public CngAlgorithm HashAlgorithm
{
get
{
return default(CngAlgorithm);
}
set
{
}
}
public byte[] HmacKey
{
get
{
return default(byte[]);
}
set
{
}
}
public CngKey Key
{
get
{
Contract.Ensures(Contract.Result<System.Security.Cryptography.CngKey>() != null);
return default(CngKey);
}
private set
{
}
}
public ECDiffieHellmanKeyDerivationFunction KeyDerivationFunction
{
get
{
return default(ECDiffieHellmanKeyDerivationFunction);
}
set
{
}
}
public byte[] Label
{
get
{
return default(byte[]);
}
set
{
}
}
public override ECDiffieHellmanPublicKey PublicKey
{
get
{
return default(ECDiffieHellmanPublicKey);
}
}
public byte[] SecretAppend
{
get
{
return default(byte[]);
}
set
{
}
}
public byte[] SecretPrepend
{
get
{
return default(byte[]);
}
set
{
}
}
public byte[] Seed
{
get
{
return default(byte[]);
}
set
{
}
}
public bool UseSecretAgreementAsHmacKey
{
get
{
Contract.Ensures(Contract.Result<bool>() == (this.HmacKey == null));
return default(bool);
}
}
#endregion
}
}
| |
// Copyright (c) 2012-2014 Sharpex2D - Kevin Scholz (ThuCommix)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the 'Software'), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using Sharpex2D.Audio;
using Sharpex2D.Content;
using Sharpex2D.GameService;
using Sharpex2D.Input;
using Sharpex2D.Rendering;
using Sharpex2D.Rendering.Scene;
using Sharpex2D.Surface;
namespace Sharpex2D
{
[Developer("ThuCommix", "developer@sharpex2d.de")]
[TestState(TestState.Tested)]
public abstract class Game : IUpdateable, IDrawable, IConstructable
{
private RenderTarget _renderTarget;
/// <summary>
/// Initializes a new Game class.
/// </summary>
protected Game()
{
GameComponentManager = new GameComponentManager();
}
/// <summary>
/// Gets the GameComponentManager.
/// </summary>
public GameComponentManager GameComponentManager { private set; get; }
/// <summary>
/// The current InputManager.
/// </summary>
public InputManager Input { get; set; }
/// <summary>
/// The Current AudioManager.
/// </summary>
public AudioManager AudioManager { get; internal set; }
/// <summary>
/// The Current ContentManager.
/// </summary>
public ContentManager Content { get; set; }
/// <summary>
/// The Current SceneManager.
/// </summary>
public SceneManager SceneManager { get; set; }
/// <summary>
/// The Current GameServices.
/// </summary>
public GameServiceContainer GameServices { set; get; }
/// <summary>
/// Sets or gets the TargetFrameTime.
/// </summary>
public float TargetTime
{
get { return SGL.Components.Get<GameLoop>().TargetTime; }
set { SGL.Components.Get<GameLoop>().TargetTime = value; }
}
/// <summary>
/// A value indicating whether the surface is active.
/// </summary>
public bool IsActive
{
get { return _renderTarget.Window.IsActive; }
}
/// <summary>
/// Gets the GameWindow.
/// </summary>
public GameWindow Window
{
get { return _renderTarget.Window; }
}
#region IComponent Implementation
/// <summary>
/// Sets or gets the Guid of the Component.
/// </summary>
public Guid Guid
{
get { return new Guid("6782E502-BE99-4030-9472-C295E822881B"); }
}
#endregion
#region IConstructable Implementation
/// <summary>
/// Constructs the component.
/// </summary>
void IConstructable.Construct()
{
_renderTarget = SGL.Components.Get<RenderTarget>();
}
#endregion
#region IDrawable Implementation
/// <summary>
/// Processes a Render.
/// </summary>
/// <param name="spriteBatch">The SpriteBatch.</param>
/// <param name="gameTime">The GameTime.</param>
void IDrawable.Draw(SpriteBatch spriteBatch, GameTime gameTime)
{
OnDrawing(spriteBatch, gameTime);
}
#endregion
#region IUpdateable Implementation
/// <summary>
/// Updates the object.
/// </summary>
/// <param name="gameTime">The GameTime.</param>
void IUpdateable.Update(GameTime gameTime)
{
OnUpdate(gameTime);
}
#endregion
/// <summary>
/// Updates the object.
/// </summary>
/// <param name="gameTime">The GameTime.</param>
public virtual void OnUpdate(GameTime gameTime)
{
foreach (IGameComponent gameComponent in GameComponentManager)
{
gameComponent.Update(gameTime);
}
}
/// <summary>
/// Processes a Render.
/// </summary>
/// <param name="spriteBatch">The SpriteBatch.</param>
/// <param name="gameTime">The GameTime.</param>
public virtual void OnDrawing(SpriteBatch spriteBatch, GameTime gameTime)
{
spriteBatch.Begin();
foreach (IGameComponent gameComponent in GameComponentManager)
{
gameComponent.Draw(spriteBatch, gameTime);
}
spriteBatch.End();
}
/// <summary>
/// Processes the Game initialization.
/// </summary>
/// <param name="launchParameters">The LaunchParameters.</param>
public abstract EngineConfiguration OnInitialize(LaunchParameters launchParameters);
/// <summary>
/// Processes the Game load.
/// </summary>
public abstract void OnLoadContent();
/// <summary>
/// Processes the Game unload.
/// </summary>
public virtual void OnUnload()
{
}
/// <summary>
/// Processes if the surface is activated.
/// </summary>
public virtual void OnActivation()
{
}
/// <summary>
/// Processes if the surface is deactivated.
/// </summary>
public virtual void OnDeactivation()
{
}
/// <summary>
/// Exits the game.
/// </summary>
public void Exit()
{
SGL.Shutdown();
}
/// <summary>
/// Restarts the Game with the specified LaunchParameters.
/// </summary>
/// <param name="launchParameters">The LaunchParameters.</param>
public void Restart(LaunchParameters launchParameters)
{
SGL.Restart(launchParameters.ToString());
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace appveyortest.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Collections;
using System.Diagnostics;
using System.Drawing;
using System.Windows.Forms;
using System.Text;
using OpenLiveWriter.Controls;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.HtmlEditor;
using OpenLiveWriter.Localization;
using OpenLiveWriter.Mshtml;
using OpenLiveWriter.Interop.Windows;
using OpenLiveWriter.ApplicationFramework ;
using mshtml ;
namespace OpenLiveWriter.PostEditor.Tables
{
internal class TableEditingManager : IDisposable
{
private bool _selectionChangedHooked = false;
public TableEditingManager(IHtmlEditorComponentContext editorContext)
{
_editorContext = editorContext ;
if (!GlobalEditorOptions.SupportsFeature(ContentEditorFeature.Table))
{
_editorContext.SelectionChanged += new EventHandler(_editorContext_SelectionChanged);
_selectionChangedHooked = true;
}
InitializeCommands() ;
}
public MshtmlElementBehavior CreateTableEditingElementBehavior()
{
return new TableEditingElementBehavior(_editorContext, this) ;
}
public MshtmlElementBehavior CreateTableCellEditingElementBehavior()
{
return new TableCellEditingElementBehavior(_editorContext) ;
}
public bool ShowTableContextMenuForElement(IHTMLElement element)
{
IHTMLTable tableElement = TableHelper.GetContainingTableElement(element) ;
if ( tableElement != null )
{
MarkupRange tableMarkupRange = _editorContext.MarkupServices.CreateMarkupRange(tableElement as IHTMLElement) ;
return TableHelper.TableElementIsEditable(tableElement as IHTMLElement, tableMarkupRange) ;
}
else
{
return false ;
}
}
public CommandContextMenuDefinition CreateTableContextMenuDefinition()
{
// make sure enable/disable states are correct based on new selection
ManageCommands() ;
// return menu definition
return new TableContextMenuDefinition(false) ;
}
internal void NotifyTableDetached()
{
ManageCommands() ;
}
private void _editorContext_SelectionChanged(object sender, EventArgs e)
{
ManageCommands() ;
}
private void InitializeCommands()
{
_editorContext.CommandManager.BeginUpdate();
commandTableMenu = new Command(CommandId.TableMenu);
commandTableMenu.CommandBarButtonContextMenuDefinition = new TableContextMenuDefinition() ;
_editorContext.CommandManager.Add(commandTableMenu);
commandTableProperties = new Command(CommandId.TableProperties) ;
commandTableProperties.Execute +=new EventHandler(commandTableProperties_Execute);
_editorContext.CommandManager.Add(commandTableProperties);
commandDeleteTable = new Command(CommandId.DeleteTable);
commandDeleteTable.Execute +=new EventHandler(commandDeleteTable_Execute);
_editorContext.CommandManager.Add(commandDeleteTable);
commandRowProperties = new Command(CommandId.RowProperties) ;
commandRowProperties.Execute +=new EventHandler(commandRowProperties_Execute);
_editorContext.CommandManager.Add(commandRowProperties);
commandInsertRowAbove = new Command(CommandId.InsertRowAbove);
commandInsertRowAbove.Execute += new EventHandler(commandInsertRowAbove_Execute);
_editorContext.CommandManager.Add(commandInsertRowAbove);
commandInsertRowBelow = new Command(CommandId.InsertRowBelow);
commandInsertRowBelow.Execute += new EventHandler(commandInsertRowBelow_Execute);
_editorContext.CommandManager.Add(commandInsertRowBelow);
commandMoveRowUp = new Command(CommandId.MoveRowUp);
commandMoveRowUp.Execute += new EventHandler(commandMoveRowUp_Execute);
_editorContext.CommandManager.Add(commandMoveRowUp);
commandMoveRowDown = new Command(CommandId.MoveRowDown);
commandMoveRowDown.Execute += new EventHandler(commandMoveRowDown_Execute);
_editorContext.CommandManager.Add(commandMoveRowDown);
commandDeleteRow = new Command(CommandId.DeleteRow);
commandDeleteRow.Execute += new EventHandler(commandDeleteRow_Execute);
_editorContext.CommandManager.Add(commandDeleteRow);
commandColumnProperties = new Command(CommandId.ColumnProperties) ;
commandColumnProperties.Execute +=new EventHandler(commandColumnProperties_Execute);
_editorContext.CommandManager.Add(commandColumnProperties);
commandInsertColumnLeft = new Command(CommandId.InsertColumnLeft);
commandInsertColumnLeft.Execute += new EventHandler(commandInsertColumnLeft_Execute);
_editorContext.CommandManager.Add(commandInsertColumnLeft);
commandInsertColumnRight = new Command(CommandId.InsertColumnRight);
commandInsertColumnRight.Execute += new EventHandler(commandInsertColumnRight_Execute);
_editorContext.CommandManager.Add(commandInsertColumnRight);
commandMoveColumnLeft = new Command(CommandId.MoveColumnLeft);
commandMoveColumnLeft.Execute += new EventHandler(commandMoveColumnLeft_Execute);
_editorContext.CommandManager.Add(commandMoveColumnLeft);
commandMoveColumnRight = new Command(CommandId.MoveColumnRight);
commandMoveColumnRight.Execute += new EventHandler(commandMoveColumnRight_Execute);
_editorContext.CommandManager.Add(commandMoveColumnRight);
commandDeleteColumn = new Command(CommandId.DeleteColumn);
commandDeleteColumn.Execute += new EventHandler(commandDeleteColumn_Execute);
_editorContext.CommandManager.Add(commandDeleteColumn);
commandCellProperties = new Command(CommandId.CellProperties);
commandCellProperties.Execute +=new EventHandler(commandCellProperties_Execute);
_editorContext.CommandManager.Add(commandCellProperties);
commandClearCell = new Command(CommandId.ClearCell);
commandClearCell.Execute += new EventHandler(commandClearCell_Execute);
_editorContext.CommandManager.Add(commandClearCell);
_editorContext.CommandManager.EndUpdate();
}
internal void ManageCommands()
{
// state variables
bool editableTableSelected, multipleRowsSelected, multipleColumnsSelected, multipleCellsSelected ;
if ( _editorContext.EditMode )
{
// analyze selection
TableSelection tableSelection = new TableSelection(_editorContext.Selection.SelectedMarkupRange);
editableTableSelected = (tableSelection.Table != null) && (tableSelection.Table as IHTMLElement3).isContentEditable ;
multipleRowsSelected = editableTableSelected && !tableSelection.SingleRowSelected ;
multipleColumnsSelected = editableTableSelected && !tableSelection.SingleColumnSelected ;
multipleCellsSelected = editableTableSelected && tableSelection.HasContiguousSelection ;
}
else
{
editableTableSelected = multipleRowsSelected = multipleColumnsSelected = multipleCellsSelected = false ;
}
commandTableProperties.Enabled = editableTableSelected ;
commandDeleteTable.Enabled = editableTableSelected ;
commandRowProperties.Enabled = editableTableSelected && !multipleRowsSelected;
commandInsertRowAbove.Enabled = editableTableSelected ;
commandInsertRowBelow.Enabled = editableTableSelected ;
commandMoveRowUp.Enabled = editableTableSelected && !multipleRowsSelected;
commandMoveRowDown.Enabled = editableTableSelected && !multipleRowsSelected;
commandDeleteRow.Enabled = editableTableSelected;
// commandDeleteRow.MenuFormatArgs = new object[] { multipleRowsSelected ? "s" : String.Empty };
commandColumnProperties.Enabled = editableTableSelected && !multipleColumnsSelected;
commandInsertColumnLeft.Enabled = editableTableSelected ;
commandInsertColumnRight.Enabled = editableTableSelected ;
commandMoveColumnLeft.Enabled = editableTableSelected && !multipleColumnsSelected;
commandMoveColumnRight.Enabled = editableTableSelected && !multipleColumnsSelected;
commandDeleteColumn.Enabled = editableTableSelected ;
// commandDeleteColumn.MenuFormatArgs = new object[] { multipleColumnsSelected ? "s" : String.Empty };
commandCellProperties.Enabled = editableTableSelected && !multipleCellsSelected ;
commandClearCell.Enabled = editableTableSelected ;
// commandClearCell.MenuFormatArgs = new object[] { multipleCellsSelected ? "s" : String.Empty } ;
}
private void commandTableProperties_Execute(object sender, EventArgs e)
{
using ( TablePropertiesForm tablePropertiesForm = new TablePropertiesForm())
{
// read existing properites
TableProperties existingProperties = TableEditor.GetTableProperties(_editorContext) ;
// show the dialog
TableProperties editedProperties = tablePropertiesForm.EditTable(_editorContext.MainFrameWindow, existingProperties);
// update
if ( editedProperties != null )
{
TableEditor.SetTableProperties(_editorContext, editedProperties) ;
}
}
}
private void commandDeleteTable_Execute(object sender, EventArgs e)
{
if ( DisplayMessage.Show(MessageId.ConfirmDeleteTable, _editorContext.MainFrameWindow) == DialogResult.Yes )
{
TableEditor.DeleteTable(_editorContext);
}
}
private void commandRowProperties_Execute(object sender, EventArgs e)
{
using ( RowPropertiesForm rowPropertiesForm = new RowPropertiesForm(TableEditor.GetRowProperties(_editorContext)) )
{
if ( rowPropertiesForm.ShowDialog(_editorContext.MainFrameWindow) == DialogResult.OK )
{
TableEditor.SetRowProperties(_editorContext,rowPropertiesForm.RowProperties) ;
}
}
}
private void commandInsertRowAbove_Execute(object sender, EventArgs e)
{
TableEditor.InsertRowAbove(_editorContext) ;
}
private void commandInsertRowBelow_Execute(object sender, EventArgs e)
{
TableEditor.InsertRowBelow(_editorContext) ;
}
private void commandMoveRowUp_Execute(object sender, EventArgs e)
{
TableEditor.MoveRowUp(_editorContext);
}
private void commandMoveRowDown_Execute(object sender, EventArgs e)
{
TableEditor.MoveRowDown(_editorContext);
}
private void commandDeleteRow_Execute(object sender, EventArgs e)
{
TableEditor.DeleteRows(_editorContext);
ManageCommands() ;
}
private void commandColumnProperties_Execute(object sender, EventArgs e)
{
using ( ColumnPropertiesForm columnPropertiesForm = new ColumnPropertiesForm(TableEditor.GetColumnProperties(_editorContext)) )
{
if ( columnPropertiesForm.ShowDialog(_editorContext.MainFrameWindow) == DialogResult.OK )
{
TableEditor.SetColumnProperties(_editorContext,columnPropertiesForm.ColumnProperties) ;
}
}
}
private void commandInsertColumnLeft_Execute(object sender, EventArgs e)
{
// In RTL, since the table is flipped, we have to flipp the commands that are right and left aware.
if (_editorContext.IsRTLTemplate)
TableEditor.InsertColumnRight(_editorContext);
else
TableEditor.InsertColumnLeft(_editorContext);
}
private void commandInsertColumnRight_Execute(object sender, EventArgs e)
{
// In RTL, since the table is flipped, we have to flipp the commands that are right and left aware.
if (_editorContext.IsRTLTemplate)
TableEditor.InsertColumnLeft(_editorContext);
else
TableEditor.InsertColumnRight(_editorContext);
}
private void commandMoveColumnLeft_Execute(object sender, EventArgs e)
{
// In RTL, since the table is flipped, we have to flipp the commands that are right and left aware.
if (_editorContext.IsRTLTemplate)
TableEditor.MoveColumnRight(_editorContext);
else
TableEditor.MoveColumnLeft(_editorContext);
}
private void commandMoveColumnRight_Execute(object sender, EventArgs e)
{
// In RTL, since the table is flipped, we have to flipp the commands that are right and left aware.
if (_editorContext.IsRTLTemplate)
TableEditor.MoveColumnLeft(_editorContext);
else
TableEditor.MoveColumnRight(_editorContext);
}
private void commandDeleteColumn_Execute(object sender, EventArgs e)
{
TableEditor.DeleteColumns(_editorContext);
ManageCommands();
}
private void commandCellProperties_Execute(object sender, EventArgs e)
{
using ( CellPropertiesForm cellPropertiesForm = new CellPropertiesForm(TableEditor.GetCellProperties(_editorContext)) )
{
if ( cellPropertiesForm.ShowDialog(_editorContext.MainFrameWindow) == DialogResult.OK )
{
TableEditor.SetCellProperties(_editorContext,cellPropertiesForm.CellProperties) ;
}
}
}
private void commandClearCell_Execute(object sender, EventArgs e)
{
TableSelection tableSelection = new TableSelection(_editorContext.Selection.SelectedMarkupRange);
if ( tableSelection.Table != null )
{
MarkupRange tableMarkupRange = _editorContext.MarkupServices.CreateMarkupRange(tableSelection.Table as IHTMLElement);
using(_editorContext.DamageServices.CreateDamageTracker(tableMarkupRange, false))
TableEditor.ClearCells(_editorContext);
}
}
private Command commandTableMenu ;
private Command commandTableProperties ;
private Command commandDeleteTable ;
private Command commandRowProperties ;
private Command commandInsertRowAbove ;
private Command commandInsertRowBelow ;
private Command commandMoveRowUp ;
private Command commandMoveRowDown ;
private Command commandDeleteRow ;
private Command commandColumnProperties ;
private Command commandInsertColumnLeft ;
private Command commandInsertColumnRight ;
private Command commandMoveColumnLeft ;
private Command commandMoveColumnRight ;
private Command commandDeleteColumn ;
private Command commandCellProperties ;
private Command commandClearCell ;
private IHtmlEditorComponentContext _editorContext ;
public void Dispose()
{
if (_selectionChangedHooked)
{
_editorContext.SelectionChanged -= new EventHandler(_editorContext_SelectionChanged);
_selectionChangedHooked = false;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using System.Web.Script.Services;
using System.Web.Services;
using System.Xml;
using Umbraco.Core;
using Umbraco.Core.Configuration;
using Umbraco.Core.Logging;
using Umbraco.Core.Models;
using Umbraco.Core.Models.EntityBase;
using Umbraco.Web;
using Umbraco.Web.WebServices;
using umbraco.BasePages;
using umbraco.BusinessLogic;
using umbraco.BusinessLogic.Actions;
using umbraco.cms.businesslogic.web;
namespace umbraco.presentation.webservices
{
/// <summary>
/// Summary description for nodeSorter
/// </summary>
[WebService(Namespace = "http://umbraco.org/")]
[WebServiceBinding(ConformsTo = WsiProfiles.BasicProfile1_1)]
[ToolboxItem(false)]
[ScriptService]
public class nodeSorter : UmbracoAuthorizedWebService
{
[WebMethod]
public SortNode GetNodes(string ParentId, string App)
{
if (BasePage.ValidateUserContextID(BasePage.umbracoUserContextID))
{
var nodes = new List<SortNode>();
// "hack for stylesheet"
if (App == "settings")
{
var stylesheet = Services.FileService.GetStylesheetByName(ParentId.EnsureEndsWith(".css"));
if (stylesheet == null) throw new InvalidOperationException("No stylesheet found by name " + ParentId);
var sort = 0;
foreach (var child in stylesheet.Properties)
{
nodes.Add(new SortNode(child.Name.GetHashCode(), sort, child.Name, DateTime.Now));
sort++;
}
return new SortNode()
{
SortNodes = nodes.ToArray()
};
}
else
{
var asInt = int.Parse(ParentId);
var parent = new SortNode { Id = asInt };
var entityService = base.ApplicationContext.Services.EntityService;
// Root nodes?
if (asInt == -1)
{
if (App == "media")
{
var rootMedia = entityService.GetRootEntities(UmbracoObjectTypes.Media);
nodes.AddRange(rootMedia.Select(media => new SortNode(media.Id, media.SortOrder, media.Name, media.CreateDate)));
}
else
{
var rootContent = entityService.GetRootEntities(UmbracoObjectTypes.Document);
nodes.AddRange(rootContent.Select(content => new SortNode(content.Id, content.SortOrder, content.Name, content.CreateDate)));
}
}
else
{
var children = entityService.GetChildren(asInt);
nodes.AddRange(children.Select(child => new SortNode(child.Id, child.SortOrder, child.Name, child.CreateDate)));
}
parent.SortNodes = nodes.ToArray();
return parent;
}
}
throw new ArgumentException("User not logged in");
}
public void UpdateSortOrder(int ParentId, string SortOrder)
{
UpdateSortOrder(ParentId.ToString(), SortOrder);
}
[WebMethod]
public void UpdateSortOrder(string ParentId, string SortOrder)
{
if (AuthorizeRequest() == false) return;
if (SortOrder.Trim().Length <= 0) return;
var isContent = helper.Request("app") == "content" | helper.Request("app") == "";
var isMedia = helper.Request("app") == "media";
//ensure user is authorized for the app requested
if (isContent && AuthorizeRequest(DefaultApps.content.ToString()) == false) return;
if (isMedia && AuthorizeRequest(DefaultApps.media.ToString()) == false) return;
var ids = SortOrder.Split(new[] { "," }, StringSplitOptions.RemoveEmptyEntries);
if (isContent)
{
SortContent(ids, int.Parse(ParentId));
}
else if (isMedia)
{
SortMedia(ids);
}
else
{
SortStylesheetProperties(ParentId, ids);
}
}
private void SortMedia(string[] ids)
{
var mediaService = base.ApplicationContext.Services.MediaService;
var sortedMedia = new List<IMedia>();
try
{
for (var i = 0; i < ids.Length; i++)
{
var id = int.Parse(ids[i]);
var m = mediaService.GetById(id);
sortedMedia.Add(m);
}
// Save Media with new sort order and update content xml in db accordingly
var sorted = mediaService.Sort(sortedMedia);
}
catch (Exception ex)
{
LogHelper.Error<nodeSorter>("Could not update media sort order", ex);
}
}
private void SortStylesheetProperties(string stylesheetName, string[] names)
{
var stylesheet = Services.FileService.GetStylesheetByName(stylesheetName.EnsureEndsWith(".css"));
if (stylesheet == null) throw new InvalidOperationException("No stylesheet found by name " + stylesheetName);
var currProps = stylesheet.Properties.ToArray();
//remove them all first
foreach (var prop in currProps)
{
stylesheet.RemoveProperty(prop.Name);
}
//re-add them in the right order
for (var i = 0; i < names.Length; i++)
{
var found = currProps.Single(x => x.Name == names[i]);
stylesheet.AddProperty(found);
}
Services.FileService.SaveStylesheet(stylesheet);
}
private void SortContent(string[] ids, int parentId)
{
var contentService = base.ApplicationContext.Services.ContentService;
var sortedContent = new List<IContent>();
try
{
for (var i = 0; i < ids.Length; i++)
{
var id = int.Parse(ids[i]);
var c = contentService.GetById(id);
sortedContent.Add(c);
}
// Save content with new sort order and update db+cache accordingly
var sorted = contentService.Sort(sortedContent);
// refresh sort order on cached xml
// but no... this is not distributed - solely relying on content service & events should be enough
//content.Instance.SortNodes(parentId);
//send notifications! TODO: This should be put somewhere centralized instead of hard coded directly here
ApplicationContext.Services.NotificationService.SendNotification(contentService.GetById(parentId), ActionSort.Instance, UmbracoContext, ApplicationContext);
}
catch (Exception ex)
{
LogHelper.Error<nodeSorter>("Could not update content sort order", ex);
}
}
}
[Serializable]
public class SortNode
{
public SortNode()
{
}
private SortNode[] _sortNodes;
public SortNode[] SortNodes
{
get { return _sortNodes; }
set { _sortNodes = value; }
}
public int TotalNodes
{
get { return _sortNodes != null ? _sortNodes.Length : 0; }
set { int test = value; }
}
public SortNode(int Id, int SortOrder, string Name, DateTime CreateDate)
{
_id = Id;
_sortOrder = SortOrder;
_name = Name;
_createDate = CreateDate;
}
private DateTime _createDate;
public DateTime CreateDate
{
get { return _createDate; }
set { _createDate = value; }
}
private string _name;
public string Name
{
get { return _name; }
set { _name = value; }
}
private int _sortOrder;
public int SortOrder
{
get { return _sortOrder; }
set { _sortOrder = value; }
}
private int _id;
public int Id
{
get { return _id; }
set { _id = value; }
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics;
namespace LZ4Sharp
{
/// <summary>
/// Class for decompressing an LZ4 compressed byte array.
/// </summary>
public unsafe class LZ4Decompressor64 : ILZ4Decompressor
{
const int STEPSIZE = 8;
static byte[] DeBruijnBytePos = new byte[64] { 0, 0, 0, 0, 0, 1, 1, 2, 0, 3, 1, 3, 1, 4, 2, 7, 0, 2, 3, 6, 1, 5, 3, 5, 1, 3, 4, 4, 2, 5, 6, 7, 7, 0, 1, 2, 3, 3, 4, 6, 2, 6, 5, 5, 3, 4, 5, 6, 7, 1, 2, 4, 6, 4, 4, 5, 7, 2, 6, 5, 7, 6, 7, 7 };
//**************************************
// Macros
//**************************************
readonly sbyte[] m_DecArray = new sbyte[8] { 0, 3, 2, 3, 0, 0, 0, 0 };
readonly sbyte[] m_Dec2table = new sbyte[8] { 0, 0, 0, -1, 0, 1, 2, 3 };
// Note : The decoding functions LZ4_uncompress() and LZ4_uncompress_unknownOutputSize()
// are safe against "buffer overflow" attack type
// since they will *never* write outside of the provided output buffer :
// they both check this condition *before* writing anything.
// A corrupted packet however can make them *read* within the first 64K before the output buffer.
/// <summary>
/// Decompress.
/// </summary>
/// <param name="source">compressed array</param>
/// <param name="dest">This must be the exact length of the decompressed item</param>
public void DecompressKnownSize(byte[] compressed, byte[] decompressed)
{
int len = DecompressKnownSize(compressed, decompressed, decompressed.Length);
Debug.Assert(len == decompressed.Length);
}
public int DecompressKnownSize(byte[] compressed, byte[] decompressedBuffer, int decompressedSize)
{
fixed (byte* src = compressed)
fixed (byte* dst = decompressedBuffer)
return DecompressKnownSize(src, dst, decompressedSize);
}
public int DecompressKnownSize(byte* compressed, byte* decompressedBuffer, int decompressedSize)
{
fixed (sbyte* dec = m_DecArray)
fixed (sbyte* dec2Ptr = m_Dec2table)
{
// Local Variables
byte* ip = (byte*)compressed;
byte* r;
byte* op = (byte*)decompressedBuffer;
byte* oend = op + decompressedSize;
byte* cpy;
byte token;
int len, length;
// Main Loop
while (true)
{
// get runLength
token = *ip++;
if ((length = (token >> LZ4Util.ML_BITS)) == LZ4Util.RUN_MASK) { for (; (len = *ip++) == 255; length += 255) { } length += len; }
cpy = op + length;
if (cpy > oend - LZ4Util.COPYLENGTH)
{
if (cpy > oend) goto _output_error;
LZ4Util.CopyMemory(op, ip, length);
ip += length;
break;
}
do { *(ulong*)op = *(ulong*)ip; op += 8; ip += 8; } while (op < cpy); ; ip -= (op - cpy); op = cpy;
// get offset
{ r = (cpy) - *(ushort*)ip; }; ip += 2;
if (r < decompressedBuffer) goto _output_error;
// get matchLength
if ((length = (int)(token & LZ4Util.ML_MASK)) == LZ4Util.ML_MASK) { for (; *ip == 255; length += 255) { ip++; } length += *ip++; }
// copy repeated sequence
if (op - r < STEPSIZE)
{
var dec2 = dec2Ptr[(int)(op - r)];
*op++ = *r++;
*op++ = *r++;
*op++ = *r++;
*op++ = *r++;
r -= dec[op - r];
*(uint*)op = *(uint*)r; op += STEPSIZE - 4;
r -= dec2;
}
else { *(ulong*)op = *(ulong*)r; op += 8; r += 8; ; }
cpy = op + length - (STEPSIZE - 4);
if (cpy > oend - LZ4Util.COPYLENGTH)
{
if (cpy > oend) goto _output_error;
if (op < (oend - LZ4Util.COPYLENGTH)) do { *(ulong*)op = *(ulong*)r; op += 8; r += 8; } while (op < (oend - LZ4Util.COPYLENGTH)); ;
while (op < cpy) *op++ = *r++;
op = cpy;
if (op == oend) break;
continue;
}
if (op < cpy) do { *(ulong*)op = *(ulong*)r; op += 8; r += 8; } while (op < cpy); ;
op = cpy; // correction
}
// end of decoding
return (int)(((byte*)ip) - compressed);
// write overflow error detected
_output_error:
return (int)(-(((byte*)ip) - compressed));
}
}
public byte[] Decompress(byte[] compressed)
{
int length = compressed.Length;
int len;
byte[] dest;
const int Multiplier = 4; // Just a number. Determines how fast length should increase.
do
{
length *= Multiplier;
dest = new byte[length];
len = Decompress(compressed, dest, compressed.Length);
}
while (len < 0 || dest.Length < len);
byte[] d = new byte[len];
Buffer.BlockCopy(dest, 0, d, 0, d.Length);
return d;
}
public int Decompress(byte[] compressed, byte[] decompressedBuffer)
{
return Decompress(compressed, decompressedBuffer, compressed.Length);
}
public int Decompress(byte[] compressedBuffer, byte[] decompressedBuffer, int compressedSize)
{
fixed (byte* src = compressedBuffer)
fixed (byte* dst = decompressedBuffer)
return Decompress(src, dst, compressedSize, decompressedBuffer.Length);
}
public int Decompress(byte[] compressedBuffer, int compressedPosition, byte[] decompressedBuffer, int decompressedPosition, int compressedSize)
{
fixed (byte* src = &compressedBuffer[compressedPosition])
fixed (byte* dst = &decompressedBuffer[decompressedPosition])
return Decompress(src, dst, compressedSize, decompressedBuffer.Length);
}
public int Decompress(
byte* compressedBuffer,
byte* decompressedBuffer,
int compressedSize,
int maxDecompressedSize)
{
fixed (sbyte* dec = m_DecArray)
fixed (sbyte* dec2Ptr = m_Dec2table)
{
// Local Variables
byte* ip = (byte*)compressedBuffer;
byte* iend = ip + compressedSize;
byte* r;
byte* op = (byte*)decompressedBuffer;
byte* oend = op + maxDecompressedSize;
byte* cpy;
byte token;
int len, length;
// Main Loop
while (ip < iend)
{
// get runLength
token = *ip++;
if ((length = (token >> LZ4Util.ML_BITS)) == LZ4Util.RUN_MASK) { int s = 255; while ((ip < iend) && (s == 255)) { s = *ip++; length += s; } }
// copy literals
cpy = op + length;
if ((cpy > oend - LZ4Util.COPYLENGTH) || (ip + length > iend - LZ4Util.COPYLENGTH))
{
if (cpy > oend) goto _output_error; // Error : request to write beyond destination buffer
if (ip + length > iend) goto _output_error; // Error : request to read beyond source buffer
LZ4Util.CopyMemory(op, ip, length);
op += length;
ip += length;
if (ip < iend) goto _output_error; // Error : LZ4 format violation
break; //Necessarily EOF
}
do { *(ulong*)op = *(ulong*)ip; op += 8; ip += 8; } while (op < cpy); ; ip -= (op - cpy); op = cpy;
// get offset
{ r = (cpy) - *(ushort*)ip; }; ip += 2;
if (r < decompressedBuffer) goto _output_error;
// get matchlength
if ((length = (int)(token & LZ4Util.ML_MASK)) == LZ4Util.ML_MASK) { while (ip < iend) { int s = *ip++; length += s; if (s == 255) continue; break; } }
// copy repeated sequence
if (op - r < STEPSIZE)
{
var dec2 = dec2Ptr[op - r];
*op++ = *r++;
*op++ = *r++;
*op++ = *r++;
*op++ = *r++;
r -= dec[op - r];
*(uint*)op = *(uint*)r; op += STEPSIZE - 4;
r -= dec2;
}
else { *(ulong*)op = *(ulong*)r; op += 8; r += 8; ; }
cpy = op + length - (STEPSIZE - 4);
if (cpy > oend - LZ4Util.COPYLENGTH)
{
if (cpy > oend) goto _output_error;
if (op < (oend - LZ4Util.COPYLENGTH)) do { *(ulong*)op = *(ulong*)r; op += 8; r += 8; } while (op < (oend - LZ4Util.COPYLENGTH)); ;
while (op < cpy) *op++ = *r++;
op = cpy;
if (op == oend) goto _output_error; // Check EOF (should never happen, since last 5 bytes are supposed to be literals)
continue;
}
if (op < cpy) do { *(ulong*)op = *(ulong*)r; op += 8; r += 8; } while (op < cpy); ;
op = cpy; // correction
}
return (int)(((byte*)op) - decompressedBuffer);
_output_error:
return (int)(-(((byte*)ip) - compressedBuffer));
}
}
}
}
| |
using System;
using System.IO;
using Raksha.Math;
using Raksha.Bcpg.Sig;
using Raksha.Crypto;
using Raksha.Crypto.Parameters;
using Raksha.Security;
using Raksha.Utilities;
namespace Raksha.Bcpg.OpenPgp
{
/// <remarks>Generator for PGP signatures.</remarks>
// TODO Should be able to implement ISigner?
public class PgpSignatureGenerator
{
private static readonly SignatureSubpacket[] EmptySignatureSubpackets = new SignatureSubpacket[0];
private PublicKeyAlgorithmTag keyAlgorithm;
private HashAlgorithmTag hashAlgorithm;
private PgpPrivateKey privKey;
private ISigner sig;
private IDigest dig;
private int signatureType;
private byte lastb;
private SignatureSubpacket[] unhashed = EmptySignatureSubpackets;
private SignatureSubpacket[] hashed = EmptySignatureSubpackets;
/// <summary>Create a generator for the passed in keyAlgorithm and hashAlgorithm codes.</summary>
public PgpSignatureGenerator(
PublicKeyAlgorithmTag keyAlgorithm,
HashAlgorithmTag hashAlgorithm)
{
this.keyAlgorithm = keyAlgorithm;
this.hashAlgorithm = hashAlgorithm;
dig = DigestUtilities.GetDigest(PgpUtilities.GetDigestName(hashAlgorithm));
sig = SignerUtilities.GetSigner(PgpUtilities.GetSignatureName(keyAlgorithm, hashAlgorithm));
}
/// <summary>Initialise the generator for signing.</summary>
public void InitSign(
int sigType,
PgpPrivateKey key)
{
InitSign(sigType, key, null);
}
/// <summary>Initialise the generator for signing.</summary>
public void InitSign(
int sigType,
PgpPrivateKey key,
SecureRandom random)
{
this.privKey = key;
this.signatureType = sigType;
try
{
ICipherParameters cp = key.Key;
if (random != null)
{
cp = new ParametersWithRandom(key.Key, random);
}
sig.Init(true, cp);
}
catch (InvalidKeyException e)
{
throw new PgpException("invalid key.", e);
}
dig.Reset();
lastb = 0;
}
public void Update(
byte b)
{
if (signatureType == PgpSignature.CanonicalTextDocument)
{
doCanonicalUpdateByte(b);
}
else
{
doUpdateByte(b);
}
}
private void doCanonicalUpdateByte(
byte b)
{
if (b == '\r')
{
doUpdateCRLF();
}
else if (b == '\n')
{
if (lastb != '\r')
{
doUpdateCRLF();
}
}
else
{
doUpdateByte(b);
}
lastb = b;
}
private void doUpdateCRLF()
{
doUpdateByte((byte)'\r');
doUpdateByte((byte)'\n');
}
private void doUpdateByte(
byte b)
{
sig.Update(b);
dig.Update(b);
}
public void Update(
params byte[] b)
{
Update(b, 0, b.Length);
}
public void Update(
byte[] b,
int off,
int len)
{
if (signatureType == PgpSignature.CanonicalTextDocument)
{
int finish = off + len;
for (int i = off; i != finish; i++)
{
doCanonicalUpdateByte(b[i]);
}
}
else
{
sig.BlockUpdate(b, off, len);
dig.BlockUpdate(b, off, len);
}
}
public void SetHashedSubpackets(
PgpSignatureSubpacketVector hashedPackets)
{
hashed = hashedPackets == null
? EmptySignatureSubpackets
: hashedPackets.ToSubpacketArray();
}
public void SetUnhashedSubpackets(
PgpSignatureSubpacketVector unhashedPackets)
{
unhashed = unhashedPackets == null
? EmptySignatureSubpackets
: unhashedPackets.ToSubpacketArray();
}
/// <summary>Return the one pass header associated with the current signature.</summary>
public PgpOnePassSignature GenerateOnePassVersion(
bool isNested)
{
return new PgpOnePassSignature(
new OnePassSignaturePacket(
signatureType, hashAlgorithm, keyAlgorithm, privKey.KeyId, isNested));
}
/// <summary>Return a signature object containing the current signature state.</summary>
public PgpSignature Generate()
{
SignatureSubpacket[] hPkts = hashed, unhPkts = unhashed;
if (!packetPresent(hashed, SignatureSubpacketTag.CreationTime))
{
hPkts = insertSubpacket(hPkts, new SignatureCreationTime(false, DateTime.UtcNow));
}
if (!packetPresent(hashed, SignatureSubpacketTag.IssuerKeyId)
&& !packetPresent(unhashed, SignatureSubpacketTag.IssuerKeyId))
{
unhPkts = insertSubpacket(unhPkts, new IssuerKeyId(false, privKey.KeyId));
}
int version = 4;
byte[] hData;
try
{
MemoryStream hOut = new MemoryStream();
for (int i = 0; i != hPkts.Length; i++)
{
hPkts[i].Encode(hOut);
}
byte[] data = hOut.ToArray();
MemoryStream sOut = new MemoryStream(data.Length + 6);
sOut.WriteByte((byte)version);
sOut.WriteByte((byte)signatureType);
sOut.WriteByte((byte)keyAlgorithm);
sOut.WriteByte((byte)hashAlgorithm);
sOut.WriteByte((byte)(data.Length >> 8));
sOut.WriteByte((byte)data.Length);
sOut.Write(data, 0, data.Length);
hData = sOut.ToArray();
}
catch (IOException e)
{
throw new PgpException("exception encoding hashed data.", e);
}
sig.BlockUpdate(hData, 0, hData.Length);
dig.BlockUpdate(hData, 0, hData.Length);
hData = new byte[]
{
(byte) version,
0xff,
(byte)(hData.Length >> 24),
(byte)(hData.Length >> 16),
(byte)(hData.Length >> 8),
(byte) hData.Length
};
sig.BlockUpdate(hData, 0, hData.Length);
dig.BlockUpdate(hData, 0, hData.Length);
byte[] sigBytes = sig.GenerateSignature();
byte[] digest = DigestUtilities.DoFinal(dig);
byte[] fingerPrint = new byte[] { digest[0], digest[1] };
// an RSA signature
bool isRsa = keyAlgorithm == PublicKeyAlgorithmTag.RsaSign
|| keyAlgorithm == PublicKeyAlgorithmTag.RsaGeneral;
MPInteger[] sigValues = isRsa
? PgpUtilities.RsaSigToMpi(sigBytes)
: PgpUtilities.DsaSigToMpi(sigBytes);
return new PgpSignature(
new SignaturePacket(signatureType, privKey.KeyId, keyAlgorithm,
hashAlgorithm, hPkts, unhPkts, fingerPrint, sigValues));
}
/// <summary>Generate a certification for the passed in ID and key.</summary>
/// <param name="id">The ID we are certifying against the public key.</param>
/// <param name="pubKey">The key we are certifying against the ID.</param>
/// <returns>The certification.</returns>
public PgpSignature GenerateCertification(
string id,
PgpPublicKey pubKey)
{
UpdateWithPublicKey(pubKey);
//
// hash in the id
//
UpdateWithIdData(0xb4, Strings.ToByteArray(id));
return Generate();
}
/// <summary>Generate a certification for the passed in userAttributes.</summary>
/// <param name="userAttributes">The ID we are certifying against the public key.</param>
/// <param name="pubKey">The key we are certifying against the ID.</param>
/// <returns>The certification.</returns>
public PgpSignature GenerateCertification(
PgpUserAttributeSubpacketVector userAttributes,
PgpPublicKey pubKey)
{
UpdateWithPublicKey(pubKey);
//
// hash in the attributes
//
try
{
MemoryStream bOut = new MemoryStream();
foreach (UserAttributeSubpacket packet in userAttributes.ToSubpacketArray())
{
packet.Encode(bOut);
}
UpdateWithIdData(0xd1, bOut.ToArray());
}
catch (IOException e)
{
throw new PgpException("cannot encode subpacket array", e);
}
return this.Generate();
}
/// <summary>Generate a certification for the passed in key against the passed in master key.</summary>
/// <param name="masterKey">The key we are certifying against.</param>
/// <param name="pubKey">The key we are certifying.</param>
/// <returns>The certification.</returns>
public PgpSignature GenerateCertification(
PgpPublicKey masterKey,
PgpPublicKey pubKey)
{
UpdateWithPublicKey(masterKey);
UpdateWithPublicKey(pubKey);
return Generate();
}
/// <summary>Generate a certification, such as a revocation, for the passed in key.</summary>
/// <param name="pubKey">The key we are certifying.</param>
/// <returns>The certification.</returns>
public PgpSignature GenerateCertification(
PgpPublicKey pubKey)
{
UpdateWithPublicKey(pubKey);
return Generate();
}
private byte[] GetEncodedPublicKey(
PgpPublicKey pubKey)
{
try
{
return pubKey.publicPk.GetEncodedContents();
}
catch (IOException e)
{
throw new PgpException("exception preparing key.", e);
}
}
private bool packetPresent(
SignatureSubpacket[] packets,
SignatureSubpacketTag type)
{
for (int i = 0; i != packets.Length; i++)
{
if (packets[i].SubpacketType == type)
{
return true;
}
}
return false;
}
private SignatureSubpacket[] insertSubpacket(
SignatureSubpacket[] packets,
SignatureSubpacket subpacket)
{
SignatureSubpacket[] tmp = new SignatureSubpacket[packets.Length + 1];
tmp[0] = subpacket;
packets.CopyTo(tmp, 1);
return tmp;
}
private void UpdateWithIdData(
int header,
byte[] idBytes)
{
this.Update(
(byte) header,
(byte)(idBytes.Length >> 24),
(byte)(idBytes.Length >> 16),
(byte)(idBytes.Length >> 8),
(byte)(idBytes.Length));
this.Update(idBytes);
}
private void UpdateWithPublicKey(
PgpPublicKey key)
{
byte[] keyBytes = GetEncodedPublicKey(key);
this.Update(
(byte) 0x99,
(byte)(keyBytes.Length >> 8),
(byte)(keyBytes.Length));
this.Update(keyBytes);
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the cloudfront-2015-04-17.normal.json service model.
*/
using System;
using System.IO;
using System.Text;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Amazon.CloudFront;
using Amazon.CloudFront.Model;
using Amazon.CloudFront.Model.Internal.MarshallTransformations;
using Amazon.Runtime.Internal.Transform;
using Amazon.Util;
using ServiceClientGenerator;
using AWSSDK_DotNet35.UnitTests.TestTools;
namespace AWSSDK_DotNet35.UnitTests.Marshalling
{
[TestClass]
public partial class CloudFrontMarshallingTests
{
static readonly ServiceModel service_model = Utils.LoadServiceModel("cloudfront-2015-04-17.normal.json", "cloudfront.customizations.json");
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void CreateCloudFrontOriginAccessIdentityMarshallTest()
{
var operation = service_model.FindOperation("CreateCloudFrontOriginAccessIdentity");
var request = InstantiateClassGenerator.Execute<CreateCloudFrontOriginAccessIdentityRequest>();
var marshaller = new CreateCloudFrontOriginAccessIdentityRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("CreateCloudFrontOriginAccessIdentity", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"Location","Location_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = CreateCloudFrontOriginAccessIdentityResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as CreateCloudFrontOriginAccessIdentityResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void CreateDistributionMarshallTest()
{
var operation = service_model.FindOperation("CreateDistribution");
var request = InstantiateClassGenerator.Execute<CreateDistributionRequest>();
var marshaller = new CreateDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("CreateDistribution", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"Location","Location_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = CreateDistributionResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as CreateDistributionResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void CreateInvalidationMarshallTest()
{
var operation = service_model.FindOperation("CreateInvalidation");
var request = InstantiateClassGenerator.Execute<CreateInvalidationRequest>();
var marshaller = new CreateInvalidationRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("CreateInvalidation", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"Location","Location_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = CreateInvalidationResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as CreateInvalidationResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void CreateStreamingDistributionMarshallTest()
{
var operation = service_model.FindOperation("CreateStreamingDistribution");
var request = InstantiateClassGenerator.Execute<CreateStreamingDistributionRequest>();
var marshaller = new CreateStreamingDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("CreateStreamingDistribution", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"Location","Location_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = CreateStreamingDistributionResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as CreateStreamingDistributionResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void DeleteCloudFrontOriginAccessIdentityMarshallTest()
{
var operation = service_model.FindOperation("DeleteCloudFrontOriginAccessIdentity");
var request = InstantiateClassGenerator.Execute<DeleteCloudFrontOriginAccessIdentityRequest>();
var marshaller = new DeleteCloudFrontOriginAccessIdentityRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("DeleteCloudFrontOriginAccessIdentity", request, internalRequest, service_model);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void DeleteDistributionMarshallTest()
{
var operation = service_model.FindOperation("DeleteDistribution");
var request = InstantiateClassGenerator.Execute<DeleteDistributionRequest>();
var marshaller = new DeleteDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("DeleteDistribution", request, internalRequest, service_model);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void DeleteStreamingDistributionMarshallTest()
{
var operation = service_model.FindOperation("DeleteStreamingDistribution");
var request = InstantiateClassGenerator.Execute<DeleteStreamingDistributionRequest>();
var marshaller = new DeleteStreamingDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("DeleteStreamingDistribution", request, internalRequest, service_model);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void GetCloudFrontOriginAccessIdentityMarshallTest()
{
var operation = service_model.FindOperation("GetCloudFrontOriginAccessIdentity");
var request = InstantiateClassGenerator.Execute<GetCloudFrontOriginAccessIdentityRequest>();
var marshaller = new GetCloudFrontOriginAccessIdentityRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("GetCloudFrontOriginAccessIdentity", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = GetCloudFrontOriginAccessIdentityResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as GetCloudFrontOriginAccessIdentityResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void GetCloudFrontOriginAccessIdentityConfigMarshallTest()
{
var operation = service_model.FindOperation("GetCloudFrontOriginAccessIdentityConfig");
var request = InstantiateClassGenerator.Execute<GetCloudFrontOriginAccessIdentityConfigRequest>();
var marshaller = new GetCloudFrontOriginAccessIdentityConfigRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("GetCloudFrontOriginAccessIdentityConfig", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = GetCloudFrontOriginAccessIdentityConfigResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as GetCloudFrontOriginAccessIdentityConfigResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void GetDistributionMarshallTest()
{
var operation = service_model.FindOperation("GetDistribution");
var request = InstantiateClassGenerator.Execute<GetDistributionRequest>();
var marshaller = new GetDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("GetDistribution", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = GetDistributionResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as GetDistributionResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void GetDistributionConfigMarshallTest()
{
var operation = service_model.FindOperation("GetDistributionConfig");
var request = InstantiateClassGenerator.Execute<GetDistributionConfigRequest>();
var marshaller = new GetDistributionConfigRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("GetDistributionConfig", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = GetDistributionConfigResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as GetDistributionConfigResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void GetInvalidationMarshallTest()
{
var operation = service_model.FindOperation("GetInvalidation");
var request = InstantiateClassGenerator.Execute<GetInvalidationRequest>();
var marshaller = new GetInvalidationRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("GetInvalidation", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = GetInvalidationResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as GetInvalidationResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void GetStreamingDistributionMarshallTest()
{
var operation = service_model.FindOperation("GetStreamingDistribution");
var request = InstantiateClassGenerator.Execute<GetStreamingDistributionRequest>();
var marshaller = new GetStreamingDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("GetStreamingDistribution", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = GetStreamingDistributionResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as GetStreamingDistributionResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void GetStreamingDistributionConfigMarshallTest()
{
var operation = service_model.FindOperation("GetStreamingDistributionConfig");
var request = InstantiateClassGenerator.Execute<GetStreamingDistributionConfigRequest>();
var marshaller = new GetStreamingDistributionConfigRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("GetStreamingDistributionConfig", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = GetStreamingDistributionConfigResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as GetStreamingDistributionConfigResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void ListCloudFrontOriginAccessIdentitiesMarshallTest()
{
var operation = service_model.FindOperation("ListCloudFrontOriginAccessIdentities");
var request = InstantiateClassGenerator.Execute<ListCloudFrontOriginAccessIdentitiesRequest>();
var marshaller = new ListCloudFrontOriginAccessIdentitiesRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("ListCloudFrontOriginAccessIdentities", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = ListCloudFrontOriginAccessIdentitiesResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as ListCloudFrontOriginAccessIdentitiesResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void ListDistributionsMarshallTest()
{
var operation = service_model.FindOperation("ListDistributions");
var request = InstantiateClassGenerator.Execute<ListDistributionsRequest>();
var marshaller = new ListDistributionsRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("ListDistributions", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = ListDistributionsResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as ListDistributionsResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void ListInvalidationsMarshallTest()
{
var operation = service_model.FindOperation("ListInvalidations");
var request = InstantiateClassGenerator.Execute<ListInvalidationsRequest>();
var marshaller = new ListInvalidationsRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("ListInvalidations", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = ListInvalidationsResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as ListInvalidationsResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void ListStreamingDistributionsMarshallTest()
{
var operation = service_model.FindOperation("ListStreamingDistributions");
var request = InstantiateClassGenerator.Execute<ListStreamingDistributionsRequest>();
var marshaller = new ListStreamingDistributionsRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("ListStreamingDistributions", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = ListStreamingDistributionsResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as ListStreamingDistributionsResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void UpdateCloudFrontOriginAccessIdentityMarshallTest()
{
var operation = service_model.FindOperation("UpdateCloudFrontOriginAccessIdentity");
var request = InstantiateClassGenerator.Execute<UpdateCloudFrontOriginAccessIdentityRequest>();
var marshaller = new UpdateCloudFrontOriginAccessIdentityRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("UpdateCloudFrontOriginAccessIdentity", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = UpdateCloudFrontOriginAccessIdentityResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as UpdateCloudFrontOriginAccessIdentityResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void UpdateDistributionMarshallTest()
{
var operation = service_model.FindOperation("UpdateDistribution");
var request = InstantiateClassGenerator.Execute<UpdateDistributionRequest>();
var marshaller = new UpdateDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("UpdateDistribution", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = UpdateDistributionResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as UpdateDistributionResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
[TestMethod]
[TestCategory("UnitTest")]
[TestCategory("Rest_Xml")]
[TestCategory("CloudFront")]
public void UpdateStreamingDistributionMarshallTest()
{
var operation = service_model.FindOperation("UpdateStreamingDistribution");
var request = InstantiateClassGenerator.Execute<UpdateStreamingDistributionRequest>();
var marshaller = new UpdateStreamingDistributionRequestMarshaller();
var internalRequest = marshaller.Marshall(request);
RequestValidator.Validate("UpdateStreamingDistribution", request, internalRequest, service_model);
var webResponse = new WebResponseData
{
Headers = {
{"ETag","ETag_Value"},
{"x-amzn-RequestId", Guid.NewGuid().ToString()},
{"x-amz-crc32","0"}
}
};
var payloadResponse = new XmlSampleGenerator(service_model, operation).Execute();
var context = new XmlUnmarshallerContext(Utils.CreateStreamFromString(payloadResponse), false, webResponse);
ResponseUnmarshaller unmarshaller = UpdateStreamingDistributionResponseUnmarshaller.Instance;
var response = unmarshaller.Unmarshall(context)
as UpdateStreamingDistributionResponse;
InstantiateClassGenerator.ValidateObjectFullyInstantiated(response);
}
}
}
| |
using System.Threading;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Barak.Benchmark.Tests
{
[TestClass]
public class CleanupAndWarmups
{
[TestMethod]
public void WarmupHitOnlyInTheFirstRun()
{
int warmup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatWarmup = false;
bench.DoWarmupInEachThread = false;
bench.SetTest((thread, index) => { });
bench.SetWarmup(() =>
{
Interlocked.Increment(ref warmup);
});
bench.Start();
Assert.AreEqual(1, warmup);
}
[TestMethod]
public void WarmupHitOnEveryTest()
{
int warmup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatWarmup = true;
bench.DoWarmupInEachThread = false;
bench.SetTest((thread, index) => { });
bench.SetWarmup(() =>
{
Interlocked.Increment(ref warmup);
});
bench.Start();
Assert.AreEqual(2, warmup);
}
[TestMethod]
public void WarmupHitOnEveryThreadForOnlySingleTimeTest()
{
int warmup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatWarmup = false;
bench.DoWarmupInEachThread = true;
bench.SetTest((thread, index) => { });
bench.SetWarmup(() =>
{
Interlocked.Increment(ref warmup);
});
bench.Start();
Assert.AreEqual(2, warmup);
}
[TestMethod]
public void WarmupHitOnEveryThreadForEveryThreadTest()
{
int warmup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatWarmup = true;
bench.DoWarmupInEachThread = true;
bench.SetTest((thread, index) => { });
bench.SetWarmup(() =>
{
Interlocked.Increment(ref warmup);
});
bench.Start();
Assert.AreEqual(4, warmup);
}
[TestMethod]
public void CleanupHappendsOnlyInTheLastRun()
{
int cleanUp = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 0;
bench.RepeatCleanup = false;
bench.DoCleanUpInEachThread = false;
int hitCount = 0;
bench.RepeatWarmup = false;
bench.DoWarmupInEachThread = false;
bench.SetTest((thread, index) => { hitCount++; });
bench.SetCleanup(() =>
{
Assert.AreEqual(2, hitCount);
cleanUp++;
});
bench.Start();
Assert.AreEqual(1, cleanUp);
}
[TestMethod]
public void CleanUpHitOnlyOnce()
{
int cleanup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatCleanup = false;
bench.DoCleanUpInEachThread = false;
bench.SetTest((thread, index) => { });
bench.SetCleanup(() =>
{
Interlocked.Increment(ref cleanup);
});
bench.Start();
Assert.AreEqual(1, cleanup);
}
[TestMethod]
public void CleanUpHitOnEveryTest()
{
int cleanup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatCleanup = true;
bench.DoCleanUpInEachThread = false;
bench.SetTest((thread, index) => { });
bench.SetCleanup(() =>
{
Interlocked.Increment(ref cleanup);
});
bench.Start();
Assert.AreEqual(2, cleanup);
}
[TestMethod]
public void CleanupHitOnEveryThreadForOnlySingleTimeTest()
{
int cleanup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatCleanup = false;
bench.DoCleanUpInEachThread = true;
bench.SetTest((thread, index) => { });
bench.SetCleanup(() =>
{
Interlocked.Increment(ref cleanup);
});
bench.Start();
Assert.AreEqual(2, cleanup);
}
[TestMethod]
public void CleanupHitOnEveryThreadForEveryThreadTest()
{
int cleanup = 0;
Bench bench = new Bench();
bench.RepeatCount = 2;
bench.ThreadCount = 2;
bench.RepeatCleanup = true;
bench.DoCleanUpInEachThread = true;
bench.SetTest((thread, index) => { });
bench.SetCleanup(() =>
{
Interlocked.Increment(ref cleanup);
});
bench.Start();
Assert.AreEqual(4, cleanup);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace OpenTokService.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.IO;
using System.Reflection;
using System.ComponentModel;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Management.Automation;
using System.Management.Automation.Provider;
using System.Xml;
using System.Collections;
using System.Collections.Generic;
using System.Management.Automation.Runspaces;
using System.Diagnostics.CodeAnalysis;
using Dbg = System.Management.Automation;
namespace Microsoft.WSMan.Management
{
#region Base class for cmdlets taking credential, authentication, certificatethumbprint
/// <summary>
/// Common base class for all WSMan cmdlets that
/// take Authentication, CertificateThumbprint and Credential parameters.
/// </summary>
public class AuthenticatingWSManCommand : PSCmdlet
{
/// <summary>
/// The following is the definition of the input parameter "Credential".
/// Specifies a user account that has permission to perform this action. The
/// default is the current user.
/// </summary>
[Parameter(ValueFromPipelineByPropertyName = true)]
[ValidateNotNullOrEmpty]
[Credential]
[Alias("cred", "c")]
public virtual PSCredential Credential
{
get { return credential; }
set
{
credential = value;
ValidateSpecifiedAuthentication();
}
}
private PSCredential credential;
/// <summary>
/// The following is the definition of the input parameter "Authentication".
/// This parameter takes a set of authentication methods the user can select
/// from. The available method are an enum called Authentication in the
/// System.Management.Automation.Runspaces namespace. The available options
/// should be as follows:
/// - Default : Use the default authentication (ad defined by the underlying
/// protocol) for establishing a remote connection.
/// - Negotiate
/// - Kerberos
/// - Basic: Use basic authentication for establishing a remote connection.
/// -CredSSP: Use CredSSP authentication for establishing a remote connection
/// which will enable the user to perform credential delegation. (i.e. second
/// hop)
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
[Alias("auth", "am")]
public virtual AuthenticationMechanism Authentication
{
get { return authentication; }
set
{
authentication = value;
ValidateSpecifiedAuthentication();
}
}
private AuthenticationMechanism authentication = AuthenticationMechanism.Default;
/// <summary>
/// Specifies the certificate thumbprint to be used to impersonate the user on the
/// remote machine.
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
public virtual string CertificateThumbprint
{
get { return thumbPrint; }
set
{
thumbPrint = value;
ValidateSpecifiedAuthentication();
}
}
private string thumbPrint = null;
internal void ValidateSpecifiedAuthentication()
{
WSManHelper.ValidateSpecifiedAuthentication(
this.Authentication,
this.Credential,
this.CertificateThumbprint);
}
}
#endregion
#region Connect-WsMan
/// <summary>
/// Connect wsman cmdlet.
/// </summary>
[Cmdlet(VerbsCommunications.Connect, "WSMan", DefaultParameterSetName = "ComputerName", HelpUri = "https://go.microsoft.com/fwlink/?LinkId=141437")]
public class ConnectWSManCommand : AuthenticatingWSManCommand
{
#region Parameters
/// <summary>
/// The following is the definition of the input parameter "ApplicationName".
/// ApplicationName identifies the remote endpoint.
/// </summary>
[Parameter(ParameterSetName = "ComputerName")]
[ValidateNotNullOrEmpty]
public string ApplicationName
{
get { return applicationname; }
set { applicationname = value; }
}
private string applicationname = null;
/// <summary>
/// The following is the definition of the input parameter "ComputerName".
/// Executes the management operation on the specified computer(s). The default
/// is the local computer. Type the fully qualified domain name, NETBIOS name or
/// IP address to indicate the remote host(s)
/// </summary>
[Parameter(ParameterSetName = "ComputerName", Position = 0)]
[Alias("cn")]
public string ComputerName
{
get { return computername; }
set
{
computername = value;
if ((string.IsNullOrEmpty(computername)) || (computername.Equals(".", StringComparison.OrdinalIgnoreCase)))
{
computername = "localhost";
}
}
}
private string computername = null;
/// <summary>
/// The following is the definition of the input parameter "ConnectionURI".
/// Specifies the transport, server, port, and ApplicationName of the new
/// runspace. The format of this string is:
/// transport://server:port/ApplicationName.
/// </summary>
[Parameter(ParameterSetName = "URI")]
[ValidateNotNullOrEmpty]
[SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "URI")]
public Uri ConnectionURI
{
get { return connectionuri; }
set { connectionuri = value; }
}
private Uri connectionuri;
/// <summary>
/// The following is the definition of the input parameter "OptionSet".
/// OptionSet is a hash table and is used to pass a set of switches to the
/// service to modify or refine the nature of the request.
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
[Alias("os")]
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public Hashtable OptionSet
{
get { return optionset; }
set { optionset = value; }
}
private Hashtable optionset;
/// <summary>
/// The following is the definition of the input parameter "Port".
/// Specifies the port to be used when connecting to the ws management service.
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
[Parameter(ParameterSetName = "ComputerName")]
[ValidateRange(1, Int32.MaxValue)]
public Int32 Port
{
get { return port; }
set { port = value; }
}
private Int32 port = 0;
/// <summary>
/// The following is the definition of the input parameter "SessionOption".
/// Defines a set of extended options for the WSMan session. This hashtable can
/// be created using New-WSManSessionOption.
/// </summary>
[Parameter]
[ValidateNotNullOrEmpty]
[Alias("so")]
[SuppressMessage("Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly")]
public SessionOption SessionOption
{
get { return sessionoption; }
set { sessionoption = value; }
}
private SessionOption sessionoption;
/// <summary>
/// The following is the definition of the input parameter "UseSSL".
/// Uses the Secure Sockets Layer (SSL) protocol to establish a connection to
/// the remote computer. If SSL is not available on the port specified by the
/// Port parameter, the command fails.
/// </summary>
[Parameter(ParameterSetName = "ComputerName")]
[SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "SSL")]
public SwitchParameter UseSSL
{
get { return usessl; }
set { usessl = value; }
}
private SwitchParameter usessl;
#endregion
/// <summary>
/// BeginProcessing method.
/// </summary>
protected override void BeginProcessing()
{
WSManHelper helper = new WSManHelper(this);
if (connectionuri != null)
{
try
{
// always in the format http://server:port/applicationname
string[] constrsplit = connectionuri.OriginalString.Split(new string[] { ":" + port + "/" + applicationname }, StringSplitOptions.None);
string[] constrsplit1 = constrsplit[0].Split(new string[] { "//" }, StringSplitOptions.None);
computername = constrsplit1[1].Trim();
}
catch (IndexOutOfRangeException)
{
helper.AssertError(helper.GetResourceMsgFromResourcetext("NotProperURI"), false, connectionuri);
}
}
string crtComputerName = computername;
if (crtComputerName == null)
{
crtComputerName = "localhost";
}
if (this.SessionState.Path.CurrentProviderLocation(WSManStringLiterals.rootpath).Path.StartsWith(this.SessionState.Drive.Current.Name + ":" + WSManStringLiterals.DefaultPathSeparator + crtComputerName, StringComparison.OrdinalIgnoreCase))
{
helper.AssertError(helper.GetResourceMsgFromResourcetext("ConnectFailure"), false, computername);
}
helper.CreateWsManConnection(ParameterSetName, connectionuri, port, computername, applicationname, usessl.IsPresent, Authentication, sessionoption, Credential, CertificateThumbprint);
}
}
#endregion
#region Disconnect-WSMAN
/// <summary>
/// The following is the definition of the input parameter "ComputerName".
/// Executes the management operation on the specified computer(s). The default
/// is the local computer. Type the fully qualified domain name, NETBIOS name or
/// IP address to indicate the remote host(s)
/// </summary>
[Cmdlet(VerbsCommunications.Disconnect, "WSMan", HelpUri = "https://go.microsoft.com/fwlink/?LinkId=141439")]
public class DisconnectWSManCommand : PSCmdlet, IDisposable
{
/// <summary>
/// The following is the definition of the input parameter "ComputerName".
/// Executes the management operation on the specified computer(s). The default
/// is the local computer. Type the fully qualified domain name, NETBIOS name or
/// IP address to indicate the remote host(s)
/// </summary>
[Parameter(Position = 0)]
public string ComputerName
{
get { return computername; }
set
{
computername = value;
if ((string.IsNullOrEmpty(computername)) || (computername.Equals(".", StringComparison.OrdinalIgnoreCase)))
{
computername = "localhost";
}
}
}
private string computername = null;
#region IDisposable Members
/// <summary>
/// Public dispose method.
/// </summary>
public
void
Dispose()
{
// CleanUp();
GC.SuppressFinalize(this);
}
/// <summary>
/// Public dispose method.
/// </summary>
public
void
Dispose(object session)
{
session = null;
this.Dispose();
}
#endregion IDisposable Members
/// <summary>
/// BeginProcessing method.
/// </summary>
protected override void BeginProcessing()
{
WSManHelper helper = new WSManHelper(this);
if (computername == null)
{
computername = "localhost";
}
if (this.SessionState.Path.CurrentProviderLocation(WSManStringLiterals.rootpath).Path.StartsWith(WSManStringLiterals.rootpath + ":" + WSManStringLiterals.DefaultPathSeparator + computername, StringComparison.OrdinalIgnoreCase))
{
helper.AssertError(helper.GetResourceMsgFromResourcetext("DisconnectFailure"), false, computername);
}
if (computername.Equals("localhost", StringComparison.OrdinalIgnoreCase))
{
helper.AssertError(helper.GetResourceMsgFromResourcetext("LocalHost"), false, computername);
}
object _ws = helper.RemoveFromDictionary(computername);
if (_ws != null)
{
Dispose(_ws);
}
else
{
helper.AssertError(helper.GetResourceMsgFromResourcetext("InvalidComputerName"), false, computername);
}
}
}
#endregion Disconnect-WSMAN
}
| |
//
// OpCode.cs
//
// Author:
// Jb Evain (jbevain@gmail.com)
//
// Copyright (c) 2008 - 2010 Jb Evain
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
namespace Mono.Cecil.Cil {
public enum FlowControl {
Branch,
Break,
Call,
Cond_Branch,
Meta,
Next,
Phi,
Return,
Throw,
}
public enum OpCodeType {
Annotation,
Macro,
Nternal,
Objmodel,
Prefix,
Primitive,
}
public enum OperandType {
InlineBrTarget,
InlineField,
InlineI,
InlineI8,
InlineMethod,
InlineNone,
InlinePhi,
InlineR,
InlineSig,
InlineString,
InlineSwitch,
InlineTok,
InlineType,
InlineVar,
InlineArg,
ShortInlineBrTarget,
ShortInlineI,
ShortInlineR,
ShortInlineVar,
ShortInlineArg,
}
public enum StackBehaviour {
Pop0,
Pop1,
Pop1_pop1,
Popi,
Popi_pop1,
Popi_popi,
Popi_popi8,
Popi_popi_popi,
Popi_popr4,
Popi_popr8,
Popref,
Popref_pop1,
Popref_popi,
Popref_popi_popi,
Popref_popi_popi8,
Popref_popi_popr4,
Popref_popi_popr8,
Popref_popi_popref,
PopAll,
Push0,
Push1,
Push1_push1,
Pushi,
Pushi8,
Pushr4,
Pushr8,
Pushref,
Varpop,
Varpush,
}
public struct OpCode {
readonly byte op1;
readonly byte op2;
readonly byte code;
readonly byte flow_control;
readonly byte opcode_type;
readonly byte operand_type;
readonly byte stack_behavior_pop;
readonly byte stack_behavior_push;
public string Name {
get { return OpCodeNames.names [op1 == 0xff ? op2 : op2 + 256]; }
}
public int Size {
get { return op1 == 0xff ? 1 : 2; }
}
public byte Op1 {
get { return op1; }
}
public byte Op2 {
get { return op2; }
}
public short Value {
get { return (short) ((op1 << 8) | op2); }
}
public Code Code {
get { return (Code) code; }
}
public FlowControl FlowControl {
get { return (FlowControl) flow_control; }
}
public OpCodeType OpCodeType {
get { return (OpCodeType) opcode_type; }
}
public OperandType OperandType {
get { return (OperandType) operand_type; }
}
public StackBehaviour StackBehaviourPop {
get { return (StackBehaviour) stack_behavior_pop; }
}
public StackBehaviour StackBehaviourPush {
get { return (StackBehaviour) stack_behavior_push; }
}
internal OpCode (int x, int y)
{
this.op1 = (byte) ((x >> 0) & 0xff);
this.op2 = (byte) ((x >> 8) & 0xff);
this.code = (byte) ((x >> 16) & 0xff);
this.flow_control = (byte) ((x >> 24) & 0xff);
this.opcode_type = (byte) ((y >> 0) & 0xff);
this.operand_type = (byte) ((y >> 8) & 0xff);
this.stack_behavior_pop = (byte) ((y >> 16) & 0xff);
this.stack_behavior_push = (byte) ((y >> 24) & 0xff);
if (op1 == 0xff)
OpCodes.OneByteOpCode [op2] = this;
else
OpCodes.TwoBytesOpCode [op2] = this;
}
public override int GetHashCode ()
{
return Value;
}
public override bool Equals (object obj)
{
if (!(obj is OpCode))
return false;
var opcode = (OpCode) obj;
return op1 == opcode.op1 && op2 == opcode.op2;
}
public bool Equals (OpCode opcode)
{
return op1 == opcode.op1 && op2 == opcode.op2;
}
public static bool operator == (OpCode one, OpCode other)
{
return one.op1 == other.op1 && one.op2 == other.op2;
}
public static bool operator != (OpCode one, OpCode other)
{
return one.op1 != other.op1 || one.op2 != other.op2;
}
public override string ToString ()
{
return Name;
}
}
static class OpCodeNames {
internal static readonly string [] names = {
"nop",
"break",
"ldarg.0",
"ldarg.1",
"ldarg.2",
"ldarg.3",
"ldloc.0",
"ldloc.1",
"ldloc.2",
"ldloc.3",
"stloc.0",
"stloc.1",
"stloc.2",
"stloc.3",
"ldarg.s",
"ldarga.s",
"starg.s",
"ldloc.s",
"ldloca.s",
"stloc.s",
"ldnull",
"ldc.i4.m1",
"ldc.i4.0",
"ldc.i4.1",
"ldc.i4.2",
"ldc.i4.3",
"ldc.i4.4",
"ldc.i4.5",
"ldc.i4.6",
"ldc.i4.7",
"ldc.i4.8",
"ldc.i4.s",
"ldc.i4",
"ldc.i8",
"ldc.r4",
"ldc.r8",
null,
"dup",
"pop",
"jmp",
"call",
"calli",
"ret",
"br.s",
"brfalse.s",
"brtrue.s",
"beq.s",
"bge.s",
"bgt.s",
"ble.s",
"blt.s",
"bne.un.s",
"bge.un.s",
"bgt.un.s",
"ble.un.s",
"blt.un.s",
"br",
"brfalse",
"brtrue",
"beq",
"bge",
"bgt",
"ble",
"blt",
"bne.un",
"bge.un",
"bgt.un",
"ble.un",
"blt.un",
"switch",
"ldind.i1",
"ldind.u1",
"ldind.i2",
"ldind.u2",
"ldind.i4",
"ldind.u4",
"ldind.i8",
"ldind.i",
"ldind.r4",
"ldind.r8",
"ldind.ref",
"stind.ref",
"stind.i1",
"stind.i2",
"stind.i4",
"stind.i8",
"stind.r4",
"stind.r8",
"add",
"sub",
"mul",
"div",
"div.un",
"rem",
"rem.un",
"and",
"or",
"xor",
"shl",
"shr",
"shr.un",
"neg",
"not",
"conv.i1",
"conv.i2",
"conv.i4",
"conv.i8",
"conv.r4",
"conv.r8",
"conv.u4",
"conv.u8",
"callvirt",
"cpobj",
"ldobj",
"ldstr",
"newobj",
"castclass",
"isinst",
"conv.r.un",
null,
null,
"unbox",
"throw",
"ldfld",
"ldflda",
"stfld",
"ldsfld",
"ldsflda",
"stsfld",
"stobj",
"conv.ovf.i1.un",
"conv.ovf.i2.un",
"conv.ovf.i4.un",
"conv.ovf.i8.un",
"conv.ovf.u1.un",
"conv.ovf.u2.un",
"conv.ovf.u4.un",
"conv.ovf.u8.un",
"conv.ovf.i.un",
"conv.ovf.u.un",
"box",
"newarr",
"ldlen",
"ldelema",
"ldelem.i1",
"ldelem.u1",
"ldelem.i2",
"ldelem.u2",
"ldelem.i4",
"ldelem.u4",
"ldelem.i8",
"ldelem.i",
"ldelem.r4",
"ldelem.r8",
"ldelem.ref",
"stelem.i",
"stelem.i1",
"stelem.i2",
"stelem.i4",
"stelem.i8",
"stelem.r4",
"stelem.r8",
"stelem.ref",
"ldelem.any",
"stelem.any",
"unbox.any",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"conv.ovf.i1",
"conv.ovf.u1",
"conv.ovf.i2",
"conv.ovf.u2",
"conv.ovf.i4",
"conv.ovf.u4",
"conv.ovf.i8",
"conv.ovf.u8",
null,
null,
null,
null,
null,
null,
null,
"refanyval",
"ckfinite",
null,
null,
"mkrefany",
null,
null,
null,
null,
null,
null,
null,
null,
null,
"ldtoken",
"conv.u2",
"conv.u1",
"conv.i",
"conv.ovf.i",
"conv.ovf.u",
"add.ovf",
"add.ovf.un",
"mul.ovf",
"mul.ovf.un",
"sub.ovf",
"sub.ovf.un",
"endfinally",
"leave",
"leave.s",
"stind.i",
"conv.u",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
"prefix7",
"prefix6",
"prefix5",
"prefix4",
"prefix3",
"prefix2",
"prefix1",
"prefixref",
"arglist",
"ceq",
"cgt",
"cgt.un",
"clt",
"clt.un",
"ldftn",
"ldvirtftn",
null,
"ldarg",
"ldarga",
"starg",
"ldloc",
"ldloca",
"stloc",
"localloc",
null,
"endfilter",
"unaligned.",
"volatile.",
"tail.",
"initobj",
"constrained.",
"cpblk",
"initblk",
"no.", // added by spouliot to match Cecil existing definitions
"rethrow",
null,
"sizeof",
"refanytype",
"readonly.", // added by spouliot to match Cecil existing definitions
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
};
}
}
| |
using Orleans.Messaging;
using Orleans.Runtime.Configuration;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Orleans.SqlUtils;
namespace Orleans.Runtime.MembershipService
{
internal class SqlMembershipTable: IMembershipTable, IGatewayListProvider
{
private string deploymentId;
private TimeSpan maxStaleness;
private TraceLogger logger;
private RelationalOrleansQueries orleansQueries;
public async Task InitializeMembershipTable(GlobalConfiguration config, bool tryInitTableVersion, TraceLogger traceLogger)
{
logger = traceLogger;
deploymentId = config.DeploymentId;
if (logger.IsVerbose3) logger.Verbose3("SqlMembershipTable.InitializeMembershipTable called.");
//This initializes all of Orleans operational queries from the database using a well known view
//and assumes the database with appropriate defintions exists already.
orleansQueries = await RelationalOrleansQueries.CreateInstance(config.AdoInvariant, config.DataConnectionString);
// even if I am not the one who created the table,
// try to insert an initial table version if it is not already there,
// so we always have a first table version row, before this silo starts working.
if(tryInitTableVersion)
{
var wasCreated = await InitTableAsync();
if(wasCreated)
{
logger.Info("Created new table version row.");
}
}
}
public async Task InitializeGatewayListProvider(ClientConfiguration config, TraceLogger traceLogger)
{
logger = traceLogger;
if (logger.IsVerbose3) logger.Verbose3("SqlMembershipTable.InitializeGatewayListProvider called.");
deploymentId = config.DeploymentId;
maxStaleness = config.GatewayListRefreshPeriod;
orleansQueries = await RelationalOrleansQueries.CreateInstance(config.AdoInvariant, config.DataConnectionString);
}
public TimeSpan MaxStaleness
{
get { return maxStaleness; }
}
public bool IsUpdatable
{
get { return true; }
}
public async Task<IList<Uri>> GetGateways()
{
if (logger.IsVerbose3) logger.Verbose3("SqlMembershipTable.GetGateways called.");
try
{
return await orleansQueries.ActiveGatewaysAsync(deploymentId);
}
catch(Exception ex)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.Gateways failed {0}", ex);
throw;
}
}
public async Task<MembershipTableData> ReadRow(SiloAddress key)
{
if (logger.IsVerbose3) logger.Verbose3(string.Format("SqlMembershipTable.ReadRow called with key: {0}.", key));
try
{
return await orleansQueries.MembershipReadRowAsync(deploymentId, key);
}
catch(Exception ex)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.ReadRow failed: {0}", ex);
throw;
}
}
public async Task<MembershipTableData> ReadAll()
{
if (logger.IsVerbose3) logger.Verbose3("SqlMembershipTable.ReadAll called.");
try
{
return await orleansQueries.MembershipReadAllAsync(deploymentId);
}
catch(Exception ex)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.ReadAll failed: {0}", ex);
throw;
}
}
public async Task<bool> InsertRow(MembershipEntry entry, TableVersion tableVersion)
{
if (logger.IsVerbose3) logger.Verbose3(string.Format("SqlMembershipTable.InsertRow called with entry {0} and tableVersion {1}.", entry, tableVersion));
//The "tableVersion" parameter should always exist when inserting a row as Init should
//have been called and membership version created and read. This is an optimization to
//not to go through all the way to database to fail a conditional check on etag (which does
//exist for the sake of robustness) as mandated by Orleans membership protocol.
//Likewise, no update can be done without membership entry.
if (entry == null)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.InsertRow aborted due to null check. MembershipEntry is null.");
throw new ArgumentNullException("entry");
}
if (tableVersion == null)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.InsertRow aborted due to null check. TableVersion is null ");
throw new ArgumentNullException("tableVersion");
}
try
{
return await orleansQueries.InsertMembershipRowAsync(deploymentId, entry, tableVersion.VersionEtag);
}
catch(Exception ex)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.InsertRow failed: {0}", ex);
throw;
}
}
public async Task<bool> UpdateRow(MembershipEntry entry, string etag, TableVersion tableVersion)
{
if (logger.IsVerbose3) logger.Verbose3(string.Format("IMembershipTable.UpdateRow called with entry {0}, etag {1} and tableVersion {2}.", entry, etag, tableVersion));
//The "tableVersion" parameter should always exist when updating a row as Init should
//have been called and membership version created and read. This is an optimization to
//not to go through all the way to database to fail a conditional check (which does
//exist for the sake of robustness) as mandated by Orleans membership protocol.
//Likewise, no update can be done without membership entry or an etag.
if (entry == null)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.UpdateRow aborted due to null check. MembershipEntry is null.");
throw new ArgumentNullException("entry");
}
if (tableVersion == null)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.UpdateRow aborted due to null check. TableVersion is null ");
throw new ArgumentNullException("tableVersion");
}
try
{
return await orleansQueries.UpdateMembershipRowAsync(deploymentId, entry, tableVersion.VersionEtag);
}
catch(Exception ex)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.UpdateRow failed: {0}", ex);
throw;
}
}
public async Task UpdateIAmAlive(MembershipEntry entry)
{
if(logger.IsVerbose3) logger.Verbose3(string.Format("IMembershipTable.UpdateIAmAlive called with entry {0}.", entry));
if (entry == null)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.UpdateIAmAlive aborted due to null check. MembershipEntry is null.");
throw new ArgumentNullException("entry");
}
try
{
await orleansQueries.UpdateIAmAliveTimeAsync(deploymentId, entry.SiloAddress, entry.IAmAliveTime);
}
catch(Exception ex)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.UpdateIAmAlive failed: {0}", ex);
throw;
}
}
public async Task DeleteMembershipTableEntries(string deploymentId)
{
if (logger.IsVerbose3) logger.Verbose3(string.Format("IMembershipTable.DeleteMembershipTableEntries called with deploymentId {0}.", deploymentId));
try
{
await orleansQueries.DeleteMembershipTableEntriesAsync(deploymentId);
}
catch(Exception ex)
{
if (logger.IsVerbose) logger.Verbose("SqlMembershipTable.DeleteMembershipTableEntries failed: {0}", ex);
throw;
}
}
private async Task<bool> InitTableAsync()
{
try
{
return await orleansQueries.InsertMembershipVersionRowAsync(deploymentId);
}
catch(Exception ex)
{
if(logger.IsVerbose2) logger.Verbose2("Insert silo membership version failed: {0}", ex.ToString());
throw;
}
}
}
}
| |
using Pathfinding.Util;
using System.Collections.Generic;
using UnityEngine;
namespace Pathfinding {
/** Contains useful functions for working with paths and nodes.
* This class works a lot with the Node class, a useful function to get nodes is AstarPath.GetNearest.
* \see AstarPath.GetNearest
* \see Pathfinding.Utils.GraphUpdateUtilities
* \since Added in version 3.2
* \ingroup utils
*
*/
public static class PathUtilities {
/** Returns if there is a walkable path from \a n1 to \a n2.
* If you are making changes to the graph, areas must first be recaculated using FloodFill()
* \note This might return true for small areas even if there is no possible path if AstarPath.minAreaSize is greater than zero (0).
* So when using this, it is recommended to set AstarPath.minAreaSize to 0. (A* Inspector -> Settings -> Pathfinding)
* \see AstarPath.GetNearest
*/
public static bool IsPathPossible (GraphNode n1, GraphNode n2) {
return n1.Walkable && n2.Walkable && n1.Area == n2.Area;
}
/** Returns if there are walkable paths between all nodes.
* If you are making changes to the graph, areas must first be recaculated using FloodFill()
* \note This might return true for small areas even if there is no possible path if AstarPath.minAreaSize is greater than zero (0).
* So when using this, it is recommended to set AstarPath.minAreaSize to 0. (A* Inspector -> Settings -> Pathfinding)
*
* Returns true for empty lists
*
* \see AstarPath.GetNearest
*/
public static bool IsPathPossible (List<GraphNode> nodes) {
if (nodes.Count == 0) return true;
uint area = nodes[0].Area;
for (int i = 0; i < nodes.Count; i++) if (!nodes[i].Walkable || nodes[i].Area != area) return false;
return true;
}
/** Returns if there are walkable paths between all nodes.
* If you are making changes to the graph, areas should first be recaculated using FloodFill()
*
* This method will actually only check if the first node can reach all other nodes. However this is
* equivalent in 99% of the cases since almost always the graph connections are bidirectional.
* If you are not aware of any cases where you explicitly create unidirectional connections
* this method can be used without worries.
*
* Returns true for empty lists
*
* \warning This method is significantly slower than the IsPathPossible method which does not take a tagMask
*
* \see AstarPath.GetNearest
*/
public static bool IsPathPossible (List<GraphNode> nodes, int tagMask) {
if (nodes.Count == 0) return true;
// Make sure that the first node has a valid tag
if (((tagMask >> (int)nodes[0].Tag) & 1) == 0) return false;
// Fast check first
if (!IsPathPossible(nodes)) return false;
// Make sure that the first node can reach all other nodes
var reachable = GetReachableNodes(nodes[0], tagMask);
bool result = true;
// Make sure that the first node can reach all other nodes
for (int i = 1; i < nodes.Count; i++) {
if (!reachable.Contains(nodes[i])) {
result = false;
break;
}
}
// Pool the temporary list
ListPool<GraphNode>.Release(reachable);
return result;
}
/** Returns all nodes reachable from the seed node.
* This function performs a BFS (breadth-first-search) or flood fill of the graph and returns all nodes which can be reached from
* the seed node. In almost all cases this will be identical to returning all nodes which have the same area as the seed node.
* In the editor areas are displayed as different colors of the nodes.
* The only case where it will not be so is when there is a one way path from some part of the area to the seed node
* but no path from the seed node to that part of the graph.
*
* The returned list is sorted by node distance from the seed node
* i.e distance is measured in the number of nodes the shortest path from \a seed to that node would pass through.
* Note that the distance measurement does not take heuristics, penalties or tag penalties.
*
* Depending on the number of reachable nodes, this function can take quite some time to calculate
* so don't use it too often or it might affect the framerate of your game.
*
* \param seed The node to start the search from
* \param tagMask Optional mask for tags. This is a bitmask.
*
* \returns A List<Node> containing all nodes reachable from the seed node.
* For better memory management the returned list should be pooled, see Pathfinding.Util.ListPool
*/
public static List<GraphNode> GetReachableNodes (GraphNode seed, int tagMask = -1) {
Stack<GraphNode> stack = StackPool<GraphNode>.Claim();
List<GraphNode> list = ListPool<GraphNode>.Claim();
/** \todo Pool */
var map = new HashSet<GraphNode>();
GraphNodeDelegate callback;
if (tagMask == -1) {
callback = delegate(GraphNode node) {
if (node.Walkable && map.Add(node)) {
list.Add(node);
stack.Push(node);
}
};
} else {
callback = delegate(GraphNode node) {
if (node.Walkable && ((tagMask >> (int)node.Tag) & 0x1) != 0 && map.Add(node)) {
list.Add(node);
stack.Push(node);
}
};
}
callback(seed);
while (stack.Count > 0) {
stack.Pop().GetConnections(callback);
}
StackPool<GraphNode>.Release(stack);
return list;
}
static Queue<GraphNode> BFSQueue;
static Dictionary<GraphNode, int> BFSMap;
/** Returns all nodes up to a given node-distance from the seed node.
* This function performs a BFS (breadth-first-search) or flood fill of the graph and returns all nodes within a specified node distance which can be reached from
* the seed node. In almost all cases when \a depth is large enough this will be identical to returning all nodes which have the same area as the seed node.
* In the editor areas are displayed as different colors of the nodes.
* The only case where it will not be so is when there is a one way path from some part of the area to the seed node
* but no path from the seed node to that part of the graph.
*
* The returned list is sorted by node distance from the seed node
* i.e distance is measured in the number of nodes the shortest path from \a seed to that node would pass through.
* Note that the distance measurement does not take heuristics, penalties or tag penalties.
*
* Depending on the number of nodes, this function can take quite some time to calculate
* so don't use it too often or it might affect the framerate of your game.
*
* \param seed The node to start the search from.
* \param depth The maximum node-distance from the seed node.
* \param tagMask Optional mask for tags. This is a bitmask.
*
* \returns A List<Node> containing all nodes reachable up to a specified node distance from the seed node.
* For better memory management the returned list should be pooled, see Pathfinding.Util.ListPool
*
* \warning This method is not thread safe. Only use it from the Unity thread (i.e normal game code).
*/
public static List<GraphNode> BFS (GraphNode seed, int depth, int tagMask = -1) {
BFSQueue = BFSQueue ?? new Queue<GraphNode>();
var que = BFSQueue;
BFSMap = BFSMap ?? new Dictionary<GraphNode, int>();
var map = BFSMap;
// Even though we clear at the end of this function, it is good to
// do it here as well in case the previous invocation of the method
// threw an exception for some reason
// and didn't clear the que and map
que.Clear();
map.Clear();
List<GraphNode> result = ListPool<GraphNode>.Claim();
int currentDist = -1;
GraphNodeDelegate callback;
if (tagMask == -1) {
callback = node => {
if (node.Walkable && !map.ContainsKey(node)) {
map.Add(node, currentDist+1);
result.Add(node);
que.Enqueue(node);
}
};
} else {
callback = node => {
if (node.Walkable && ((tagMask >> (int)node.Tag) & 0x1) != 0 && !map.ContainsKey(node)) {
map.Add(node, currentDist+1);
result.Add(node);
que.Enqueue(node);
}
};
}
callback(seed);
while (que.Count > 0) {
GraphNode n = que.Dequeue();
currentDist = map[n];
if (currentDist >= depth) break;
n.GetConnections(callback);
}
que.Clear();
map.Clear();
return result;
}
/** Returns points in a spiral centered around the origin with a minimum clearance from other points.
* The points are laid out on the involute of a circle
* \see http://en.wikipedia.org/wiki/Involute
* Which has some nice properties.
* All points are separated by \a clearance world units.
* This method is O(n), yes if you read the code you will see a binary search, but that binary search
* has an upper bound on the number of steps, so it does not yield a log factor.
*
* \note Consider recycling the list after usage to reduce allocations.
* \see Pathfinding.Util.ListPool
*/
public static List<Vector3> GetSpiralPoints (int count, float clearance) {
List<Vector3> pts = ListPool<Vector3>.Claim(count);
// The radius of the smaller circle used for generating the involute of a circle
// Calculated from the separation distance between the turns
float a = clearance/(2*Mathf.PI);
float t = 0;
pts.Add(InvoluteOfCircle(a, t));
for (int i = 0; i < count; i++) {
Vector3 prev = pts[pts.Count-1];
// d = -t0/2 + sqrt( t0^2/4 + 2d/a )
// Minimum angle (radians) which would create an arc distance greater than clearance
float d = -t/2 + Mathf.Sqrt(t*t/4 + 2*clearance/a);
// Binary search for separating this point and the previous one
float mn = t + d;
float mx = t + 2*d;
while (mx - mn > 0.01f) {
float mid = (mn + mx)/2;
Vector3 p = InvoluteOfCircle(a, mid);
if ((p - prev).sqrMagnitude < clearance*clearance) {
mn = mid;
} else {
mx = mid;
}
}
pts.Add(InvoluteOfCircle(a, mx));
t = mx;
}
return pts;
}
/** Returns the XZ coordinate of the involute of circle.
* \see http://en.wikipedia.org/wiki/Involute
*/
private static Vector3 InvoluteOfCircle (float a, float t) {
return new Vector3(a*(Mathf.Cos(t) + t*Mathf.Sin(t)), 0, a*(Mathf.Sin(t) - t*Mathf.Cos(t)));
}
/** Will calculate a number of points around \a p which are on the graph and are separated by \a clearance from each other.
* This is like GetPointsAroundPoint except that \a previousPoints are treated as being in world space.
* The average of the points will be found and then that will be treated as the group center.
*/
public static void GetPointsAroundPointWorld (Vector3 p, IRaycastableGraph g, List<Vector3> previousPoints, float radius, float clearanceRadius) {
if (previousPoints.Count == 0) return;
Vector3 avg = Vector3.zero;
for (int i = 0; i < previousPoints.Count; i++) avg += previousPoints[i];
avg /= previousPoints.Count;
for (int i = 0; i < previousPoints.Count; i++) previousPoints[i] -= avg;
GetPointsAroundPoint(p, g, previousPoints, radius, clearanceRadius);
}
/** Will calculate a number of points around \a p which are on the graph and are separated by \a clearance from each other.
* The maximum distance from \a p to any point will be \a radius.
* Points will first be tried to be laid out as \a previousPoints and if that fails, random points will be selected.
* This is great if you want to pick a number of target points for group movement. If you pass all current agent points from e.g the group's average position
* this method will return target points so that the units move very little within the group, this is often aesthetically pleasing and reduces jitter if using
* some kind of local avoidance.
*
* \param p The point to generate points around
* \param g The graph to use for linecasting. If you are only using one graph, you can get this by AstarPath.active.graphs[0] as IRaycastableGraph.
* Note that not all graphs are raycastable, recast, navmesh and grid graphs are raycastable. On recast and navmesh it works the best.
* \param previousPoints The points to use for reference. Note that these should not be in world space. They are treated as relative to \a p.
* \param radius The final points will be at most this distance from \a p.
* \param clearanceRadius The points will if possible be at least this distance from each other.
*/
public static void GetPointsAroundPoint (Vector3 p, IRaycastableGraph g, List<Vector3> previousPoints, float radius, float clearanceRadius) {
if (g == null) throw new System.ArgumentNullException("g");
var graph = g as NavGraph;
if (graph == null) throw new System.ArgumentException("g is not a NavGraph");
NNInfo nn = graph.GetNearestForce(p, NNConstraint.Default);
p = nn.clampedPosition;
if (nn.node == null) {
// No valid point to start from
return;
}
// Make sure the enclosing circle has a radius which can pack circles with packing density 0.5
radius = Mathf.Max(radius, 1.4142f*clearanceRadius*Mathf.Sqrt(previousPoints.Count)); //Mathf.Sqrt(previousPoints.Count*clearanceRadius*2));
clearanceRadius *= clearanceRadius;
for (int i = 0; i < previousPoints.Count; i++) {
Vector3 dir = previousPoints[i];
float magn = dir.magnitude;
if (magn > 0) dir /= magn;
float newMagn = radius;//magn > radius ? radius : magn;
dir *= newMagn;
bool worked = false;
GraphHitInfo hit;
int tests = 0;
do {
Vector3 pt = p + dir;
if (g.Linecast(p, pt, nn.node, out hit)) {
pt = hit.point;
}
for (float q = 0.1f; q <= 1.0f; q += 0.05f) {
Vector3 qt = (pt - p)*q + p;
worked = true;
for (int j = 0; j < i; j++) {
if ((previousPoints[j] - qt).sqrMagnitude < clearanceRadius) {
worked = false;
break;
}
}
if (worked) {
previousPoints[i] = qt;
break;
}
}
if (!worked) {
// Abort after 8 tries
if (tests > 8) {
worked = true;
} else {
clearanceRadius *= 0.9f;
// This will pick points in 2D closer to the edge of the circle with a higher probability
dir = Random.onUnitSphere * Mathf.Lerp(newMagn, radius, tests / 5);
dir.y = 0;
tests++;
}
}
} while (!worked);
}
}
/** Returns randomly selected points on the specified nodes with each point being separated by \a clearanceRadius from each other.
* Selecting points ON the nodes only works for TriangleMeshNode (used by Recast Graph and Navmesh Graph) and GridNode (used by GridGraph).
* For other node types, only the positions of the nodes will be used.
*
* clearanceRadius will be reduced if no valid points can be found.
*/
public static List<Vector3> GetPointsOnNodes (List<GraphNode> nodes, int count, float clearanceRadius = 0) {
if (nodes == null) throw new System.ArgumentNullException("nodes");
if (nodes.Count == 0) throw new System.ArgumentException("no nodes passed");
var rnd = new System.Random();
List<Vector3> pts = ListPool<Vector3>.Claim(count);
// Square
clearanceRadius *= clearanceRadius;
if (nodes[0] is TriangleMeshNode
|| nodes[0] is GridNode
) {
// Accumulated area of all nodes
List<float> accs = ListPool<float>.Claim(nodes.Count);
// Total area of all nodes so far
float tot = 0;
for (int i = 0; i < nodes.Count; i++) {
var tnode = nodes[i] as TriangleMeshNode;
if (tnode != null) {
/** \bug Doesn't this need to be divided by 2? */
float a = System.Math.Abs(VectorMath.SignedTriangleAreaTimes2XZ(tnode.GetVertex(0), tnode.GetVertex(1), tnode.GetVertex(2)));
tot += a;
accs.Add(tot);
} else {
var gnode = nodes[i] as GridNode;
if (gnode != null) {
GridGraph gg = GridNode.GetGridGraph(gnode.GraphIndex);
float a = gg.nodeSize*gg.nodeSize;
tot += a;
accs.Add(tot);
} else {
accs.Add(tot);
}
}
}
for (int i = 0; i < count; i++) {
//Pick point
int testCount = 0;
int testLimit = 10;
bool worked = false;
while (!worked) {
worked = true;
//If no valid points can be found, progressively lower the clearance radius until such a point is found
if (testCount >= testLimit) {
clearanceRadius *= 0.8f;
testLimit += 10;
if (testLimit > 100) clearanceRadius = 0;
}
// Pick a random node among the ones in the list weighted by their area
float tg = (float)rnd.NextDouble()*tot;
int v = accs.BinarySearch(tg);
if (v < 0) v = ~v;
if (v >= nodes.Count) {
// This shouldn't happen, due to NextDouble being smaller than 1... but I don't trust floating point arithmetic.
worked = false;
continue;
}
var node = nodes[v] as TriangleMeshNode;
Vector3 p;
if (node != null) {
// Find a random point inside the triangle
// This generates uniformly distributed trilinear coordinates
// See http://mathworld.wolfram.com/TrianglePointPicking.html
float v1;
float v2;
do {
v1 = (float)rnd.NextDouble();
v2 = (float)rnd.NextDouble();
} while (v1+v2 > 1);
// Pick the point corresponding to the trilinear coordinate
p = ((Vector3)(node.GetVertex(1)-node.GetVertex(0)))*v1 + ((Vector3)(node.GetVertex(2)-node.GetVertex(0)))*v2 + (Vector3)node.GetVertex(0);
} else {
var gnode = nodes[v] as GridNode;
if (gnode != null) {
GridGraph gg = GridNode.GetGridGraph(gnode.GraphIndex);
float v1 = (float)rnd.NextDouble();
float v2 = (float)rnd.NextDouble();
p = (Vector3)gnode.position + new Vector3(v1 - 0.5f, 0, v2 - 0.5f) * gg.nodeSize;
} else {
//Point nodes have no area, so we break directly instead
pts.Add((Vector3)nodes[v].position);
break;
}
}
// Test if it is some distance away from the other points
if (clearanceRadius > 0) {
for (int j = 0; j < pts.Count; j++) {
if ((pts[j]-p).sqrMagnitude < clearanceRadius) {
worked = false;
break;
}
}
}
if (worked) {
pts.Add(p);
break;
}
testCount++;
}
}
ListPool<float>.Release(accs);
} else {
for (int i = 0; i < count; i++) {
pts.Add((Vector3)nodes[rnd.Next(nodes.Count)].position);
}
}
return pts;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Mime;
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Infrastructure;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Umbraco.Cms.Core;
using Umbraco.Cms.Core.Cache;
using Umbraco.Cms.Core.Configuration.Models;
using Umbraco.Cms.Core.ContentApps;
using Umbraco.Cms.Core.Dictionary;
using Umbraco.Cms.Core.Events;
using Umbraco.Cms.Core.Hosting;
using Umbraco.Cms.Core.IO;
using Umbraco.Cms.Core.Mapping;
using Umbraco.Cms.Core.Media;
using Umbraco.Cms.Core.Models;
using Umbraco.Cms.Core.Models.ContentEditing;
using Umbraco.Cms.Core.Models.Entities;
using Umbraco.Cms.Core.Models.Validation;
using Umbraco.Cms.Core.Persistence.Querying;
using Umbraco.Cms.Core.PropertyEditors;
using Umbraco.Cms.Core.Security;
using Umbraco.Cms.Core.Serialization;
using Umbraco.Cms.Core.Services;
using Umbraco.Cms.Core.Strings;
using Umbraco.Cms.Infrastructure.Persistence;
using Umbraco.Cms.Web.BackOffice.ActionResults;
using Umbraco.Cms.Web.BackOffice.Authorization;
using Umbraco.Cms.Web.BackOffice.Extensions;
using Umbraco.Cms.Web.BackOffice.Filters;
using Umbraco.Cms.Web.BackOffice.ModelBinders;
using Umbraco.Cms.Web.Common.ActionsResults;
using Umbraco.Cms.Web.Common.Attributes;
using Umbraco.Cms.Web.Common.Authorization;
using Umbraco.Extensions;
namespace Umbraco.Cms.Web.BackOffice.Controllers
{
/// <remarks>
/// This controller is decorated with the UmbracoApplicationAuthorizeAttribute which means that any user requesting
/// access to ALL of the methods on this controller will need access to the media application.
/// </remarks>
[PluginController(Constants.Web.Mvc.BackOfficeApiArea)]
[Authorize(Policy = AuthorizationPolicies.SectionAccessMedia)]
[ParameterSwapControllerActionSelector(nameof(GetById), "id", typeof(int), typeof(Guid), typeof(Udi))]
[ParameterSwapControllerActionSelector(nameof(GetChildren), "id", typeof(int), typeof(Guid), typeof(Udi))]
public class MediaController : ContentControllerBase
{
private readonly IShortStringHelper _shortStringHelper;
private readonly ContentSettings _contentSettings;
private readonly IMediaTypeService _mediaTypeService;
private readonly IMediaService _mediaService;
private readonly IEntityService _entityService;
private readonly IBackOfficeSecurityAccessor _backofficeSecurityAccessor;
private readonly IUmbracoMapper _umbracoMapper;
private readonly IDataTypeService _dataTypeService;
private readonly ILocalizedTextService _localizedTextService;
private readonly ISqlContext _sqlContext;
private readonly IContentTypeBaseServiceProvider _contentTypeBaseServiceProvider;
private readonly IRelationService _relationService;
private readonly IImageUrlGenerator _imageUrlGenerator;
private readonly IAuthorizationService _authorizationService;
private readonly AppCaches _appCaches;
private readonly ILogger<MediaController> _logger;
public MediaController(
ICultureDictionary cultureDictionary,
ILoggerFactory loggerFactory,
IShortStringHelper shortStringHelper,
IEventMessagesFactory eventMessages,
ILocalizedTextService localizedTextService,
IOptions<ContentSettings> contentSettings,
IMediaTypeService mediaTypeService,
IMediaService mediaService,
IEntityService entityService,
IBackOfficeSecurityAccessor backofficeSecurityAccessor,
IUmbracoMapper umbracoMapper,
IDataTypeService dataTypeService,
ISqlContext sqlContext,
IContentTypeBaseServiceProvider contentTypeBaseServiceProvider,
IRelationService relationService,
PropertyEditorCollection propertyEditors,
MediaFileManager mediaFileManager,
MediaUrlGeneratorCollection mediaUrlGenerators,
IHostingEnvironment hostingEnvironment,
IImageUrlGenerator imageUrlGenerator,
IJsonSerializer serializer,
IAuthorizationService authorizationService,
AppCaches appCaches)
: base(cultureDictionary, loggerFactory, shortStringHelper, eventMessages, localizedTextService, serializer)
{
_shortStringHelper = shortStringHelper;
_contentSettings = contentSettings.Value;
_mediaTypeService = mediaTypeService;
_mediaService = mediaService;
_entityService = entityService;
_backofficeSecurityAccessor = backofficeSecurityAccessor;
_umbracoMapper = umbracoMapper;
_dataTypeService = dataTypeService;
_localizedTextService = localizedTextService;
_sqlContext = sqlContext;
_contentTypeBaseServiceProvider = contentTypeBaseServiceProvider;
_relationService = relationService;
_propertyEditors = propertyEditors;
_mediaFileManager = mediaFileManager;
_mediaUrlGenerators = mediaUrlGenerators;
_hostingEnvironment = hostingEnvironment;
_logger = loggerFactory.CreateLogger<MediaController>();
_imageUrlGenerator = imageUrlGenerator;
_authorizationService = authorizationService;
_appCaches = appCaches;
}
/// <summary>
/// Gets an empty content item for the
/// </summary>
/// <param name="contentTypeAlias"></param>
/// <param name="parentId"></param>
/// <returns></returns>
[OutgoingEditorModelEvent]
public ActionResult<MediaItemDisplay> GetEmpty(string contentTypeAlias, int parentId)
{
var contentType = _mediaTypeService.Get(contentTypeAlias);
if (contentType == null)
{
return NotFound();
}
var emptyContent = _mediaService.CreateMedia("", parentId, contentType.Alias, _backofficeSecurityAccessor.BackOfficeSecurity.GetUserId().ResultOr(Constants.Security.SuperUserId));
var mapped = _umbracoMapper.Map<MediaItemDisplay>(emptyContent);
//remove the listview app if it exists
mapped.ContentApps = mapped.ContentApps.Where(x => x.Alias != "umbListView").ToList();
return mapped;
}
/// <summary>
/// Returns an item to be used to display the recycle bin for media
/// </summary>
/// <returns></returns>
public MediaItemDisplay GetRecycleBin()
{
var apps = new List<ContentApp>();
apps.Add(ListViewContentAppFactory.CreateContentApp(_dataTypeService, _propertyEditors, "recycleBin", "media", Constants.DataTypes.DefaultMediaListView));
apps[0].Active = true;
var display = new MediaItemDisplay
{
Id = Constants.System.RecycleBinMedia,
Alias = "recycleBin",
ParentId = -1,
Name = _localizedTextService.Localize("general", "recycleBin"),
ContentTypeAlias = "recycleBin",
CreateDate = DateTime.Now,
IsContainer = true,
Path = "-1," + Constants.System.RecycleBinMedia,
ContentApps = apps
};
return display;
}
/// <summary>
/// Gets the media item by id
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
[OutgoingEditorModelEvent]
[Authorize(Policy = AuthorizationPolicies.MediaPermissionPathById)]
public MediaItemDisplay GetById(int id)
{
var foundMedia = GetObjectFromRequest(() => _mediaService.GetById(id));
if (foundMedia == null)
{
HandleContentNotFound(id);
//HandleContentNotFound will throw an exception
return null;
}
return _umbracoMapper.Map<MediaItemDisplay>(foundMedia);
}
/// <summary>
/// Gets the media item by id
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
[OutgoingEditorModelEvent]
[Authorize(Policy = AuthorizationPolicies.MediaPermissionPathById)]
public MediaItemDisplay GetById(Guid id)
{
var foundMedia = GetObjectFromRequest(() => _mediaService.GetById(id));
if (foundMedia == null)
{
HandleContentNotFound(id);
//HandleContentNotFound will throw an exception
return null;
}
return _umbracoMapper.Map<MediaItemDisplay>(foundMedia);
}
/// <summary>
/// Gets the media item by id
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
[OutgoingEditorModelEvent]
[Authorize(Policy = AuthorizationPolicies.MediaPermissionPathById)]
public ActionResult<MediaItemDisplay> GetById(Udi id)
{
var guidUdi = id as GuidUdi;
if (guidUdi != null)
{
return GetById(guidUdi.Guid);
}
return NotFound();
}
/// <summary>
/// Return media for the specified ids
/// </summary>
/// <param name="ids"></param>
/// <returns></returns>
[FilterAllowedOutgoingMedia(typeof(IEnumerable<MediaItemDisplay>))]
public IEnumerable<MediaItemDisplay> GetByIds([FromQuery] int[] ids)
{
var foundMedia = _mediaService.GetByIds(ids);
return foundMedia.Select(media => _umbracoMapper.Map<MediaItemDisplay>(media));
}
/// <summary>
/// Returns a paged result of media items known to be of a "Folder" type
/// </summary>
/// <param name="id"></param>
/// <param name="pageNumber"></param>
/// <param name="pageSize"></param>
/// <returns></returns>
public PagedResult<ContentItemBasic<ContentPropertyBasic>> GetChildFolders(int id, int pageNumber = 1, int pageSize = 1000)
{
//Suggested convention for folder mediatypes - we can make this more or less complicated as long as we document it...
//if you create a media type, which has an alias that ends with ...Folder then its a folder: ex: "secureFolder", "bannerFolder", "Folder"
var folderTypes = _mediaTypeService
.GetAll()
.Where(x => x.Alias.EndsWith("Folder"))
.Select(x => x.Id)
.ToArray();
if (folderTypes.Length == 0)
{
return new PagedResult<ContentItemBasic<ContentPropertyBasic>>(0, pageNumber, pageSize);
}
long total;
var children = _mediaService.GetPagedChildren(id, pageNumber - 1, pageSize, out total,
//lookup these content types
_sqlContext.Query<IMedia>().Where(x => folderTypes.Contains(x.ContentTypeId)),
Ordering.By("Name", Direction.Ascending));
return new PagedResult<ContentItemBasic<ContentPropertyBasic>>(total, pageNumber, pageSize)
{
Items = children.Select(_umbracoMapper.Map<IMedia, ContentItemBasic<ContentPropertyBasic>>)
};
}
/// <summary>
/// Returns the root media objects
/// </summary>
[FilterAllowedOutgoingMedia(typeof(IEnumerable<ContentItemBasic<ContentPropertyBasic>>))]
public IEnumerable<ContentItemBasic<ContentPropertyBasic>> GetRootMedia()
{
// TODO: Add permissions check!
return _mediaService.GetRootMedia()
.Select(_umbracoMapper.Map<IMedia, ContentItemBasic<ContentPropertyBasic>>);
}
#region GetChildren
private int[] _userStartNodes;
private readonly PropertyEditorCollection _propertyEditors;
private readonly MediaFileManager _mediaFileManager;
private readonly MediaUrlGeneratorCollection _mediaUrlGenerators;
private readonly IHostingEnvironment _hostingEnvironment;
protected int[] UserStartNodes
{
get { return _userStartNodes ?? (_userStartNodes = _backofficeSecurityAccessor.BackOfficeSecurity.CurrentUser.CalculateMediaStartNodeIds(_entityService, _appCaches)); }
}
/// <summary>
/// Returns the child media objects - using the entity INT id
/// </summary>
[FilterAllowedOutgoingMedia(typeof(IEnumerable<ContentItemBasic<ContentPropertyBasic>>), "Items")]
public PagedResult<ContentItemBasic<ContentPropertyBasic>> GetChildren(int id,
int pageNumber = 0,
int pageSize = 0,
string orderBy = "SortOrder",
Direction orderDirection = Direction.Ascending,
bool orderBySystemField = true,
string filter = "")
{
//if a request is made for the root node data but the user's start node is not the default, then
// we need to return their start nodes
if (id == Constants.System.Root && UserStartNodes.Length > 0 && UserStartNodes.Contains(Constants.System.Root) == false)
{
if (pageNumber > 0)
return new PagedResult<ContentItemBasic<ContentPropertyBasic>>(0, 0, 0);
var nodes = _mediaService.GetByIds(UserStartNodes).ToArray();
if (nodes.Length == 0)
return new PagedResult<ContentItemBasic<ContentPropertyBasic>>(0, 0, 0);
if (pageSize < nodes.Length)
pageSize = nodes.Length; // bah
var pr = new PagedResult<ContentItemBasic<ContentPropertyBasic>>(nodes.Length, pageNumber, pageSize)
{
Items = nodes.Select(_umbracoMapper.Map<IMedia, ContentItemBasic<ContentPropertyBasic>>)
};
return pr;
}
// else proceed as usual
long totalChildren;
List<IMedia> children;
if (pageNumber > 0 && pageSize > 0)
{
IQuery<IMedia> queryFilter = null;
if (filter.IsNullOrWhiteSpace() == false)
{
//add the default text filter
queryFilter = _sqlContext.Query<IMedia>()
.Where(x => x.Name.Contains(filter));
}
children = _mediaService
.GetPagedChildren(
id, (pageNumber - 1), pageSize,
out totalChildren,
queryFilter,
Ordering.By(orderBy, orderDirection, isCustomField: !orderBySystemField)).ToList();
}
else
{
//better to not use this without paging where possible, currently only the sort dialog does
children = _mediaService.GetPagedChildren(id, 0, int.MaxValue, out var total).ToList();
totalChildren = children.Count;
}
if (totalChildren == 0)
{
return new PagedResult<ContentItemBasic<ContentPropertyBasic>>(0, 0, 0);
}
var pagedResult = new PagedResult<ContentItemBasic<ContentPropertyBasic>>(totalChildren, pageNumber, pageSize);
pagedResult.Items = children
.Select(_umbracoMapper.Map<IMedia, ContentItemBasic<ContentPropertyBasic>>);
return pagedResult;
}
/// <summary>
/// Returns the child media objects - using the entity GUID id
/// </summary>
/// <param name="id"></param>
/// <param name="pageNumber"></param>
/// <param name="pageSize"></param>
/// <param name="orderBy"></param>
/// <param name="orderDirection"></param>
/// <param name="orderBySystemField"></param>
/// <param name="filter"></param>
/// <returns></returns>
[FilterAllowedOutgoingMedia(typeof(IEnumerable<ContentItemBasic<ContentPropertyBasic>>), "Items")]
public ActionResult<PagedResult<ContentItemBasic<ContentPropertyBasic>>> GetChildren(Guid id,
int pageNumber = 0,
int pageSize = 0,
string orderBy = "SortOrder",
Direction orderDirection = Direction.Ascending,
bool orderBySystemField = true,
string filter = "")
{
var entity = _entityService.Get(id);
if (entity != null)
{
return GetChildren(entity.Id, pageNumber, pageSize, orderBy, orderDirection, orderBySystemField, filter);
}
return NotFound();
}
/// <summary>
/// Returns the child media objects - using the entity UDI id
/// </summary>
/// <param name="id"></param>
/// <param name="pageNumber"></param>
/// <param name="pageSize"></param>
/// <param name="orderBy"></param>
/// <param name="orderDirection"></param>
/// <param name="orderBySystemField"></param>
/// <param name="filter"></param>
/// <returns></returns>
[FilterAllowedOutgoingMedia(typeof(IEnumerable<ContentItemBasic<ContentPropertyBasic>>), "Items")]
public ActionResult<PagedResult<ContentItemBasic<ContentPropertyBasic>>> GetChildren(Udi id,
int pageNumber = 0,
int pageSize = 0,
string orderBy = "SortOrder",
Direction orderDirection = Direction.Ascending,
bool orderBySystemField = true,
string filter = "")
{
var guidUdi = id as GuidUdi;
if (guidUdi != null)
{
var entity = _entityService.Get(guidUdi.Guid);
if (entity != null)
{
return GetChildren(entity.Id, pageNumber, pageSize, orderBy, orderDirection, orderBySystemField, filter);
}
}
return NotFound();
}
#endregion
/// <summary>
/// Moves an item to the recycle bin, if it is already there then it will permanently delete it
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
[Authorize(Policy = AuthorizationPolicies.MediaPermissionPathById)]
[HttpPost]
public IActionResult DeleteById(int id)
{
var foundMedia = GetObjectFromRequest(() => _mediaService.GetById(id));
if (foundMedia == null)
{
return HandleContentNotFound(id);
}
//if the current item is in the recycle bin
if (foundMedia.Trashed == false)
{
var moveResult = _mediaService.MoveToRecycleBin(foundMedia, _backofficeSecurityAccessor.BackOfficeSecurity.GetUserId().ResultOr(Constants.Security.SuperUserId));
if (moveResult == false)
{
return ValidationProblem();
}
}
else
{
var deleteResult = _mediaService.Delete(foundMedia, _backofficeSecurityAccessor.BackOfficeSecurity.GetUserId().ResultOr(Constants.Security.SuperUserId));
if (deleteResult == false)
{
return ValidationProblem();
}
}
return Ok();
}
/// <summary>
/// Change the sort order for media
/// </summary>
/// <param name="move"></param>
/// <returns></returns>
public async Task<IActionResult> PostMove(MoveOrCopy move)
{
// Authorize...
var requirement = new MediaPermissionsResourceRequirement();
var authorizationResult = await _authorizationService.AuthorizeAsync(User, new MediaPermissionsResource(_mediaService.GetById(move.Id)), requirement);
if (!authorizationResult.Succeeded)
{
return Forbid();
}
var toMoveResult = ValidateMoveOrCopy(move);
var toMove = toMoveResult.Value;
if (toMove is null && toMoveResult is IConvertToActionResult convertToActionResult)
{
return convertToActionResult.Convert();
}
var destinationParentID = move.ParentId;
var sourceParentID = toMove.ParentId;
var moveResult = _mediaService.Move(toMove, move.ParentId, _backofficeSecurityAccessor.BackOfficeSecurity.GetUserId().ResultOr(Constants.Security.SuperUserId));
if (sourceParentID == destinationParentID)
{
return ValidationProblem(new SimpleNotificationModel(new BackOfficeNotification("", _localizedTextService.Localize("media", "moveToSameFolderFailed"), NotificationStyle.Error)));
}
if (moveResult == false)
{
return ValidationProblem();
}
else
{
return Content(toMove.Path, MediaTypeNames.Text.Plain, Encoding.UTF8);
}
}
/// <summary>
/// Saves content
/// </summary>
/// <returns></returns>
[FileUploadCleanupFilter]
[MediaItemSaveValidation]
[OutgoingEditorModelEvent]
public ActionResult<MediaItemDisplay> PostSave(
[ModelBinder(typeof(MediaItemBinder))]
MediaItemSave contentItem)
{
//Recent versions of IE/Edge may send in the full client side file path instead of just the file name.
//To ensure similar behavior across all browsers no matter what they do - we strip the FileName property of all
//uploaded files to being *only* the actual file name (as it should be).
if (contentItem.UploadedFiles != null && contentItem.UploadedFiles.Any())
{
foreach (var file in contentItem.UploadedFiles)
{
file.FileName = Path.GetFileName(file.FileName);
}
}
//If we've reached here it means:
// * Our model has been bound
// * and validated
// * any file attachments have been saved to their temporary location for us to use
// * we have a reference to the DTO object and the persisted object
// * Permissions are valid
//Don't update the name if it is empty
if (contentItem.Name.IsNullOrWhiteSpace() == false)
{
contentItem.PersistedContent.Name = contentItem.Name;
}
MapPropertyValuesForPersistence<IMedia, MediaItemSave>(
contentItem,
contentItem.PropertyCollectionDto,
(save, property) => property.GetValue(), //get prop val
(save, property, v) => property.SetValue(v), //set prop val
null); // media are all invariant
//we will continue to save if model state is invalid, however we cannot save if critical data is missing.
//TODO: Allowing media to be saved when it is invalid is odd - media doesn't have a publish phase so suddenly invalid data is allowed to be 'live'
if (!ModelState.IsValid)
{
//check for critical data validation issues, we can't continue saving if this data is invalid
if (!RequiredForPersistenceAttribute.HasRequiredValuesForPersistence(contentItem))
{
//ok, so the absolute mandatory data is invalid and it's new, we cannot actually continue!
// add the model state to the outgoing object and throw validation response
MediaItemDisplay forDisplay = _umbracoMapper.Map<MediaItemDisplay>(contentItem.PersistedContent);
return ValidationProblem(forDisplay, ModelState);
}
}
//save the item
var saveStatus = _mediaService.Save(contentItem.PersistedContent, _backofficeSecurityAccessor.BackOfficeSecurity.GetUserId().ResultOr(Constants.Security.SuperUserId));
//return the updated model
var display = _umbracoMapper.Map<MediaItemDisplay>(contentItem.PersistedContent);
//lastly, if it is not valid, add the model state to the outgoing object and throw a 403
if (!ModelState.IsValid)
{
return ValidationProblem(display, ModelState, StatusCodes.Status403Forbidden);
}
//put the correct msgs in
switch (contentItem.Action)
{
case ContentSaveAction.Save:
case ContentSaveAction.SaveNew:
if (saveStatus.Success)
{
display.AddSuccessNotification(
_localizedTextService.Localize("speechBubbles", "editMediaSaved"),
_localizedTextService.Localize("speechBubbles", "editMediaSavedText"));
}
else
{
AddCancelMessage(display);
//If the item is new and the operation was cancelled, we need to return a different
// status code so the UI can handle it since it won't be able to redirect since there
// is no Id to redirect to!
if (saveStatus.Result.Result == OperationResultType.FailedCancelledByEvent && IsCreatingAction(contentItem.Action))
{
return ValidationProblem(display);
}
}
break;
}
return display;
}
/// <summary>
/// Empties the recycle bin
/// </summary>
/// <returns></returns>
[HttpDelete]
[HttpPost]
public IActionResult EmptyRecycleBin()
{
_mediaService.EmptyRecycleBin(_backofficeSecurityAccessor.BackOfficeSecurity.GetUserId().ResultOr(Constants.Security.SuperUserId));
return Ok(_localizedTextService.Localize("defaultdialogs", "recycleBinIsEmpty"));
}
/// <summary>
/// Change the sort order for media
/// </summary>
/// <param name="sorted"></param>
/// <returns></returns>
public async Task<IActionResult> PostSort(ContentSortOrder sorted)
{
if (sorted == null)
{
return NotFound();
}
//if there's nothing to sort just return ok
if (sorted.IdSortOrder.Length == 0)
{
return Ok();
}
// Authorize...
var requirement = new MediaPermissionsResourceRequirement();
var resource = new MediaPermissionsResource(sorted.ParentId);
var authorizationResult = await _authorizationService.AuthorizeAsync(User, resource, requirement);
if (!authorizationResult.Succeeded)
{
return Forbid();
}
var sortedMedia = new List<IMedia>();
try
{
sortedMedia.AddRange(sorted.IdSortOrder.Select(_mediaService.GetById));
// Save Media with new sort order and update content xml in db accordingly
if (_mediaService.Sort(sortedMedia) == false)
{
_logger.LogWarning("Media sorting failed, this was probably caused by an event being cancelled");
return ValidationProblem("Media sorting failed, this was probably caused by an event being cancelled");
}
return Ok();
}
catch (Exception ex)
{
_logger.LogError(ex, "Could not update media sort order");
throw;
}
}
public async Task<ActionResult<MediaItemDisplay>> PostAddFolder(PostedFolder folder)
{
var parentIdResult = await GetParentIdAsIntAsync(folder.ParentId, validatePermissions: true);
if (!(parentIdResult.Result is null))
{
return new ActionResult<MediaItemDisplay>(parentIdResult.Result);
}
var parentId = parentIdResult.Value;
if (!parentId.HasValue)
{
return NotFound("The passed id doesn't exist");
}
var isFolderAllowed = IsFolderCreationAllowedHere(parentId.Value);
if (isFolderAllowed == false)
{
return ValidationProblem(_localizedTextService.Localize("speechBubbles", "folderCreationNotAllowed"));
}
var f = _mediaService.CreateMedia(folder.Name, parentId.Value, Constants.Conventions.MediaTypes.Folder);
_mediaService.Save(f, _backofficeSecurityAccessor.BackOfficeSecurity.CurrentUser.Id);
return _umbracoMapper.Map<MediaItemDisplay>(f);
}
/// <summary>
/// Used to submit a media file
/// </summary>
/// <returns></returns>
/// <remarks>
/// We cannot validate this request with attributes (nicely) due to the nature of the multi-part for data.
/// </remarks>
public async Task<IActionResult> PostAddFile([FromForm] string path, [FromForm] string currentFolder, [FromForm] string contentTypeAlias, List<IFormFile> file)
{
var root = _hostingEnvironment.MapPathContentRoot(Constants.SystemDirectories.TempFileUploads);
//ensure it exists
Directory.CreateDirectory(root);
//must have a file
if (file.Count == 0)
{
return NotFound();
}
//get the string json from the request
var parentIdResult = await GetParentIdAsIntAsync(currentFolder, validatePermissions: true);
if (!(parentIdResult.Result is null))
{
return parentIdResult.Result;
}
var parentId = parentIdResult.Value;
if (!parentId.HasValue)
{
return NotFound("The passed id doesn't exist");
}
var tempFiles = new PostedFiles();
//in case we pass a path with a folder in it, we will create it and upload media to it.
if (!string.IsNullOrEmpty(path))
{
if (!IsFolderCreationAllowedHere(parentId.Value))
{
AddCancelMessage(tempFiles, _localizedTextService.Localize("speechBubbles", "folderUploadNotAllowed"));
return Ok(tempFiles);
}
var folders = path.Split(Constants.CharArrays.ForwardSlash);
for (int i = 0; i < folders.Length - 1; i++)
{
var folderName = folders[i];
IMedia folderMediaItem;
//if uploading directly to media root and not a subfolder
if (parentId == Constants.System.Root)
{
//look for matching folder
folderMediaItem =
_mediaService.GetRootMedia().FirstOrDefault(x => x.Name == folderName && x.ContentType.Alias == Constants.Conventions.MediaTypes.Folder);
if (folderMediaItem == null)
{
//if null, create a folder
folderMediaItem = _mediaService.CreateMedia(folderName, -1, Constants.Conventions.MediaTypes.Folder);
_mediaService.Save(folderMediaItem);
}
}
else
{
//get current parent
var mediaRoot = _mediaService.GetById(parentId.Value);
//if the media root is null, something went wrong, we'll abort
if (mediaRoot == null)
return Problem(
"The folder: " + folderName + " could not be used for storing images, its ID: " + parentId +
" returned null");
//look for matching folder
folderMediaItem = FindInChildren(mediaRoot.Id, folderName, Constants.Conventions.MediaTypes.Folder);
if (folderMediaItem == null)
{
//if null, create a folder
folderMediaItem = _mediaService.CreateMedia(folderName, mediaRoot, Constants.Conventions.MediaTypes.Folder);
_mediaService.Save(folderMediaItem);
}
}
//set the media root to the folder id so uploaded files will end there.
parentId = folderMediaItem.Id;
}
}
var mediaTypeAlias = string.Empty;
var allMediaTypes = _mediaTypeService.GetAll().ToList();
var allowedContentTypes = new HashSet<IMediaType>();
if (parentId != Constants.System.Root)
{
var mediaFolderItem = _mediaService.GetById(parentId.Value);
var mediaFolderType = allMediaTypes.FirstOrDefault(x => x.Alias == mediaFolderItem.ContentType.Alias);
if (mediaFolderType != null)
{
IMediaType mediaTypeItem = null;
foreach (ContentTypeSort allowedContentType in mediaFolderType.AllowedContentTypes)
{
IMediaType checkMediaTypeItem = allMediaTypes.FirstOrDefault(x => x.Id == allowedContentType.Id.Value);
allowedContentTypes.Add(checkMediaTypeItem);
var fileProperty = checkMediaTypeItem?.CompositionPropertyTypes.FirstOrDefault(x => x.Alias == Constants.Conventions.Media.File);
if (fileProperty != null)
{
mediaTypeItem = checkMediaTypeItem;
}
}
//Only set the permission-based mediaType if we only allow 1 specific file under this parent.
if (allowedContentTypes.Count == 1 && mediaTypeItem != null)
{
mediaTypeAlias = mediaTypeItem.Alias;
}
}
}
else
{
var typesAllowedAtRoot = allMediaTypes.Where(x => x.AllowedAsRoot).ToList();
allowedContentTypes.UnionWith(typesAllowedAtRoot);
}
//get the files
foreach (var formFile in file)
{
var fileName = formFile.FileName.Trim(Constants.CharArrays.DoubleQuote).TrimEnd();
var safeFileName = fileName.ToSafeFileName(ShortStringHelper);
var ext = safeFileName.Substring(safeFileName.LastIndexOf('.') + 1).ToLower();
if (!_contentSettings.IsFileAllowedForUpload(ext))
{
tempFiles.Notifications.Add(new BackOfficeNotification(
_localizedTextService.Localize("speechBubbles", "operationFailedHeader"),
_localizedTextService.Localize("media", "disallowedFileType"),
NotificationStyle.Warning));
continue;
}
if (string.IsNullOrEmpty(mediaTypeAlias))
{
mediaTypeAlias = Constants.Conventions.MediaTypes.File;
if (contentTypeAlias == Constants.Conventions.MediaTypes.AutoSelect)
{
// Look up MediaTypes
foreach (var mediaTypeItem in allMediaTypes)
{
var fileProperty = mediaTypeItem.CompositionPropertyTypes.FirstOrDefault(x => x.Alias == Constants.Conventions.Media.File);
if (fileProperty == null)
{
continue;
}
var dataTypeKey = fileProperty.DataTypeKey;
var dataType = _dataTypeService.GetDataType(dataTypeKey);
if (dataType == null || dataType.Configuration is not IFileExtensionsConfig fileExtensionsConfig)
{
continue;
}
var fileExtensions = fileExtensionsConfig.FileExtensions;
if (fileExtensions == null || fileExtensions.All(x => x.Value != ext))
{
continue;
}
mediaTypeAlias = mediaTypeItem.Alias;
break;
}
// If media type is still File then let's check if it's an image.
if (mediaTypeAlias == Constants.Conventions.MediaTypes.File && _imageUrlGenerator.SupportedImageFileTypes.Contains(ext))
{
mediaTypeAlias = Constants.Conventions.MediaTypes.Image;
}
}
else
{
mediaTypeAlias = contentTypeAlias;
}
}
if (allowedContentTypes.Any(x => x.Alias == mediaTypeAlias) == false)
{
tempFiles.Notifications.Add(new BackOfficeNotification(
_localizedTextService.Localize("speechBubbles", "operationFailedHeader"),
_localizedTextService.Localize("media", "disallowedMediaType", new[] { mediaTypeAlias }),
NotificationStyle.Warning));
continue;
}
var mediaItemName = fileName.ToFriendlyName();
var createdMediaItem = _mediaService.CreateMedia(mediaItemName, parentId.Value, mediaTypeAlias, _backofficeSecurityAccessor.BackOfficeSecurity.CurrentUser.Id);
await using (var stream = formFile.OpenReadStream())
{
createdMediaItem.SetValue(_mediaFileManager, _mediaUrlGenerators, _shortStringHelper, _contentTypeBaseServiceProvider, Constants.Conventions.Media.File, fileName, stream);
}
var saveResult = _mediaService.Save(createdMediaItem, _backofficeSecurityAccessor.BackOfficeSecurity.CurrentUser.Id);
if (saveResult == false)
{
AddCancelMessage(tempFiles, _localizedTextService.Localize("speechBubbles", "operationCancelledText") + " -- " + mediaItemName);
}
}
//Different response if this is a 'blueimp' request
if (HttpContext.Request.Query.Any(x => x.Key == "origin"))
{
var origin = HttpContext.Request.Query.First(x => x.Key == "origin");
if (origin.Value == "blueimp")
{
return new JsonResult(tempFiles); //Don't output the angular xsrf stuff, blue imp doesn't like that
}
}
return Ok(tempFiles);
}
private bool IsFolderCreationAllowedHere(int parentId)
{
var allMediaTypes = _mediaTypeService.GetAll().ToList();
var isFolderAllowed = false;
if (parentId == Constants.System.Root)
{
var typesAllowedAtRoot = allMediaTypes.Where(ct => ct.AllowedAsRoot).ToList();
isFolderAllowed = typesAllowedAtRoot.Any(x => x.Alias == Constants.Conventions.MediaTypes.Folder);
}
else
{
var parentMediaType = _mediaService.GetById(parentId);
var mediaFolderType = allMediaTypes.FirstOrDefault(x => x.Alias == parentMediaType.ContentType.Alias);
if (mediaFolderType != null)
{
isFolderAllowed =
mediaFolderType.AllowedContentTypes.Any(x => x.Alias == Constants.Conventions.MediaTypes.Folder);
}
}
return isFolderAllowed;
}
private IMedia FindInChildren(int mediaId, string nameToFind, string contentTypeAlias)
{
const int pageSize = 500;
var page = 0;
var total = long.MaxValue;
while (page * pageSize < total)
{
var children = _mediaService.GetPagedChildren(mediaId, page++, pageSize, out total,
_sqlContext.Query<IMedia>().Where(x => x.Name == nameToFind));
var match = children.FirstOrDefault(c => c.ContentType.Alias == contentTypeAlias);
if (match != null)
{
return match;
}
}
return null;
}
/// <summary>
/// Given a parent id which could be a GUID, UDI or an INT, this will resolve the INT
/// </summary>
/// <param name="parentId"></param>
/// <param name="validatePermissions">
/// If true, this will check if the current user has access to the resolved integer parent id
/// and if that check fails an unauthorized exception will occur
/// </param>
/// <returns></returns>
private async Task<ActionResult<int?>> GetParentIdAsIntAsync(string parentId, bool validatePermissions)
{
int intParentId;
// test for udi
if (UdiParser.TryParse(parentId, out GuidUdi parentUdi))
{
parentId = parentUdi.Guid.ToString();
}
//if it's not an INT then we'll check for GUID
if (int.TryParse(parentId, NumberStyles.Integer, CultureInfo.InvariantCulture, out intParentId) == false)
{
// if a guid then try to look up the entity
Guid idGuid;
if (Guid.TryParse(parentId, out idGuid))
{
var entity = _entityService.Get(idGuid);
if (entity != null)
{
intParentId = entity.Id;
}
else
{
return null;
}
}
else
{
return ValidationProblem("The request was not formatted correctly, the parentId is not an integer, Guid or UDI");
}
}
// Authorize...
//ensure the user has access to this folder by parent id!
if (validatePermissions)
{
var requirement = new MediaPermissionsResourceRequirement();
var authorizationResult = await _authorizationService.AuthorizeAsync(User, new MediaPermissionsResource(_mediaService.GetById(intParentId)), requirement);
if (!authorizationResult.Succeeded)
{
return ValidationProblem(
new SimpleNotificationModel(new BackOfficeNotification(
_localizedTextService.Localize("speechBubbles", "operationFailedHeader"),
_localizedTextService.Localize("speechBubbles", "invalidUserPermissionsText"),
NotificationStyle.Warning)),
StatusCodes.Status403Forbidden);
}
}
return intParentId;
}
/// <summary>
/// Ensures the item can be moved/copied to the new location
/// </summary>
/// <param name="model"></param>
/// <returns></returns>
private ActionResult<IMedia> ValidateMoveOrCopy(MoveOrCopy model)
{
if (model == null)
{
return NotFound();
}
var toMove = _mediaService.GetById(model.Id);
if (toMove == null)
{
return NotFound();
}
if (model.ParentId < 0)
{
//cannot move if the content item is not allowed at the root unless there are
//none allowed at root (in which case all should be allowed at root)
var mediaTypeService = _mediaTypeService;
if (toMove.ContentType.AllowedAsRoot == false && mediaTypeService.GetAll().Any(ct => ct.AllowedAsRoot))
{
var notificationModel = new SimpleNotificationModel();
notificationModel.AddErrorNotification(_localizedTextService.Localize("moveOrCopy", "notAllowedAtRoot"), "");
return ValidationProblem(notificationModel);
}
}
else
{
var parent = _mediaService.GetById(model.ParentId);
if (parent == null)
{
return NotFound();
}
//check if the item is allowed under this one
var parentContentType = _mediaTypeService.Get(parent.ContentTypeId);
if (parentContentType.AllowedContentTypes.Select(x => x.Id).ToArray()
.Any(x => x.Value == toMove.ContentType.Id) == false)
{
var notificationModel = new SimpleNotificationModel();
notificationModel.AddErrorNotification(_localizedTextService.Localize("moveOrCopy", "notAllowedByContentType"), "");
return ValidationProblem(notificationModel);
}
// Check on paths
if ((string.Format(",{0},", parent.Path)).IndexOf(string.Format(",{0},", toMove.Id), StringComparison.Ordinal) > -1)
{
var notificationModel = new SimpleNotificationModel();
notificationModel.AddErrorNotification(_localizedTextService.Localize("moveOrCopy", "notAllowedByPath"), "");
return ValidationProblem(notificationModel);
}
}
return new ActionResult<IMedia>(toMove);
}
public PagedResult<EntityBasic> GetPagedReferences(int id, string entityType, int pageNumber = 1, int pageSize = 100)
{
if (pageNumber <= 0 || pageSize <= 0)
{
throw new NotSupportedException("Both pageNumber and pageSize must be greater than zero");
}
var objectType = ObjectTypes.GetUmbracoObjectType(entityType);
var udiType = ObjectTypes.GetUdiType(objectType);
var relations = _relationService.GetPagedParentEntitiesByChildId(id, pageNumber - 1, pageSize, out var totalRecords, objectType);
return new PagedResult<EntityBasic>(totalRecords, pageNumber, pageSize)
{
Items = relations.Cast<ContentEntitySlim>().Select(rel => new EntityBasic
{
Id = rel.Id,
Key = rel.Key,
Udi = Udi.Create(udiType, rel.Key),
Icon = rel.ContentTypeIcon,
Name = rel.Name,
Alias = rel.ContentTypeAlias
})
};
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using sys = System;
namespace Google.Ads.GoogleAds.V10.Resources
{
/// <summary>Resource name for the <c>FeedItemSetLink</c> resource.</summary>
public sealed partial class FeedItemSetLinkName : gax::IResourceName, sys::IEquatable<FeedItemSetLinkName>
{
/// <summary>The possible contents of <see cref="FeedItemSetLinkName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>.
/// </summary>
CustomerFeedFeedItemSetFeedItem = 1,
}
private static gax::PathTemplate s_customerFeedFeedItemSetFeedItem = new gax::PathTemplate("customers/{customer_id}/feedItemSetLinks/{feed_id_feed_item_set_id_feed_item_id}");
/// <summary>Creates a <see cref="FeedItemSetLinkName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="FeedItemSetLinkName"/> containing the provided
/// <paramref name="unparsedResourceName"/>.
/// </returns>
public static FeedItemSetLinkName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new FeedItemSetLinkName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="FeedItemSetLinkName"/> with the pattern
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedId">The <c>Feed</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemSetId">The <c>FeedItemSet</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemId">The <c>FeedItem</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="FeedItemSetLinkName"/> constructed from the provided ids.</returns>
public static FeedItemSetLinkName FromCustomerFeedFeedItemSetFeedItem(string customerId, string feedId, string feedItemSetId, string feedItemId) =>
new FeedItemSetLinkName(ResourceNameType.CustomerFeedFeedItemSetFeedItem, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), feedId: gax::GaxPreconditions.CheckNotNullOrEmpty(feedId, nameof(feedId)), feedItemSetId: gax::GaxPreconditions.CheckNotNullOrEmpty(feedItemSetId, nameof(feedItemSetId)), feedItemId: gax::GaxPreconditions.CheckNotNullOrEmpty(feedItemId, nameof(feedItemId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="FeedItemSetLinkName"/> with pattern
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedId">The <c>Feed</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemSetId">The <c>FeedItemSet</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemId">The <c>FeedItem</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="FeedItemSetLinkName"/> with pattern
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>.
/// </returns>
public static string Format(string customerId, string feedId, string feedItemSetId, string feedItemId) =>
FormatCustomerFeedFeedItemSetFeedItem(customerId, feedId, feedItemSetId, feedItemId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="FeedItemSetLinkName"/> with pattern
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedId">The <c>Feed</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemSetId">The <c>FeedItemSet</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemId">The <c>FeedItem</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="FeedItemSetLinkName"/> with pattern
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>.
/// </returns>
public static string FormatCustomerFeedFeedItemSetFeedItem(string customerId, string feedId, string feedItemSetId, string feedItemId) =>
s_customerFeedFeedItemSetFeedItem.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), $"{(gax::GaxPreconditions.CheckNotNullOrEmpty(feedId, nameof(feedId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(feedItemSetId, nameof(feedItemSetId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(feedItemId, nameof(feedItemId)))}");
/// <summary>
/// Parses the given resource name string into a new <see cref="FeedItemSetLinkName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>
/// </description>
/// </item>
/// </list>
/// </remarks>
/// <param name="feedItemSetLinkName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="FeedItemSetLinkName"/> if successful.</returns>
public static FeedItemSetLinkName Parse(string feedItemSetLinkName) => Parse(feedItemSetLinkName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="FeedItemSetLinkName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>
/// </description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="feedItemSetLinkName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="FeedItemSetLinkName"/> if successful.</returns>
public static FeedItemSetLinkName Parse(string feedItemSetLinkName, bool allowUnparsed) =>
TryParse(feedItemSetLinkName, allowUnparsed, out FeedItemSetLinkName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="FeedItemSetLinkName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>
/// </description>
/// </item>
/// </list>
/// </remarks>
/// <param name="feedItemSetLinkName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="FeedItemSetLinkName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string feedItemSetLinkName, out FeedItemSetLinkName result) =>
TryParse(feedItemSetLinkName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="FeedItemSetLinkName"/> instance;
/// optionally allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description>
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>
/// </description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="feedItemSetLinkName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="FeedItemSetLinkName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string feedItemSetLinkName, bool allowUnparsed, out FeedItemSetLinkName result)
{
gax::GaxPreconditions.CheckNotNull(feedItemSetLinkName, nameof(feedItemSetLinkName));
gax::TemplatedResourceName resourceName;
if (s_customerFeedFeedItemSetFeedItem.TryParseName(feedItemSetLinkName, out resourceName))
{
string[] split1 = ParseSplitHelper(resourceName[1], new char[] { '~', '~', });
if (split1 == null)
{
result = null;
return false;
}
result = FromCustomerFeedFeedItemSetFeedItem(resourceName[0], split1[0], split1[1], split1[2]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(feedItemSetLinkName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private static string[] ParseSplitHelper(string s, char[] separators)
{
string[] result = new string[separators.Length + 1];
int i0 = 0;
for (int i = 0; i <= separators.Length; i++)
{
int i1 = i < separators.Length ? s.IndexOf(separators[i], i0) : s.Length;
if (i1 < 0 || i1 == i0)
{
return null;
}
result[i] = s.Substring(i0, i1 - i0);
i0 = i1 + 1;
}
return result;
}
private FeedItemSetLinkName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string customerId = null, string feedId = null, string feedItemId = null, string feedItemSetId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
CustomerId = customerId;
FeedId = feedId;
FeedItemId = feedItemId;
FeedItemSetId = feedItemSetId;
}
/// <summary>
/// Constructs a new instance of a <see cref="FeedItemSetLinkName"/> class from the component parts of pattern
/// <c>customers/{customer_id}/feedItemSetLinks/{feed_id}~{feed_item_set_id}~{feed_item_id}</c>
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedId">The <c>Feed</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemSetId">The <c>FeedItemSet</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="feedItemId">The <c>FeedItem</c> ID. Must not be <c>null</c> or empty.</param>
public FeedItemSetLinkName(string customerId, string feedId, string feedItemSetId, string feedItemId) : this(ResourceNameType.CustomerFeedFeedItemSetFeedItem, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), feedId: gax::GaxPreconditions.CheckNotNullOrEmpty(feedId, nameof(feedId)), feedItemSetId: gax::GaxPreconditions.CheckNotNullOrEmpty(feedItemSetId, nameof(feedItemSetId)), feedItemId: gax::GaxPreconditions.CheckNotNullOrEmpty(feedItemId, nameof(feedItemId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string CustomerId { get; }
/// <summary>
/// The <c>Feed</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string FeedId { get; }
/// <summary>
/// The <c>FeedItem</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string FeedItemId { get; }
/// <summary>
/// The <c>FeedItemSet</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string FeedItemSetId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.CustomerFeedFeedItemSetFeedItem: return s_customerFeedFeedItemSetFeedItem.Expand(CustomerId, $"{FeedId}~{FeedItemSetId}~{FeedItemId}");
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as FeedItemSetLinkName);
/// <inheritdoc/>
public bool Equals(FeedItemSetLinkName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(FeedItemSetLinkName a, FeedItemSetLinkName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(FeedItemSetLinkName a, FeedItemSetLinkName b) => !(a == b);
}
public partial class FeedItemSetLink
{
/// <summary>
/// <see cref="FeedItemSetLinkName"/>-typed view over the <see cref="ResourceName"/> resource name property.
/// </summary>
internal FeedItemSetLinkName ResourceNameAsFeedItemSetLinkName
{
get => string.IsNullOrEmpty(ResourceName) ? null : FeedItemSetLinkName.Parse(ResourceName, allowUnparsed: true);
set => ResourceName = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="FeedItemName"/>-typed view over the <see cref="FeedItem"/> resource name property.
/// </summary>
internal FeedItemName FeedItemAsFeedItemName
{
get => string.IsNullOrEmpty(FeedItem) ? null : FeedItemName.Parse(FeedItem, allowUnparsed: true);
set => FeedItem = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="FeedItemSetName"/>-typed view over the <see cref="FeedItemSet"/> resource name property.
/// </summary>
internal FeedItemSetName FeedItemSetAsFeedItemSetName
{
get => string.IsNullOrEmpty(FeedItemSet) ? null : FeedItemSetName.Parse(FeedItemSet, allowUnparsed: true);
set => FeedItemSet = value?.ToString() ?? "";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Impl.Binary.IO
{
using System;
using System.IO;
using System.Text;
using Apache.Ignite.Core.Impl.Memory;
/// <summary>
/// Base class for managed and unmanaged data streams.
/// </summary>
internal unsafe abstract class BinaryStreamBase : IBinaryStream
{
/** Byte: zero. */
private const byte ByteZero = 0;
/** Byte: one. */
private const byte ByteOne = 1;
/** LITTLE_ENDIAN flag. */
private static readonly bool LittleEndian = BitConverter.IsLittleEndian;
/** Position. */
protected int Pos;
/** Disposed flag. */
private bool _disposed;
/// <summary>
/// Write byte.
/// </summary>
/// <param name="val">Byte value.</param>
public abstract void WriteByte(byte val);
/// <summary>
/// Read byte.
/// </summary>
/// <returns>
/// Byte value.
/// </returns>
public abstract byte ReadByte();
/// <summary>
/// Write byte array.
/// </summary>
/// <param name="val">Byte array.</param>
public abstract void WriteByteArray(byte[] val);
/// <summary>
/// Internal routine to write byte array.
/// </summary>
/// <param name="val">Byte array.</param>
/// <param name="data">Data pointer.</param>
protected static void WriteByteArray0(byte[] val, byte* data)
{
fixed (byte* val0 = val)
{
CopyMemory(val0, data, val.Length);
}
}
/// <summary>
/// Read byte array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Byte array.
/// </returns>
public abstract byte[] ReadByteArray(int cnt);
/// <summary>
/// Internal routine to read byte array.
/// </summary>
/// <param name="len">Array length.</param>
/// <param name="data">Data pointer.</param>
/// <returns>Byte array</returns>
protected static byte[] ReadByteArray0(int len, byte* data)
{
byte[] res = new byte[len];
fixed (byte* res0 = res)
{
CopyMemory(data, res0, len);
}
return res;
}
/// <summary>
/// Write bool.
/// </summary>
/// <param name="val">Bool value.</param>
public void WriteBool(bool val)
{
WriteByte(val ? ByteOne : ByteZero);
}
/// <summary>
/// Read bool.
/// </summary>
/// <returns>
/// Bool value.
/// </returns>
public bool ReadBool()
{
return ReadByte() == ByteOne;
}
/// <summary>
/// Write bool array.
/// </summary>
/// <param name="val">Bool array.</param>
public abstract void WriteBoolArray(bool[] val);
/// <summary>
/// Internal routine to write bool array.
/// </summary>
/// <param name="val">Bool array.</param>
/// <param name="data">Data pointer.</param>
protected static void WriteBoolArray0(bool[] val, byte* data)
{
fixed (bool* val0 = val)
{
CopyMemory((byte*)val0, data, val.Length);
}
}
/// <summary>
/// Read bool array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Bool array.
/// </returns>
public abstract bool[] ReadBoolArray(int cnt);
/// <summary>
/// Internal routine to read bool array.
/// </summary>
/// <param name="len">Array length.</param>
/// <param name="data">Data pointer.</param>
/// <returns>Bool array</returns>
protected static bool[] ReadBoolArray0(int len, byte* data)
{
bool[] res = new bool[len];
fixed (bool* res0 = res)
{
CopyMemory(data, (byte*)res0, len);
}
return res;
}
/// <summary>
/// Write short.
/// </summary>
/// <param name="val">Short value.</param>
public abstract void WriteShort(short val);
/// <summary>
/// Internal routine to write short value.
/// </summary>
/// <param name="val">Short value.</param>
/// <param name="data">Data pointer.</param>
protected static void WriteShort0(short val, byte* data)
{
if (LittleEndian)
*((short*)data) = val;
else
{
byte* valPtr = (byte*)&val;
data[0] = valPtr[1];
data[1] = valPtr[0];
}
}
/// <summary>
/// Read short.
/// </summary>
/// <returns>
/// Short value.
/// </returns>
public abstract short ReadShort();
/// <summary>
/// Internal routine to read short value.
/// </summary>
/// <param name="data">Data pointer.</param>
/// <returns>Short value</returns>
protected static short ReadShort0(byte* data)
{
short val;
if (LittleEndian)
val = *((short*)data);
else
{
byte* valPtr = (byte*)&val;
valPtr[0] = data[1];
valPtr[1] = data[0];
}
return val;
}
/// <summary>
/// Write short array.
/// </summary>
/// <param name="val">Short array.</param>
public abstract void WriteShortArray(short[] val);
/// <summary>
/// Internal routine to write short array.
/// </summary>
/// <param name="val">Short array.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
protected static void WriteShortArray0(short[] val, byte* data, int cnt)
{
if (LittleEndian)
{
fixed (short* val0 = val)
{
CopyMemory((byte*)val0, data, cnt);
}
}
else
{
byte* curPos = data;
for (int i = 0; i < val.Length; i++)
{
short val0 = val[i];
byte* valPtr = (byte*)&(val0);
*curPos++ = valPtr[1];
*curPos++ = valPtr[0];
}
}
}
/// <summary>
/// Read short array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Short array.
/// </returns>
public abstract short[] ReadShortArray(int cnt);
/// <summary>
/// Internal routine to read short array.
/// </summary>
/// <param name="len">Array length.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
/// <returns>Short array</returns>
protected static short[] ReadShortArray0(int len, byte* data, int cnt)
{
short[] res = new short[len];
if (LittleEndian)
{
fixed (short* res0 = res)
{
CopyMemory(data, (byte*)res0, cnt);
}
}
else
{
for (int i = 0; i < len; i++)
{
short val;
byte* valPtr = (byte*)&val;
valPtr[1] = *data++;
valPtr[0] = *data++;
res[i] = val;
}
}
return res;
}
/// <summary>
/// Write char.
/// </summary>
/// <param name="val">Char value.</param>
public void WriteChar(char val)
{
WriteShort(*(short*)(&val));
}
/// <summary>
/// Read char.
/// </summary>
/// <returns>
/// Char value.
/// </returns>
public char ReadChar()
{
short val = ReadShort();
return *(char*)(&val);
}
/// <summary>
/// Write char array.
/// </summary>
/// <param name="val">Char array.</param>
public abstract void WriteCharArray(char[] val);
/// <summary>
/// Internal routine to write char array.
/// </summary>
/// <param name="val">Char array.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
protected static void WriteCharArray0(char[] val, byte* data, int cnt)
{
if (LittleEndian)
{
fixed (char* val0 = val)
{
CopyMemory((byte*)val0, data, cnt);
}
}
else
{
byte* curPos = data;
for (int i = 0; i < val.Length; i++)
{
char val0 = val[i];
byte* valPtr = (byte*)&(val0);
*curPos++ = valPtr[1];
*curPos++ = valPtr[0];
}
}
}
/// <summary>
/// Read char array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Char array.
/// </returns>
public abstract char[] ReadCharArray(int cnt);
/// <summary>
/// Internal routine to read char array.
/// </summary>
/// <param name="len">Count.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
/// <returns>Char array</returns>
protected static char[] ReadCharArray0(int len, byte* data, int cnt)
{
char[] res = new char[len];
if (LittleEndian)
{
fixed (char* res0 = res)
{
CopyMemory(data, (byte*)res0, cnt);
}
}
else
{
for (int i = 0; i < len; i++)
{
char val;
byte* valPtr = (byte*)&val;
valPtr[1] = *data++;
valPtr[0] = *data++;
res[i] = val;
}
}
return res;
}
/// <summary>
/// Write int.
/// </summary>
/// <param name="val">Int value.</param>
public abstract void WriteInt(int val);
/// <summary>
/// Write int to specific position.
/// </summary>
/// <param name="writePos">Position.</param>
/// <param name="val">Value.</param>
public abstract void WriteInt(int writePos, int val);
/// <summary>
/// Internal routine to write int value.
/// </summary>
/// <param name="val">Int value.</param>
/// <param name="data">Data pointer.</param>
protected static void WriteInt0(int val, byte* data)
{
if (LittleEndian)
*((int*)data) = val;
else
{
byte* valPtr = (byte*)&val;
data[0] = valPtr[3];
data[1] = valPtr[2];
data[2] = valPtr[1];
data[3] = valPtr[0];
}
}
/// <summary>
/// Read int.
/// </summary>
/// <returns>
/// Int value.
/// </returns>
public abstract int ReadInt();
/// <summary>
/// Internal routine to read int value.
/// </summary>
/// <param name="data">Data pointer.</param>
/// <returns>Int value</returns>
protected static int ReadInt0(byte* data) {
int val;
if (LittleEndian)
val = *((int*)data);
else
{
byte* valPtr = (byte*)&val;
valPtr[0] = data[3];
valPtr[1] = data[2];
valPtr[2] = data[1];
valPtr[3] = data[0];
}
return val;
}
/// <summary>
/// Write int array.
/// </summary>
/// <param name="val">Int array.</param>
public abstract void WriteIntArray(int[] val);
/// <summary>
/// Internal routine to write int array.
/// </summary>
/// <param name="val">Int array.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
protected static void WriteIntArray0(int[] val, byte* data, int cnt)
{
if (LittleEndian)
{
fixed (int* val0 = val)
{
CopyMemory((byte*)val0, data, cnt);
}
}
else
{
byte* curPos = data;
for (int i = 0; i < val.Length; i++)
{
int val0 = val[i];
byte* valPtr = (byte*)&(val0);
*curPos++ = valPtr[3];
*curPos++ = valPtr[2];
*curPos++ = valPtr[1];
*curPos++ = valPtr[0];
}
}
}
/// <summary>
/// Read int array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Int array.
/// </returns>
public abstract int[] ReadIntArray(int cnt);
/// <summary>
/// Internal routine to read int array.
/// </summary>
/// <param name="len">Count.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
/// <returns>Int array</returns>
protected static int[] ReadIntArray0(int len, byte* data, int cnt)
{
int[] res = new int[len];
if (LittleEndian)
{
fixed (int* res0 = res)
{
CopyMemory(data, (byte*)res0, cnt);
}
}
else
{
for (int i = 0; i < len; i++)
{
int val;
byte* valPtr = (byte*)&val;
valPtr[3] = *data++;
valPtr[2] = *data++;
valPtr[1] = *data++;
valPtr[0] = *data++;
res[i] = val;
}
}
return res;
}
/// <summary>
/// Write float.
/// </summary>
/// <param name="val">Float value.</param>
public void WriteFloat(float val)
{
int val0 = *(int*)(&val);
WriteInt(val0);
}
/// <summary>
/// Read float.
/// </summary>
/// <returns>
/// Float value.
/// </returns>
public float ReadFloat()
{
int val = ReadInt();
return *(float*)(&val);
}
/// <summary>
/// Write float array.
/// </summary>
/// <param name="val">Float array.</param>
public abstract void WriteFloatArray(float[] val);
/// <summary>
/// Internal routine to write float array.
/// </summary>
/// <param name="val">Int array.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
protected static void WriteFloatArray0(float[] val, byte* data, int cnt)
{
if (LittleEndian)
{
fixed (float* val0 = val)
{
CopyMemory((byte*)val0, data, cnt);
}
}
else
{
byte* curPos = data;
for (int i = 0; i < val.Length; i++)
{
float val0 = val[i];
byte* valPtr = (byte*)&(val0);
*curPos++ = valPtr[3];
*curPos++ = valPtr[2];
*curPos++ = valPtr[1];
*curPos++ = valPtr[0];
}
}
}
/// <summary>
/// Read float array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Float array.
/// </returns>
public abstract float[] ReadFloatArray(int cnt);
/// <summary>
/// Internal routine to read float array.
/// </summary>
/// <param name="len">Count.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
/// <returns>Float array</returns>
protected static float[] ReadFloatArray0(int len, byte* data, int cnt)
{
float[] res = new float[len];
if (LittleEndian)
{
fixed (float* res0 = res)
{
CopyMemory(data, (byte*)res0, cnt);
}
}
else
{
for (int i = 0; i < len; i++)
{
int val;
byte* valPtr = (byte*)&val;
valPtr[3] = *data++;
valPtr[2] = *data++;
valPtr[1] = *data++;
valPtr[0] = *data++;
res[i] = val;
}
}
return res;
}
/// <summary>
/// Write long.
/// </summary>
/// <param name="val">Long value.</param>
public abstract void WriteLong(long val);
/// <summary>
/// Internal routine to write long value.
/// </summary>
/// <param name="val">Long value.</param>
/// <param name="data">Data pointer.</param>
protected static void WriteLong0(long val, byte* data)
{
if (LittleEndian)
*((long*)data) = val;
else
{
byte* valPtr = (byte*)&val;
data[0] = valPtr[7];
data[1] = valPtr[6];
data[2] = valPtr[5];
data[3] = valPtr[4];
data[4] = valPtr[3];
data[5] = valPtr[2];
data[6] = valPtr[1];
data[7] = valPtr[0];
}
}
/// <summary>
/// Read long.
/// </summary>
/// <returns>
/// Long value.
/// </returns>
public abstract long ReadLong();
/// <summary>
/// Internal routine to read long value.
/// </summary>
/// <param name="data">Data pointer.</param>
/// <returns>Long value</returns>
protected static long ReadLong0(byte* data)
{
long val;
if (LittleEndian)
val = *((long*)data);
else
{
byte* valPtr = (byte*)&val;
valPtr[0] = data[7];
valPtr[1] = data[6];
valPtr[2] = data[5];
valPtr[3] = data[4];
valPtr[4] = data[3];
valPtr[5] = data[2];
valPtr[6] = data[1];
valPtr[7] = data[0];
}
return val;
}
/// <summary>
/// Write long array.
/// </summary>
/// <param name="val">Long array.</param>
public abstract void WriteLongArray(long[] val);
/// <summary>
/// Internal routine to write long array.
/// </summary>
/// <param name="val">Long array.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
protected static void WriteLongArray0(long[] val, byte* data, int cnt)
{
if (LittleEndian)
{
fixed (long* val0 = val)
{
CopyMemory((byte*)val0, data, cnt);
}
}
else
{
byte* curPos = data;
for (int i = 0; i < val.Length; i++)
{
long val0 = val[i];
byte* valPtr = (byte*)&(val0);
*curPos++ = valPtr[7];
*curPos++ = valPtr[6];
*curPos++ = valPtr[5];
*curPos++ = valPtr[4];
*curPos++ = valPtr[3];
*curPos++ = valPtr[2];
*curPos++ = valPtr[1];
*curPos++ = valPtr[0];
}
}
}
/// <summary>
/// Read long array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Long array.
/// </returns>
public abstract long[] ReadLongArray(int cnt);
/// <summary>
/// Internal routine to read long array.
/// </summary>
/// <param name="len">Count.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
/// <returns>Long array</returns>
protected static long[] ReadLongArray0(int len, byte* data, int cnt)
{
long[] res = new long[len];
if (LittleEndian)
{
fixed (long* res0 = res)
{
CopyMemory(data, (byte*)res0, cnt);
}
}
else
{
for (int i = 0; i < len; i++)
{
long val;
byte* valPtr = (byte*)&val;
valPtr[7] = *data++;
valPtr[6] = *data++;
valPtr[5] = *data++;
valPtr[4] = *data++;
valPtr[3] = *data++;
valPtr[2] = *data++;
valPtr[1] = *data++;
valPtr[0] = *data++;
res[i] = val;
}
}
return res;
}
/// <summary>
/// Write double.
/// </summary>
/// <param name="val">Double value.</param>
public void WriteDouble(double val)
{
long val0 = *(long*)(&val);
WriteLong(val0);
}
/// <summary>
/// Read double.
/// </summary>
/// <returns>
/// Double value.
/// </returns>
public double ReadDouble()
{
long val = ReadLong();
return *(double*)(&val);
}
/// <summary>
/// Write double array.
/// </summary>
/// <param name="val">Double array.</param>
public abstract void WriteDoubleArray(double[] val);
/// <summary>
/// Internal routine to write double array.
/// </summary>
/// <param name="val">Double array.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
protected static void WriteDoubleArray0(double[] val, byte* data, int cnt)
{
if (LittleEndian)
{
fixed (double* val0 = val)
{
CopyMemory((byte*)val0, data, cnt);
}
}
else
{
byte* curPos = data;
for (int i = 0; i < val.Length; i++)
{
double val0 = val[i];
byte* valPtr = (byte*)&(val0);
*curPos++ = valPtr[7];
*curPos++ = valPtr[6];
*curPos++ = valPtr[5];
*curPos++ = valPtr[4];
*curPos++ = valPtr[3];
*curPos++ = valPtr[2];
*curPos++ = valPtr[1];
*curPos++ = valPtr[0];
}
}
}
/// <summary>
/// Read double array.
/// </summary>
/// <param name="cnt">Count.</param>
/// <returns>
/// Double array.
/// </returns>
public abstract double[] ReadDoubleArray(int cnt);
/// <summary>
/// Internal routine to read double array.
/// </summary>
/// <param name="len">Count.</param>
/// <param name="data">Data pointer.</param>
/// <param name="cnt">Bytes count.</param>
/// <returns>Double array</returns>
protected static double[] ReadDoubleArray0(int len, byte* data, int cnt)
{
double[] res = new double[len];
if (LittleEndian)
{
fixed (double* res0 = res)
{
CopyMemory(data, (byte*)res0, cnt);
}
}
else
{
for (int i = 0; i < len; i++)
{
double val;
byte* valPtr = (byte*)&val;
valPtr[7] = *data++;
valPtr[6] = *data++;
valPtr[5] = *data++;
valPtr[4] = *data++;
valPtr[3] = *data++;
valPtr[2] = *data++;
valPtr[1] = *data++;
valPtr[0] = *data++;
res[i] = val;
}
}
return res;
}
/// <summary>
/// Write string.
/// </summary>
/// <param name="chars">Characters.</param>
/// <param name="charCnt">Char count.</param>
/// <param name="byteCnt">Byte count.</param>
/// <param name="encoding">Encoding.</param>
/// <returns>
/// Amounts of bytes written.
/// </returns>
public abstract int WriteString(char* chars, int charCnt, int byteCnt, Encoding encoding);
/// <summary>
/// Write arbitrary data.
/// </summary>
/// <param name="src">Source array.</param>
/// <param name="off">Offset</param>
/// <param name="cnt">Count.</param>
public void Write(byte[] src, int off, int cnt)
{
fixed (byte* src0 = src)
{
Write(src0 + off, cnt);
}
}
/// <summary>
/// Read arbitrary data.
/// </summary>
/// <param name="dest">Destination array.</param>
/// <param name="off">Offset.</param>
/// <param name="cnt">Count.</param>
/// <returns>
/// Amount of bytes read.
/// </returns>
public void Read(byte[] dest, int off, int cnt)
{
fixed (byte* dest0 = dest)
{
Read(dest0 + off, cnt);
}
}
/// <summary>
/// Write arbitrary data.
/// </summary>
/// <param name="src">Source.</param>
/// <param name="cnt">Count.</param>
public abstract void Write(byte* src, int cnt);
/// <summary>
/// Internal write routine.
/// </summary>
/// <param name="src">Source.</param>
/// <param name="cnt">Count.</param>
/// <param name="data">Data (dsetination).</param>
protected void WriteInternal(byte* src, int cnt, byte* data)
{
CopyMemory(src, data + Pos, cnt);
}
/// <summary>
/// Read arbitrary data.
/// </summary>
/// <param name="dest">Destination.</param>
/// <param name="cnt">Count.</param>
/// <returns></returns>
public abstract void Read(byte* dest, int cnt);
/// <summary>
/// Internal read routine.
/// </summary>
/// <param name="src">Source</param>
/// <param name="dest">Destination.</param>
/// <param name="cnt">Count.</param>
/// <returns>Amount of bytes written.</returns>
protected void ReadInternal(byte* src, byte* dest, int cnt)
{
int cnt0 = Math.Min(Remaining, cnt);
CopyMemory(src + Pos, dest, cnt0);
ShiftRead(cnt0);
}
/// <summary>
/// Position.
/// </summary>
public int Position
{
get { return Pos; }
}
/// <summary>
/// Gets remaining bytes in the stream.
/// </summary>
/// <value>
/// Remaining bytes.
/// </value>
public abstract int Remaining { get; }
/// <summary>
/// Gets underlying array, avoiding copying if possible.
/// </summary>
/// <returns>
/// Underlying array.
/// </returns>
public abstract byte[] GetArray();
/// <summary>
/// Gets underlying data in a new array.
/// </summary>
/// <returns>
/// New array with data.
/// </returns>
public abstract byte[] GetArrayCopy();
/// <summary>
/// Check whether array passed as argument is the same as the stream hosts.
/// </summary>
/// <param name="arr">Array.</param>
/// <returns>
/// <c>True</c> if they are same.
/// </returns>
public virtual bool IsSameArray(byte[] arr)
{
return false;
}
/// <summary>
/// Seek to the given positoin.
/// </summary>
/// <param name="offset">Offset.</param>
/// <param name="origin">Seek origin.</param>
/// <returns>
/// Position.
/// </returns>
/// <exception cref="System.ArgumentException">
/// Unsupported seek origin: + origin
/// or
/// Seek before origin: + newPos
/// </exception>
public int Seek(int offset, SeekOrigin origin)
{
int newPos;
switch (origin)
{
case SeekOrigin.Begin:
{
newPos = offset;
break;
}
case SeekOrigin.Current:
{
newPos = Pos + offset;
break;
}
default:
throw new ArgumentException("Unsupported seek origin: " + origin);
}
if (newPos < 0)
throw new ArgumentException("Seek before origin: " + newPos);
EnsureWriteCapacity(newPos);
Pos = newPos;
return Pos;
}
/** <inheritdoc /> */
public void Dispose()
{
if (_disposed)
return;
Dispose(true);
GC.SuppressFinalize(this);
_disposed = true;
}
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.
/// </summary>
protected abstract void Dispose(bool disposing);
/// <summary>
/// Ensure capacity for write.
/// </summary>
/// <param name="cnt">Bytes count.</param>
protected abstract void EnsureWriteCapacity(int cnt);
/// <summary>
/// Ensure capacity for write and shift position.
/// </summary>
/// <param name="cnt">Bytes count.</param>
/// <returns>Position before shift.</returns>
protected int EnsureWriteCapacityAndShift(int cnt)
{
int pos0 = Pos;
EnsureWriteCapacity(Pos + cnt);
ShiftWrite(cnt);
return pos0;
}
/// <summary>
/// Ensure capacity for read.
/// </summary>
/// <param name="cnt">Bytes count.</param>
protected abstract void EnsureReadCapacity(int cnt);
/// <summary>
/// Ensure capacity for read and shift position.
/// </summary>
/// <param name="cnt">Bytes count.</param>
/// <returns>Position before shift.</returns>
protected int EnsureReadCapacityAndShift(int cnt)
{
int pos0 = Pos;
EnsureReadCapacity(cnt);
ShiftRead(cnt);
return pos0;
}
/// <summary>
/// Shift position due to write
/// </summary>
/// <param name="cnt">Bytes count.</param>
protected void ShiftWrite(int cnt)
{
Pos += cnt;
}
/// <summary>
/// Shift position due to read.
/// </summary>
/// <param name="cnt">Bytes count.</param>
private void ShiftRead(int cnt)
{
Pos += cnt;
}
/// <summary>
/// Calculate new capacity.
/// </summary>
/// <param name="curCap">Current capacity.</param>
/// <param name="reqCap">Required capacity.</param>
/// <returns>New capacity.</returns>
protected static int Capacity(int curCap, int reqCap)
{
int newCap;
if (reqCap < 256)
newCap = 256;
else
{
newCap = curCap << 1;
if (newCap < reqCap)
newCap = reqCap;
}
return newCap;
}
/// <summary>
/// Unsafe memory copy routine.
/// </summary>
/// <param name="src">Source.</param>
/// <param name="dest">Destination.</param>
/// <param name="len">Length.</param>
private static void CopyMemory(byte* src, byte* dest, int len)
{
PlatformMemoryUtils.CopyMemory(src, dest, len);
}
}
}
| |
// Copyright (c) Charlie Poole, Rob Prouse and Contributors. MIT License - see LICENSE.txt
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Text;
using System.Threading;
using NUnit.Compatibility;
using NUnit.Framework.Interfaces;
using NUnit.Framework.Internal;
using NUnit.Framework.Internal.Execution;
using NUnit.Tests;
using NUnit.Tests.Assemblies;
using NUnit.TestUtilities;
using NUnit.Framework.Internal.Filters;
namespace NUnit.Framework.Api
{
// Functional tests of the TestAssemblyRunner and all subordinate classes
[NonParallelizable]
public class TestAssemblyRunnerTests : ITestListener
{
private const string MOCK_ASSEMBLY_FILE = "mock-assembly.dll";
private const string COULD_NOT_LOAD_MSG = "Could not load";
private const string BAD_FILE = "mock-assembly.pdb";
private const string SLOW_TESTS_FILE = "slow-nunit-tests.dll";
private const string MISSING_FILE = "junk.dll";
// Arbitrary delay for cancellation based on the time to run each case in SlowTests
private const int CANCEL_TEST_DELAY = SlowTests.SINGLE_TEST_DELAY * 2;
private static readonly string MOCK_ASSEMBLY_NAME = typeof(MockAssembly).GetTypeInfo().Assembly.FullName;
private const string INVALID_FILTER_ELEMENT_MESSAGE = "Invalid filter element: {0}";
private static readonly IDictionary<string, object> EMPTY_SETTINGS = new Dictionary<string, object>();
private ITestAssemblyRunner _runner;
private int _suiteStartedCount;
private int _suiteFinishedCount;
private int _testStartedCount;
private int _testFinishedCount;
private int _testOutputCount;
private int _successCount;
private int _failCount;
private int _skipCount;
private int _inconclusiveCount;
private Dictionary<string, bool> _activeTests;
[SetUp]
public void CreateRunner()
{
_runner = new NUnitTestAssemblyRunner(new DefaultTestAssemblyBuilder());
_suiteStartedCount = 0;
_suiteFinishedCount = 0;
_testStartedCount = 0;
_testFinishedCount = 0;
_testOutputCount = 0;
_successCount = 0;
_failCount = 0;
_skipCount = 0;
_inconclusiveCount = 0;
_activeTests = new Dictionary<string, bool>();
}
#region Load
[Test]
public void Load_GoodFile_ReturnsRunnableSuite()
{
var result = LoadMockAssembly();
Assert.That(result.IsSuite);
Assert.That(result, Is.TypeOf<TestAssembly>());
Assert.That(result.Name, Is.EqualTo(MOCK_ASSEMBLY_FILE));
Assert.That(result.RunState, Is.EqualTo(Interfaces.RunState.Runnable));
Assert.That(result.TestCaseCount, Is.EqualTo(MockAssembly.Tests));
}
[Test, SetUICulture("en-US")]
public void Load_FileNotFound_ReturnsNonRunnableSuite()
{
var result = _runner.Load(MISSING_FILE, EMPTY_SETTINGS);
Assert.That(result.IsSuite);
Assert.That(result, Is.TypeOf<TestAssembly>());
Assert.That(result.Name, Is.EqualTo(MISSING_FILE));
Assert.That(result.RunState, Is.EqualTo(Interfaces.RunState.NotRunnable));
Assert.That(result.TestCaseCount, Is.EqualTo(0));
Assert.That(result.Properties.Get(PropertyNames.SkipReason),
Does.StartWith(COULD_NOT_LOAD_MSG));
}
[Test, SetUICulture("en-US")]
public void Load_BadFile_ReturnsNonRunnableSuite()
{
var result = _runner.Load(BAD_FILE, EMPTY_SETTINGS);
Assert.That(result.IsSuite);
Assert.That(result, Is.TypeOf<TestAssembly>());
Assert.That(result.Name, Is.EqualTo(BAD_FILE));
Assert.That(result.RunState, Is.EqualTo(Interfaces.RunState.NotRunnable));
Assert.That(result.TestCaseCount, Is.EqualTo(0));
Assert.That(result.Properties.Get(PropertyNames.SkipReason),
Does.StartWith("Could not load").And.Contains(BAD_FILE));
}
#endregion
#region CountTestCases
[Test]
public void CountTestCases_AfterLoad_ReturnsCorrectCount()
{
LoadMockAssembly();
Assert.That(_runner.CountTestCases(TestFilter.Empty), Is.EqualTo(MockAssembly.Tests));
}
[Test]
public void CountTestCases_FullNameRegexFilterForFixture_AfterLoad_ReturnsCorrectCount()
{
LoadMockAssembly();
Assert.That(_runner.CountTestCases(new FullNameFilter(".*BadFixture.*")
{
IsRegex = true
}), Is.EqualTo(BadFixture.Tests));
}
[Test]
public void CountTestCases_FullNameRegexFilterForAssembly_AfterLoad_ReturnsCorrectCount()
{
LoadMockAssembly();
Assert.That(_runner.CountTestCases(new FullNameFilter(".*mock-assembly.dll")
{
IsRegex = true
}), Is.EqualTo(MockAssembly.Tests));
}
[Test]
public void CountTestCases_FullNameRegexFilterForTest_AfterLoad_ReturnsCorrectCount()
{
LoadMockAssembly();
Assert.That(_runner.CountTestCases(new OrFilter(
new FullNameFilter("NUnit.Tests.ExplicitFixture.Test1"),
new FullNameFilter("NUnit.Tests.ExplicitFixture.Test2"))),
Is.EqualTo(2));
}
[Test]
public void CountTestCases_CategoryFilter_AfterLoad_ReturnsCorrectCount()
{
LoadMockAssembly();
Assert.That(_runner.CountTestCases(new CategoryFilter("FixtureCategory")), Is.EqualTo(MockTestFixture.Tests));
}
[Test]
public void CountTestCases_WithoutLoad_ThrowsInvalidOperation()
{
var ex = Assert.Throws<InvalidOperationException>(
() => _runner.CountTestCases(TestFilter.Empty));
Assert.That(ex.Message, Is.EqualTo("Tests must be loaded before counting test cases."));
}
[Test]
public void CountTestCases_FileNotFound_ReturnsZero()
{
_runner.Load(MISSING_FILE, EMPTY_SETTINGS);
Assert.That(_runner.CountTestCases(TestFilter.Empty), Is.EqualTo(0));
}
[Test]
public void CountTestCases_BadFile_ReturnsZero()
{
_runner.Load(BAD_FILE, EMPTY_SETTINGS);
Assert.That(_runner.CountTestCases(TestFilter.Empty), Is.EqualTo(0));
}
#endregion
#region ExploreTests
[Test]
public void ExploreTests_WithoutLoad_ThrowsInvalidOperation()
{
var ex = Assert.Throws<InvalidOperationException>(
() => _runner.ExploreTests(TestFilter.Empty));
Assert.That(ex.Message, Is.EqualTo("Tests must be loaded before exploring them."));
}
[Test]
public void ExploreTests_FileNotFound_ReturnsZeroTests()
{
_runner.Load(MISSING_FILE, EMPTY_SETTINGS);
var explorer = _runner.ExploreTests(TestFilter.Empty);
Assert.That(explorer.TestCaseCount, Is.EqualTo(0));
}
[Test]
public void ExploreTests_BadFile_ReturnsZeroTests()
{
_runner.Load(BAD_FILE, EMPTY_SETTINGS);
var explorer = _runner.ExploreTests(TestFilter.Empty);
Assert.That(explorer.TestCaseCount, Is.EqualTo(0));
}
[Test]
public void ExploreTests_AfterLoad_ReturnsCorrectCount()
{
LoadMockAssembly();
var explorer = _runner.ExploreTests(TestFilter.Empty);
Assert.That(explorer.TestCaseCount, Is.EqualTo(MockAssembly.Tests));
}
[Test]
public void ExploreTest_AfterLoad_ReturnsSameTestCount()
{
LoadMockAssembly();
var explorer = _runner.ExploreTests(TestFilter.Empty);
Assert.That(explorer.TestCaseCount, Is.EqualTo(_runner.CountTestCases(TestFilter.Empty)));
}
[Test]
public void ExploreTest_AfterLoad_AllIdsAreUnique()
{
LoadMockAssembly();
var explorer = _runner.ExploreTests(TestFilter.Empty);
var dict = new Dictionary<string, bool>();
CheckForDuplicates(explorer, dict);
}
private void CheckForDuplicates(ITest test, Dictionary<string, bool> dict)
{
Assert.False(dict.ContainsKey(test.Id), "Duplicate key: {0}", test.Id);
dict.Add(test.Id, true);
foreach (var child in test.Tests)
CheckForDuplicates(child, dict);
}
[Test]
public void ExploreTests_AfterLoad_WithFilter_ReturnCorrectCount()
{
LoadMockAssembly();
ITestFilter filter = new CategoryFilter("FixtureCategory");
var explorer = _runner.ExploreTests(filter);
Assert.That(explorer.TestCaseCount, Is.EqualTo(MockTestFixture.Tests));
}
[Test]
public void ExploreTests_AfterLoad_WithFilter_ReturnSameTestCount()
{
LoadMockAssembly();
ITestFilter filter = new CategoryFilter("FixtureCategory");
var explorer = _runner.ExploreTests(filter);
Assert.That(explorer.TestCaseCount, Is.EqualTo(_runner.CountTestCases(filter)));
}
[Test]
public void ExploreTests_AfterLoad_WithFilter_TestSuitesRetainProperties()
{
LoadMockAssembly();
ITestFilter filter = new CategoryFilter("FixtureCategory");
var explorer = _runner.ExploreTests(filter);
var runnerFixture = _runner.LoadedTest.Tests[0].Tests[0].Tests[0].Tests[0];
var explorerFixture = explorer.Tests[0].Tests[0].Tests[0].Tests[0];
Assert.That(explorerFixture.Properties.Keys.Count, Is.EqualTo(runnerFixture.Properties.Keys.Count));
Assert.That(explorerFixture.Properties.Get(PropertyNames.Category),
Is.EqualTo(explorerFixture.Properties.Get(PropertyNames.Category)));
Assert.That(explorerFixture.Properties.Get(PropertyNames.Description),
Is.EqualTo(explorerFixture.Properties.Get(PropertyNames.Description)));
}
#endregion
#region Run
[Test]
public void Run_AfterLoad_ReturnsRunnableSuite()
{
LoadMockAssembly();
var result = _runner.Run(TestListener.NULL, TestFilter.Empty);
Assert.That(result.Test.IsSuite);
Assert.That(result.Test, Is.TypeOf<TestAssembly>());
Assert.That(result.Test.RunState, Is.EqualTo(RunState.Runnable));
Assert.That(result.Test.TestCaseCount, Is.EqualTo(MockAssembly.Tests));
Assert.That(result.ResultState, Is.EqualTo(ResultState.ChildFailure));
Assert.That(result.PassCount, Is.EqualTo(MockAssembly.Passed));
Assert.That(result.FailCount, Is.EqualTo(MockAssembly.Failed));
Assert.That(result.WarningCount, Is.EqualTo(MockAssembly.Warnings));
Assert.That(result.SkipCount, Is.EqualTo(MockAssembly.Skipped));
Assert.That(result.InconclusiveCount, Is.EqualTo(MockAssembly.Inconclusive));
}
[Test]
public void Run_AfterLoad_SendsExpectedEvents()
{
LoadMockAssembly();
_runner.Run(this, TestFilter.Empty);
Assert.That(_suiteStartedCount, Is.EqualTo(MockAssembly.Suites));
Assert.That(_suiteFinishedCount, Is.EqualTo(MockAssembly.Suites));
Assert.That(_testStartedCount, Is.EqualTo(MockAssembly.TestStartedEvents));
Assert.That(_testFinishedCount, Is.EqualTo(MockAssembly.TestFinishedEvents));
Assert.That(_testOutputCount, Is.EqualTo(MockAssembly.TestOutputEvents));
Assert.That(_successCount, Is.EqualTo(MockAssembly.Passed));
Assert.That(_failCount, Is.EqualTo(MockAssembly.Failed));
Assert.That(_skipCount, Is.EqualTo(MockAssembly.Skipped));
Assert.That(_inconclusiveCount, Is.EqualTo(MockAssembly.Inconclusive));
}
[Test]
public void Run_WithoutLoad_ReturnsError()
{
var ex = Assert.Throws<InvalidOperationException>(
() => _runner.Run(TestListener.NULL, TestFilter.Empty));
Assert.That(ex.Message, Is.EqualTo("Tests must be loaded before running them."));
}
[Test, SetUICulture("en-US")]
public void Run_FileNotFound_ReturnsNonRunnableSuite()
{
_runner.Load(MISSING_FILE, EMPTY_SETTINGS);
var result = _runner.Run(TestListener.NULL, TestFilter.Empty);
Assert.That(result.Test.IsSuite);
Assert.That(result.Test, Is.TypeOf<TestAssembly>());
Assert.That(result.Test.RunState, Is.EqualTo(RunState.NotRunnable));
Assert.That(result.Test.TestCaseCount, Is.EqualTo(0));
Assert.That(result.ResultState, Is.EqualTo(ResultState.NotRunnable.WithSite(FailureSite.SetUp)));
Assert.That(result.Message,
Does.StartWith(COULD_NOT_LOAD_MSG));
}
[Test]
public void RunTestsAction_WithInvalidFilterElement_ThrowsArgumentException()
{
LoadMockAssembly();
var ex = Assert.Throws<ArgumentException>(() =>
{
TestFilter.FromXml("<filter><invalidElement>foo</invalidElement></filter>");
});
Assert.That(ex.Message, Does.StartWith(string.Format(INVALID_FILTER_ELEMENT_MESSAGE, "invalidElement")));
}
[Test]
public void Run_WithParameters()
{
var dict = new Dictionary<string, string>();
dict.Add("X", "5");
dict.Add("Y", "7");
var settings = new Dictionary<string, object>();
settings.Add("TestParametersDictionary", dict);
LoadMockAssembly(settings);
var result = _runner.Run(TestListener.NULL, TestFilter.Empty);
CheckParameterOutput(result);
}
[Test]
public void Run_WithLegacyParameters()
{
var settings = new Dictionary<string, object>();
settings.Add("TestParameters", "X=5;Y=7");
LoadMockAssembly(settings);
var result = _runner.Run(TestListener.NULL, TestFilter.Empty);
CheckParameterOutput(result);
}
[Test, SetUICulture("en-US")]
public void Run_BadFile_ReturnsNonRunnableSuite()
{
_runner.Load(BAD_FILE, EMPTY_SETTINGS);
var result = _runner.Run(TestListener.NULL, TestFilter.Empty);
Assert.That(result.Test.IsSuite);
Assert.That(result.Test, Is.TypeOf<TestAssembly>());
Assert.That(result.Test.RunState, Is.EqualTo(RunState.NotRunnable));
Assert.That(result.Test.TestCaseCount, Is.EqualTo(0));
Assert.That(result.ResultState, Is.EqualTo(ResultState.NotRunnable.WithSite(FailureSite.SetUp)));
Assert.That(result.Message,
Does.StartWith("Could not load"));
}
#endregion
#region RunAsync
[Test]
public void RunAsync_AfterLoad_ReturnsRunnableSuite()
{
LoadMockAssembly();
_runner.RunAsync(TestListener.NULL, TestFilter.Empty);
_runner.WaitForCompletion(Timeout.Infinite);
Assert.NotNull(_runner.Result, "No result returned");
Assert.That(_runner.Result.Test.IsSuite);
Assert.That(_runner.Result.Test, Is.TypeOf<TestAssembly>());
Assert.That(_runner.Result.Test.RunState, Is.EqualTo(RunState.Runnable));
Assert.That(_runner.Result.Test.TestCaseCount, Is.EqualTo(MockAssembly.Tests));
Assert.That(_runner.Result.ResultState, Is.EqualTo(ResultState.ChildFailure));
Assert.That(_runner.Result.PassCount, Is.EqualTo(MockAssembly.Passed));
Assert.That(_runner.Result.FailCount, Is.EqualTo(MockAssembly.Failed));
Assert.That(_runner.Result.SkipCount, Is.EqualTo(MockAssembly.Skipped));
Assert.That(_runner.Result.InconclusiveCount, Is.EqualTo(MockAssembly.Inconclusive));
}
[Test]
public void RunAsync_AfterLoad_SendsExpectedEvents()
{
LoadMockAssembly();
_runner.RunAsync(this, TestFilter.Empty);
_runner.WaitForCompletion(Timeout.Infinite);
Assert.That(_testStartedCount, Is.EqualTo(MockAssembly.Tests - IgnoredFixture.Tests - BadFixture.Tests - ExplicitFixture.Tests));
Assert.That(_testFinishedCount, Is.EqualTo(MockAssembly.Tests));
Assert.That(_successCount, Is.EqualTo(MockAssembly.Passed));
Assert.That(_failCount, Is.EqualTo(MockAssembly.Failed));
Assert.That(_skipCount, Is.EqualTo(MockAssembly.Skipped));
Assert.That(_inconclusiveCount, Is.EqualTo(MockAssembly.Inconclusive));
}
[Test]
public void RunAsync_WithoutLoad_ReturnsError()
{
var ex = Assert.Throws<InvalidOperationException>(
() => _runner.RunAsync(TestListener.NULL, TestFilter.Empty));
Assert.That(ex.Message, Is.EqualTo("Tests must be loaded before running them."));
}
[Test, SetUICulture("en-US")]
public void RunAsync_FileNotFound_ReturnsNonRunnableSuite()
{
_runner.Load(MISSING_FILE, EMPTY_SETTINGS);
_runner.RunAsync(TestListener.NULL, TestFilter.Empty);
_runner.WaitForCompletion(Timeout.Infinite);
Assert.NotNull(_runner.Result, "No result returned");
Assert.That(_runner.Result.Test.IsSuite);
Assert.That(_runner.Result.Test, Is.TypeOf<TestAssembly>());
Assert.That(_runner.Result.Test.RunState, Is.EqualTo(RunState.NotRunnable));
Assert.That(_runner.Result.Test.TestCaseCount, Is.EqualTo(0));
Assert.That(_runner.Result.ResultState, Is.EqualTo(ResultState.NotRunnable.WithSite(FailureSite.SetUp)));
Assert.That(_runner.Result.Message,
Does.StartWith(COULD_NOT_LOAD_MSG));
}
[Test, SetUICulture("en-US")]
public void RunAsync_BadFile_ReturnsNonRunnableSuite()
{
_runner.Load(BAD_FILE, EMPTY_SETTINGS);
_runner.RunAsync(TestListener.NULL, TestFilter.Empty);
_runner.WaitForCompletion(Timeout.Infinite);
Assert.NotNull(_runner.Result, "No result returned");
Assert.That(_runner.Result.Test.IsSuite);
Assert.That(_runner.Result.Test, Is.TypeOf<TestAssembly>());
Assert.That(_runner.Result.Test.RunState, Is.EqualTo(RunState.NotRunnable));
Assert.That(_runner.Result.Test.TestCaseCount, Is.EqualTo(0));
Assert.That(_runner.Result.ResultState, Is.EqualTo(ResultState.NotRunnable.WithSite(FailureSite.SetUp)));
Assert.That(_runner.Result.Message,
Does.StartWith("Could not load"));
}
#endregion
#region StopRun
#if THREAD_ABORT // Can't stop run on platforms without ability to abort thread
[Test]
public void StopRun_WhenNoTestIsRunning_DoesNotThrow([Values] bool force)
{
Assert.DoesNotThrow(() => _runner.StopRun(force));
}
private static TestCaseData[] StopRunCases = new TestCaseData[]
{
new TestCaseData(0, false).SetName("{m}(Simple dispatcher, cooperative stop)"),
new TestCaseData(0, true).SetName("{m}(Simple dispatcher, forced stop)"),
new TestCaseData(2, false).SetName("{m}(Parallel dispatcher, cooperative stop)"),
new TestCaseData(2, true).SetName("{m}(Parallel dispatcher, forced stop)")
};
[TestCaseSource(nameof(StopRunCases))]
public void StopRun_WhenTestIsRunning_StopsTest(int workers, bool force)
{
var tests = LoadSlowTests(workers);
var count = tests.TestCaseCount;
var stopType = force ? "forced stop" : "cooperative stop";
_runner.RunAsync(this, TestFilter.Empty);
// Ensure that at least one test started, otherwise we aren't testing anything!
SpinWait.SpinUntil(() => _testStartedCount > 0, CANCEL_TEST_DELAY);
_runner.StopRun(force);
var completionWasSignaled = _runner.WaitForCompletion(CANCEL_TEST_DELAY);
// Use Assert.Multiple so we can see everything that went wrong at one time
Assert.Multiple(() =>
{
Assert.True(completionWasSignaled, "Runner never signaled completion");
Assert.True(_runner.IsTestComplete, "Test is not recorded as complete");
if (_activeTests.Count > 0)
{
var sb = new StringBuilder("The following tests never terminated:" + Environment.NewLine);
foreach (var name in _activeTests.Keys)
sb.AppendLine($" * {name}");
Assert.Fail(sb.ToString());
}
Assert.That(_suiteStartedCount, Is.GreaterThan(0), "No suites started");
Assert.That(_testStartedCount, Is.GreaterThan(0), "No test cases started");
Assert.That(_suiteFinishedCount, Is.EqualTo(_suiteStartedCount), $"Not all suites terminated after {stopType}");
Assert.That(_testFinishedCount, Is.EqualTo(_testStartedCount), $"Not all test cases terminated after {stopType}");
Assert.That(_runner.Result.ResultState, Is.EqualTo(ResultState.Cancelled), $"Invalid ResultState after {stopType}");
Assert.That(_runner.Result.PassCount, Is.LessThan(count), $"All tests passed in spite of {stopType}");
});
}
#endif
#endregion
#region ITestListener Implementation
void ITestListener.TestStarted(ITest test)
{
_activeTests.Add(test.Name, true);
if (test.IsSuite)
_suiteStartedCount++;
else
_testStartedCount++;
}
void ITestListener.TestFinished(ITestResult result)
{
_activeTests.Remove(result.Test.Name);
if (result.Test.IsSuite)
{
_suiteFinishedCount++;
}
else
{
_testFinishedCount++;
switch (result.ResultState.Status)
{
case TestStatus.Passed:
_successCount++;
break;
case TestStatus.Failed:
_failCount++;
break;
case TestStatus.Skipped:
_skipCount++;
break;
case TestStatus.Inconclusive:
_inconclusiveCount++;
break;
}
}
}
/// <summary>
/// Called when a test produces output for immediate display
/// </summary>
/// <param name="output">A TestOutput object containing the text to display</param>
public void TestOutput(TestOutput output)
{
_testOutputCount++;
}
/// <summary>
/// Called when a test produces message to be sent to listeners
/// </summary>
/// <param name="message">A TestMessage object containing the text to send</param>
public void SendMessage(TestMessage message)
{
}
#endregion
#region Helper Methods
private ITest LoadMockAssembly()
{
return LoadMockAssembly(EMPTY_SETTINGS);
}
private ITest LoadMockAssembly(IDictionary<string, object> settings)
{
return _runner.Load(
Path.Combine(TestContext.CurrentContext.TestDirectory, MOCK_ASSEMBLY_FILE),
settings);
}
private ITest LoadSlowTests(int workers)
{
var settings = new Dictionary<string, object>();
settings.Add(FrameworkPackageSettings.NumberOfTestWorkers, workers);
return _runner.Load(Path.Combine(TestContext.CurrentContext.TestDirectory, SLOW_TESTS_FILE), settings);
}
private void CheckParameterOutput(ITestResult result)
{
var childResult = TestFinder.Find(
"DisplayRunParameters", result, true);
Assert.That(childResult.Output, Is.EqualTo(
"Parameter X = 5" + Environment.NewLine +
"Parameter Y = 7" + Environment.NewLine));
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Web.UI;
using System.Web.UI.WebControls;
using Vevo;
using Vevo.Domain;
using Vevo.Domain.Products;
using Vevo.Domain.Stores;
using Vevo.Shared.Utilities;
using Vevo.WebUI;
using Vevo.WebUI.Ajax;
using Vevo.WebUI.Products;
public partial class Layouts_ProductLists_ProductListRowandGridStyle2 : BaseProductListControl
{
private string SortField
{
get
{
if ( ViewState[ "SortField" ] == null )
{
if ( !IsSearchResult )
ViewState[ "SortField" ] = uxSortField.Items[ 0 ].Value.ToString();
else
ViewState[ "SortField" ] = uxSortField.Items[ 1 ].Value.ToString();
}
return ( string ) ViewState[ "SortField" ];
}
set
{
ViewState[ "SortField" ] = value;
}
}
private string SortType
{
get
{
if ( ViewState[ "SortType" ] == null )
ViewState[ "SortType" ] = "ASC";
return ( string ) ViewState[ "SortType" ];
}
set
{
ViewState[ "SortType" ] = value;
}
}
private string ItemPerPage
{
get
{
if ( ViewState[ "ItemPerPage" ] == null )
ViewState[ "ItemPerPage" ] = uxItemsPerPageControl.DefaultValue;
return ( string ) ViewState[ "ItemPerPage" ];
}
set
{
ViewState[ "ItemPerPage" ] = value;
}
}
private string ViewType
{
get
{
if ( ViewState[ "ViewType" ] == null )
ViewState[ "ViewType" ] = "Grid";
return ( string ) ViewState[ "ViewType" ];
}
set
{
ViewState[ "ViewType" ] = value;
}
}
private int NoOfCategoryColumn
{
get
{
return DataAccessContext.Configurations.GetIntValue( uxProductListViewType.ProductColumnConfig );
}
}
private IList<Product> GetProductList( int itemsPerPage, string sortBy, out int totalItems )
{
return DataRetriever(
StoreContext.Culture,
sortBy,
( uxPagingControl.CurrentPage - 1 ) * itemsPerPage,
( uxPagingControl.CurrentPage * itemsPerPage ) - 1,
UserDefinedParameters,
out totalItems );
}
private void PopulateProductControls()
{
if ( this.Visible )
Refresh();
}
private void ProductList_StoreCultureChanged( object sender, CultureEventArgs e )
{
Refresh();
}
private void ProductList_StoreCurrencyChanged( object sender, CurrencyEventArgs e )
{
Refresh();
}
private void RegisterStoreEvents()
{
GetStorefrontEvents().StoreCultureChanged +=
new StorefrontEvents.CultureEventHandler( ProductList_StoreCultureChanged );
GetStorefrontEvents().StoreCurrencyChanged +=
new StorefrontEvents.CurrencyEventHandler( ProductList_StoreCurrencyChanged );
}
private ScriptManager GetScriptManager()
{
return AjaxUtilities.GetScriptManager( this );
}
private void AddHistoryPoint()
{
GetScriptManager().AddHistoryPoint( "page", uxPagingControl.CurrentPage.ToString() );
GetScriptManager().AddHistoryPoint( "sortField", SortField );
GetScriptManager().AddHistoryPoint( "sortType", SortType );
GetScriptManager().AddHistoryPoint( "itemPerPage", uxItemsPerPageControl.SelectedValue );
GetScriptManager().AddHistoryPoint( "viewType", uxProductListViewType.SelectedView );
}
private string CurrentCategoryName
{
get
{
if (Request.QueryString["CategoryName"] == null)
return String.Empty;
else
return Request.QueryString["CategoryName"];
}
}
private string GetCategoryDescription()
{
return DataAccessContext.CategoryRepository.GetOne(
StoreContext.Culture,
CurrentCategoryName).Description;
}
private void PopulateCategoryControls()
{
uxCategoryDescriptionText.Text = GetCategoryDescription();
}
protected void uxSortUpLink_Click( object sender, EventArgs e )
{
SortType = "DESC";
AddHistoryPoint();
Refresh();
}
protected void uxSortDownLink_Click( object sender, EventArgs e )
{
SortType = "ASC";
AddHistoryPoint();
Refresh();
}
protected void uxFieldSortDrop_SelectedIndexChanged( object sender, EventArgs e )
{
SortField = uxSortField.SelectedValue;
uxSortValueHidden.Value = uxSortField.SelectedValue;
AddHistoryPoint();
Refresh();
}
protected void uxItemsPerPageControl_BubbleEvent( object sender, EventArgs e )
{
ItemPerPage = uxItemsPerPageControl.SelectedValue;
uxPagingControl.CurrentPage = 1;
AddHistoryPoint();
Refresh();
}
protected void uxPagingControl_BubbleEvent( object sender, EventArgs e )
{
AddHistoryPoint();
Refresh();
}
protected void uxProductListViewType_BubbleEvent( object sender, EventArgs e )
{
ViewType = uxProductListViewType.SelectedView;
AddHistoryPoint();
Refresh();
}
protected void DisplaySortType()
{
if ( SortType == "ASC" )
{
uxSortUpLink.Visible = true;
uxSortDownLink.Visible = false;
}
else
{
uxSortUpLink.Visible = false;
uxSortDownLink.Visible = true;
}
}
protected void ScriptManager_Navigate( object sender, HistoryEventArgs e )
{
if ( IsSearchResult )
return;
string args;
if ( !string.IsNullOrEmpty( e.State[ "sortField" ] ) )
{
SortField = e.State[ "sortField" ].ToString();
}
else
{
SortField = uxSortField.Items[ uxSortField.SelectedIndex ].Value.ToString();
}
if ( !string.IsNullOrEmpty( e.State[ "sortType" ] ) )
{
SortType = e.State[ "sortType" ].ToString();
}
else
{
SortType = "ASC";
}
if ( !string.IsNullOrEmpty( e.State[ "itemPerPage" ] ) )
{
ItemPerPage = e.State[ "itemPerPage" ].ToString();
}
else
{
ItemPerPage = uxItemsPerPageControl.DefaultValue;
}
if ( !string.IsNullOrEmpty( e.State[ "viewType" ] ) )
{
ViewType = e.State[ "viewType" ].ToString();
}
else
{
ViewType = "Grid";
}
if ( !string.IsNullOrEmpty( e.State[ "page" ] ) )
{
args = e.State[ "page" ];
uxPagingControl.CurrentPage = int.Parse( args );
}
else
{
uxPagingControl.CurrentPage = 1;
}
Refresh();
}
protected void Page_Load( object sender, EventArgs e )
{
PopulateCategoryControls();
if ( IsSearchResult && !IsPostBack )
{
uxSortField.SelectedIndex = 1;
uxSortField.Items[ 0 ].Enabled = false;
}
RegisterStoreEvents();
uxPagingControl.BubbleEvent += new EventHandler( uxPagingControl_BubbleEvent );
uxItemsPerPageControl.BubbleEvent += new EventHandler( uxItemsPerPageControl_BubbleEvent );
GetScriptManager().Navigate += new EventHandler<HistoryEventArgs>( ScriptManager_Navigate );
uxProductListViewType.BubbleEvent += new EventHandler( uxProductListViewType_BubbleEvent );
AjaxUtilities.ScrollToTop( uxGoToTopLink );
uxPageControlTR.Visible = true;
uxList.Visible = true;
}
protected void Page_PreRender( object sender, EventArgs e )
{
if ( this.Visible )
{
if ( !IsPostBack )
{
if ( ViewState[ "UserDefinedParameters" ] != null )
{
PopulateProductControls();
}
else
{
DisplaySortType();
uxSortField.SelectedValue = SortField;
uxItemsPerPageControl.SelectValue( ItemPerPage );
DisplayViewType();
}
}
}
CatalogUtilities.ProductItemsPerPage = ItemPerPage;
CatalogUtilities.CatalogSortField = SortField;
CatalogUtilities.ProductListView = ViewType;
}
protected void uxList_ItemDataBound( object sender, DataListItemEventArgs e )
{
Components_ProductListItem listItem = ( Components_ProductListItem ) e.Item.FindControl( "uxItem" );
if ( listItem != null )
listItem.Refresh();
}
protected void DisplayViewType()
{
if ( uxProductListViewType.SelectedView == "Grid" )
{
uxDataListPanel.Visible = false;
uxGridViewPanel.Visible = true;
uxTableViewPanel.Visible = false;
}
else if ( uxProductListViewType.SelectedView == "List" )
{
uxDataListPanel.Visible = true;
uxGridViewPanel.Visible = false;
uxTableViewPanel.Visible = false;
}
else
{
uxDataListPanel.Visible = false;
uxGridViewPanel.Visible = false;
uxTableViewPanel.Visible = true;
}
}
public void PopulateText( int totalItems, int totalItemsPerPage )
{
if ( totalItems > 0 )
{
uxMessageLabel.Text = "";
int itemPerPage = ConvertUtilities.ToInt32( ItemPerPage );
if ( itemPerPage > totalItems )
itemPerPage = totalItems;
uxItemCounLabel.Text = String.Format( "{0} {1} {2}", totalItemsPerPage.ToString(), GetLanguageText( "items" ), totalItems.ToString() );
}
else
{
uxMessageDiv.Visible = true;
uxMessageLabel.Text = "<div style='text-align: center;'>" + GetLanguageText( "ProductListNoResults" ) + "</div>";
uxItemCounLabel.Text = String.Empty;
}
}
public void Refresh()
{
DisplaySortType();
uxSortField.SelectedValue = SortField;
uxItemsPerPageControl.SelectValue( ItemPerPage );
uxProductListViewType.SelectedView = ViewType;
uxProductListViewType.SetViewTypeText( ViewType );
DisplayViewType();
int totalItems;
int selectedValue;
int totalItemsPerPage;
selectedValue = Convert.ToInt32( uxItemsPerPageControl.SelectedValue );
if ( ViewType == "List" )
{
uxList.DataSource = GetProductList( selectedValue, SortField + " " + SortType, out totalItems);
uxList.DataBind();
totalItemsPerPage = uxList.Items.Count;
}
else if ( ViewType == "Grid" )
{
uxList2.DataSource = GetProductList( selectedValue, SortField + " " + SortType, out totalItems );
uxList2.DataBind();
totalItemsPerPage = uxList2.Items.Count;
}
else
{
uxTableList.DataSource = GetProductList( selectedValue, SortField + " " + SortType, out totalItems );
uxTableList.DataBind();
totalItemsPerPage = uxTableList.Items.Count;
if ( totalItemsPerPage == 0 )
uxTableViewPanel.Visible = false;
}
uxPagingControl.NumberOfPages = ( int ) System.Math.Ceiling( ( double ) totalItems / selectedValue );
PopulateText( totalItems, totalItemsPerPage );
}
}
| |
using System;
using System.Linq;
using System.Collections.Generic;
using ConTangibles.Utilities;
using ConTangibles.Contracts;
using Prism.Mvvm;
using Windows.Foundation;
namespace ConTangibles.Data
{
/// <summary>
/// Represents a two-dimensional line.
/// </summary>
public class Line2D : BindableBase, ILine
{
#region Events
/// <summary>
/// Event that is raised when the line has changed.
/// </summary>
public event EventHandler<Line2D> Changed;
#endregion
#region Constructors
/// <summary>
/// Creates a new instance of <see cref="Line2D"/>.
/// </summary>
/// <param name="p1">The first point.</param>
/// <param name="p2">The second point.</param>
public Line2D(Point2D p1, Point2D p2)
{
Point1 = p1;
Point2 = p2;
p1.Changed += OnPointChanged;
p2.Changed += OnPointChanged;
}
#endregion
#region Points
/// <summary>
/// Gets the first point.
/// </summary>
public Point2D Point1 { get; private set; }
/// <summary>
/// Gets the second point.
/// </summary>
public Point2D Point2 { get; private set; }
/// <summary>
/// Gets or sets the <see cref="TangibleObject"/> that is
/// the owner of the line.
/// </summary>
internal TangibleObject Owner { get; set; }
#endregion
#region Methods
/// <summary>
/// Invoked when a point has changed.
/// </summary>
/// <param name="source">The source of the event.</param>
/// <param name="args">The event arguments.</param>
void OnPointChanged(Point2D source, PointChangedEventArgs args)
{
OnChanged();
}
/// <summary>
/// Raises the <see cref="Changed"/> event.
/// </summary>
protected virtual void OnChanged()
{
var handler = Changed;
handler?.Invoke(this.Owner, this);
}
/// <summary>
/// Updates the position of the line.
/// </summary>
/// <param name="line">The new line.</param>
internal void Update(Line2D line)
{
Check.NotNull(line, "line");
this.Point1.X = line.Point1.X;
this.Point1.Y = line.Point1.Y;
this.Point2.X = line.Point2.X;
this.Point2.Y = line.Point2.Y;
}
#endregion
#region ILine members
/// <summary>
/// Gets or sets the X-coordinate of the first point.
/// </summary>
public double X1
{
get
{
return Point1.X;
}
set
{
if (Point1.X == value)
return;
Point1.X = value;
OnPropertyChanged();
}
}
/// <summary>
/// Gets or sets the Y-coordinate of the first point.
/// </summary>
public double Y1
{
get
{
return Point1.Y;
}
set
{
if (Point1.Y == value)
return;
Point1.Y = value;
OnPropertyChanged();
}
}
/// <summary>
/// Gets or sets the X-coordinate of the second point.
/// </summary>
public double X2
{
get
{
return Point2.X;
}
set
{
if (Point2.X == value)
return;
Point2.X = value;
OnPropertyChanged();
}
}
/// <summary>
/// Gets or sets the Y-coordinate of the second point.
/// </summary>
public double Y2
{
get
{
return Point2.Y;
}
set
{
if (Point2.Y == value)
return;
Point2.Y = value;
OnPropertyChanged();
}
}
/// <summary>
/// Gets the X-coordinate of the first point.
/// </summary>
double IReadOnlyLine.X1
{
get
{
return Point1.X;
}
}
/// <summary>
/// Gets the Y-coordinate of the first point.
/// </summary>
double IReadOnlyLine.Y1
{
get
{
return Point1.Y;
}
}
/// <summary>
/// Gets the X-coordinate of the second point.
/// </summary>
double IReadOnlyLine.X2
{
get
{
return Point2.X;
}
}
/// <summary>
/// Gets the Y-coordinate of the second point.
/// </summary>
double IReadOnlyLine.Y2
{
get
{
return Point2.Y;
}
}
/// <summary>
/// Gets the distance of the line.
/// </summary>
public double Length
{
get
{
return MathHelper.Distance(Point1, Point2);
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Text;
using Microsoft.Rest.Generator.ClientModel;
using Microsoft.Rest.Generator.Logging;
using Microsoft.Rest.Generator.Utilities;
using Microsoft.Rest.Modeler.Swagger.Model;
using Microsoft.Rest.Modeler.Swagger.Properties;
using ParameterLocation = Microsoft.Rest.Modeler.Swagger.Model.ParameterLocation;
namespace Microsoft.Rest.Modeler.Swagger
{
/// <summary>
/// The builder for building swagger operations into client model methods.
/// </summary>
public class OperationBuilder
{
private IList<string> _effectiveProduces;
private IList<string> _effectiveConsumes;
private SwaggerModeler _swaggerModeler;
private Operation _operation;
private const string APP_JSON_MIME = "application/json";
public OperationBuilder(Operation operation, SwaggerModeler swaggerModeler)
{
if (operation == null)
{
throw new ArgumentNullException("operation");
}
if (swaggerModeler == null)
{
throw new ArgumentNullException("swaggerModeler");
}
this._operation = operation;
this._swaggerModeler = swaggerModeler;
this._effectiveProduces = operation.Produces.Any() ? operation.Produces : swaggerModeler.ServiceDefinition.Produces;
this._effectiveConsumes = operation.Consumes.Any() ? operation.Consumes : swaggerModeler.ServiceDefinition.Consumes;
}
public Method BuildMethod(HttpMethod httpMethod, string url, string methodName, string methodGroup)
{
EnsureUniqueMethodName(methodName, methodGroup);
var method = new Method
{
HttpMethod = httpMethod,
Url = url,
Name = methodName
};
method.RequestContentType = _effectiveConsumes.FirstOrDefault() ?? APP_JSON_MIME;
string produce = _effectiveConsumes.FirstOrDefault(s => s.StartsWith(APP_JSON_MIME, StringComparison.OrdinalIgnoreCase));
if (!string.IsNullOrEmpty(produce))
{
method.RequestContentType = produce;
}
if (method.RequestContentType.StartsWith(APP_JSON_MIME, StringComparison.OrdinalIgnoreCase) &&
method.RequestContentType.IndexOf("charset=", StringComparison.OrdinalIgnoreCase) == -1)
{
// Enable UTF-8 charset
method.RequestContentType += "; charset=utf-8";
}
method.Description = _operation.Description;
method.Summary = _operation.Summary;
// Service parameters
if (_operation.Parameters != null)
{
foreach (var swaggerParameter in DeduplicateParameters(_operation.Parameters))
{
var parameter = ((ParameterBuilder) swaggerParameter.GetBuilder(_swaggerModeler)).Build();
method.Parameters.Add(parameter);
StringBuilder parameterName = new StringBuilder(parameter.Name);
parameterName = CollectionFormatBuilder.OnBuildMethodParameter(method, swaggerParameter,
parameterName);
if (swaggerParameter.In == ParameterLocation.Header)
{
method.RequestHeaders[swaggerParameter.Name] =
string.Format(CultureInfo.InvariantCulture, "{{{0}}}", parameterName);
}
}
}
// Build header object
var responseHeaders = new Dictionary<string, Header>();
foreach (var response in _operation.Responses.Values)
{
if (response.Headers != null)
{
response.Headers.ForEach( h => responseHeaders[h.Key] = h.Value);
}
}
var headerTypeName = string.Format(CultureInfo.InvariantCulture,
"{0}-{1}-Headers", methodGroup, methodName).Trim('-');
var headerType = new CompositeType
{
Name = headerTypeName,
SerializedName = headerTypeName,
Documentation = string.Format(CultureInfo.InvariantCulture, "Defines headers for {0} operation.", methodName)
};
responseHeaders.ForEach(h =>
{
var property = new Property
{
Name = h.Key,
SerializedName = h.Key,
Type = h.Value.GetBuilder(this._swaggerModeler).BuildServiceType(h.Key),
Documentation = h.Value.Description
};
headerType.Properties.Add(property);
});
if (!headerType.Properties.Any())
{
headerType = null;
}
// Response format
var typesList = new List<Stack<IType>>();
foreach (var response in _operation.Responses)
{
if (string.Equals(response.Key, "default", StringComparison.OrdinalIgnoreCase))
{
TryBuildDefaultResponse(methodName, response.Value, method, headerType);
}
else
{
if (
!(TryBuildResponse(methodName, response.Key.ToHttpStatusCode(), response.Value, method,
typesList, headerType) ||
TryBuildStreamResponse(response.Key.ToHttpStatusCode(), response.Value, method, typesList, headerType) ||
TryBuildEmptyResponse(methodName, response.Key.ToHttpStatusCode(), response.Value, method,
typesList, headerType)))
{
throw new InvalidOperationException(
string.Format(CultureInfo.InvariantCulture,
Resources.UnsupportedMimeTypeForResponseBody,
methodName,
response.Key));
}
}
}
method.ReturnType = BuildMethodReturnType(typesList, headerType);
if (method.Responses.Count == 0)
{
method.ReturnType = method.DefaultResponse;
}
if (method.ReturnType.Headers != null)
{
_swaggerModeler.ServiceClient.HeaderTypes.Add(method.ReturnType.Headers as CompositeType);
}
// Copy extensions
_operation.Extensions.ForEach(extention => method.Extensions.Add(extention.Key, extention.Value));
return method;
}
private static IEnumerable<SwaggerParameter> DeduplicateParameters(IEnumerable<SwaggerParameter> parameters)
{
return parameters
.Select(s =>
{
// if parameter with the same name exists in Body and Path/Query then we need to give it a unique name
if (s.In == ParameterLocation.Body)
{
string newName = s.Name;
while (parameters.Any(t => t.In != ParameterLocation.Body &&
string.Equals(t.Name, newName,
StringComparison.OrdinalIgnoreCase)))
{
newName += "Body";
}
s.Name = newName;
}
// if parameter with same name exists in Query and Path, make Query one required
if (s.In == ParameterLocation.Query &&
parameters.Any(t => t.In == ParameterLocation.Path &&
string.Equals(t.Name, s.Name, StringComparison.OrdinalIgnoreCase)))
{
s.IsRequired = true;
}
return s;
});
}
private static void BuildMethodReturnTypeStack(IType type, List<Stack<IType>> types)
{
var typeStack = new Stack<IType>();
typeStack.Push(type);
types.Add(typeStack);
}
private Response BuildMethodReturnType(List<Stack<IType>> types, IType headerType)
{
IType baseType = PrimaryType.Object;
// Return null if no response is specified
if (types.Count == 0)
{
return new Response(null, headerType);
}
// Return first if only one return type
if (types.Count == 1)
{
return new Response(types.First().Pop(), headerType);
}
// BuildParameter up type inheritance tree
types.ForEach(typeStack =>
{
IType type = typeStack.Peek();
while (!Equals(type, baseType))
{
if (type is CompositeType && _swaggerModeler.ExtendedTypes.ContainsKey(type.Name))
{
type = _swaggerModeler.GeneratedTypes[_swaggerModeler.ExtendedTypes[type.Name]];
}
else
{
type = baseType;
}
typeStack.Push(type);
}
});
// Eliminate commonly shared base classes
while (!types.First().IsNullOrEmpty())
{
IType currentType = types.First().Peek();
foreach (var typeStack in types)
{
IType t = typeStack.Pop();
if (!Equals(t, currentType))
{
return new Response(baseType, headerType);
}
}
baseType = currentType;
}
return new Response(baseType, headerType);
}
private bool TryBuildStreamResponse(HttpStatusCode responseStatusCode, OperationResponse response,
Method method, List<Stack<IType>> types, IType headerType)
{
bool handled = false;
if (SwaggerOperationProducesNotEmpty())
{
if (response.Schema != null)
{
IType serviceType = response.Schema.GetBuilder(_swaggerModeler)
.BuildServiceType(response.Schema.Reference.StripDefinitionPath());
Debug.Assert(serviceType != null);
BuildMethodReturnTypeStack(serviceType, types);
var compositeType = serviceType as CompositeType;
if (compositeType != null)
{
VerifyFirstPropertyIsByteArray(compositeType);
}
method.Responses[responseStatusCode] = new Response(serviceType, headerType);
handled = true;
}
}
return handled;
}
private void VerifyFirstPropertyIsByteArray(CompositeType serviceType)
{
var referenceKey = serviceType.Name;
var responseType = _swaggerModeler.GeneratedTypes[referenceKey];
var property = responseType.Properties.FirstOrDefault(p => p.Type == PrimaryType.ByteArray);
if (property == null)
{
throw new KeyNotFoundException(
"Please specify a field with type of System.Byte[] to deserialize the file contents to");
}
}
private bool TryBuildResponse(string methodName, HttpStatusCode responseStatusCode,
OperationResponse response, Method method, List<Stack<IType>> types, IType headerType)
{
bool handled = false;
IType serviceType;
if (SwaggerOperationProducesJson())
{
if (TryBuildResponseBody(methodName, response,
s => GenerateResponseObjectName(s, responseStatusCode), out serviceType))
{
method.Responses[responseStatusCode] = new Response(serviceType, headerType);
BuildMethodReturnTypeStack(serviceType, types);
handled = true;
}
}
return handled;
}
private bool TryBuildEmptyResponse(string methodName, HttpStatusCode responseStatusCode,
OperationResponse response, Method method, List<Stack<IType>> types, IType headerType)
{
bool handled = false;
if (response.Schema == null)
{
method.Responses[responseStatusCode] = new Response(null, headerType);
handled = true;
}
else
{
if (_operation.Produces.IsNullOrEmpty())
{
method.Responses[responseStatusCode] = new Response(PrimaryType.Object, headerType);
BuildMethodReturnTypeStack(PrimaryType.Object, types);
handled = true;
}
var unwrapedSchemaProperties =
_swaggerModeler.Resolver.Unwrap(response.Schema).Properties;
if (unwrapedSchemaProperties != null && unwrapedSchemaProperties.Any())
{
Logger.LogWarning(Resources.NoProduceOperationWithBody,
methodName);
}
}
return handled;
}
private void TryBuildDefaultResponse(string methodName, OperationResponse response, Method method, IType headerType)
{
IType errorModel = null;
if (SwaggerOperationProducesJson())
{
if (TryBuildResponseBody(methodName, response, s => GenerateErrorModelName(s), out errorModel))
{
method.DefaultResponse = new Response(errorModel, headerType);
}
}
}
private bool TryBuildResponseBody(string methodName, OperationResponse response,
Func<string, string> typeNamer, out IType responseType)
{
bool handled = false;
responseType = null;
if (SwaggerOperationProducesJson())
{
if (response.Schema != null)
{
string referenceKey;
if (response.Schema.Reference != null)
{
referenceKey = response.Schema.Reference.StripDefinitionPath();
response.Schema.Reference = referenceKey;
}
else
{
referenceKey = typeNamer(methodName);
}
responseType = response.Schema.GetBuilder(_swaggerModeler).BuildServiceType(referenceKey);
handled = true;
}
}
return handled;
}
private bool SwaggerOperationProducesJson()
{
return _effectiveProduces != null &&
_effectiveProduces.Any(s => s.StartsWith(APP_JSON_MIME, StringComparison.OrdinalIgnoreCase));
}
private bool SwaggerOperationProducesNotEmpty()
{
return _effectiveProduces != null
&& _effectiveProduces.Any();
}
private void EnsureUniqueMethodName(string methodName, string methodGroup)
{
string serviceOperationPrefix = "";
if (methodGroup != null)
{
serviceOperationPrefix = methodGroup + "_";
}
if (_swaggerModeler.ServiceClient.Methods.Any(m => m.Group == methodGroup && m.Name == methodName))
{
throw new ArgumentException(string.Format(CultureInfo.InvariantCulture,
Resources.DuplicateOperationIdException,
serviceOperationPrefix + methodName));
}
}
private static string GenerateResponseObjectName(string methodName, HttpStatusCode responseStatusCode)
{
return string.Format(CultureInfo.InvariantCulture,
"{0}{1}Response", methodName, responseStatusCode);
}
private static string GenerateErrorModelName(string methodName)
{
return string.Format(CultureInfo.InvariantCulture,
"{0}ErrorModel", methodName);
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Xml.Linq;
using Microsoft.WindowsAzure;
using Microsoft.WindowsAzure.Common;
using Microsoft.WindowsAzure.Common.Internals;
using Microsoft.WindowsAzure.Management.TrafficManager;
using Microsoft.WindowsAzure.Management.TrafficManager.Models;
namespace Microsoft.WindowsAzure.Management.TrafficManager
{
/// <summary>
/// The Traffic Manager API includes operations for managing definitions
/// for a specified profile.
/// </summary>
internal partial class DefinitionOperations : IServiceOperations<TrafficManagerManagementClient>, Microsoft.WindowsAzure.Management.TrafficManager.IDefinitionOperations
{
/// <summary>
/// Initializes a new instance of the DefinitionOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal DefinitionOperations(TrafficManagerManagementClient client)
{
this._client = client;
}
private TrafficManagerManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.WindowsAzure.Management.TrafficManager.TrafficManagerManagementClient.
/// </summary>
public TrafficManagerManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Creates a new definition for a specified profile. (see
/// http://msdn.microsoft.com/en-us/library/hh758257.aspx for more
/// information)
/// </summary>
/// <param name='profileName'>
/// Required. The name of the profile to create a new definition for.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Create Definition operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public async System.Threading.Tasks.Task<OperationResponse> CreateAsync(string profileName, DefinitionCreateParameters parameters, CancellationToken cancellationToken)
{
// Validate
if (profileName == null)
{
throw new ArgumentNullException("profileName");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (parameters.DnsOptions == null)
{
throw new ArgumentNullException("parameters.DnsOptions");
}
if (parameters.Monitors == null)
{
throw new ArgumentNullException("parameters.Monitors");
}
if (parameters.Monitors != null)
{
foreach (DefinitionMonitor monitorsParameterItem in parameters.Monitors)
{
if (monitorsParameterItem.HttpOptions == null)
{
throw new ArgumentNullException("parameters.Monitors.HttpOptions");
}
}
}
if (parameters.Policy == null)
{
throw new ArgumentNullException("parameters.Policy");
}
if (parameters.Policy.Endpoints == null)
{
throw new ArgumentNullException("parameters.Policy.Endpoints");
}
if (parameters.Policy.Endpoints != null)
{
foreach (DefinitionEndpointCreateParameters endpointsParameterItem in parameters.Policy.Endpoints)
{
if (endpointsParameterItem.DomainName == null)
{
throw new ArgumentNullException("parameters.Policy.Endpoints.DomainName");
}
}
}
// Tracing
bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = Tracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("profileName", profileName);
tracingParameters.Add("parameters", parameters);
Tracing.Enter(invocationId, this, "CreateAsync", tracingParameters);
}
// Construct URL
string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/WATM/profiles/" + profileName.Trim() + "/definitions";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Post;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2011-10-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
XDocument requestDoc = new XDocument();
XElement definitionElement = new XElement(XName.Get("Definition", "http://schemas.microsoft.com/windowsazure"));
requestDoc.Add(definitionElement);
XElement dnsOptionsElement = new XElement(XName.Get("DnsOptions", "http://schemas.microsoft.com/windowsazure"));
definitionElement.Add(dnsOptionsElement);
XElement timeToLiveInSecondsElement = new XElement(XName.Get("TimeToLiveInSeconds", "http://schemas.microsoft.com/windowsazure"));
timeToLiveInSecondsElement.Value = parameters.DnsOptions.TimeToLiveInSeconds.ToString();
dnsOptionsElement.Add(timeToLiveInSecondsElement);
if (parameters.Monitors is ILazyCollection == false || ((ILazyCollection)parameters.Monitors).IsInitialized)
{
XElement monitorsSequenceElement = new XElement(XName.Get("Monitors", "http://schemas.microsoft.com/windowsazure"));
foreach (DefinitionMonitor monitorsItem in parameters.Monitors)
{
XElement monitorElement = new XElement(XName.Get("Monitor", "http://schemas.microsoft.com/windowsazure"));
monitorsSequenceElement.Add(monitorElement);
XElement intervalInSecondsElement = new XElement(XName.Get("IntervalInSeconds", "http://schemas.microsoft.com/windowsazure"));
intervalInSecondsElement.Value = monitorsItem.IntervalInSeconds.ToString();
monitorElement.Add(intervalInSecondsElement);
XElement timeoutInSecondsElement = new XElement(XName.Get("TimeoutInSeconds", "http://schemas.microsoft.com/windowsazure"));
timeoutInSecondsElement.Value = monitorsItem.TimeoutInSeconds.ToString();
monitorElement.Add(timeoutInSecondsElement);
XElement toleratedNumberOfFailuresElement = new XElement(XName.Get("ToleratedNumberOfFailures", "http://schemas.microsoft.com/windowsazure"));
toleratedNumberOfFailuresElement.Value = monitorsItem.ToleratedNumberOfFailures.ToString();
monitorElement.Add(toleratedNumberOfFailuresElement);
XElement protocolElement = new XElement(XName.Get("Protocol", "http://schemas.microsoft.com/windowsazure"));
protocolElement.Value = TrafficManagerManagementClient.DefinitionMonitorProtocolToString(monitorsItem.Protocol);
monitorElement.Add(protocolElement);
XElement portElement = new XElement(XName.Get("Port", "http://schemas.microsoft.com/windowsazure"));
portElement.Value = monitorsItem.Port.ToString();
monitorElement.Add(portElement);
XElement httpOptionsElement = new XElement(XName.Get("HttpOptions", "http://schemas.microsoft.com/windowsazure"));
monitorElement.Add(httpOptionsElement);
if (monitorsItem.HttpOptions.Verb != null)
{
XElement verbElement = new XElement(XName.Get("Verb", "http://schemas.microsoft.com/windowsazure"));
verbElement.Value = "GET";
httpOptionsElement.Add(verbElement);
}
if (monitorsItem.HttpOptions.RelativePath != null)
{
XElement relativePathElement = new XElement(XName.Get("RelativePath", "http://schemas.microsoft.com/windowsazure"));
relativePathElement.Value = monitorsItem.HttpOptions.RelativePath;
httpOptionsElement.Add(relativePathElement);
}
XElement expectedStatusCodeElement = new XElement(XName.Get("ExpectedStatusCode", "http://schemas.microsoft.com/windowsazure"));
expectedStatusCodeElement.Value = "200";
httpOptionsElement.Add(expectedStatusCodeElement);
}
definitionElement.Add(monitorsSequenceElement);
}
XElement policyElement = new XElement(XName.Get("Policy", "http://schemas.microsoft.com/windowsazure"));
definitionElement.Add(policyElement);
XElement loadBalancingMethodElement = new XElement(XName.Get("LoadBalancingMethod", "http://schemas.microsoft.com/windowsazure"));
loadBalancingMethodElement.Value = parameters.Policy.LoadBalancingMethod.ToString();
policyElement.Add(loadBalancingMethodElement);
if (parameters.Policy.Endpoints is ILazyCollection == false || ((ILazyCollection)parameters.Policy.Endpoints).IsInitialized)
{
XElement endpointsSequenceElement = new XElement(XName.Get("Endpoints", "http://schemas.microsoft.com/windowsazure"));
foreach (DefinitionEndpointCreateParameters endpointsItem in parameters.Policy.Endpoints)
{
XElement endpointElement = new XElement(XName.Get("Endpoint", "http://schemas.microsoft.com/windowsazure"));
endpointsSequenceElement.Add(endpointElement);
XElement domainNameElement = new XElement(XName.Get("DomainName", "http://schemas.microsoft.com/windowsazure"));
domainNameElement.Value = endpointsItem.DomainName;
endpointElement.Add(domainNameElement);
XElement statusElement = new XElement(XName.Get("Status", "http://schemas.microsoft.com/windowsazure"));
statusElement.Value = endpointsItem.Status.ToString();
endpointElement.Add(statusElement);
XElement typeElement = new XElement(XName.Get("Type", "http://schemas.microsoft.com/windowsazure"));
typeElement.Value = endpointsItem.Type.ToString();
endpointElement.Add(typeElement);
if (endpointsItem.Location != null)
{
XElement locationElement = new XElement(XName.Get("Location", "http://schemas.microsoft.com/windowsazure"));
locationElement.Value = endpointsItem.Location;
endpointElement.Add(locationElement);
}
if (endpointsItem.Weight != null)
{
XElement weightElement = new XElement(XName.Get("Weight", "http://schemas.microsoft.com/windowsazure"));
weightElement.Value = endpointsItem.Weight.ToString();
endpointElement.Add(weightElement);
}
if (endpointsItem.MinChildEndpoints != null)
{
XElement minChildEndpointsElement = new XElement(XName.Get("MinChildEndpoints", "http://schemas.microsoft.com/windowsazure"));
minChildEndpointsElement.Value = endpointsItem.MinChildEndpoints.ToString();
endpointElement.Add(minChildEndpointsElement);
}
}
policyElement.Add(endpointsSequenceElement);
}
requestContent = requestDoc.ToString();
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
Tracing.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
Tracing.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
Tracing.Error(invocationId, ex);
}
throw ex;
}
// Create Result
OperationResponse result = null;
result = new OperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
Tracing.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Returns an existing profile definition. (see
/// http://msdn.microsoft.com/en-us/library/hh758248.aspx for more
/// information)
/// </summary>
/// <param name='profileName'>
/// Required. The name of the profile to get definition from.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The Get Definition operation response.
/// </returns>
public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.TrafficManager.Models.DefinitionGetResponse> GetAsync(string profileName, CancellationToken cancellationToken)
{
// Validate
if (profileName == null)
{
throw new ArgumentNullException("profileName");
}
// Tracing
bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = Tracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("profileName", profileName);
Tracing.Enter(invocationId, this, "GetAsync", tracingParameters);
}
// Construct URL
string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/WATM/profiles/" + profileName.Trim() + "/definitions/1";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2011-10-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
Tracing.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
Tracing.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
Tracing.Error(invocationId, ex);
}
throw ex;
}
// Create Result
DefinitionGetResponse result = null;
// Deserialize Response
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new DefinitionGetResponse();
XDocument responseDoc = XDocument.Parse(responseContent);
XElement definitionElement = responseDoc.Element(XName.Get("Definition", "http://schemas.microsoft.com/windowsazure"));
if (definitionElement != null)
{
Definition definitionInstance = new Definition();
result.Definition = definitionInstance;
XElement dnsOptionsElement = definitionElement.Element(XName.Get("DnsOptions", "http://schemas.microsoft.com/windowsazure"));
if (dnsOptionsElement != null)
{
DefinitionDnsOptions dnsOptionsInstance = new DefinitionDnsOptions();
definitionInstance.DnsOptions = dnsOptionsInstance;
XElement timeToLiveInSecondsElement = dnsOptionsElement.Element(XName.Get("TimeToLiveInSeconds", "http://schemas.microsoft.com/windowsazure"));
if (timeToLiveInSecondsElement != null)
{
int timeToLiveInSecondsInstance = int.Parse(timeToLiveInSecondsElement.Value, CultureInfo.InvariantCulture);
dnsOptionsInstance.TimeToLiveInSeconds = timeToLiveInSecondsInstance;
}
}
XElement statusElement = definitionElement.Element(XName.Get("Status", "http://schemas.microsoft.com/windowsazure"));
if (statusElement != null)
{
ProfileDefinitionStatus statusInstance = ((ProfileDefinitionStatus)Enum.Parse(typeof(ProfileDefinitionStatus), statusElement.Value, true));
definitionInstance.Status = statusInstance;
}
XElement versionElement = definitionElement.Element(XName.Get("Version", "http://schemas.microsoft.com/windowsazure"));
if (versionElement != null)
{
int versionInstance = int.Parse(versionElement.Value, CultureInfo.InvariantCulture);
definitionInstance.Version = versionInstance;
}
XElement monitorsSequenceElement = definitionElement.Element(XName.Get("Monitors", "http://schemas.microsoft.com/windowsazure"));
if (monitorsSequenceElement != null)
{
foreach (XElement monitorsElement in monitorsSequenceElement.Elements(XName.Get("Monitor", "http://schemas.microsoft.com/windowsazure")))
{
DefinitionMonitor monitorInstance = new DefinitionMonitor();
definitionInstance.Monitors.Add(monitorInstance);
XElement intervalInSecondsElement = monitorsElement.Element(XName.Get("IntervalInSeconds", "http://schemas.microsoft.com/windowsazure"));
if (intervalInSecondsElement != null)
{
int intervalInSecondsInstance = int.Parse(intervalInSecondsElement.Value, CultureInfo.InvariantCulture);
monitorInstance.IntervalInSeconds = intervalInSecondsInstance;
}
XElement timeoutInSecondsElement = monitorsElement.Element(XName.Get("TimeoutInSeconds", "http://schemas.microsoft.com/windowsazure"));
if (timeoutInSecondsElement != null)
{
int timeoutInSecondsInstance = int.Parse(timeoutInSecondsElement.Value, CultureInfo.InvariantCulture);
monitorInstance.TimeoutInSeconds = timeoutInSecondsInstance;
}
XElement toleratedNumberOfFailuresElement = monitorsElement.Element(XName.Get("ToleratedNumberOfFailures", "http://schemas.microsoft.com/windowsazure"));
if (toleratedNumberOfFailuresElement != null)
{
int toleratedNumberOfFailuresInstance = int.Parse(toleratedNumberOfFailuresElement.Value, CultureInfo.InvariantCulture);
monitorInstance.ToleratedNumberOfFailures = toleratedNumberOfFailuresInstance;
}
XElement protocolElement = monitorsElement.Element(XName.Get("Protocol", "http://schemas.microsoft.com/windowsazure"));
if (protocolElement != null)
{
DefinitionMonitorProtocol protocolInstance = TrafficManagerManagementClient.ParseDefinitionMonitorProtocol(protocolElement.Value);
monitorInstance.Protocol = protocolInstance;
}
XElement portElement = monitorsElement.Element(XName.Get("Port", "http://schemas.microsoft.com/windowsazure"));
if (portElement != null)
{
int portInstance = int.Parse(portElement.Value, CultureInfo.InvariantCulture);
monitorInstance.Port = portInstance;
}
XElement httpOptionsElement = monitorsElement.Element(XName.Get("HttpOptions", "http://schemas.microsoft.com/windowsazure"));
if (httpOptionsElement != null)
{
DefinitionMonitorHTTPOptions httpOptionsInstance = new DefinitionMonitorHTTPOptions();
monitorInstance.HttpOptions = httpOptionsInstance;
XElement verbElement = httpOptionsElement.Element(XName.Get("Verb", "http://schemas.microsoft.com/windowsazure"));
if (verbElement != null)
{
string verbInstance = verbElement.Value;
httpOptionsInstance.Verb = verbInstance;
}
XElement relativePathElement = httpOptionsElement.Element(XName.Get("RelativePath", "http://schemas.microsoft.com/windowsazure"));
if (relativePathElement != null)
{
string relativePathInstance = relativePathElement.Value;
httpOptionsInstance.RelativePath = relativePathInstance;
}
XElement expectedStatusCodeElement = httpOptionsElement.Element(XName.Get("ExpectedStatusCode", "http://schemas.microsoft.com/windowsazure"));
if (expectedStatusCodeElement != null)
{
int expectedStatusCodeInstance = int.Parse(expectedStatusCodeElement.Value, CultureInfo.InvariantCulture);
httpOptionsInstance.ExpectedStatusCode = expectedStatusCodeInstance;
}
}
}
}
XElement policyElement = definitionElement.Element(XName.Get("Policy", "http://schemas.microsoft.com/windowsazure"));
if (policyElement != null)
{
DefinitionPolicyResponse policyInstance = new DefinitionPolicyResponse();
definitionInstance.Policy = policyInstance;
XElement loadBalancingMethodElement = policyElement.Element(XName.Get("LoadBalancingMethod", "http://schemas.microsoft.com/windowsazure"));
if (loadBalancingMethodElement != null)
{
LoadBalancingMethod loadBalancingMethodInstance = ((LoadBalancingMethod)Enum.Parse(typeof(LoadBalancingMethod), loadBalancingMethodElement.Value, true));
policyInstance.LoadBalancingMethod = loadBalancingMethodInstance;
}
XElement endpointsSequenceElement = policyElement.Element(XName.Get("Endpoints", "http://schemas.microsoft.com/windowsazure"));
if (endpointsSequenceElement != null)
{
foreach (XElement endpointsElement in endpointsSequenceElement.Elements(XName.Get("Endpoint", "http://schemas.microsoft.com/windowsazure")))
{
DefinitionEndpointResponse endpointInstance = new DefinitionEndpointResponse();
policyInstance.Endpoints.Add(endpointInstance);
XElement domainNameElement = endpointsElement.Element(XName.Get("DomainName", "http://schemas.microsoft.com/windowsazure"));
if (domainNameElement != null)
{
string domainNameInstance = domainNameElement.Value;
endpointInstance.DomainName = domainNameInstance;
}
XElement statusElement2 = endpointsElement.Element(XName.Get("Status", "http://schemas.microsoft.com/windowsazure"));
if (statusElement2 != null)
{
EndpointStatus statusInstance2 = ((EndpointStatus)Enum.Parse(typeof(EndpointStatus), statusElement2.Value, true));
endpointInstance.Status = statusInstance2;
}
XElement typeElement = endpointsElement.Element(XName.Get("Type", "http://schemas.microsoft.com/windowsazure"));
if (typeElement != null)
{
EndpointType typeInstance = ((EndpointType)Enum.Parse(typeof(EndpointType), typeElement.Value, true));
endpointInstance.Type = typeInstance;
}
XElement locationElement = endpointsElement.Element(XName.Get("Location", "http://schemas.microsoft.com/windowsazure"));
if (locationElement != null)
{
string locationInstance = locationElement.Value;
endpointInstance.Location = locationInstance;
}
XElement monitorStatusElement = endpointsElement.Element(XName.Get("MonitorStatus", "http://schemas.microsoft.com/windowsazure"));
if (monitorStatusElement != null)
{
DefinitionEndpointMonitorStatus monitorStatusInstance = ((DefinitionEndpointMonitorStatus)Enum.Parse(typeof(DefinitionEndpointMonitorStatus), monitorStatusElement.Value, true));
endpointInstance.MonitorStatus = monitorStatusInstance;
}
XElement weightElement = endpointsElement.Element(XName.Get("Weight", "http://schemas.microsoft.com/windowsazure"));
if (weightElement != null && string.IsNullOrEmpty(weightElement.Value) == false)
{
int weightInstance = int.Parse(weightElement.Value, CultureInfo.InvariantCulture);
endpointInstance.Weight = weightInstance;
}
XElement minChildEndpointsElement = endpointsElement.Element(XName.Get("MinChildEndpoints", "http://schemas.microsoft.com/windowsazure"));
if (minChildEndpointsElement != null && string.IsNullOrEmpty(minChildEndpointsElement.Value) == false)
{
int minChildEndpointsInstance = int.Parse(minChildEndpointsElement.Value, CultureInfo.InvariantCulture);
endpointInstance.MinChildEndpoints = minChildEndpointsInstance;
}
}
}
XElement monitorStatusElement2 = policyElement.Element(XName.Get("MonitorStatus", "http://schemas.microsoft.com/windowsazure"));
if (monitorStatusElement2 != null)
{
DefinitionPolicyMonitorStatus monitorStatusInstance2 = ((DefinitionPolicyMonitorStatus)Enum.Parse(typeof(DefinitionPolicyMonitorStatus), monitorStatusElement2.Value, true));
policyInstance.MonitorStatus = monitorStatusInstance2;
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
Tracing.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Returns all definitions of a profile (see
/// http://msdn.microsoft.com/en-us/library/hh758252.aspx for more
/// information)
/// </summary>
/// <param name='profileName'>
/// Required. The name of the profile to return all definitions
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// The List Definitions operation response.
/// </returns>
public async System.Threading.Tasks.Task<Microsoft.WindowsAzure.Management.TrafficManager.Models.DefinitionsListResponse> ListAsync(string profileName, CancellationToken cancellationToken)
{
// Validate
if (profileName == null)
{
throw new ArgumentNullException("profileName");
}
// Tracing
bool shouldTrace = CloudContext.Configuration.Tracing.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = Tracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("profileName", profileName);
Tracing.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "/" + (this.Client.Credentials.SubscriptionId != null ? this.Client.Credentials.SubscriptionId.Trim() : "") + "/services/WATM/profiles/" + profileName.Trim() + "/definitions";
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2011-10-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
Tracing.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
Tracing.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
Tracing.Error(invocationId, ex);
}
throw ex;
}
// Create Result
DefinitionsListResponse result = null;
// Deserialize Response
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new DefinitionsListResponse();
XDocument responseDoc = XDocument.Parse(responseContent);
XElement definitionsSequenceElement = responseDoc.Element(XName.Get("Definitions", "http://schemas.microsoft.com/windowsazure"));
if (definitionsSequenceElement != null)
{
foreach (XElement definitionsElement in definitionsSequenceElement.Elements(XName.Get("Definition", "http://schemas.microsoft.com/windowsazure")))
{
Definition definitionInstance = new Definition();
result.Definitions.Add(definitionInstance);
XElement dnsOptionsElement = definitionsElement.Element(XName.Get("DnsOptions", "http://schemas.microsoft.com/windowsazure"));
if (dnsOptionsElement != null)
{
DefinitionDnsOptions dnsOptionsInstance = new DefinitionDnsOptions();
definitionInstance.DnsOptions = dnsOptionsInstance;
XElement timeToLiveInSecondsElement = dnsOptionsElement.Element(XName.Get("TimeToLiveInSeconds", "http://schemas.microsoft.com/windowsazure"));
if (timeToLiveInSecondsElement != null)
{
int timeToLiveInSecondsInstance = int.Parse(timeToLiveInSecondsElement.Value, CultureInfo.InvariantCulture);
dnsOptionsInstance.TimeToLiveInSeconds = timeToLiveInSecondsInstance;
}
}
XElement statusElement = definitionsElement.Element(XName.Get("Status", "http://schemas.microsoft.com/windowsazure"));
if (statusElement != null)
{
ProfileDefinitionStatus statusInstance = ((ProfileDefinitionStatus)Enum.Parse(typeof(ProfileDefinitionStatus), statusElement.Value, true));
definitionInstance.Status = statusInstance;
}
XElement versionElement = definitionsElement.Element(XName.Get("Version", "http://schemas.microsoft.com/windowsazure"));
if (versionElement != null)
{
int versionInstance = int.Parse(versionElement.Value, CultureInfo.InvariantCulture);
definitionInstance.Version = versionInstance;
}
XElement monitorsSequenceElement = definitionsElement.Element(XName.Get("Monitors", "http://schemas.microsoft.com/windowsazure"));
if (monitorsSequenceElement != null)
{
foreach (XElement monitorsElement in monitorsSequenceElement.Elements(XName.Get("Monitor", "http://schemas.microsoft.com/windowsazure")))
{
DefinitionMonitor monitorInstance = new DefinitionMonitor();
definitionInstance.Monitors.Add(monitorInstance);
XElement intervalInSecondsElement = monitorsElement.Element(XName.Get("IntervalInSeconds", "http://schemas.microsoft.com/windowsazure"));
if (intervalInSecondsElement != null)
{
int intervalInSecondsInstance = int.Parse(intervalInSecondsElement.Value, CultureInfo.InvariantCulture);
monitorInstance.IntervalInSeconds = intervalInSecondsInstance;
}
XElement timeoutInSecondsElement = monitorsElement.Element(XName.Get("TimeoutInSeconds", "http://schemas.microsoft.com/windowsazure"));
if (timeoutInSecondsElement != null)
{
int timeoutInSecondsInstance = int.Parse(timeoutInSecondsElement.Value, CultureInfo.InvariantCulture);
monitorInstance.TimeoutInSeconds = timeoutInSecondsInstance;
}
XElement toleratedNumberOfFailuresElement = monitorsElement.Element(XName.Get("ToleratedNumberOfFailures", "http://schemas.microsoft.com/windowsazure"));
if (toleratedNumberOfFailuresElement != null)
{
int toleratedNumberOfFailuresInstance = int.Parse(toleratedNumberOfFailuresElement.Value, CultureInfo.InvariantCulture);
monitorInstance.ToleratedNumberOfFailures = toleratedNumberOfFailuresInstance;
}
XElement protocolElement = monitorsElement.Element(XName.Get("Protocol", "http://schemas.microsoft.com/windowsazure"));
if (protocolElement != null)
{
DefinitionMonitorProtocol protocolInstance = TrafficManagerManagementClient.ParseDefinitionMonitorProtocol(protocolElement.Value);
monitorInstance.Protocol = protocolInstance;
}
XElement portElement = monitorsElement.Element(XName.Get("Port", "http://schemas.microsoft.com/windowsazure"));
if (portElement != null)
{
int portInstance = int.Parse(portElement.Value, CultureInfo.InvariantCulture);
monitorInstance.Port = portInstance;
}
XElement httpOptionsElement = monitorsElement.Element(XName.Get("HttpOptions", "http://schemas.microsoft.com/windowsazure"));
if (httpOptionsElement != null)
{
DefinitionMonitorHTTPOptions httpOptionsInstance = new DefinitionMonitorHTTPOptions();
monitorInstance.HttpOptions = httpOptionsInstance;
XElement verbElement = httpOptionsElement.Element(XName.Get("Verb", "http://schemas.microsoft.com/windowsazure"));
if (verbElement != null)
{
string verbInstance = verbElement.Value;
httpOptionsInstance.Verb = verbInstance;
}
XElement relativePathElement = httpOptionsElement.Element(XName.Get("RelativePath", "http://schemas.microsoft.com/windowsazure"));
if (relativePathElement != null)
{
string relativePathInstance = relativePathElement.Value;
httpOptionsInstance.RelativePath = relativePathInstance;
}
XElement expectedStatusCodeElement = httpOptionsElement.Element(XName.Get("ExpectedStatusCode", "http://schemas.microsoft.com/windowsazure"));
if (expectedStatusCodeElement != null)
{
int expectedStatusCodeInstance = int.Parse(expectedStatusCodeElement.Value, CultureInfo.InvariantCulture);
httpOptionsInstance.ExpectedStatusCode = expectedStatusCodeInstance;
}
}
}
}
XElement policyElement = definitionsElement.Element(XName.Get("Policy", "http://schemas.microsoft.com/windowsazure"));
if (policyElement != null)
{
DefinitionPolicyResponse policyInstance = new DefinitionPolicyResponse();
definitionInstance.Policy = policyInstance;
XElement loadBalancingMethodElement = policyElement.Element(XName.Get("LoadBalancingMethod", "http://schemas.microsoft.com/windowsazure"));
if (loadBalancingMethodElement != null)
{
LoadBalancingMethod loadBalancingMethodInstance = ((LoadBalancingMethod)Enum.Parse(typeof(LoadBalancingMethod), loadBalancingMethodElement.Value, true));
policyInstance.LoadBalancingMethod = loadBalancingMethodInstance;
}
XElement endpointsSequenceElement = policyElement.Element(XName.Get("Endpoints", "http://schemas.microsoft.com/windowsazure"));
if (endpointsSequenceElement != null)
{
foreach (XElement endpointsElement in endpointsSequenceElement.Elements(XName.Get("Endpoint", "http://schemas.microsoft.com/windowsazure")))
{
DefinitionEndpointResponse endpointInstance = new DefinitionEndpointResponse();
policyInstance.Endpoints.Add(endpointInstance);
XElement domainNameElement = endpointsElement.Element(XName.Get("DomainName", "http://schemas.microsoft.com/windowsazure"));
if (domainNameElement != null)
{
string domainNameInstance = domainNameElement.Value;
endpointInstance.DomainName = domainNameInstance;
}
XElement statusElement2 = endpointsElement.Element(XName.Get("Status", "http://schemas.microsoft.com/windowsazure"));
if (statusElement2 != null)
{
EndpointStatus statusInstance2 = ((EndpointStatus)Enum.Parse(typeof(EndpointStatus), statusElement2.Value, true));
endpointInstance.Status = statusInstance2;
}
XElement typeElement = endpointsElement.Element(XName.Get("Type", "http://schemas.microsoft.com/windowsazure"));
if (typeElement != null)
{
EndpointType typeInstance = ((EndpointType)Enum.Parse(typeof(EndpointType), typeElement.Value, true));
endpointInstance.Type = typeInstance;
}
XElement locationElement = endpointsElement.Element(XName.Get("Location", "http://schemas.microsoft.com/windowsazure"));
if (locationElement != null)
{
string locationInstance = locationElement.Value;
endpointInstance.Location = locationInstance;
}
XElement monitorStatusElement = endpointsElement.Element(XName.Get("MonitorStatus", "http://schemas.microsoft.com/windowsazure"));
if (monitorStatusElement != null)
{
DefinitionEndpointMonitorStatus monitorStatusInstance = ((DefinitionEndpointMonitorStatus)Enum.Parse(typeof(DefinitionEndpointMonitorStatus), monitorStatusElement.Value, true));
endpointInstance.MonitorStatus = monitorStatusInstance;
}
XElement weightElement = endpointsElement.Element(XName.Get("Weight", "http://schemas.microsoft.com/windowsazure"));
if (weightElement != null && string.IsNullOrEmpty(weightElement.Value) == false)
{
int weightInstance = int.Parse(weightElement.Value, CultureInfo.InvariantCulture);
endpointInstance.Weight = weightInstance;
}
XElement minChildEndpointsElement = endpointsElement.Element(XName.Get("MinChildEndpoints", "http://schemas.microsoft.com/windowsazure"));
if (minChildEndpointsElement != null && string.IsNullOrEmpty(minChildEndpointsElement.Value) == false)
{
int minChildEndpointsInstance = int.Parse(minChildEndpointsElement.Value, CultureInfo.InvariantCulture);
endpointInstance.MinChildEndpoints = minChildEndpointsInstance;
}
}
}
XElement monitorStatusElement2 = policyElement.Element(XName.Get("MonitorStatus", "http://schemas.microsoft.com/windowsazure"));
if (monitorStatusElement2 != null)
{
DefinitionPolicyMonitorStatus monitorStatusInstance2 = ((DefinitionPolicyMonitorStatus)Enum.Parse(typeof(DefinitionPolicyMonitorStatus), monitorStatusElement2.Value, true));
policyInstance.MonitorStatus = monitorStatusInstance2;
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
Tracing.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#define CONTRACTS_FULL
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Diagnostics.Contracts;
using Microsoft.Research.ClousotRegression;
namespace ExamplesPurity
{
public class Simple
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=27,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=27,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=13,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=27,MethodILOffset=0)]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter untouched")]
public void ChangeFirstElementNoPure([Pure] int[] arr, object[] untouched)
{
Contract.Requires(arr != null);
Contract.Requires(arr.Length > 0);
arr[0] = 123;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=27,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=27,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=13,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=27,MethodILOffset=0)]
// Nothing to suggest
public void ChangeFirstElement([Pure] int[] arr, [Pure] object[] untouched)
{
Contract.Requires(arr != null);
Contract.Requires(arr.Length > 0);
arr[0] = 123;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=23,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=23,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=30,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=23,MethodILOffset=0)]
// Nothing to suggest
public void ChangeAllTheElements([Pure] int[] arr)
{
Contract.Requires(arr != null);
for (var i = 0; i < arr.Length; i++)
{
arr[i] = 1234;
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 'arr'",PrimaryILOffset=18,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(arr != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter arr")]
public int Unchanged(int[] arr)
{
var sum = 0;
for (var i = 0; i < arr.Length; i++)
{
sum += arr[i];
}
return sum;
}
}
}
namespace ForAllPreconditionInference
{
public class Tests
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=24,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=13,MethodILOffset=0)]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, s.Length, i => s[i] != null));")]
public void TestAllElements(string[] s)
{
for (var i = 0; i < s.Length; i++)
{
Contract.Assert(s[i] != null);
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=28,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=13,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, s.Length, i => s[i] != null));")]
public void TestAllElementsAndChangeThem(string[] s)
{
for (var i = 0; i < s.Length; i++)
{
Contract.Assert(s[i] != null);
s[i] = null;
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=10,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=10,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=1,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=10,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=17,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, s.Length, i => s[i] != null));")]
public void TestAllElementsBackwards(string[] s)
{
for (var i = s.Length - 1; i >= 0; i--)
{
Contract.Assert(s[i] != null);
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=24,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, s.Length, i => s[i] != null));")]
public void AssumeAllElements(string[] s)
{
for (var i = 0; i < s.Length; i++)
{
Contract.Assume(s[i] != null);
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=28,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=6,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, s.Length, i => s[i] != null));")]
public void AssumeAllElementsAndChangeThem(string[] s)
{
for (var i = 0; i < s.Length; i++)
{
Contract.Assume(s[i] != null);
s[i] = null;
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=2,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Array access might be above the upper bound",PrimaryILOffset=2,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=2,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(3 < s.Length);")]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(3, 4, i => s[i] != null));")]
public void AssertOneElement(string[] s)
{
Contract.Assert(s[3] != null);
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=3,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Array access might be above the upper bound",PrimaryILOffset=3,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=3,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(12 < s.Length);")]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
public int DoNotTestOneElement(string[] s)
{
int v;
if (s[12] == null)
{
v = 0;
}
else
{
v = 1;
}
return v;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=23,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possibly calling a method on a null reference",PrimaryILOffset=10,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, s.Length, i => s[i] != null));")]
public int Sum(string[] s)
{
var sum = 0;
for (var i = 0; i < s.Length; i++)
{
sum += s[i].Length;
}
return sum;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'",PrimaryILOffset=23,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possibly accessing a field on a null reference",PrimaryILOffset=10,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(s != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, s.Length, i => s[i] != null));")]
public int Sum(Dummy[] s)
{
var sum = 0;
for (var i = 0; i < s.Length; i++)
{
sum += s[i].f;
}
return sum;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Array access might be above the upper bound",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'. The static checker determined that the condition 's != null' should hold on entry. Nevertheless, the condition may be too strong for the callers. If you think it is ok, add a precondition to document it: Contract.Requires(s != null);",PrimaryILOffset=9,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possibly accessing a field on a null reference",PrimaryILOffset=10,MethodILOffset=0)]
// [RegressionOutcome("Contract.Requires(s != null);")] // unsound to infer it
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
public int Sum(Dummy[] s, int end)
{
var sum = 0;
for (var i = 0; i < end; i++)
{
sum += s[i].f;
}
return sum;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=31,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=31,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 'arr'",PrimaryILOffset=14,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=50,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=31,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possibly accessing a field on a null reference",PrimaryILOffset=32,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(arr != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter arr")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(left, arr.Length, i => arr[i] != null));")]
static public int PartitionWithLocal(Dummy[] arr, int left, int pivot)
{
Contract.Requires(left >= 0);
Contract.Requires(left < arr.Length);
var counter = left;
var z = 0;
while (counter < arr.Length )
{
if (arr[counter].f < pivot)
{
z++;
}
counter++;
}
return z;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=15,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=15,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=30,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=15,MethodILOffset=0)]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter genericArguments")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, genericArguments.Length, i => genericArguments[i] != null));")]
static void SanityCheckArguments(object[] genericArguments)
{
if (genericArguments == null)
{
throw new ArgumentNullException();
}
for (int i = 0; i < genericArguments.Length; i++)
{
if (genericArguments[i] == null)
{
throw new ArgumentNullException();
}
}
}
/*
[ClousotRegressionTest]
public int TestList(List<Dummy> list)
{
var x = 0;
foreach (var d in list)
{
x -= d.f;
}
return x;
}
[ClousotRegressionTest]
public int TestListWithContract(List<Dummy> list)
{
Contract.Requires(Contract.ForAll(list, v => v != null));
var x = 0;
foreach (var d in list)
{
x -= d.f;
}
return x;
}
*/
public class Dummy
{
public int f;
}
}
public class ExamplesWithControlFlow
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=12,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=12,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 's'. The static checker determined that the condition 's != null' should hold on entry. Nevertheless, the condition may be too strong for the callers. If you think it is ok, add a precondition to document it: Contract.Requires(s != null);",PrimaryILOffset=30,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=12,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=19,MethodILOffset=0)]
//[RegressionOutcome("Contract.Requires(s != null);")] // F: this is incorrect, should not be suggested
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter s")]
// Correct not to infer ForAll(0, s.Length, i => s[i] != null)
public void NotAllThePaths(int x, string[] s)
{
if (x > 10)
return;
for (var i = 0; i < s.Length; i++)
{
Contract.Assert(s[i] != null);
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=37,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=37,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=50,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=37,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possibly calling a method on a null reference",PrimaryILOffset=38,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=28,MethodILOffset=0)]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter arrStr")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, arrStr.Length, i => arrStr[i] != null));")]
public void Branches(bool b, string[] arrStr)
{
Contract.Requires(arrStr != null);
int len = 0;
for(var i = 0; i < arrStr.Length; i++)
{
if(b)
{
Contract.Assert(arrStr[i] != null);
}
else
{
len = arrStr[i].Length;
}
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=40,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=40,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=43,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=43,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 'array'",PrimaryILOffset=20,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=40,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=43,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=50,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(array != null);")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, 1, i => array[i] != null));")]
public void WriteNotInTheFirstPosition(object[] array, int index)
{
Contract.Requires(index > 0);
Contract.Requires(index < 10);
Contract.Requires(array.Length > 10);
array[index] = 34;
Contract.Assert(array[0] != null);
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Array access might be below the lower bound",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Array access might be above the upper bound",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=24,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=24,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 'array'",PrimaryILOffset=1,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=21,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=24,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"assert unproven",PrimaryILOffset=31,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(0 <= index);")]
[RegressionOutcome("Contract.Requires(index < array.Length);")]
[RegressionOutcome("Contract.Requires(array != null);")]
// nothing to infer on array content
public void Writesomewhere_NoPrecondition(object[] array, int index)
{
Contract.Requires(array.Length > 10);
array[index] = 34;
Contract.Assert(array[0] != null);
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=114,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=114,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=126,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=126,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=138,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=138,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=150,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=150,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=162,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=162,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=174,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=174,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=186,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=186,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=198,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=198,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=8,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=51,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=8,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=106,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=114,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=116,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=126,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=128,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=138,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=140,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=150,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=152,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=162,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=164,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=174,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=176,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=186,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=188,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=198,MethodILOffset=0)]
#if NETFRAMEWORK_4_0
[RegressionOutcome(Outcome=ProofOutcome.True,Message="requires is valid",PrimaryILOffset=13,MethodILOffset=213)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message="requires is valid",PrimaryILOffset=35,MethodILOffset=213)]
#else
#if CLOUSOT2
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"requires is valid",PrimaryILOffset=3,MethodILOffset=213)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"requires is valid",PrimaryILOffset=25,MethodILOffset=213)]
#else
[RegressionOutcome(Outcome = ProofOutcome.True, Message = @"requires is valid", PrimaryILOffset = 22, MethodILOffset = 213)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = @"requires is valid", PrimaryILOffset = 44, MethodILOffset = 213)]
#endif
#endif
#if CLOUSOT2
// nothing?
#else
[RegressionOutcome(Outcome = ProofOutcome.True, Message = @"valid non-null reference (as field receiver)", PrimaryILOffset = 14, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = @"valid non-null reference (as field receiver)", PrimaryILOffset = 46, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = @"valid non-null reference (as field receiver)", PrimaryILOffset = 61, MethodILOffset = 0)]
#endif
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"assert is valid",PrimaryILOffset=218,MethodILOffset=0)]
/*
Effect on the input array segment: Contract.ForAll(0, offset, Contract.Old(binaryForm[i]) == binaryForm[i])
Effect on the input array segment: Contract.ForAll(offset, offset + 1, changed?)
Effect on the input array segment: Contract.ForAll(offset + 1, offset + 2, changed?)
Effect on the input array segment: Contract.ForAll(offset + 2, offset + 3, changed?)
Effect on the input array segment: Contract.ForAll(offset + 3, offset + 4, changed?)
Effect on the input array segment: Contract.ForAll(offset + 4, offset + 5, changed?)
Effect on the input array segment: Contract.ForAll(offset + 5, offset + 6, changed?)
Effect on the input array segment: Contract.ForAll(offset + 6, offset + 7, changed?)
Effect on the input array segment: Contract.ForAll(offset + 7, offset + 8, changed?)
Effect on the input array segment: Contract.ForAll(offset + 8, binaryForm.Length, Contract.Old(binaryForm[i]) == binaryForm[i])
*/
public void MarshalHeader(byte[] binaryForm, int offset)
{
Contract.Requires(binaryForm != null);
Contract.Requires(offset >= 0);
Contract.Requires(offset + 8 < binaryForm.Length);
Contract.Requires(Contract.ForAll(binaryForm, b => b < 10));
binaryForm[offset] = 11;
binaryForm[offset + 1] = 10;
binaryForm[offset + 2] = 13;
binaryForm[offset + 3] = 14;
binaryForm[offset + 4] = 10;
binaryForm[offset + 5] = 11;
binaryForm[offset + 6] = 10;
binaryForm[offset + 7] = 10;
Contract.Assert(Contract.ForAll(0, offset, i => binaryForm[i] < 10));
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=2,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Array access might be above the upper bound",PrimaryILOffset=2,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=16,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=16,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=26,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Upper bound access ok",PrimaryILOffset=26,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 'arr'",PrimaryILOffset=2,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possibly calling a method on a null reference",PrimaryILOffset=3,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=39,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=16,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possibly calling a method on a null reference",PrimaryILOffset=17,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=26,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as receiver)",PrimaryILOffset=27,MethodILOffset=0)]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter arr")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(0, 1, i => arr[i] != null));")]
[RegressionOutcome("Contract.Requires(Contract.ForAll(1, arr.Length, i => arr[i] != null));")]
[RegressionOutcome("Contract.Requires(arr != null);")]
[RegressionOutcome("Contract.Requires(0 < arr.Length);")]
static public int Max(string[] arr)
{
var max = arr[0].Length;
for (var i = 1; i < arr.Length; i++)
{
if (max < arr[i].Length)
{
max = arr[i].Length;
}
}
return max;
}
}
public class FromSystemData
{
public int[] _columnSmiMetaData;
// F: Inference not working here: we infer the right invariant, but then we cannot read
// this._columnSmiMetaData.Length in the prestate
[ClousotRegressionTest]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"Lower bound access ok",PrimaryILOffset=20,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Array access might be above the upper bound",PrimaryILOffset=20,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as field receiver)",PrimaryILOffset=56,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"Possible use of a null array 'this._columnSmiMetaData'",PrimaryILOffset=61,MethodILOffset=0)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"valid non-null reference (as array)",PrimaryILOffset=20,MethodILOffset=0)]
[RegressionOutcome("Contract.Requires(this._columnSmiMetaData != null);")]
[RegressionOutcome("Consider adding the [Pure] attribute to the parameter metaData")]
public void Test(params object[] metaData)
{
if (metaData == null)
{
throw new Exception("metadata");
}
for (int i = 0; i < this._columnSmiMetaData.Length; i++)
{
if (metaData[i] == null)
{
throw new Exception("metadata[" + i + "]");
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using Gtk;
using System.Linq;
using Moscrif.IDE.Controls;
using Moscrif.IDE.Workspace;
using Moscrif.IDE.Devices;
using MessageDialogs = Moscrif.IDE.Controls.MessageDialog;
using Moscrif.IDE.Iface.Entities;
namespace Moscrif.IDE.Option
{
internal class FilteringPanel : OptionsPanel
{
FilteringWidget widget;
public override Widget CreatePanelWidget ()
{
return widget = new FilteringWidget (ParentDialog);
}
public override void ShowPanel()
{
}
public override void ApplyChanges ()
{
widget.Store ();
}
public override bool ValidateChanges ()
{
return true;
}
public override void Initialize (PreferencesDialog dialog, object dataObject)
{
base.Initialize (dialog, dataObject);
}
public override string Label {
get { return MainClass.Languages.Translate("filtering"); }
}
public override string Icon {
get { return "filter.png"; }
}
}
public partial class FilteringWidget : Gtk.Bin
{
private List<LogicalSystem> conditions;
Gtk.ListStore filterStore = new Gtk.ListStore( typeof(string),typeof(LogicalSystem));
Gtk.ListStore maskStore = new Gtk.ListStore(typeof(string));
Gtk.Window parentWindow;
public FilteringWidget(Gtk.Window parent)
{
parentWindow = parent;
this.Build();
tvFilter.AppendColumn(MainClass.Languages.Translate("name"), new Gtk.CellRendererText(), "text", 0);
tvMask.AppendColumn(MainClass.Languages.Translate("name"), new Gtk.CellRendererText(), "text", 0);
tvFilter.Model = filterStore;
tvMask.Model = maskStore;
if ( MainClass.Settings == null) return;
if(MainClass.Settings == null || MainClass.Settings.LogicalSort.Count<1)
MainClass.Settings.LogicalSort = LogicalSystem.GetDefaultLogicalSystem();
this.conditions = new List<LogicalSystem>();
conditions =MainClass.Tools.Clone(MainClass.Settings.LogicalSort);
TreeIter ti = new TreeIter();
foreach (LogicalSystem cd in conditions){
ti = filterStore.AppendValues(cd.Display,cd);
}
tvFilter.Selection.Changed += delegate(object sender, EventArgs e)
{
maskStore.Clear();
LogicalSystem cd =GetSelected();
if (cd == null ) return;
if(cd.Mask == null) cd.Mask = new List<string>();
foreach (string rl in cd.Mask){
maskStore.AppendValues(rl);
}
};
if (conditions.Count>0)
tvFilter.Selection.SelectIter(ti);
}
private LogicalSystem GetSelected()
{
TreeSelection ts = tvFilter.Selection;
TreeIter ti = new TreeIter();
ts.GetSelected(out ti);
TreePath[] tp = ts.GetSelectedRows();
if (tp.Length < 1)
return null;
return (LogicalSystem)tvFilter.Model.GetValue(ti, 1);
}
public void Store()
{
if(MainClass.Settings!= null)
MainClass.Settings.LogicalSort.Clear();
MainClass.Settings.LogicalSort = new List<LogicalSystem>(conditions.ToArray());
}
protected virtual void OnBtnAddFilterClicked (object sender, System.EventArgs e)
{
EntryDialog ed = new EntryDialog("",MainClass.Languages.Translate("new_filter"),parentWindow);
int result = ed.Run();
if (result == (int)ResponseType.Ok){
string newStr = ed.TextEntry;
if (!String.IsNullOrEmpty(newStr) ){
LogicalSystem cdFind = conditions.Find(x=>x.Display.ToUpper() ==newStr.ToUpper());
if (cdFind != null){
MessageDialogs md = new MessageDialogs(MessageDialogs.DialogButtonType.Ok, MainClass.Languages.Translate("filter_is_exist", cdFind.Display), "", Gtk.MessageType.Error,parentWindow);
md.ShowDialog();
ed.Destroy();
return;
}
LogicalSystem cd= new LogicalSystem();
cd.Display =newStr;
filterStore.AppendValues(cd.Display,cd);
conditions.Add(cd);
}
}
ed.Destroy(); }
protected virtual void OnBtnDeleteFilterClicked (object sender, System.EventArgs e)
{
TreeSelection ts = tvFilter.Selection;
TreeIter ti = new TreeIter();
ts.GetSelected(out ti);
TreePath[] tp = ts.GetSelectedRows();
if (tp.Length < 1)
return ;
LogicalSystem cd = (LogicalSystem)tvFilter.Model.GetValue(ti, 1);
if (cd == null) return;
MessageDialogs md = new MessageDialogs(MessageDialogs.DialogButtonType.YesNo, MainClass.Languages.Translate("delete_filter", cd.Display), "", Gtk.MessageType.Question,parentWindow);
int result = md.ShowDialog();
if (result != (int)Gtk.ResponseType.Yes)
return;
conditions.Remove(cd);
maskStore.Clear();
filterStore.Remove(ref ti);
}
protected virtual void OnBtnEditFilterClicked (object sender, System.EventArgs e)
{
TreeSelection ts = tvFilter.Selection;
TreeIter ti = new TreeIter();
ts.GetSelected(out ti);
TreePath[] tp = ts.GetSelectedRows();
if (tp.Length < 1)
return ;
LogicalSystem cd = (LogicalSystem)tvFilter.Model.GetValue(ti, 1);
if (cd == null) return;
EntryDialog ed = new EntryDialog(cd.Display,MainClass.Languages.Translate("new_filter"),parentWindow);
int result = ed.Run();
if (result == (int)ResponseType.Ok){
string newStr = ed.TextEntry;
if (!String.IsNullOrEmpty(newStr) ){
if (newStr == cd.Display) return;
LogicalSystem cdFind = conditions.Find(x=>x.Display.ToUpper() ==newStr.ToUpper());
if (cdFind != null){
MessageDialogs md = new MessageDialogs(MessageDialogs.DialogButtonType.Ok, MainClass.Languages.Translate("filter_is_exist", cdFind.Display), "", Gtk.MessageType.Error,parentWindow);
md.ShowDialog();
ed.Destroy();
return;
}
LogicalSystem cdEdited = conditions.Find(x => x.Display.ToUpper() == cd.Display.ToUpper());
if (cdEdited == null){
MessageDialogs md = new MessageDialogs(MessageDialogs.DialogButtonType.Ok, MainClass.Languages.Translate("unspecified_error"), "", Gtk.MessageType.Error,parentWindow);
md.ShowDialog();
ed.Destroy();
return;
}
cdEdited.Display=newStr;
filterStore.SetValues(ti,cdEdited.Display,cdEdited);
//conditions.Find(cd).Name =ed.TextEntry;
}
}
ed.Destroy(); }
protected virtual void OnBtnAddMaskClicked (object sender, System.EventArgs e)
{
TreeSelection ts = tvFilter.Selection;
TreeIter ti = new TreeIter();
ts.GetSelected(out ti);
TreePath[] tp = ts.GetSelectedRows();
if (tp.Length < 1)
return ;
LogicalSystem cd = (LogicalSystem)tvFilter.Model.GetValue(ti, 1);
if (cd == null) return;
EntryDialog ed = new EntryDialog("",MainClass.Languages.Translate("new_mask"),parentWindow);
int result = ed.Run();
if (result == (int)ResponseType.Ok){
string newStr = ed.TextEntry;
if (!String.IsNullOrEmpty(newStr) ){
//int maxCountRule = 0;
/*foreach (string rlf in cd.Mask){
if (maxCountRule < rlf.Id) maxCountRule = rlf.Id;
}*/
string rlFind = cd.Mask.Find(x=>x.ToUpper() ==newStr.ToUpper());
if (rlFind != null){
MessageDialogs md = new MessageDialogs(MessageDialogs.DialogButtonType.Ok, MainClass.Languages.Translate("mask_is_exist", rlFind), "", Gtk.MessageType.Error,parentWindow);
md.ShowDialog();
ed.Destroy();
return;
}
maskStore.AppendValues(newStr);
LogicalSystem cd2 = conditions.Find(x => x.Display.ToUpper() == cd.Display.ToUpper());
cd2.Mask.Add(newStr);
filterStore.SetValues(ti,cd2.Display,cd2);
}
}
ed.Destroy();
}
protected virtual void OnBtnDeleteMaskClicked (object sender, System.EventArgs e)
{
TreeSelection ts = tvFilter.Selection;
TreeIter ti = new TreeIter();
ts.GetSelected(out ti);
TreePath[] tp = ts.GetSelectedRows();
if (tp.Length < 1)
return ;
LogicalSystem cd = (LogicalSystem)tvFilter.Model.GetValue(ti, 1);
if (cd == null) return;
TreeSelection tsR = tvMask.Selection;
TreeIter tiR = new TreeIter();
tsR.GetSelected(out tiR);
TreePath[] tpR = tsR.GetSelectedRows();
if (tpR.Length < 1)
return ;
string rl = (string)tvMask.Model.GetValue(tiR, 0);
if (String.IsNullOrEmpty(rl)) return;
MessageDialogs md = new MessageDialogs(MessageDialogs.DialogButtonType.YesNo, MainClass.Languages.Translate("delete_mask", rl), "", Gtk.MessageType.Question,parentWindow);
int result = md.ShowDialog();
if (result != (int)Gtk.ResponseType.Yes)
return;
maskStore.Remove(ref tiR);
LogicalSystem cd2 = conditions.Find(x => x.Display.ToUpper() == cd.Display.ToUpper());
cd2.Mask.Remove(rl);
filterStore.SetValues(ti,cd2.Display,cd2);
}
protected virtual void OnBtnEditMaskClicked (object sender, System.EventArgs e)
{
TreeSelection ts = tvFilter.Selection;
TreeIter ti = new TreeIter();
ts.GetSelected(out ti);
TreePath[] tp = ts.GetSelectedRows();
if (tp.Length < 1)
return ;
LogicalSystem cd = (LogicalSystem)tvFilter.Model.GetValue(ti, 1);
if (cd == null) return;
TreeSelection tsR = tvMask.Selection;
TreeIter tiR = new TreeIter();
tsR.GetSelected(out tiR);
TreePath[] tpR = tsR.GetSelectedRows();
if (tpR.Length < 1)
return ;
string rl = (string)tvMask.Model.GetValue(tiR, 0);
EntryDialog ed = new EntryDialog(rl,MainClass.Languages.Translate("new_name"),parentWindow);
int result = ed.Run();
if (result == (int)ResponseType.Ok){
string newStr = ed.TextEntry;
if (!String.IsNullOrEmpty(newStr) ){
if(rl.ToUpper() == newStr.ToUpper()){
ed.Destroy();
return;
}
string rlFind = cd.Mask.Find(x=>x.ToUpper() ==newStr.ToUpper());
if (!String.IsNullOrEmpty(rlFind)){
MessageDialogs md = new MessageDialogs(MessageDialogs.DialogButtonType.Ok, MainClass.Languages.Translate("mask_is_exist", rlFind), "", Gtk.MessageType.Error,parentWindow);
md.ShowDialog();
ed.Destroy();
return;
}
LogicalSystem cd2 = conditions.Find(x => x.Display.ToUpper() == cd.Display.ToUpper());
cd2.Mask.Remove(rlFind);
maskStore.SetValues(tiR,0,newStr,newStr);
cd2.Mask.Add(newStr);
filterStore.SetValues(ti,cd2.Display,cd2);
}
}
ed.Destroy();
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
// <auto-generated />
namespace SAEON.Observations.Data{
/// <summary>
/// Strongly-typed collection for the VDataSchema class.
/// </summary>
[Serializable]
public partial class VDataSchemaCollection : ReadOnlyList<VDataSchema, VDataSchemaCollection>
{
public VDataSchemaCollection() {}
}
/// <summary>
/// This is Read-only wrapper class for the vDataSchema view.
/// </summary>
[Serializable]
public partial class VDataSchema : ReadOnlyRecord<VDataSchema>, IReadOnlyRecord
{
#region Default Settings
protected static void SetSQLProps()
{
GetTableSchema();
}
#endregion
#region Schema Accessor
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
{
SetSQLProps();
}
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("vDataSchema", TableType.View, DataService.GetInstance("ObservationsDB"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarId = new TableSchema.TableColumn(schema);
colvarId.ColumnName = "ID";
colvarId.DataType = DbType.Guid;
colvarId.MaxLength = 0;
colvarId.AutoIncrement = false;
colvarId.IsNullable = false;
colvarId.IsPrimaryKey = false;
colvarId.IsForeignKey = false;
colvarId.IsReadOnly = false;
schema.Columns.Add(colvarId);
TableSchema.TableColumn colvarCode = new TableSchema.TableColumn(schema);
colvarCode.ColumnName = "Code";
colvarCode.DataType = DbType.AnsiString;
colvarCode.MaxLength = 50;
colvarCode.AutoIncrement = false;
colvarCode.IsNullable = false;
colvarCode.IsPrimaryKey = false;
colvarCode.IsForeignKey = false;
colvarCode.IsReadOnly = false;
schema.Columns.Add(colvarCode);
TableSchema.TableColumn colvarName = new TableSchema.TableColumn(schema);
colvarName.ColumnName = "Name";
colvarName.DataType = DbType.AnsiString;
colvarName.MaxLength = 100;
colvarName.AutoIncrement = false;
colvarName.IsNullable = false;
colvarName.IsPrimaryKey = false;
colvarName.IsForeignKey = false;
colvarName.IsReadOnly = false;
schema.Columns.Add(colvarName);
TableSchema.TableColumn colvarDescription = new TableSchema.TableColumn(schema);
colvarDescription.ColumnName = "Description";
colvarDescription.DataType = DbType.AnsiString;
colvarDescription.MaxLength = 5000;
colvarDescription.AutoIncrement = false;
colvarDescription.IsNullable = true;
colvarDescription.IsPrimaryKey = false;
colvarDescription.IsForeignKey = false;
colvarDescription.IsReadOnly = false;
schema.Columns.Add(colvarDescription);
TableSchema.TableColumn colvarDataSourceTypeID = new TableSchema.TableColumn(schema);
colvarDataSourceTypeID.ColumnName = "DataSourceTypeID";
colvarDataSourceTypeID.DataType = DbType.Guid;
colvarDataSourceTypeID.MaxLength = 0;
colvarDataSourceTypeID.AutoIncrement = false;
colvarDataSourceTypeID.IsNullable = false;
colvarDataSourceTypeID.IsPrimaryKey = false;
colvarDataSourceTypeID.IsForeignKey = false;
colvarDataSourceTypeID.IsReadOnly = false;
schema.Columns.Add(colvarDataSourceTypeID);
TableSchema.TableColumn colvarIgnoreFirst = new TableSchema.TableColumn(schema);
colvarIgnoreFirst.ColumnName = "IgnoreFirst";
colvarIgnoreFirst.DataType = DbType.Int32;
colvarIgnoreFirst.MaxLength = 0;
colvarIgnoreFirst.AutoIncrement = false;
colvarIgnoreFirst.IsNullable = false;
colvarIgnoreFirst.IsPrimaryKey = false;
colvarIgnoreFirst.IsForeignKey = false;
colvarIgnoreFirst.IsReadOnly = false;
schema.Columns.Add(colvarIgnoreFirst);
TableSchema.TableColumn colvarHasColumnNames = new TableSchema.TableColumn(schema);
colvarHasColumnNames.ColumnName = "HasColumnNames";
colvarHasColumnNames.DataType = DbType.Boolean;
colvarHasColumnNames.MaxLength = 0;
colvarHasColumnNames.AutoIncrement = false;
colvarHasColumnNames.IsNullable = true;
colvarHasColumnNames.IsPrimaryKey = false;
colvarHasColumnNames.IsForeignKey = false;
colvarHasColumnNames.IsReadOnly = false;
schema.Columns.Add(colvarHasColumnNames);
TableSchema.TableColumn colvarIgnoreLast = new TableSchema.TableColumn(schema);
colvarIgnoreLast.ColumnName = "IgnoreLast";
colvarIgnoreLast.DataType = DbType.Int32;
colvarIgnoreLast.MaxLength = 0;
colvarIgnoreLast.AutoIncrement = false;
colvarIgnoreLast.IsNullable = false;
colvarIgnoreLast.IsPrimaryKey = false;
colvarIgnoreLast.IsForeignKey = false;
colvarIgnoreLast.IsReadOnly = false;
schema.Columns.Add(colvarIgnoreLast);
TableSchema.TableColumn colvarCondition = new TableSchema.TableColumn(schema);
colvarCondition.ColumnName = "Condition";
colvarCondition.DataType = DbType.AnsiString;
colvarCondition.MaxLength = 500;
colvarCondition.AutoIncrement = false;
colvarCondition.IsNullable = true;
colvarCondition.IsPrimaryKey = false;
colvarCondition.IsForeignKey = false;
colvarCondition.IsReadOnly = false;
schema.Columns.Add(colvarCondition);
TableSchema.TableColumn colvarDataSchema = new TableSchema.TableColumn(schema);
colvarDataSchema.ColumnName = "DataSchema";
colvarDataSchema.DataType = DbType.AnsiString;
colvarDataSchema.MaxLength = 2147483647;
colvarDataSchema.AutoIncrement = false;
colvarDataSchema.IsNullable = true;
colvarDataSchema.IsPrimaryKey = false;
colvarDataSchema.IsForeignKey = false;
colvarDataSchema.IsReadOnly = false;
schema.Columns.Add(colvarDataSchema);
TableSchema.TableColumn colvarUserId = new TableSchema.TableColumn(schema);
colvarUserId.ColumnName = "UserId";
colvarUserId.DataType = DbType.Guid;
colvarUserId.MaxLength = 0;
colvarUserId.AutoIncrement = false;
colvarUserId.IsNullable = false;
colvarUserId.IsPrimaryKey = false;
colvarUserId.IsForeignKey = false;
colvarUserId.IsReadOnly = false;
schema.Columns.Add(colvarUserId);
TableSchema.TableColumn colvarDelimiter = new TableSchema.TableColumn(schema);
colvarDelimiter.ColumnName = "Delimiter";
colvarDelimiter.DataType = DbType.AnsiString;
colvarDelimiter.MaxLength = 3;
colvarDelimiter.AutoIncrement = false;
colvarDelimiter.IsNullable = true;
colvarDelimiter.IsPrimaryKey = false;
colvarDelimiter.IsForeignKey = false;
colvarDelimiter.IsReadOnly = false;
schema.Columns.Add(colvarDelimiter);
TableSchema.TableColumn colvarSplitSelector = new TableSchema.TableColumn(schema);
colvarSplitSelector.ColumnName = "SplitSelector";
colvarSplitSelector.DataType = DbType.AnsiString;
colvarSplitSelector.MaxLength = 50;
colvarSplitSelector.AutoIncrement = false;
colvarSplitSelector.IsNullable = true;
colvarSplitSelector.IsPrimaryKey = false;
colvarSplitSelector.IsForeignKey = false;
colvarSplitSelector.IsReadOnly = false;
schema.Columns.Add(colvarSplitSelector);
TableSchema.TableColumn colvarSplitIndex = new TableSchema.TableColumn(schema);
colvarSplitIndex.ColumnName = "SplitIndex";
colvarSplitIndex.DataType = DbType.Int32;
colvarSplitIndex.MaxLength = 0;
colvarSplitIndex.AutoIncrement = false;
colvarSplitIndex.IsNullable = true;
colvarSplitIndex.IsPrimaryKey = false;
colvarSplitIndex.IsForeignKey = false;
colvarSplitIndex.IsReadOnly = false;
schema.Columns.Add(colvarSplitIndex);
TableSchema.TableColumn colvarAddedAt = new TableSchema.TableColumn(schema);
colvarAddedAt.ColumnName = "AddedAt";
colvarAddedAt.DataType = DbType.DateTime;
colvarAddedAt.MaxLength = 0;
colvarAddedAt.AutoIncrement = false;
colvarAddedAt.IsNullable = true;
colvarAddedAt.IsPrimaryKey = false;
colvarAddedAt.IsForeignKey = false;
colvarAddedAt.IsReadOnly = false;
schema.Columns.Add(colvarAddedAt);
TableSchema.TableColumn colvarUpdatedAt = new TableSchema.TableColumn(schema);
colvarUpdatedAt.ColumnName = "UpdatedAt";
colvarUpdatedAt.DataType = DbType.DateTime;
colvarUpdatedAt.MaxLength = 0;
colvarUpdatedAt.AutoIncrement = false;
colvarUpdatedAt.IsNullable = true;
colvarUpdatedAt.IsPrimaryKey = false;
colvarUpdatedAt.IsForeignKey = false;
colvarUpdatedAt.IsReadOnly = false;
schema.Columns.Add(colvarUpdatedAt);
TableSchema.TableColumn colvarRowVersion = new TableSchema.TableColumn(schema);
colvarRowVersion.ColumnName = "RowVersion";
colvarRowVersion.DataType = DbType.Binary;
colvarRowVersion.MaxLength = 0;
colvarRowVersion.AutoIncrement = false;
colvarRowVersion.IsNullable = false;
colvarRowVersion.IsPrimaryKey = false;
colvarRowVersion.IsForeignKey = false;
colvarRowVersion.IsReadOnly = true;
schema.Columns.Add(colvarRowVersion);
TableSchema.TableColumn colvarDataSourceTypeCode = new TableSchema.TableColumn(schema);
colvarDataSourceTypeCode.ColumnName = "DataSourceTypeCode";
colvarDataSourceTypeCode.DataType = DbType.AnsiString;
colvarDataSourceTypeCode.MaxLength = 50;
colvarDataSourceTypeCode.AutoIncrement = false;
colvarDataSourceTypeCode.IsNullable = false;
colvarDataSourceTypeCode.IsPrimaryKey = false;
colvarDataSourceTypeCode.IsForeignKey = false;
colvarDataSourceTypeCode.IsReadOnly = false;
schema.Columns.Add(colvarDataSourceTypeCode);
TableSchema.TableColumn colvarDataSourceTypeDesc = new TableSchema.TableColumn(schema);
colvarDataSourceTypeDesc.ColumnName = "DataSourceTypeDesc";
colvarDataSourceTypeDesc.DataType = DbType.AnsiString;
colvarDataSourceTypeDesc.MaxLength = 500;
colvarDataSourceTypeDesc.AutoIncrement = false;
colvarDataSourceTypeDesc.IsNullable = false;
colvarDataSourceTypeDesc.IsPrimaryKey = false;
colvarDataSourceTypeDesc.IsForeignKey = false;
colvarDataSourceTypeDesc.IsReadOnly = false;
schema.Columns.Add(colvarDataSourceTypeDesc);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["ObservationsDB"].AddSchema("vDataSchema",schema);
}
}
#endregion
#region Query Accessor
public static Query CreateQuery()
{
return new Query(Schema);
}
#endregion
#region .ctors
public VDataSchema()
{
SetSQLProps();
SetDefaults();
MarkNew();
}
public VDataSchema(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
{
ForceDefaults();
}
MarkNew();
}
public VDataSchema(object keyID)
{
SetSQLProps();
LoadByKey(keyID);
}
public VDataSchema(string columnName, object columnValue)
{
SetSQLProps();
LoadByParam(columnName,columnValue);
}
#endregion
#region Props
[XmlAttribute("Id")]
[Bindable(true)]
public Guid Id
{
get
{
return GetColumnValue<Guid>("ID");
}
set
{
SetColumnValue("ID", value);
}
}
[XmlAttribute("Code")]
[Bindable(true)]
public string Code
{
get
{
return GetColumnValue<string>("Code");
}
set
{
SetColumnValue("Code", value);
}
}
[XmlAttribute("Name")]
[Bindable(true)]
public string Name
{
get
{
return GetColumnValue<string>("Name");
}
set
{
SetColumnValue("Name", value);
}
}
[XmlAttribute("Description")]
[Bindable(true)]
public string Description
{
get
{
return GetColumnValue<string>("Description");
}
set
{
SetColumnValue("Description", value);
}
}
[XmlAttribute("DataSourceTypeID")]
[Bindable(true)]
public Guid DataSourceTypeID
{
get
{
return GetColumnValue<Guid>("DataSourceTypeID");
}
set
{
SetColumnValue("DataSourceTypeID", value);
}
}
[XmlAttribute("IgnoreFirst")]
[Bindable(true)]
public int IgnoreFirst
{
get
{
return GetColumnValue<int>("IgnoreFirst");
}
set
{
SetColumnValue("IgnoreFirst", value);
}
}
[XmlAttribute("HasColumnNames")]
[Bindable(true)]
public bool? HasColumnNames
{
get
{
return GetColumnValue<bool?>("HasColumnNames");
}
set
{
SetColumnValue("HasColumnNames", value);
}
}
[XmlAttribute("IgnoreLast")]
[Bindable(true)]
public int IgnoreLast
{
get
{
return GetColumnValue<int>("IgnoreLast");
}
set
{
SetColumnValue("IgnoreLast", value);
}
}
[XmlAttribute("Condition")]
[Bindable(true)]
public string Condition
{
get
{
return GetColumnValue<string>("Condition");
}
set
{
SetColumnValue("Condition", value);
}
}
[XmlAttribute("DataSchema")]
[Bindable(true)]
public string DataSchema
{
get
{
return GetColumnValue<string>("DataSchema");
}
set
{
SetColumnValue("DataSchema", value);
}
}
[XmlAttribute("UserId")]
[Bindable(true)]
public Guid UserId
{
get
{
return GetColumnValue<Guid>("UserId");
}
set
{
SetColumnValue("UserId", value);
}
}
[XmlAttribute("Delimiter")]
[Bindable(true)]
public string Delimiter
{
get
{
return GetColumnValue<string>("Delimiter");
}
set
{
SetColumnValue("Delimiter", value);
}
}
[XmlAttribute("SplitSelector")]
[Bindable(true)]
public string SplitSelector
{
get
{
return GetColumnValue<string>("SplitSelector");
}
set
{
SetColumnValue("SplitSelector", value);
}
}
[XmlAttribute("SplitIndex")]
[Bindable(true)]
public int? SplitIndex
{
get
{
return GetColumnValue<int?>("SplitIndex");
}
set
{
SetColumnValue("SplitIndex", value);
}
}
[XmlAttribute("AddedAt")]
[Bindable(true)]
public DateTime? AddedAt
{
get
{
return GetColumnValue<DateTime?>("AddedAt");
}
set
{
SetColumnValue("AddedAt", value);
}
}
[XmlAttribute("UpdatedAt")]
[Bindable(true)]
public DateTime? UpdatedAt
{
get
{
return GetColumnValue<DateTime?>("UpdatedAt");
}
set
{
SetColumnValue("UpdatedAt", value);
}
}
[XmlAttribute("RowVersion")]
[Bindable(true)]
public byte[] RowVersion
{
get
{
return GetColumnValue<byte[]>("RowVersion");
}
set
{
SetColumnValue("RowVersion", value);
}
}
[XmlAttribute("DataSourceTypeCode")]
[Bindable(true)]
public string DataSourceTypeCode
{
get
{
return GetColumnValue<string>("DataSourceTypeCode");
}
set
{
SetColumnValue("DataSourceTypeCode", value);
}
}
[XmlAttribute("DataSourceTypeDesc")]
[Bindable(true)]
public string DataSourceTypeDesc
{
get
{
return GetColumnValue<string>("DataSourceTypeDesc");
}
set
{
SetColumnValue("DataSourceTypeDesc", value);
}
}
#endregion
#region Columns Struct
public struct Columns
{
public static string Id = @"ID";
public static string Code = @"Code";
public static string Name = @"Name";
public static string Description = @"Description";
public static string DataSourceTypeID = @"DataSourceTypeID";
public static string IgnoreFirst = @"IgnoreFirst";
public static string HasColumnNames = @"HasColumnNames";
public static string IgnoreLast = @"IgnoreLast";
public static string Condition = @"Condition";
public static string DataSchema = @"DataSchema";
public static string UserId = @"UserId";
public static string Delimiter = @"Delimiter";
public static string SplitSelector = @"SplitSelector";
public static string SplitIndex = @"SplitIndex";
public static string AddedAt = @"AddedAt";
public static string UpdatedAt = @"UpdatedAt";
public static string RowVersion = @"RowVersion";
public static string DataSourceTypeCode = @"DataSourceTypeCode";
public static string DataSourceTypeDesc = @"DataSourceTypeDesc";
}
#endregion
#region IAbstractRecord Members
public new CT GetColumnValue<CT>(string columnName) {
return base.GetColumnValue<CT>(columnName);
}
public object GetColumnValue(string columnName) {
return base.GetColumnValue<object>(columnName);
}
#endregion
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
namespace EasyNetQ
{
public class DefaultTypeNameSerializer : ITypeNameSerializer
{
private readonly ConcurrentDictionary<Type, string> serializedTypes = new ConcurrentDictionary<Type, string>();
private readonly ConcurrentDictionary<string, Type> deSerializedTypes = new ConcurrentDictionary<string, Type>();
public string Serialize(Type type)
{
Preconditions.CheckNotNull(type, "type");
return serializedTypes.GetOrAdd(type, t =>
{
var typeName = RemoveAssemblyDetails(t.AssemblyQualifiedName);
if (typeName.Length > 255)
{
throw new EasyNetQException($"The serialized name of type '{t.Name}' exceeds the AMQP maximum short string length of 255 characters");
}
return typeName;
});
}
public Type DeSerialize(string typeName)
{
Preconditions.CheckNotBlank(typeName, "typeName");
return deSerializedTypes.GetOrAdd(typeName, t =>
{
var typeNameKey = SplitFullyQualifiedTypeName(t);
return GetTypeFromTypeNameKey(typeNameKey);
});
}
private static string RemoveAssemblyDetails(string fullyQualifiedTypeName)
{
var builder = new StringBuilder(fullyQualifiedTypeName.Length);
// loop through the type name and filter out qualified assembly details from nested type names
var writingAssemblyName = false;
var skippingAssemblyDetails = false;
foreach (var character in fullyQualifiedTypeName)
{
switch (character)
{
case '[':
writingAssemblyName = false;
skippingAssemblyDetails = false;
builder.Append(character);
break;
case ']':
writingAssemblyName = false;
skippingAssemblyDetails = false;
builder.Append(character);
break;
case ',':
if (!writingAssemblyName)
{
writingAssemblyName = true;
builder.Append(character);
}
else
{
skippingAssemblyDetails = true;
}
break;
default:
if (!skippingAssemblyDetails)
{
builder.Append(character);
}
break;
}
}
return builder.ToString();
}
private static TypeNameKey SplitFullyQualifiedTypeName(string fullyQualifiedTypeName)
{
var assemblyDelimiterIndex = GetAssemblyDelimiterIndex(fullyQualifiedTypeName);
string typeName;
string assemblyName;
if (assemblyDelimiterIndex != null)
{
typeName = fullyQualifiedTypeName.Trim(0, assemblyDelimiterIndex.GetValueOrDefault());
assemblyName = fullyQualifiedTypeName.Trim(assemblyDelimiterIndex.GetValueOrDefault() + 1, fullyQualifiedTypeName.Length - assemblyDelimiterIndex.GetValueOrDefault() - 1);
}
else
{
typeName = fullyQualifiedTypeName;
assemblyName = null;
}
return new TypeNameKey(assemblyName, typeName);
}
private static Type GetTypeFromTypeNameKey(TypeNameKey typeNameKey)
{
var assemblyName = typeNameKey.AssemblyName;
var typeName = typeNameKey.TypeName;
if (assemblyName != null)
{
#if NETFX
// look, I don't like using obsolete methods as much as you do but this is the only way
// Assembly.Load won't check the GAC for a partial name
#pragma warning disable 618,612
var assembly = Assembly.LoadWithPartialName(assemblyName);
#pragma warning restore 618,612
#else
var assembly = Assembly.Load(new AssemblyName(assemblyName));
#endif
#if NETFX
if (assembly == null)
{
// will find assemblies loaded with Assembly.LoadFile outside of the main directory
var loadedAssemblies = AppDomain.CurrentDomain.GetAssemblies();
foreach (var a in loadedAssemblies)
{
// check for both full name or partial name match
if (a.FullName == assemblyName || a.GetName().Name == assemblyName)
{
assembly = a;
break;
}
}
}
#endif
if (assembly == null)
{
throw new EasyNetQException($"Could not load assembly '{assemblyName}'");
}
var type = assembly.GetType(typeName);
if (type == null)
{
// if generic type, try manually parsing the type arguments for the case of dynamically loaded assemblies
// example generic typeName format: System.Collections.Generic.Dictionary`2[[System.String, mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089],[System.String, mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]]
if (typeName.IndexOf('`') >= 0)
{
try
{
type = GetGenericTypeFromTypeName(typeName, assembly);
}
catch (Exception ex)
{
throw new EasyNetQException($"Could not find type '{typeName}' in assembly '{assembly.FullName}'", ex);
}
}
if (type == null)
{
throw new EasyNetQException($"Could not find type '{typeName}' in assembly '{assembly.FullName}'");
}
}
return type;
}
return Type.GetType(typeName);
}
private static Type GetGenericTypeFromTypeName(string typeName, Assembly assembly)
{
Type type = null;
var openBracketIndex = typeName.IndexOf('[');
if (openBracketIndex >= 0)
{
var genericTypeDefName = typeName.Substring(0, openBracketIndex);
var genericTypeDef = assembly.GetType(genericTypeDefName);
if (genericTypeDef != null)
{
var genericTypeArguments = new List<Type>();
var scope = 0;
var typeArgStartIndex = 0;
var endIndex = typeName.Length - 1;
for (var i = openBracketIndex + 1; i < endIndex; ++i)
{
var current = typeName[i];
switch (current)
{
case '[':
if (scope == 0)
{
typeArgStartIndex = i + 1;
}
++scope;
break;
case ']':
--scope;
if (scope == 0)
{
var typeArgAssemblyQualifiedName = typeName.Substring(typeArgStartIndex, i - typeArgStartIndex);
var typeNameKey = SplitFullyQualifiedTypeName(typeArgAssemblyQualifiedName);
genericTypeArguments.Add(GetTypeFromTypeNameKey(typeNameKey));
}
break;
}
}
type = genericTypeDef.MakeGenericType(genericTypeArguments.ToArray());
}
}
return type;
}
private static int? GetAssemblyDelimiterIndex(string fullyQualifiedTypeName)
{
// we need to get the first comma following all surrounded in brackets because of generic types
// e.g. System.Collections.Generic.Dictionary`2[[System.String, mscorlib,Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089],[System.String, mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089]], mscorlib, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089
var scope = 0;
for (var i = 0; i < fullyQualifiedTypeName.Length; i++)
{
var current = fullyQualifiedTypeName[i];
switch (current)
{
case '[':
scope++;
break;
case ']':
scope--;
break;
case ',':
if (scope == 0)
{
return i;
}
break;
}
}
return null;
}
private struct TypeNameKey
{
public string AssemblyName { get; }
public string TypeName { get; }
public TypeNameKey(string assemblyName, string typeName)
{
AssemblyName = assemblyName;
TypeName = typeName;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using ALinq;
using ALinq.Mapping;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using ALinq.SqlClient;
namespace ALinq.SqlClient
{
internal class Translator
{
// Fields
private readonly IDataServices services;
private readonly SqlFactory sql;
private readonly ITypeSystemProvider typeProvider;
private readonly SqlProvider provider;
// Methods
internal Translator(IDataServices services, SqlFactory sqlFactory, ITypeSystemProvider typeProvider,
SqlProvider provider)
{
this.services = services;
sql = sqlFactory;
this.typeProvider = typeProvider;
this.provider = provider;
}
public SqlProvider Provider
{
get { return provider; }
}
private static SqlExpression BestIdentityNode(SqlTypeCase tc)
{
foreach (SqlTypeCaseWhen when in tc.Whens)
{
if (when.TypeBinding.NodeType == SqlNodeType.New)
{
return when.TypeBinding;
}
}
return tc.Whens[0].TypeBinding;
}
internal SqlSelect BuildDefaultQuery(MetaType rowType, bool allowDeferred, SqlLink link, Expression source)
{
if (rowType.HasInheritance && (rowType.InheritanceRoot != rowType))
{
throw Error.ArgumentWrongValue("rowType");
}
SqlTable node = sql.Table(rowType.Table, rowType, source);
var alias = new SqlAlias(node);
var item = new SqlAliasRef(alias);
return new SqlSelect(BuildProjection(item, node.RowType, allowDeferred, link, source), alias, source);
}
private SqlLink BuildLink(SqlExpression item, MetaDataMember member, Expression source)
{
if (member.IsAssociation)
{
var expressionArray = new SqlExpression[member.Association.ThisKey.Count];
int index = 0;
int length = expressionArray.Length;
while (index < length)
{
MetaDataMember member2 = member.Association.ThisKey[index];
expressionArray[index] = sql.Member(item, member2.Member);
index++;
}
return new SqlLink(new object(), member.Association.OtherType, member.Type, typeProvider.From(member.Type), item, member, expressionArray, null, source);
}
MetaType declaringType = member.DeclaringType;
var keyExpressions = new List<SqlExpression>();
foreach (MetaDataMember member3 in declaringType.IdentityMembers)
{
keyExpressions.Add(sql.Member(item, member3.Member));
}
return new SqlLink(new object(), declaringType, member.Type, typeProvider.From(member.Type),
item, member, keyExpressions, sql.Member(item, member.Member), source);
}
public SqlExpression BuildProjection(SqlExpression item, MetaType rowType, bool allowDeferred, SqlLink link, Expression source)
{
if (!rowType.HasInheritance)
{
return BuildProjectionInternal(item, rowType, (rowType.Table != null) ? rowType.PersistentDataMembers : rowType.DataMembers, allowDeferred, link, source);
}
var list = new List<MetaType>(rowType.InheritanceTypes);
var list2 = new List<SqlTypeCaseWhen>();
SqlTypeCaseWhen when = null;
MetaType inheritanceRoot = rowType.InheritanceRoot;
MetaDataMember discriminator = inheritanceRoot.Discriminator;
Type type = discriminator.Type;
SqlMember member2 = sql.Member(item, discriminator.Member);
foreach (MetaType type3 in list)
{
if (type3.HasInheritanceCode)
{
SqlNew typeBinding = BuildProjectionInternal(item, type3, type3.PersistentDataMembers, allowDeferred, link, source);
if (type3.IsInheritanceDefault)
{
when = new SqlTypeCaseWhen(null, typeBinding);
}
object obj2 = InheritanceRules.InheritanceCodeForClientCompare(type3.InheritanceCode, member2.SqlType);
SqlExpression match = sql.Value(type, sql.Default(discriminator), obj2, true, source);
list2.Add(new SqlTypeCaseWhen(match, typeBinding));
}
}
if (when == null)
{
throw Error.EmptyCaseNotSupported();
}
list2.Add(when);
return sql.TypeCase(inheritanceRoot.Type, inheritanceRoot, member2, list2.ToArray(), source);
}
private SqlNew BuildProjectionInternal(SqlExpression item, MetaType rowType, IEnumerable<MetaDataMember> members, bool allowDeferred, SqlLink link, Expression source)
{
var bindings = new List<SqlMemberAssign>();
foreach (MetaDataMember member in members)
{
if (allowDeferred && (member.IsAssociation || member.IsDeferred))
{
if ((((link != null) && (member != link.Member)) &&
(member.IsAssociation && (member.MappedName == link.Member.MappedName))) &&
(!member.Association.IsMany && !IsPreloaded(link.Member.Member)))
{
SqlLink expr = BuildLink(item, member, source);
expr.Expansion = link.Expression;
bindings.Add(new SqlMemberAssign(member.Member, expr));
}
else
{
bindings.Add(new SqlMemberAssign(member.Member, this.BuildLink(item, member, source)));
}
continue;
}
if (!member.IsAssociation)
{
bindings.Add(new SqlMemberAssign(member.Member, this.sql.Member(item, member)));
}
}
ConstructorInfo cons = rowType.Type.GetConstructor(BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance, null, Type.EmptyTypes, null);
if (cons == null)
{
throw Error.MappedTypeMustHaveDefaultConstructor(rowType.Type);
}
return this.sql.New(rowType, cons, null, null, bindings, source);
}
private List<SqlExpression> GetIdentityExpressions(MetaType type, SqlExpression expr)
{
List<MetaDataMember> list = this.GetIdentityMembers(type).ToList<MetaDataMember>();
var list2 = new List<SqlExpression>(list.Count);
foreach (MetaDataMember member in list)
{
list2.Add(this.sql.Member((SqlExpression)SqlDuplicator.Copy(expr), member));
}
return list2;
}
private IEnumerable<MetaDataMember> GetIdentityMembers(MetaType type)
{
if (type.IsEntity)
{
return type.IdentityMembers;
}
return type.DataMembers.Where(delegate(MetaDataMember m)
{
return IsPublic(m.Member);
});
}
private bool IsPreloaded(MemberInfo member)
{
if (this.services.Context.LoadOptions == null)
{
return false;
}
return this.services.Context.LoadOptions.IsPreloaded(member);
}
private static bool IsPublic(MemberInfo mi)
{
FieldInfo info = mi as FieldInfo;
if (info != null)
{
return info.IsPublic;
}
PropertyInfo info2 = mi as PropertyInfo;
if ((info2 != null) && info2.CanRead)
{
MethodInfo getMethod = info2.GetGetMethod();
if (getMethod != null)
{
return getMethod.IsPublic;
}
}
return false;
}
internal static Expression TranslateAssociation(DataContext context, MetaAssociation association, Expression otherSource, Expression[] keyValues, Expression thisInstance)
{
if (association == null)
{
throw Error.ArgumentNull("association");
}
if (keyValues == null)
{
throw Error.ArgumentNull("keyValues");
}
if (context.LoadOptions != null)
{
LambdaExpression associationSubquery = context.LoadOptions.GetAssociationSubquery(association.ThisMember.Member);
if (associationSubquery != null)
{
var composer = new RelationComposer(associationSubquery.Parameters[0], association, otherSource, thisInstance);
return composer.Visit(associationSubquery.Body);
}
}
return WhereClauseFromSourceAndKeys(otherSource, association.OtherKey.ToArray<MetaDataMember>(), keyValues);
}
internal SqlExpression TranslateEquals(SqlBinary expr)
{
IList<SqlExpression> keyExpressions;
IList<SqlExpression> identityExpressions;
SqlExpression left = expr.Left;
SqlExpression right = expr.Right;
if (right.NodeType == SqlNodeType.Element)
{
var select = (SqlSubSelect)right;
var alias = new SqlAlias(select.Select);
var selection = new SqlAliasRef(alias);
var select2 = new SqlSelect(selection, alias, expr.SourceExpression)
{
Where = sql.Binary(expr.NodeType, sql.DoNotVisitExpression(left), selection)
};
return sql.SubSelect(SqlNodeType.Exists, select2);
}
if (left.NodeType == SqlNodeType.Element)
{
var select3 = (SqlSubSelect)left;
var alias2 = new SqlAlias(select3.Select);
var ref3 = new SqlAliasRef(alias2);
var select4 = new SqlSelect(ref3, alias2, expr.SourceExpression)
{
Where = sql.Binary(expr.NodeType, sql.DoNotVisitExpression(right), ref3)
};
return sql.SubSelect(SqlNodeType.Exists, select4);
}
var sourceMetaType = TypeSource.GetSourceMetaType(left, services.Model);
var type = TypeSource.GetSourceMetaType(right, services.Model);
if (left.NodeType == SqlNodeType.TypeCase)
{
left = BestIdentityNode((SqlTypeCase)left);
}
if (right.NodeType == SqlNodeType.TypeCase)
{
right = BestIdentityNode((SqlTypeCase)right);
}
if ((sourceMetaType.IsEntity && type.IsEntity) && (sourceMetaType.Table != type.Table))
{
throw Error.CannotCompareItemsAssociatedWithDifferentTable();
}
if (((!sourceMetaType.IsEntity && !type.IsEntity) && ((left.NodeType != SqlNodeType.New) || left.SqlType.CanBeColumn)) && ((right.NodeType != SqlNodeType.New) || right.SqlType.CanBeColumn))
{
if ((expr.NodeType == SqlNodeType.EQ2V) || (expr.NodeType == SqlNodeType.NE2V))
{
return TranslateEqualsOp(expr.NodeType, sql.DoNotVisitExpression(expr.Left),
sql.DoNotVisitExpression(expr.Right), false);
}
return expr;
}
if ((sourceMetaType != type) && (sourceMetaType.InheritanceRoot != type.InheritanceRoot))
{
return this.sql.Binary(SqlNodeType.EQ, sql.ValueFromObject(0, expr.SourceExpression), this.sql.ValueFromObject(1, expr.SourceExpression));
}
var link = left as SqlLink;
if (((link != null) && link.Member.IsAssociation) && link.Member.Association.IsForeignKey)
{
keyExpressions = link.KeyExpressions;
}
else
{
keyExpressions = this.GetIdentityExpressions(sourceMetaType, this.sql.DoNotVisitExpression(left));
}
var link2 = right as SqlLink;
if (((link2 != null) && link2.Member.IsAssociation) && link2.Member.Association.IsForeignKey)
{
identityExpressions = link2.KeyExpressions;
}
else
{
identityExpressions = this.GetIdentityExpressions(type, sql.DoNotVisitExpression(right));
}
SqlExpression expression3 = null;
SqlNodeType op = ((expr.NodeType == SqlNodeType.EQ2V) || (expr.NodeType == SqlNodeType.NE2V)) ? SqlNodeType.EQ2V : SqlNodeType.EQ;
int num = 0;
int count = keyExpressions.Count;
while (num < count)
{
SqlExpression expression4 = this.TranslateEqualsOp(op, keyExpressions[num], identityExpressions[num], !sourceMetaType.IsEntity);
if (expression3 == null)
{
expression3 = expression4;
}
else
{
expression3 = this.sql.Binary(SqlNodeType.And, expression3, expression4);
}
num++;
}
if ((expr.NodeType != SqlNodeType.NE) && (expr.NodeType != SqlNodeType.NE2V))
{
return expression3;
}
return this.sql.Unary(SqlNodeType.Not, expression3, expression3.SourceExpression);
}
private SqlExpression TranslateEqualsOp(SqlNodeType op, SqlExpression left, SqlExpression right, bool allowExpand)
{
switch (op)
{
case SqlNodeType.EQ:
case SqlNodeType.NE:
return this.sql.Binary(op, left, right);
case SqlNodeType.EQ2V:
{
if ((SqlExpressionNullability.CanBeNull(left) != false) && (SqlExpressionNullability.CanBeNull(right) != false))
{
SqlNodeType type = allowExpand ? SqlNodeType.EQ2V : SqlNodeType.EQ;
return this.sql.Binary(SqlNodeType.Or, this.sql.Binary(SqlNodeType.And, this.sql.Unary(SqlNodeType.IsNull, (SqlExpression)SqlDuplicator.Copy(left)), this.sql.Unary(SqlNodeType.IsNull, (SqlExpression)SqlDuplicator.Copy(right))),
this.sql.Binary(SqlNodeType.And, this.sql.Binary(SqlNodeType.And, this.sql.Unary(SqlNodeType.IsNotNull, (SqlExpression)SqlDuplicator.Copy(left)), this.sql.Unary(SqlNodeType.IsNotNull, (SqlExpression)SqlDuplicator.Copy(right))), this.sql.Binary(type, left, right)));
}
SqlNodeType nodeType = allowExpand ? SqlNodeType.EQ2V : SqlNodeType.EQ;
return this.sql.Binary(nodeType, left, right);
}
case SqlNodeType.NE2V:
{
if ((SqlExpressionNullability.CanBeNull(left) == false) || (SqlExpressionNullability.CanBeNull(right) == false))
{
SqlNodeType type4 = allowExpand ? SqlNodeType.NE2V : SqlNodeType.NE;
return this.sql.Binary(type4, left, right);
}
SqlNodeType type3 = allowExpand ? SqlNodeType.EQ2V : SqlNodeType.EQ;
return this.sql.Unary(SqlNodeType.Not, this.sql.Binary(SqlNodeType.Or, this.sql.Binary(SqlNodeType.And, this.sql.Unary(SqlNodeType.IsNull, (SqlExpression)SqlDuplicator.Copy(left)), this.sql.Unary(SqlNodeType.IsNull, (SqlExpression)SqlDuplicator.Copy(right))), this.sql.Binary(SqlNodeType.And, this.sql.Binary(SqlNodeType.And, this.sql.Unary(SqlNodeType.IsNotNull, (SqlExpression)SqlDuplicator.Copy(left)), this.sql.Unary(SqlNodeType.IsNotNull, (SqlExpression)SqlDuplicator.Copy(right))), this.sql.Binary(type3, left, right))));
}
}
throw Error.UnexpectedNode(op);
}
internal SqlNode TranslateLink(SqlLink link, bool asExpression)
{
return this.TranslateLink(link, link.KeyExpressions, asExpression);
}
internal SqlNode TranslateLink(SqlLink link, IList<SqlExpression> keyExpressions, bool asExpression)
{
MetaDataMember member = link.Member;
if (!member.IsAssociation)
{
return link.Expansion;
}
MetaType otherType = member.Association.OtherType;
Type type = otherType.InheritanceRoot.Type;
ITable table = this.services.Context.GetTable(type);
Expression otherSource = new LinkedTableExpression(link, table, typeof(IQueryable<>).MakeGenericType(new Type[] { otherType.Type }));
Expression[] keyValues = new Expression[keyExpressions.Count];
for (int i = 0; i < keyExpressions.Count; i++)
{
MetaDataMember member2 = member.Association.OtherKey[i];
Type memberType = TypeSystem.GetMemberType(member2.Member);
keyValues[i] = InternalExpression.Known(keyExpressions[i], memberType);
}
Expression thisInstance = (link.Expression != null) ? ((Expression)InternalExpression.Known(link.Expression)) : ((Expression)Expression.Constant(null, link.Member.Member.DeclaringType));
Expression expression3 = TranslateAssociation(this.services.Context, member.Association, otherSource, keyValues, thisInstance);
var converter = provider.CreateQueryConverter(this.sql);
var select = (SqlSelect)converter.ConvertInner(expression3, link.SourceExpression);
SqlNode node = select;
if (!asExpression)
{
return node;
}
if (member.Association.IsMany)
{
return new SqlSubSelect(SqlNodeType.Multiset, link.ClrType, link.SqlType, select);
}
return new SqlSubSelect(SqlNodeType.Element, link.ClrType, link.SqlType, select);
}
internal SqlExpression TranslateLinkEquals(SqlBinary bo)
{
SqlLink left = bo.Left as SqlLink;
SqlLink right = bo.Right as SqlLink;
if ((((left == null) || !left.Member.IsAssociation) || !left.Member.Association.IsForeignKey) && (((right == null) || !right.Member.IsAssociation) || !right.Member.Association.IsForeignKey))
{
return bo;
}
return this.TranslateEquals(bo);
}
internal SqlExpression TranslateLinkIsNull(SqlUnary expr)
{
SqlLink operand = expr.Operand as SqlLink;
if (((operand == null) || !operand.Member.IsAssociation) || !operand.Member.Association.IsForeignKey)
{
return expr;
}
IList<SqlExpression> keyExpressions = operand.KeyExpressions;
SqlExpression left = null;
SqlNodeType nodeType = (expr.NodeType == SqlNodeType.IsNull) ? SqlNodeType.Or : SqlNodeType.And;
int num = 0;
int count = keyExpressions.Count;
while (num < count)
{
SqlExpression right = this.sql.Unary(expr.NodeType, this.sql.DoNotVisitExpression(keyExpressions[num]), expr.SourceExpression);
if (left == null)
{
left = right;
}
else
{
left = this.sql.Binary(nodeType, left, right);
}
num++;
}
return left;
}
internal static Expression WhereClauseFromSourceAndKeys(Expression source, MetaDataMember[] keyMembers, Expression[] keyValues)
{
Type elementType = TypeSystem.GetElementType(source.Type);
ParameterExpression expression = Expression.Parameter(elementType, "p");
Expression left = null;
for (int i = 0; i < keyMembers.Length; i++)
{
MetaDataMember member = keyMembers[i];
Expression expression3 = (elementType == member.Member.DeclaringType) ? ((Expression)expression) : ((Expression)Expression.Convert(expression, member.Member.DeclaringType));
Expression expression4 = (member.Member is FieldInfo) ? Expression.Field(expression3, (FieldInfo)member.Member) : Expression.Property(expression3, (PropertyInfo)member.Member);
Expression expression5 = keyValues[i];
if (expression5.Type != expression4.Type)
{
expression5 = Expression.Convert(expression5, expression4.Type);
}
Expression right = Expression.Equal(expression4, expression5);
left = (left != null) ? Expression.And(left, right) : right;
}
return Expression.Call(typeof(Enumerable), "Where", new Type[] { expression.Type }, new Expression[] { source, Expression.Lambda(left, new ParameterExpression[] { expression }) });
}
// Nested Types
private class RelationComposer : ExpressionVisitor
{
// Fields
private MetaAssociation association;
private Expression otherSouce;
private ParameterExpression parameter;
private Expression parameterReplacement;
// Methods
internal RelationComposer(ParameterExpression parameter, MetaAssociation association, Expression otherSouce, Expression parameterReplacement)
{
if (parameter == null)
{
throw Error.ArgumentNull("parameter");
}
if (association == null)
{
throw Error.ArgumentNull("association");
}
if (otherSouce == null)
{
throw Error.ArgumentNull("otherSouce");
}
if (parameterReplacement == null)
{
throw Error.ArgumentNull("parameterReplacement");
}
this.parameter = parameter;
this.association = association;
this.otherSouce = otherSouce;
this.parameterReplacement = parameterReplacement;
}
private static Expression[] GetKeyValues(Expression expr, ReadOnlyCollection<MetaDataMember> keys)
{
List<Expression> list = new List<Expression>();
foreach (MetaDataMember member in keys)
{
list.Add(Expression.PropertyOrField(expr, member.Name));
}
return list.ToArray();
}
public override Expression VisitMemberAccess(MemberExpression m)
{
if (MetaPosition.AreSameMember(m.Member, this.association.ThisMember.Member))
{
Expression[] keyValues = GetKeyValues(this.Visit(m.Expression), this.association.ThisKey);
return Translator.WhereClauseFromSourceAndKeys(this.otherSouce, this.association.OtherKey.ToArray<MetaDataMember>(), keyValues);
}
Expression expression = this.Visit(m.Expression);
if (expression == m.Expression)
{
return m;
}
if (((expression.Type != m.Expression.Type) && (m.Member.Name == "Count")) && TypeSystem.IsSequenceType(expression.Type))
{
return Expression.Call(typeof(Enumerable), "Count", new Type[] { TypeSystem.GetElementType(expression.Type) }, new Expression[] { expression });
}
return Expression.MakeMemberAccess(expression, m.Member);
}
public override Expression VisitParameter(ParameterExpression p)
{
if (p == this.parameter)
{
return this.parameterReplacement;
}
return base.VisitParameter(p);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using Xunit;
namespace System.Linq.Expressions.Tests
{
public class UnboxTests
{
[Theory]
[PerCompilationType(nameof(UnboxableFromObject))]
[PerCompilationType(nameof(NullableUnboxableFromObject))]
[PerCompilationType(nameof(UnboxableFromIComparable))]
[PerCompilationType(nameof(NullableUnboxableFromIComparable))]
[PerCompilationType(nameof(UnboxableFromIComparableT))]
[PerCompilationType(nameof(NullableUnboxableFromIComparableT))]
public void CanUnbox(object value, Type type, Type boxedType, bool useInterpreter)
{
Expression expression = Expression.Constant(value, boxedType);
UnaryExpression unbox = Expression.Unbox(expression, type);
Assert.Equal(type, unbox.Type);
BinaryExpression isEqual = Expression.Equal(Expression.Constant(value, type), unbox);
Assert.True(Expression.Lambda<Func<bool>>(isEqual).Compile(useInterpreter)());
}
[Theory]
[PerCompilationType(nameof(UnboxableFromObject))]
[PerCompilationType(nameof(NullableUnboxableFromObject))]
[PerCompilationType(nameof(UnboxableFromIComparable))]
[PerCompilationType(nameof(NullableUnboxableFromIComparable))]
[PerCompilationType(nameof(UnboxableFromIComparableT))]
[PerCompilationType(nameof(NullableUnboxableFromIComparableT))]
public void CanUnboxFromMake(object value, Type type, Type boxedType, bool useInterpreter)
{
Expression expression = Expression.Constant(value, boxedType);
UnaryExpression unbox = Expression.MakeUnary(ExpressionType.Unbox, expression, type);
Assert.Equal(type, unbox.Type);
BinaryExpression isEqual = Expression.Equal(Expression.Constant(value, type), unbox);
Assert.True(Expression.Lambda<Func<bool>>(isEqual).Compile(useInterpreter)());
}
public static IEnumerable<object[]> UnboxableFromObject()
{
yield return new object[] { 1, typeof(int), typeof(object) };
yield return new object[] { 42, typeof(int), typeof(object) };
yield return new object[] { DateTime.MinValue, typeof(DateTime), typeof(object) };
yield return new object[] { DateTimeOffset.MinValue, typeof(DateTimeOffset), typeof(object) };
yield return new object[] { 42L, typeof(long), typeof(object) };
yield return new object[] { 13m, typeof(decimal), typeof(object) };
yield return new object[] { ExpressionType.Unbox, typeof(ExpressionType), typeof(object) };
}
public static IEnumerable<object[]> NullableUnboxableFromObject()
{
yield return new object[] { 1, typeof(int?), typeof(object) };
yield return new object[] { 42, typeof(int?), typeof(object) };
yield return new object[] { DateTime.MinValue, typeof(DateTime?), typeof(object) };
yield return new object[] { DateTimeOffset.MinValue, typeof(DateTimeOffset?), typeof(object) };
yield return new object[] { 42L, typeof(long?), typeof(object) };
yield return new object[] { 13m, typeof(decimal?), typeof(object) };
yield return new object[] { ExpressionType.Unbox, typeof(ExpressionType?), typeof(object) };
}
public static IEnumerable<object[]> UnboxableFromIComparable()
{
yield return new object[] { 1, typeof(int), typeof(IComparable) };
yield return new object[] { 42, typeof(int), typeof(IComparable) };
yield return new object[] { DateTime.MinValue, typeof(DateTime), typeof(IComparable) };
yield return new object[] { DateTimeOffset.MinValue, typeof(DateTimeOffset), typeof(IComparable) };
yield return new object[] { 42L, typeof(long), typeof(IComparable) };
yield return new object[] { 13m, typeof(decimal), typeof(IComparable) };
yield return new object[] { ExpressionType.Unbox, typeof(ExpressionType), typeof(IComparable) };
}
public static IEnumerable<object[]> NullableUnboxableFromIComparable()
{
yield return new object[] { 1, typeof(int?), typeof(IComparable) };
yield return new object[] { 42, typeof(int?), typeof(IComparable) };
yield return new object[] { DateTime.MinValue, typeof(DateTime?), typeof(IComparable) };
yield return new object[] { DateTimeOffset.MinValue, typeof(DateTimeOffset?), typeof(IComparable) };
yield return new object[] { 42L, typeof(long?), typeof(IComparable) };
yield return new object[] { 13m, typeof(decimal?), typeof(IComparable) };
yield return new object[] { ExpressionType.Unbox, typeof(ExpressionType?), typeof(IComparable) };
}
public static IEnumerable<object[]> UnboxableFromIComparableT()
{
yield return new object[] { 1, typeof(int), typeof(IComparable<int>) };
yield return new object[] { 42, typeof(int), typeof(IComparable<int>) };
yield return new object[] { DateTime.MinValue, typeof(DateTime), typeof(IComparable<DateTime>) };
yield return new object[] { DateTimeOffset.MinValue, typeof(DateTimeOffset), typeof(IComparable<DateTimeOffset>) };
yield return new object[] { 42L, typeof(long), typeof(IComparable<long>) };
yield return new object[] { 13m, typeof(decimal), typeof(IComparable<decimal>) };
}
public static IEnumerable<object[]> NullableUnboxableFromIComparableT()
{
yield return new object[] { 1, typeof(int?), typeof(IComparable<int>) };
yield return new object[] { 42, typeof(int?), typeof(IComparable<int>) };
yield return new object[] { DateTime.MinValue, typeof(DateTime?), typeof(IComparable<DateTime>) };
yield return new object[] { DateTimeOffset.MinValue, typeof(DateTimeOffset?), typeof(IComparable<DateTimeOffset>) };
yield return new object[] { 42L, typeof(long?), typeof(IComparable<long>) };
yield return new object[] { 13m, typeof(decimal?), typeof(IComparable<decimal>) };
}
public static IEnumerable<object[]> NullableTypes()
{
yield return new object[] { typeof(int?) };
yield return new object[] { typeof(DateTime?) };
yield return new object[] { typeof(DateTimeKind?) };
yield return new object[] { typeof(DateTimeOffset?) };
yield return new object[] { typeof(long?) };
yield return new object[] { typeof(decimal?) };
}
[Theory]
[PerCompilationType(nameof(NullableTypes))]
public void NullNullable(Type type, bool useInterpreter)
{
UnaryExpression unbox = Expression.Unbox(Expression.Default(typeof(object)), type);
Func<bool> isNull = Expression.Lambda<Func<bool>>(Expression.Equal(Expression.Default(type), unbox)).Compile(useInterpreter);
Assert.True(isNull());
}
[Fact]
public void CannotUnboxToNonInterfaceExceptObject()
{
Expression value = Expression.Constant(0);
AssertExtensions.Throws<ArgumentException>("expression", () => Expression.Unbox(value, typeof(int)));
}
[Fact]
public void CannotUnboxReferenceType()
{
Expression value = Expression.Constant("", typeof(IComparable<string>));
AssertExtensions.Throws<ArgumentException>("type", () => Expression.Unbox(value, typeof(string)));
}
private static class Unreadable
{
public static object WriteOnly
{
set { }
}
}
[Fact]
public void CannotUnboxUnreadable()
{
Expression value = Expression.Property(null, typeof(Unreadable), "WriteOnly");
AssertExtensions.Throws<ArgumentException>("expression", () => Expression.Unbox(value, typeof(int)));
}
[Fact]
public void ExpressionNull()
{
AssertExtensions.Throws<ArgumentNullException>("expression", () => Expression.Unbox(null, typeof(int)));
}
[Fact]
public void TypeNull()
{
Expression value = Expression.Constant(0, typeof(object));
AssertExtensions.Throws<ArgumentNullException>("type", () => Expression.Unbox(value, null));
}
[Theory]
[ClassData(typeof(CompilationTypes))]
public void MistmatchFailsOnRuntime(bool useInterpreter)
{
Expression unbox = Expression.Unbox(Expression.Constant(0, typeof(object)), typeof(long));
Func<long> del = Expression.Lambda<Func<long>>(unbox).Compile(useInterpreter);
Assert.Throws<InvalidCastException>(() => del());
}
[Fact]
public void CannotReduce()
{
Expression unbox = Expression.Unbox(Expression.Constant(0, typeof(object)), typeof(int));
Assert.False(unbox.CanReduce);
Assert.Same(unbox, unbox.Reduce());
AssertExtensions.Throws<ArgumentException>(null, () => unbox.ReduceAndCheck());
}
[Fact]
public static void PointerType()
{
Type pointerType = typeof(int).MakePointerType();
AssertExtensions.Throws<ArgumentException>("type", () => Expression.Unbox(Expression.Constant(new object()), pointerType));
}
[Fact]
public static void ByRefType()
{
Type byRefType = typeof(int).MakeByRefType();
AssertExtensions.Throws<ArgumentException>("type", () => Expression.Unbox(Expression.Constant(new object()), byRefType));
}
private struct GenericValueType<T>
{
public T Value { get; set; }
}
[Fact]
public static void GenericType()
{
Type genType = typeof(GenericValueType<>);
AssertExtensions.Throws<ArgumentException>("type", () => Expression.Unbox(Expression.Constant(new object()), genType));
}
[Fact]
public static void GenericTypeParameters()
{
Type genType = typeof(GenericValueType<>);
AssertExtensions.Throws<ArgumentException>("type", () => Expression.Unbox(Expression.Constant(new object()), genType.MakeGenericType(genType)));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ServiceModel;
namespace System.Collections.Generic
{
[System.Runtime.InteropServices.ComVisible(false)]
public class SynchronizedReadOnlyCollection<T> : IList<T>, IList
{
private IList<T> _items;
private object _sync;
public SynchronizedReadOnlyCollection()
{
_items = new List<T>();
_sync = new Object();
}
public SynchronizedReadOnlyCollection(object syncRoot)
{
if (syncRoot == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("syncRoot"));
_items = new List<T>();
_sync = syncRoot;
}
public SynchronizedReadOnlyCollection(object syncRoot, IEnumerable<T> list)
{
if (syncRoot == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("syncRoot"));
if (list == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("list"));
_items = new List<T>(list);
_sync = syncRoot;
}
public SynchronizedReadOnlyCollection(object syncRoot, params T[] list)
{
if (syncRoot == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("syncRoot"));
if (list == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("list"));
_items = new List<T>(list.Length);
for (int i = 0; i < list.Length; i++)
_items.Add(list[i]);
_sync = syncRoot;
}
internal SynchronizedReadOnlyCollection(object syncRoot, List<T> list, bool makeCopy)
{
if (syncRoot == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("syncRoot"));
if (list == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentNullException("list"));
if (makeCopy)
_items = new List<T>(list);
else
_items = list;
_sync = syncRoot;
}
public int Count
{
get { lock (_sync) { return _items.Count; } }
}
protected IList<T> Items
{
get
{
return _items;
}
}
public T this[int index]
{
get { lock (_sync) { return _items[index]; } }
}
public bool Contains(T value)
{
lock (_sync)
{
return _items.Contains(value);
}
}
public void CopyTo(T[] array, int index)
{
lock (_sync)
{
_items.CopyTo(array, index);
}
}
public IEnumerator<T> GetEnumerator()
{
lock (_sync)
{
return _items.GetEnumerator();
}
}
public int IndexOf(T value)
{
lock (_sync)
{
return _items.IndexOf(value);
}
}
private void ThrowReadOnly()
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException(SRServiceModel.SFxCollectionReadOnly));
}
bool ICollection<T>.IsReadOnly
{
get { return true; }
}
T IList<T>.this[int index]
{
get
{
return this[index];
}
set
{
this.ThrowReadOnly();
}
}
void ICollection<T>.Add(T value)
{
this.ThrowReadOnly();
}
void ICollection<T>.Clear()
{
this.ThrowReadOnly();
}
bool ICollection<T>.Remove(T value)
{
this.ThrowReadOnly();
return false;
}
void IList<T>.Insert(int index, T value)
{
this.ThrowReadOnly();
}
void IList<T>.RemoveAt(int index)
{
this.ThrowReadOnly();
}
bool ICollection.IsSynchronized
{
get { return true; }
}
object ICollection.SyncRoot
{
get { return _sync; }
}
void ICollection.CopyTo(Array array, int index)
{
ICollection asCollection = _items as ICollection;
if (asCollection == null)
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new NotSupportedException(SRServiceModel.SFxCopyToRequiresICollection));
lock (_sync)
{
asCollection.CopyTo(array, index);
}
}
IEnumerator IEnumerable.GetEnumerator()
{
lock (_sync)
{
IEnumerable asEnumerable = _items as IEnumerable;
if (asEnumerable != null)
return asEnumerable.GetEnumerator();
else
return new EnumeratorAdapter(_items);
}
}
bool IList.IsFixedSize
{
get { return true; }
}
bool IList.IsReadOnly
{
get { return true; }
}
object IList.this[int index]
{
get
{
return this[index];
}
set
{
this.ThrowReadOnly();
}
}
int IList.Add(object value)
{
this.ThrowReadOnly();
return 0;
}
void IList.Clear()
{
this.ThrowReadOnly();
}
bool IList.Contains(object value)
{
VerifyValueType(value);
return this.Contains((T)value);
}
int IList.IndexOf(object value)
{
VerifyValueType(value);
return this.IndexOf((T)value);
}
void IList.Insert(int index, object value)
{
this.ThrowReadOnly();
}
void IList.Remove(object value)
{
this.ThrowReadOnly();
}
void IList.RemoveAt(int index)
{
this.ThrowReadOnly();
}
private static void VerifyValueType(object value)
{
if ((value is T) || (value == null && !typeof(T).IsValueType()))
return;
Type type = (value == null) ? typeof(Object) : value.GetType();
string message = string.Format(SRServiceModel.SFxCollectionWrongType2, type.ToString(), typeof(T).ToString());
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentException(message));
}
internal sealed class EnumeratorAdapter : IEnumerator, IDisposable
{
private IList<T> _list;
private IEnumerator<T> _e;
public EnumeratorAdapter(IList<T> list)
{
_list = list;
_e = list.GetEnumerator();
}
public object Current
{
get { return _e.Current; }
}
public bool MoveNext()
{
return _e.MoveNext();
}
public void Dispose()
{
_e.Dispose();
}
public void Reset()
{
_e = _list.GetEnumerator();
}
}
}
}
| |
using System;
#if NETSTANDARD
using System.Runtime.InteropServices;
#else
using Microsoft.Win32;
#endif
using System.Text.RegularExpressions;
namespace Lucene.Net.Util
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Some useful constants.
/// </summary>
public static class Constants // LUCENENET specific - made static because all members are static and constructor in Lucene was private
{
// LUCENENET NOTE: IMPORTANT - this line must be placed before RUNTIME_VERSION so it can be parsed.
private static Regex VERSION = new Regex(@"(\d+\.\d+(?:\.\d+)?(?:\.\d+)?)", RegexOptions.Compiled);
// LUCENENET specific - renamed JAVA_VERSION to RUNTIME_VERSION and moved below OS constants because loading is dependent upon OS
/// <summary>
/// NOTE: This was JAVA_VENDOR in Lucene
/// </summary>
public static readonly string RUNTIME_VENDOR = "Microsoft"; // AppSettings.Get("java.vendor", "");
//public static readonly string JVM_VENDOR = GetEnvironmentVariable("java.vm.vendor", "");
//public static readonly string JVM_VERSION = GetEnvironmentVariable("java.vm.version", "");
//public static readonly string JVM_NAME = GetEnvironmentVariable("java.vm.name", "");
#if NETSTANDARD
/// <summary>
/// The value of <see cref="RuntimeInformation.OSDescription"/>, excluding the version number.</summary>
#else
/// <summary>
/// The value of System.Environment.OSVersion.VersionString, excluding the version number.</summary>
#endif
public static readonly string OS_NAME = LoadOSName();
private static string LoadOSName() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
#if NETSTANDARD
return VERSION.Replace(RuntimeInformation.OSDescription, string.Empty).Trim();
#else
return VERSION.Replace(Environment.OSVersion.VersionString, string.Empty).Trim();
#endif
}
/// <summary>
/// True iff running on Linux. </summary>
public static readonly bool LINUX = LoadLinux();
private static bool LoadLinux()
{
#if NETSTANDARD
return RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
#else
// 128 is mono's old platform tag for Unix.
// Reference: https://stackoverflow.com/a/5117005
int id = (int)Environment.OSVersion.Platform;
return id == 4 || id == 128;
#endif
}
/// <summary>
/// True iff running on Windows. </summary>
public static readonly bool WINDOWS = LoadWindows();
private static bool LoadWindows()
{
#if NETSTANDARD
return RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
#else
PlatformID pid = Environment.OSVersion.Platform;
return pid == PlatformID.Win32NT || pid == PlatformID.Win32Windows;
#endif
}
/// <summary>
/// True iff running on SunOS. </summary>
public static readonly bool SUN_OS = LoadSunOS();
private static bool LoadSunOS()
{
#if NETSTANDARD
return RuntimeInformation.IsOSPlatform(OSPlatform.Create("SunOS"));
#else
return false; // Not possible
#endif
}
/// <summary>
/// True iff running on Mac OS X </summary>
public static readonly bool MAC_OS_X = LoadMacOSX();
private static bool LoadMacOSX() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
#if NETSTANDARD
return RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
#else
// Reference: https://stackoverflow.com/a/5117005
return Environment.OSVersion.Platform == PlatformID.MacOSX;
#endif
}
/// <summary>
/// True iff running on FreeBSD </summary>
public static readonly bool FREE_BSD = LoadFreeBSD();
private static bool LoadFreeBSD() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
#if NETSTANDARD
return RuntimeInformation.IsOSPlatform(OSPlatform.Create("FreeBSD"));
#else
return false; // Not possible
#endif
}
public static readonly string OS_ARCH = LoadOSArch();
private static string LoadOSArch() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
#if NETSTANDARD
// Possible Values: X86, X64, Arm, Arm64
return RuntimeInformation.OSArchitecture.ToString();
#else
return Environment.Is64BitOperatingSystem ? "X64" : "X86";
#endif
}
public static readonly string OS_VERSION = LoadOSVersion();
private static string LoadOSVersion() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
#if NETSTANDARD
return ExtractString(RuntimeInformation.OSDescription, VERSION);
#else
return Environment.OSVersion.Version.ToString();
#endif
}
#if NETSTANDARD
/// <summary>
/// The value of the version parsed from <see cref="RuntimeInformation.FrameworkDescription"/>.
/// <para/>
/// NOTE: This was JAVA_VERSION in Lucene
/// </summary>
#else
/// <summary>
/// The value of the currently installed .NET Framework version on Windows or <see cref="Environment.Version"/> on other operating systems.
/// <para/>
/// NOTE: This was JAVA_VERSION in Lucene
/// </summary>
#endif
public static readonly string RUNTIME_VERSION = LoadRuntimeVersion();
private static string LoadRuntimeVersion() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
#if NETSTANDARD
return ExtractString(RuntimeInformation.FrameworkDescription, VERSION);
#else
return WINDOWS ? GetFramework45PlusFromRegistry() : Environment.Version.ToString();
#endif
}
//[Obsolete("We are not running on Java for heavens sake")]
//public static readonly bool JRE_IS_MINIMUM_JAVA6 = (bool)new bool?(true); // prevent inlining in foreign class files
//[Obsolete("We are not running on Java for heavens sake")]
//public static readonly bool JRE_IS_MINIMUM_JAVA7 = (bool)new bool?(true); // prevent inlining in foreign class files
//[Obsolete("We are not running on Java for heavens sake")]
//public static readonly bool JRE_IS_MINIMUM_JAVA8;
/// <summary>
/// NOTE: This was JRE_IS_64BIT in Lucene
/// </summary>
public static readonly bool RUNTIME_IS_64BIT = LoadRuntimeIs64Bit(); // LUCENENET NOTE: We still need this constant to indicate 64 bit runtime.
private static bool LoadRuntimeIs64Bit() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006)
{
// LUCENENET NOTE: In Java, the check is for sun.misc.Unsafe.addressSize,
// which is the pointer size of the current environment. We don't need to
// fallback to the OS bitness in .NET because this property is reliable and
// doesn't throw exceptions.
if (IntPtr.Size == 8)
return true;// 64 bit machine
else // if (IntPtr.Size == 4)
return false;// 32 bit machine
}
// this method prevents inlining the final version constant in compiled classes,
// see: http://www.javaworld.com/community/node/3400
private static string Ident(string s)
{
return s.ToString();
}
// We should never change index format with minor versions, so it should always be x.y or x.y.0.z for alpha/beta versions!
/// <summary>
/// this is the internal Lucene version, recorded into each segment.
/// NOTE: we track per-segment version as a <see cref="string"/> with the <c>"X.Y"</c> format
/// (no minor version), e.g. <c>"4.0", "3.1", "3.0"</c>.
/// <para/>Alpha and Beta versions will have numbers like <c>"X.Y.0.Z"</c>,
/// anything else is not allowed. This is done to prevent people from
/// using indexes created with ALPHA/BETA versions with the released version.
/// </summary>
public static readonly string LUCENE_MAIN_VERSION = Ident("4.8");
// LUCENENET NOTE: This version is automatically updated by the
// build script, so there is no need to change it here (although
// it might make sense to change it when a major/minor/patch
// port to Lucene is done).
/// <summary>
/// This is the Lucene version for display purposes.
/// </summary>
public static readonly string LUCENE_VERSION = "4.8.0";
/// <summary>
/// Returns a LUCENE_MAIN_VERSION without any ALPHA/BETA qualifier
/// Used by test only!
/// </summary>
public static string MainVersionWithoutAlphaBeta()
{
string[] parts = MAIN_VERSION_WITHOUT_ALPHA_BETA.Split(LUCENE_MAIN_VERSION);
if (parts.Length == 4 && "0".Equals(parts[2], StringComparison.Ordinal))
{
return parts[0] + "." + parts[1];
}
return LUCENE_MAIN_VERSION;
}
private static Regex MAIN_VERSION_WITHOUT_ALPHA_BETA = new Regex("\\.", RegexOptions.Compiled);
#if !NETSTANDARD
// Gets the .NET Framework Version (if at least 4.5)
// Reference: https://docs.microsoft.com/en-us/dotnet/framework/migration-guide/how-to-determine-which-versions-are-installed
private static string GetFramework45PlusFromRegistry()
{
const string subkey = @"SOFTWARE\Microsoft\NET Framework Setup\NDP\v4\Full\";
// As an alternative, if you know the computers you will query are running .NET Framework 4.5
// or later, you can use:
using (RegistryKey ndpKey = RegistryKey.OpenBaseKey(RegistryHive.LocalMachine, RegistryView.Registry32).OpenSubKey(subkey))
{
if (ndpKey != null && ndpKey.GetValue("Release") != null)
{
return CheckFor45PlusVersion((int)ndpKey.GetValue("Release"));
}
else
{
// Fall back to Environment.Version (probably wrong, but this is our best guess if the registry check fails)
return Environment.Version.ToString();
//Console.WriteLine(".NET Framework Version 4.5 or later is not detected.");
}
}
}
// Checking the version using >= will enable forward compatibility.
private static string CheckFor45PlusVersion(int releaseKey)
{
if (releaseKey >= 460799)
return "4.8 or later";
if (releaseKey >= 460798)
return "4.7";
if (releaseKey >= 394802)
return "4.6.2";
if (releaseKey >= 394254)
{
return "4.6.1";
}
if (releaseKey >= 393295)
{
return "4.6";
}
if ((releaseKey >= 379893))
{
return "4.5.2";
}
if ((releaseKey >= 378675))
{
return "4.5.1";
}
if ((releaseKey >= 378389))
{
return "4.5";
}
// This code should never execute. A non-null release key should mean
// that 4.5 or later is installed.
return "No 4.5 or later version detected";
}
#endif
// LUCENENET TODO: Move to Support ?
/// <summary>
/// Extracts the first group matched with the regex as a new string.
/// </summary>
/// <param name="input">The string to examine</param>
/// <param name="pattern">A regex object to use to extract the string</param>
private static string ExtractString(string input, Regex pattern)
{
Match m = pattern.Match(input);
return (m.Groups.Count > 1) ? m.Groups[1].Value : string.Empty;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Input;
using System.Windows.Threading;
using Microsoft.VisualStudio.Language.Intellisense;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Editor;
using Microsoft.VisualStudio.Text.Editor.OptionsExtensionMethods;
using Microsoft.VisualStudio.Text.Operations;
using Microsoft.VisualStudio.Text.Projection;
using Microsoft.VisualStudio.Utilities;
namespace Microsoft.VisualStudio.InteractiveWindow
{
/// <summary>
/// Provides implementation of a Repl Window built on top of the VS editor using projection buffers.
/// </summary>
internal partial class InteractiveWindow : IInteractiveWindow, IInteractiveWindowOperations
{
private UIThreadOnly _dangerous_uiOnly;
#region Initialization
public InteractiveWindow(
IInteractiveWindowEditorFactoryService host,
IContentTypeRegistryService contentTypeRegistry,
ITextBufferFactoryService bufferFactory,
IProjectionBufferFactoryService projectionBufferFactory,
IEditorOperationsFactoryService editorOperationsFactory,
ITextEditorFactoryService editorFactory,
IIntellisenseSessionStackMapService intellisenseSessionStackMap,
ISmartIndentationService smartIndenterService,
IInteractiveEvaluator evaluator)
{
if (evaluator == null)
{
throw new ArgumentNullException(nameof(evaluator));
}
_dangerous_uiOnly = new UIThreadOnly(this, host);
this.Properties = new PropertyCollection();
_history = new History();
_intellisenseSessionStackMap = intellisenseSessionStackMap;
_smartIndenterService = smartIndenterService;
var textContentType = contentTypeRegistry.GetContentType("text");
var replContentType = contentTypeRegistry.GetContentType(PredefinedInteractiveContentTypes.InteractiveContentTypeName);
var replOutputContentType = contentTypeRegistry.GetContentType(PredefinedInteractiveContentTypes.InteractiveOutputContentTypeName);
_outputBuffer = bufferFactory.CreateTextBuffer(replOutputContentType);
_standardInputBuffer = bufferFactory.CreateTextBuffer();
var projBuffer = projectionBufferFactory.CreateProjectionBuffer(
new EditResolver(this),
Array.Empty<object>(),
ProjectionBufferOptions.None,
replContentType);
// we need to set IReplPromptProvider property before TextViewHost is instantiated so that ReplPromptTaggerProvider can bind to it
projBuffer.Properties.AddProperty(typeof(InteractiveWindow), this);
_projectionBuffer = projBuffer;
_dangerous_uiOnly.AppendNewOutputProjectionBuffer(); // Constructor runs on UI thread.
projBuffer.Changed += new EventHandler<TextContentChangedEventArgs>(ProjectionBufferChanged);
var roleSet = editorFactory.CreateTextViewRoleSet(
PredefinedTextViewRoles.Analyzable,
PredefinedTextViewRoles.Editable,
PredefinedTextViewRoles.Interactive,
PredefinedTextViewRoles.Zoomable,
PredefinedInteractiveTextViewRoles.InteractiveTextViewRole);
_textView = host.CreateTextView(this, projBuffer, roleSet);
_textView.Caret.PositionChanged += CaretPositionChanged;
_textView.Options.SetOptionValue(DefaultTextViewHostOptions.HorizontalScrollBarId, false);
_textView.Options.SetOptionValue(DefaultTextViewHostOptions.LineNumberMarginId, false);
_textView.Options.SetOptionValue(DefaultTextViewHostOptions.OutliningMarginId, false);
_textView.Options.SetOptionValue(DefaultTextViewHostOptions.GlyphMarginId, false);
_textView.Options.SetOptionValue(DefaultTextViewOptions.WordWrapStyleId, WordWrapStyles.WordWrap);
_lineBreakString = _textView.Options.GetNewLineCharacter();
_dangerous_uiOnly.EditorOperations = editorOperationsFactory.GetEditorOperations(_textView); // Constructor runs on UI thread.
_buffer = new OutputBuffer(this);
_outputWriter = new InteractiveWindowWriter(this, spans: null);
SortedSpans errorSpans = new SortedSpans();
_errorOutputWriter = new InteractiveWindowWriter(this, errorSpans);
OutputClassifierProvider.AttachToBuffer(_outputBuffer, errorSpans);
RequiresUIThread();
evaluator.CurrentWindow = this;
_evaluator = evaluator;
}
async Task<ExecutionResult> IInteractiveWindow.InitializeAsync()
{
RequiresUIThread();
if (_dangerous_uiOnly.State != State.Starting)
{
throw new InvalidOperationException(InteractiveWindowResources.AlreadyInitialized);
}
_dangerous_uiOnly.State = State.Initializing;
// Anything that reads options should wait until after this call so the evaluator can set the options first
ExecutionResult result = await _evaluator.InitializeAsync().ConfigureAwait(continueOnCapturedContext: true);
Debug.Assert(OnUIThread()); // ConfigureAwait should bring us back to the UI thread.
if (result.IsSuccessful)
{
_dangerous_uiOnly.PrepareForInput();
}
return result;
}
#endregion
private class UIThreadOnly
{
private readonly InteractiveWindow _window;
private readonly IInteractiveWindowEditorFactoryService _host;
private readonly TaskScheduler _uiScheduler;
private readonly IReadOnlyRegion[] _outputProtection;
// Pending submissions to be processed whenever the REPL is ready to accept submissions.
private readonly Queue<PendingSubmission> _pendingSubmissions;
private DispatcherTimer _executionTimer;
private Cursor _oldCursor;
private Task<ExecutionResult> _currentTask;
private int _currentOutputProjectionSpan;
private int _outputTrackingCaretPosition;
// Read-only regions protecting initial span of the corresponding buffers:
public readonly IReadOnlyRegion[] StandardInputProtection;
public string UncommittedInput;
private IEditorOperations _editorOperations;
public IEditorOperations EditorOperations
{
get
{
return _editorOperations;
}
set
{
Debug.Assert(_editorOperations == null, "Assignment only happens once.");
Debug.Assert(value != null);
_editorOperations = value;
}
}
public State _state;
public State State
{
get
{
return _state;
}
set
{
_window.StateChanged?.Invoke(value);
_state = value;
}
}
public UIThreadOnly(InteractiveWindow window, IInteractiveWindowEditorFactoryService host)
{
_window = window;
_host = host;
_uiScheduler = TaskScheduler.FromCurrentSynchronizationContext();
StandardInputProtection = new IReadOnlyRegion[2];
_outputProtection = new IReadOnlyRegion[2];
_pendingSubmissions = new Queue<PendingSubmission>();
_outputTrackingCaretPosition = -1;
}
public Task<ExecutionResult> ResetAsync(bool initialize)
{
Debug.Assert(State != State.Resetting, "The button should have been disabled.");
if (_window._stdInputStart != null)
{
CancelStandardInput();
}
_window._buffer.Flush();
if (State == State.WaitingForInput)
{
Debug.Assert(_window._projectionSpans.Last().Kind == ReplSpanKind.Language);
StoreUncommittedInput();
RemoveProjectionSpans(_window._projectionSpans.Count - 2, 2);
_window._currentLanguageBuffer = null;
}
// replace the task being interrupted by a "reset" task:
State = State.Resetting;
_currentTask = _window._evaluator.ResetAsync(initialize);
_currentTask.ContinueWith(FinishExecute, _uiScheduler);
return _currentTask;
}
public void ClearView()
{
if (_window._stdInputStart != null)
{
CancelStandardInput();
}
_window._adornmentToMinimize = false;
InlineAdornmentProvider.RemoveAllAdornments(_window._textView);
// remove all the spans except our initial span from the projection buffer
UncommittedInput = null;
// Clear the projection and buffers last as this might trigger events that might access other state of the REPL window:
RemoveProtection(_window._outputBuffer, _outputProtection);
RemoveProtection(_window._standardInputBuffer, StandardInputProtection);
using (var edit = _window._outputBuffer.CreateEdit(EditOptions.None, null, s_suppressPromptInjectionTag))
{
edit.Delete(0, _window._outputBuffer.CurrentSnapshot.Length);
edit.Apply();
}
_window._buffer.Reset();
OutputClassifierProvider.ClearSpans(_window._outputBuffer);
_outputTrackingCaretPosition = 0;
using (var edit = _window._standardInputBuffer.CreateEdit(EditOptions.None, null, s_suppressPromptInjectionTag))
{
edit.Delete(0, _window._standardInputBuffer.CurrentSnapshot.Length);
edit.Apply();
}
RemoveProjectionSpans(0, _window._projectionSpans.Count);
// Insert an empty output buffer.
// We do it for two reasons:
// 1) When output is written to asynchronously we need a buffer to store it.
// This may happen when clearing screen while background thread is writing to the console.
// 2) We need at least one non-inert span due to bugs in projection buffer.
AppendNewOutputProjectionBuffer();
_window._history.ForgetOriginalBuffers();
// If we were waiting for input, we need to restore the prompt that we just cleared.
// If we are in any other state, then we'll let normal transitions trigger the next prompt.
if (State == State.WaitingForInput)
{
PrepareForInput();
}
}
private void CancelStandardInput()
{
_window.AppendLineNoPromptInjection(_window._standardInputBuffer);
_window._inputValue = null;
_window._inputEvent.Set();
}
public void InsertCode(string text)
{
if (_window._stdInputStart != null)
{
return;
}
if (State == State.ExecutingInput)
{
AppendUncommittedInput(text);
}
else
{
if (!_window._textView.Selection.IsEmpty)
{
_window.CutOrDeleteSelection(isCut: false);
}
EditorOperations.InsertText(text);
}
}
public void Submit(PendingSubmission[] pendingSubmissions)
{
if (_window._stdInputStart == null)
{
if (State == State.WaitingForInput && _window._currentLanguageBuffer != null)
{
StoreUncommittedInput();
PendSubmissions(pendingSubmissions);
ProcessPendingSubmissions();
}
else
{
PendSubmissions(pendingSubmissions);
}
}
}
private void StoreUncommittedInput()
{
if (UncommittedInput == null)
{
string activeCode = _window.GetActiveCode();
if (!string.IsNullOrEmpty(activeCode))
{
UncommittedInput = activeCode;
}
}
}
private void PendSubmissions(IEnumerable<PendingSubmission> inputs)
{
foreach (var input in inputs)
{
_pendingSubmissions.Enqueue(input);
}
}
public void AddInput(string command)
{
// If the language buffer is readonly then input can not be added. Return immediately.
// The language buffer gets marked as readonly in SubmitAsync method when input on the prompt
// gets submitted. So it would be readonly when the user types #reset on the prompt. In that
// case it is the right thing to bail out of this method.
if (_window._currentLanguageBuffer != null && _window._currentLanguageBuffer.IsReadOnly(0))
{
return;
}
if (State == State.ExecutingInput || _window._currentLanguageBuffer == null)
{
AddLanguageBuffer();
_window._currentLanguageBuffer.Insert(0, command);
}
else
{
StoreUncommittedInput();
_window.SetActiveCode(command);
}
// Add command to history before calling FinishCurrentSubmissionInput as it adds newline
// to the end of the command.
_window._history.Add(_window._currentLanguageBuffer.CurrentSnapshot.GetExtent());
FinishCurrentSubmissionInput();
}
private void AppendUncommittedInput(string text)
{
if (string.IsNullOrEmpty(text))
{
// Do nothing.
}
else if (string.IsNullOrEmpty(UncommittedInput))
{
UncommittedInput = text;
}
else
{
UncommittedInput += text;
}
}
private void RestoreUncommittedInput()
{
if (UncommittedInput != null)
{
_window.SetActiveCode(UncommittedInput);
UncommittedInput = null;
}
}
/// <summary>
/// Pastes from the clipboard into the text view
/// </summary>
public bool Paste()
{
_window.MoveCaretToClosestEditableBuffer();
string format = _window._evaluator.FormatClipboard();
if (format != null)
{
InsertCode(format);
}
else if (Clipboard.ContainsText())
{
InsertCode(Clipboard.GetText());
}
else
{
return false;
}
return true;
}
/// <summary>
/// Appends given text to the last input span (standard input or active code input).
/// </summary>
private void AppendInput(string text)
{
var inputSpan = _window._projectionSpans[_window._projectionSpans.Count - 1];
Debug.Assert(inputSpan.Kind == ReplSpanKind.Language || inputSpan.Kind == ReplSpanKind.StandardInput);
Debug.Assert(inputSpan.TrackingSpan.TrackingMode == SpanTrackingMode.Custom);
var buffer = inputSpan.TrackingSpan.TextBuffer;
var span = inputSpan.TrackingSpan.GetSpan(buffer.CurrentSnapshot);
using (var edit = buffer.CreateEdit())
{
edit.Insert(edit.Snapshot.Length, text);
edit.Apply();
}
var replSpan = new ReplSpan(
new CustomTrackingSpan(
buffer.CurrentSnapshot,
new Span(span.Start, span.Length + text.Length),
PointTrackingMode.Negative,
PointTrackingMode.Positive),
inputSpan.Kind,
inputSpan.LineNumber);
ReplaceProjectionSpan(_window._projectionSpans.Count - 1, replSpan);
_window.Caret.EnsureVisible();
}
public void PrepareForInput()
{
_window._buffer.Flush();
AddLanguageBuffer();
// we are prepared for processing any postponed submissions there might have been:
ProcessPendingSubmissions();
}
private void ProcessPendingSubmissions()
{
Debug.Assert(_window._currentLanguageBuffer != null);
if (_pendingSubmissions.Count == 0)
{
RestoreUncommittedInput();
// move to the end (it might have been in virtual space):
_window.Caret.MoveTo(GetLastLine(_window.TextBuffer.CurrentSnapshot).End);
_window.Caret.EnsureVisible();
State = State.WaitingForInput;
var ready = _window.ReadyForInput;
if (ready != null)
{
ready();
}
return;
}
var submission = _pendingSubmissions.Dequeue();
// queue new work item:
_window.Dispatcher.Invoke(new Action(() =>
{
_window.SetActiveCode(submission.Input);
var taskDone = SubmitAsync();
if (submission.Completion != null)
{
taskDone.ContinueWith(x => submission.Completion.SetResult(null), TaskScheduler.Current);
}
}));
}
public Task SubmitAsync()
{
RequiresLanguageBuffer();
// TODO: queue submission
// Ensure that the REPL doesn't try to execute if it is already
// executing. If this invariant can no longer be maintained more of
// the code in this method will need to be bullet-proofed
if (State == State.ExecutingInput)
{
return Task.FromResult<object>(null);
}
// get command to save to history before calling FinishCurrentSubmissionInput
// as it adds newline at the end
var historySpan = _window._currentLanguageBuffer.CurrentSnapshot.GetExtent();
FinishCurrentSubmissionInput();
_window._history.UncommittedInput = null;
var snapshotSpan = _window._currentLanguageBuffer.CurrentSnapshot.GetExtent();
var trimmedSpan = snapshotSpan.TrimEnd();
if (trimmedSpan.Length == 0)
{
// TODO: reuse the current language buffer
PrepareForInput();
return Task.FromResult<object>(null);
}
else
{
_window._history.Add(historySpan);
State = State.ExecutingInput;
StartCursorTimer();
Debug.Assert(_currentTask == null, "Shouldn't be either executing or resetting");
_currentTask = _window._evaluator.ExecuteCodeAsync(snapshotSpan.GetText());
return _currentTask.ContinueWith(FinishExecute, _uiScheduler);
}
}
private void RequiresLanguageBuffer()
{
if (_window._currentLanguageBuffer == null)
{
Environment.FailFast("Language buffer not available");
}
}
private void FinishCurrentSubmissionInput()
{
_window.AppendLineNoPromptInjection(_window._currentLanguageBuffer);
ApplyProtection(_window._currentLanguageBuffer, regions: null);
if (_window._adornmentToMinimize)
{
// TODO (tomat): remember the index of the adornment(s) in the current output and minimize those instead of the last one
InlineAdornmentProvider.MinimizeLastInlineAdornment(_window._textView);
_window._adornmentToMinimize = false;
}
NewOutputBuffer();
}
/// <summary>
/// Marks the entire buffer as read-only.
/// </summary>
public void ApplyProtection(ITextBuffer buffer, IReadOnlyRegion[] regions, bool allowAppend = false)
{
using (var readonlyEdit = buffer.CreateReadOnlyRegionEdit())
{
int end = buffer.CurrentSnapshot.Length;
Span span = new Span(0, end);
var region0 = allowAppend ?
readonlyEdit.CreateReadOnlyRegion(span, SpanTrackingMode.EdgeExclusive, EdgeInsertionMode.Allow) :
readonlyEdit.CreateReadOnlyRegion(span, SpanTrackingMode.EdgeExclusive, EdgeInsertionMode.Deny);
// Create a second read-only region to prevent insert at start of buffer.
var region1 = (end > 0) ? readonlyEdit.CreateReadOnlyRegion(new Span(0, 0), SpanTrackingMode.EdgeExclusive, EdgeInsertionMode.Deny) : null;
readonlyEdit.Apply();
if (regions != null)
{
regions[0] = region0;
regions[1] = region1;
}
}
}
/// <summary>
/// Removes read-only region from buffer.
/// </summary>
public void RemoveProtection(ITextBuffer buffer, IReadOnlyRegion[] regions)
{
if (regions[0] != null)
{
Debug.Assert(regions[1] != null);
foreach (var region in regions)
{
using (var readonlyEdit = buffer.CreateReadOnlyRegionEdit())
{
readonlyEdit.RemoveReadOnlyRegion(region);
readonlyEdit.Apply();
}
}
}
}
public void NewOutputBuffer()
{
// Stop growing the current output projection span.
Debug.Assert(_window._projectionSpans[_currentOutputProjectionSpan].Kind == ReplSpanKind.Output);
var nonGrowingSpan = _window._projectionSpans[_currentOutputProjectionSpan].WithEndTrackingMode(PointTrackingMode.Negative);
ReplaceProjectionSpan(_currentOutputProjectionSpan, nonGrowingSpan);
AppendNewOutputProjectionBuffer();
_outputTrackingCaretPosition = _window._textView.Caret.Position.BufferPosition;
}
// WARNING: When updating projection spans we need to update _projectionSpans list first and
// then projection buffer, since the projection buffer update might trigger events that might
// access the projection spans.
public void AppendNewOutputProjectionBuffer()
{
var currentSnapshot = _window._outputBuffer.CurrentSnapshot;
var trackingSpan = new CustomTrackingSpan(
currentSnapshot,
new Span(currentSnapshot.Length, 0),
PointTrackingMode.Negative,
PointTrackingMode.Positive);
var lastLineNumber = _window._textView == null ? 0 : _window.LastLineNumber;
_currentOutputProjectionSpan = AppendProjectionSpan(new ReplSpan(trackingSpan, ReplSpanKind.Output, lastLineNumber));
}
private int AppendProjectionSpan(ReplSpan span)
{
int index = _window._projectionSpans.Count;
InsertProjectionSpan(index, span);
return index;
}
private void InsertProjectionSpan(int index, ReplSpan span)
{
_window._projectionSpans.Insert(index, span);
_window._projectionBuffer.ReplaceSpans(index, 0, new[] { span.Span }, EditOptions.None, editTag: s_suppressPromptInjectionTag);
_window.CheckProjectionSpanLineNumbers();
}
public void ReplaceProjectionSpan(int spanToReplace, ReplSpan newSpan)
{
Debug.Assert(_window._projectionSpans[spanToReplace].LineNumber == newSpan.LineNumber);
_window._projectionSpans[spanToReplace] = newSpan;
_window._projectionBuffer.ReplaceSpans(spanToReplace, 1, new[] { newSpan.Span }, EditOptions.None, editTag: s_suppressPromptInjectionTag);
_window.CheckProjectionSpanLineNumbers();
}
private void RemoveProjectionSpans(int index, int count)
{
_window._projectionSpans.RemoveRange(index, count);
_window._projectionBuffer.ReplaceSpans(index, count, Array.Empty<object>(), EditOptions.None, s_suppressPromptInjectionTag);
_window.CheckProjectionSpanLineNumbers();
}
/// <summary>
/// Appends text to the output buffer and updates projection buffer to include it.
/// WARNING: this has to be the only method that writes to the output buffer so that
/// the output buffering counters are kept in sync.
/// </summary>
internal void AppendOutput(IEnumerable<string> output, int outputLength)
{
Debug.Assert(output.Any());
// we maintain this invariant so that projections don't split "\r\n" in half
// (the editor isn't happy about it and out line counting also gets simpler):
Debug.Assert(!_window._outputBuffer.CurrentSnapshot.EndsWith('\r'));
var projectionSpans = _window._projectionSpans;
Debug.Assert(projectionSpans[_currentOutputProjectionSpan].Kind == ReplSpanKind.Output);
int lineBreakProjectionSpanIndex = _currentOutputProjectionSpan + 1;
// insert line break projection span if there is none and the output doesn't end with a line break:
bool hasLineBreakProjection = false;
if (lineBreakProjectionSpanIndex < projectionSpans.Count)
{
var oldSpan = projectionSpans[lineBreakProjectionSpanIndex];
hasLineBreakProjection = oldSpan.Kind == ReplSpanKind.Output && object.Equals(oldSpan.Span, _window._lineBreakString);
}
Debug.Assert(output.Last().Last() != '\r');
bool endsWithLineBreak = output.Last().Last() == '\n';
bool insertLineBreak = !endsWithLineBreak && !hasLineBreakProjection;
bool removeLineBreak = endsWithLineBreak && hasLineBreakProjection;
// insert text to the subject buffer.
int oldBufferLength = _window._outputBuffer.CurrentSnapshot.Length;
InsertOutput(output, oldBufferLength);
if (removeLineBreak)
{
RemoveProjectionSpans(lineBreakProjectionSpanIndex, 1);
}
else if (insertLineBreak)
{
int lineNumber = _window.LastLineNumber;
InsertProjectionSpan(lineBreakProjectionSpanIndex, new ReplSpan(_window._lineBreakString, ReplSpanKind.Output, lineNumber));
}
// caret didn't move since last time we moved it to track output:
if (_outputTrackingCaretPosition == _window._textView.Caret.Position.BufferPosition)
{
_window._textView.Caret.EnsureVisible();
_outputTrackingCaretPosition = _window._textView.Caret.Position.BufferPosition;
}
}
private void InsertOutput(IEnumerable<string> output, int position)
{
RemoveProtection(_window._outputBuffer, _outputProtection);
// append the text to output buffer and make sure it ends with a line break:
using (var edit = _window._outputBuffer.CreateEdit(EditOptions.None, null, s_suppressPromptInjectionTag))
{
foreach (string text in output)
{
edit.Insert(position, text);
}
edit.Apply();
}
ApplyProtection(_window._outputBuffer, _outputProtection);
}
private void FinishExecute(Task<ExecutionResult> result)
{
// The finished task has been replaced by another task (e.g. reset).
// Do not perform any task finalization, it will be done by the replacement task.
if (_currentTask != result)
{
return;
}
_currentTask = null;
ResetCursor();
if (result.Exception != null || !result.Result.IsSuccessful)
{
if (_window._history.Last != null)
{
_window._history.Last.Failed = true;
}
}
PrepareForInput();
}
public void ExecuteInput()
{
ITextBuffer languageBuffer = GetLanguageBuffer(_window.Caret.Position.BufferPosition);
if (languageBuffer == null)
{
return;
}
if (languageBuffer == _window._currentLanguageBuffer)
{
// TODO (tomat): this should rather send an abstract "finish" command that various features
// can implement as needed (IntelliSense, inline rename would commit, etc.).
// For now, commit IntelliSense:
var completionSession = _window.SessionStack.TopSession as ICompletionSession;
if (completionSession != null)
{
completionSession.Commit();
}
SubmitAsync();
}
else
{
// append text of the target buffer to the current language buffer:
string text = TrimTrailingEmptyLines(languageBuffer.CurrentSnapshot);
_window._currentLanguageBuffer.Replace(new Span(_window._currentLanguageBuffer.CurrentSnapshot.Length, 0), text);
EditorOperations.MoveToEndOfDocument(false);
}
}
private static string TrimTrailingEmptyLines(ITextSnapshot snapshot)
{
var line = GetLastLine(snapshot);
while (line != null && line.Length == 0)
{
line = GetPreviousLine(line);
}
if (line == null)
{
return string.Empty;
}
return line.Snapshot.GetText(0, line.Extent.End.Position);
}
private static ITextSnapshotLine GetPreviousLine(ITextSnapshotLine line)
{
return line.LineNumber > 0 ? line.Snapshot.GetLineFromLineNumber(line.LineNumber - 1) : null;
}
/// <summary>
/// Returns the language or command text buffer that the specified point belongs to.
/// If the point lays in a prompt returns the buffer corresponding to the prompt.
/// </summary>
/// <returns>The language or command buffer or null if the point doesn't belong to any.</returns>
private ITextBuffer GetLanguageBuffer(SnapshotPoint point)
{
int promptIndex = _window.GetPromptIndexForPoint(point);
if (promptIndex < 0)
{
return null;
}
// Grab the span following the prompt (either language or standard input).
ReplSpan projectionSpan = _window._projectionSpans[promptIndex + 1];
if (projectionSpan.Kind != ReplSpanKind.Language)
{
Debug.Assert(projectionSpan.Kind == ReplSpanKind.StandardInput);
return null;
}
var inputBuffer = projectionSpan.TrackingSpan.TextBuffer;
var inputSnapshot = inputBuffer.CurrentSnapshot;
var projectedSnapshot = _window._textView.BufferGraph.MapUpToBuffer(
new SnapshotSpan(inputSnapshot, 0, inputSnapshot.Length),
SpanTrackingMode.EdgePositive,
_window._projectionBuffer);
Debug.Assert(projectedSnapshot.Count > 0);
var projectedSnapshotStart = projectedSnapshot.First().Start;
var projectedSnapshotEnd = projectedSnapshot.Last().End;
if (point < projectedSnapshotStart.GetContainingLine().Start)
{
return null;
}
// If the buffer is the current buffer, the cursor might be in a virtual space behind the buffer
// but logically it belongs to the current submission. Since the current language buffer is the last buffer in the
// projection we don't need to check for its end.
if (inputBuffer == _window._currentLanguageBuffer)
{
return inputBuffer;
}
// if the point is at the end of the buffer it might be on the next line that doesn't logically belong to the input region:
if (point > projectedSnapshotEnd || (point == projectedSnapshotEnd && projectedSnapshotEnd.GetContainingLine().LineBreakLength != 0))
{
return null;
}
return inputBuffer;
}
public void ResetCursor()
{
if (_executionTimer != null)
{
_executionTimer.Stop();
}
if (_oldCursor != null)
{
((ContentControl)_window._textView).Cursor = _oldCursor;
}
_oldCursor = null;
_executionTimer = null;
}
private void StartCursorTimer()
{
var timer = new DispatcherTimer();
timer.Tick += SetRunningCursor;
timer.Interval = TimeSpan.FromMilliseconds(250);
_executionTimer = timer;
timer.Start();
}
private void SetRunningCursor(object sender, EventArgs e)
{
var view = (ContentControl)_window._textView;
// Save the old value of the cursor so it can be restored
// after execution has finished
_oldCursor = view.Cursor;
// TODO: Design work to come up with the correct cursor to use
// Set the repl's cursor to the "executing" cursor
view.Cursor = Cursors.Wait;
// Stop the timer so it doesn't fire again
if (_executionTimer != null)
{
_executionTimer.Stop();
}
}
public int IndexOfLastStandardInputSpan()
{
for (int i = _window._projectionSpans.Count - 1; i >= 0; i--)
{
if (_window._projectionSpans[i].Kind == ReplSpanKind.StandardInput)
{
return i;
}
}
return -1;
}
public void RemoveLastInputPrompt()
{
Debug.Assert(_window._projectionSpans[_window._projectionSpans.Count - SpansPerLineOfInput].Kind.IsPrompt());
// projection buffer update must be the last operation as it might trigger event that accesses prompt line mapping:
RemoveProjectionSpans(_window._projectionSpans.Count - SpansPerLineOfInput, SpansPerLineOfInput);
}
/// <summary>
/// Creates and adds a new language buffer to the projection buffer.
/// </summary>
private void AddLanguageBuffer()
{
ITextBuffer buffer = _host.CreateAndActivateBuffer(_window);
buffer.Properties.AddProperty(typeof(IInteractiveEvaluator), _window._evaluator);
buffer.Properties.AddProperty(typeof(InteractiveWindow), _window);
_window._currentLanguageBuffer = buffer;
var bufferAdded = _window.SubmissionBufferAdded;
if (bufferAdded != null)
{
bufferAdded(_window, new SubmissionBufferAddedEventArgs(buffer));
}
// add the whole buffer to the projection buffer and set it up to expand to the right as text is appended
ReplSpan promptSpan = _window.CreatePrimaryPrompt();
ReplSpan languageSpan = new ReplSpan(_window.CreateLanguageTrackingSpan(new Span(0, 0)), ReplSpanKind.Language, promptSpan.LineNumber);
// projection buffer update must be the last operation as it might trigger event that accesses prompt line mapping:
_window.AppendProjectionSpans(promptSpan, languageSpan);
}
}
internal enum State
{
/// <summary>
/// Initial state. <see cref="IInteractiveWindow.InitializeAsync"/> hasn't been called.
/// Transition to <see cref="Initializing"/> when <see cref="IInteractiveWindow.InitializeAsync"/> is called.
/// Transition to <see cref="Resetting"/> when <see cref="IInteractiveWindowOperations.ResetAsync"/> is called.
/// </summary>
Starting,
/// <summary>
/// In the process of calling <see cref="IInteractiveWindow.InitializeAsync"/>.
/// Transition to <see cref="WaitingForInput"/> when finished (in <see cref="UIThreadOnly.ProcessPendingSubmissions"/>).
/// Transition to <see cref="Resetting"/> when <see cref="IInteractiveWindowOperations.ResetAsync"/> is called.
/// </summary>
Initializing,
/// <summary>
/// In the process of calling <see cref="IInteractiveWindowOperations.ResetAsync"/>.
/// Transition to <see cref="WaitingForInput"/> when finished (in <see cref="UIThreadOnly.ProcessPendingSubmissions"/>).
/// Note: Should not see <see cref="IInteractiveWindowOperations.ResetAsync"/> calls while in this state.
/// </summary>
Resetting,
/// <summary>
/// Prompt has been displayed - waiting for the user to make the next submission.
/// Transition to <see cref="ExecutingInput"/> when <see cref="IInteractiveWindowOperations.ExecuteInput"/> is called.
/// Transition to <see cref="Resetting"/> when <see cref="IInteractiveWindowOperations.ResetAsync"/> is called.
/// </summary>
WaitingForInput,
/// <summary>
/// Executing the user's submission.
/// Transition to <see cref="WaitingForInput"/> when finished (in <see cref="UIThreadOnly.ProcessPendingSubmissions"/>).
/// Transition to <see cref="Resetting"/> when <see cref="IInteractiveWindowOperations.ResetAsync"/> is called.
/// </summary>
ExecutingInput,
/// <summary>
/// In the process of calling <see cref="IInteractiveWindow.ReadStandardInput"/>.
/// Return to preceding state when finished.
/// Transition to <see cref="Resetting"/> when <see cref="IInteractiveWindowOperations.ResetAsync"/> is called.
/// </summary>
/// <remarks>
/// TODO: When we clean up <see cref="IInteractiveWindow.ReadStandardInput"/> (https://github.com/dotnet/roslyn/issues/3984)
/// we should try to eliminate the "preceding state", since it substantially
/// increases the complexity of the state machine.
/// </remarks>
ReadingStandardInput,
}
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Org.Apache.Http.Impl.Auth.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Org.Apache.Http.Impl.Auth
{
/// <summary>
/// <para>Signals NTLM protocol failure.</para><para><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/NTLMEngineException
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/NTLMEngineException", AccessFlags = 33)]
public partial class NTLMEngineException : global::Org.Apache.Http.Auth.AuthenticationException
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public NTLMEngineException() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Creates a new NTLMEngineException with the specified message.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;)V", AccessFlags = 1)]
public NTLMEngineException(string message) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Creates a new NTLMEngineException with the specified detail message and cause.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/Throwable;)V", AccessFlags = 1)]
public NTLMEngineException(string message, global::System.Exception cause) /* MethodBuilder.Create */
{
}
}
/// <java-name>
/// org/apache/http/impl/auth/NTLMScheme
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/NTLMScheme", AccessFlags = 33)]
public partial class NTLMScheme : global::Org.Apache.Http.Impl.Auth.AuthSchemeBase
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "(Lorg/apache/http/impl/auth/NTLMEngine;)V", AccessFlags = 1)]
public NTLMScheme(global::Org.Apache.Http.Impl.Auth.INTLMEngine engine) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Returns textual designation of the given authentication scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para>the name of the given authentication scheme </para>
/// </returns>
/// <java-name>
/// getSchemeName
/// </java-name>
[Dot42.DexImport("getSchemeName", "()Ljava/lang/String;", AccessFlags = 1)]
public override string GetSchemeName() /* MethodBuilder.Create */
{
return default(string);
}
/// <java-name>
/// getParameter
/// </java-name>
[Dot42.DexImport("getParameter", "(Ljava/lang/String;)Ljava/lang/String;", AccessFlags = 1)]
public override string GetParameter(string name) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Returns authentication realm. If the concept of an authentication realm is not applicable to the given authentication scheme, returns <code>null</code>.</para><para></para>
/// </summary>
/// <returns>
/// <para>the authentication realm </para>
/// </returns>
/// <java-name>
/// getRealm
/// </java-name>
[Dot42.DexImport("getRealm", "()Ljava/lang/String;", AccessFlags = 1)]
public override string GetRealm() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Tests if the authentication scheme is provides authorization on a per connection basis instead of usual per request basis</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> if the scheme is connection based, <code>false</code> if the scheme is request based. </para>
/// </returns>
/// <java-name>
/// isConnectionBased
/// </java-name>
[Dot42.DexImport("isConnectionBased", "()Z", AccessFlags = 1)]
public override bool IsConnectionBased() /* MethodBuilder.Create */
{
return default(bool);
}
/// <java-name>
/// parseChallenge
/// </java-name>
[Dot42.DexImport("parseChallenge", "(Lorg/apache/http/util/CharArrayBuffer;II)V", AccessFlags = 4)]
protected internal override void ParseChallenge(global::Org.Apache.Http.Util.CharArrayBuffer buffer, int pos, int len) /* MethodBuilder.Create */
{
}
/// <java-name>
/// authenticate
/// </java-name>
[Dot42.DexImport("authenticate", "(Lorg/apache/http/auth/Credentials;Lorg/apache/http/HttpRequest;)Lorg/apache/http" +
"/Header;", AccessFlags = 1)]
public override global::Org.Apache.Http.IHeader Authenticate(global::Org.Apache.Http.Auth.ICredentials credentials, global::Org.Apache.Http.IHttpRequest request) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHeader);
}
/// <summary>
/// <para>Authentication process may involve a series of challenge-response exchanges. This method tests if the authorization process has been completed, either successfully or unsuccessfully, that is, all the required authorization challenges have been processed in their entirety.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> if the authentication process has been completed, <code>false</code> otherwise. </para>
/// </returns>
/// <java-name>
/// isComplete
/// </java-name>
[Dot42.DexImport("isComplete", "()Z", AccessFlags = 1)]
public override bool IsComplete() /* MethodBuilder.Create */
{
return default(bool);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal NTLMScheme() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para>Returns textual designation of the given authentication scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para>the name of the given authentication scheme </para>
/// </returns>
/// <java-name>
/// getSchemeName
/// </java-name>
public string SchemeName
{
[Dot42.DexImport("getSchemeName", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetSchemeName(); }
}
/// <summary>
/// <para>Returns authentication realm. If the concept of an authentication realm is not applicable to the given authentication scheme, returns <code>null</code>.</para><para></para>
/// </summary>
/// <returns>
/// <para>the authentication realm </para>
/// </returns>
/// <java-name>
/// getRealm
/// </java-name>
public string Realm
{
[Dot42.DexImport("getRealm", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetRealm(); }
}
}
/// <summary>
/// <para>Authentication credentials required to respond to a authentication challenge are invalid</para><para><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/UnsupportedDigestAlgorithmException
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/UnsupportedDigestAlgorithmException", AccessFlags = 33)]
public partial class UnsupportedDigestAlgorithmException : global::System.SystemException
/* scope: __dot42__ */
{
/// <summary>
/// <para>Creates a new UnsupportedAuthAlgoritmException with a <code>null</code> detail message. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public UnsupportedDigestAlgorithmException() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Creates a new UnsupportedAuthAlgoritmException with the specified message.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;)V", AccessFlags = 1)]
public UnsupportedDigestAlgorithmException(string message) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Creates a new UnsupportedAuthAlgoritmException with the specified detail message and cause.</para><para></para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/Throwable;)V", AccessFlags = 1)]
public UnsupportedDigestAlgorithmException(string message, global::System.Exception cause) /* MethodBuilder.Create */
{
}
}
/// <summary>
/// <para>Abstract authentication scheme class that lays foundation for all RFC 2617 compliant authetication schemes and provides capabilities common to all authentication schemes defined in RFC 2617.</para><para><para> </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/RFC2617Scheme
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/RFC2617Scheme", AccessFlags = 1057)]
public abstract partial class RFC2617Scheme : global::Org.Apache.Http.Impl.Auth.AuthSchemeBase
/* scope: __dot42__ */
{
/// <summary>
/// <para>Default constructor for RFC2617 compliant authetication schemes. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public RFC2617Scheme() /* MethodBuilder.Create */
{
}
/// <java-name>
/// parseChallenge
/// </java-name>
[Dot42.DexImport("parseChallenge", "(Lorg/apache/http/util/CharArrayBuffer;II)V", AccessFlags = 4)]
protected internal override void ParseChallenge(global::Org.Apache.Http.Util.CharArrayBuffer buffer, int pos, int len) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Returns authentication parameters map. Keys in the map are lower-cased.</para><para></para>
/// </summary>
/// <returns>
/// <para>the map of authentication parameters </para>
/// </returns>
/// <java-name>
/// getParameters
/// </java-name>
[Dot42.DexImport("getParameters", "()Ljava/util/Map;", AccessFlags = 4, Signature = "()Ljava/util/Map<Ljava/lang/String;Ljava/lang/String;>;")]
protected internal virtual global::Java.Util.IMap<string, string> GetParameters() /* MethodBuilder.Create */
{
return default(global::Java.Util.IMap<string, string>);
}
/// <summary>
/// <para>Returns authentication parameter with the given name, if available.</para><para></para>
/// </summary>
/// <returns>
/// <para>the parameter with the given name </para>
/// </returns>
/// <java-name>
/// getParameter
/// </java-name>
[Dot42.DexImport("getParameter", "(Ljava/lang/String;)Ljava/lang/String;", AccessFlags = 1)]
public override string GetParameter(string name) /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Returns authentication realm. The realm may not be null.</para><para></para>
/// </summary>
/// <returns>
/// <para>the authentication realm </para>
/// </returns>
/// <java-name>
/// getRealm
/// </java-name>
[Dot42.DexImport("getRealm", "()Ljava/lang/String;", AccessFlags = 1)]
public override string GetRealm() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Returns authentication parameters map. Keys in the map are lower-cased.</para><para></para>
/// </summary>
/// <returns>
/// <para>the map of authentication parameters </para>
/// </returns>
/// <java-name>
/// getParameters
/// </java-name>
protected internal global::Java.Util.IMap<string, string> Parameters
{
[Dot42.DexImport("getParameters", "()Ljava/util/Map;", AccessFlags = 4, Signature = "()Ljava/util/Map<Ljava/lang/String;Ljava/lang/String;>;")]
get{ return GetParameters(); }
}
/// <summary>
/// <para>Returns authentication realm. The realm may not be null.</para><para></para>
/// </summary>
/// <returns>
/// <para>the authentication realm </para>
/// </returns>
/// <java-name>
/// getRealm
/// </java-name>
public string Realm
{
[Dot42.DexImport("getRealm", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetRealm(); }
}
}
/// <summary>
/// <para>Abstract authentication scheme class that serves as a basis for all authentication schemes supported by HttpClient. This class defines the generic way of parsing an authentication challenge. It does not make any assumptions regarding the format of the challenge nor does it impose any specific way of responding to that challenge.</para><para><para> </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/AuthSchemeBase
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/AuthSchemeBase", AccessFlags = 1057)]
public abstract partial class AuthSchemeBase : global::Org.Apache.Http.Auth.IAuthScheme
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public AuthSchemeBase() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Processes the given challenge token. Some authentication schemes may involve multiple challenge-response exchanges. Such schemes must be able to maintain the state information when dealing with sequential challenges</para><para></para>
/// </summary>
/// <java-name>
/// processChallenge
/// </java-name>
[Dot42.DexImport("processChallenge", "(Lorg/apache/http/Header;)V", AccessFlags = 1)]
public virtual void ProcessChallenge(global::Org.Apache.Http.IHeader header) /* MethodBuilder.Create */
{
}
/// <java-name>
/// parseChallenge
/// </java-name>
[Dot42.DexImport("parseChallenge", "(Lorg/apache/http/util/CharArrayBuffer;II)V", AccessFlags = 1028)]
protected internal abstract void ParseChallenge(global::Org.Apache.Http.Util.CharArrayBuffer buffer, int pos, int len) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Returns <code>true</code> if authenticating against a proxy, <code>false</code> otherwise.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> if authenticating against a proxy, <code>false</code> otherwise </para>
/// </returns>
/// <java-name>
/// isProxy
/// </java-name>
[Dot42.DexImport("isProxy", "()Z", AccessFlags = 1)]
public virtual bool IsProxy() /* MethodBuilder.Create */
{
return default(bool);
}
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "getSchemeName", "()Ljava/lang/String;", AccessFlags = 1025)]
public virtual string GetSchemeName() /* TypeBuilder.AddAbstractInterfaceMethods */
{
return default(string);
}
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "getParameter", "(Ljava/lang/String;)Ljava/lang/String;", AccessFlags = 1025)]
public virtual string GetParameter(string name) /* TypeBuilder.AddAbstractInterfaceMethods */
{
return default(string);
}
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "getRealm", "()Ljava/lang/String;", AccessFlags = 1025)]
public virtual string GetRealm() /* TypeBuilder.AddAbstractInterfaceMethods */
{
return default(string);
}
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "isConnectionBased", "()Z", AccessFlags = 1025)]
public virtual bool IsConnectionBased() /* TypeBuilder.AddAbstractInterfaceMethods */
{
return default(bool);
}
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "isComplete", "()Z", AccessFlags = 1025)]
public virtual bool IsComplete() /* TypeBuilder.AddAbstractInterfaceMethods */
{
return default(bool);
}
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "authenticate", "(Lorg/apache/http/auth/Credentials;Lorg/apache/http/HttpRequest;)Lorg/apache/http" +
"/Header;", AccessFlags = 1025)]
public virtual global::Org.Apache.Http.IHeader Authenticate(global::Org.Apache.Http.Auth.ICredentials credentials, global::Org.Apache.Http.IHttpRequest request) /* TypeBuilder.AddAbstractInterfaceMethods */
{
return default(global::Org.Apache.Http.IHeader);
}
public string SchemeName
{
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "getSchemeName", "()Ljava/lang/String;", AccessFlags = 1025)]
get{ return GetSchemeName(); }
}
public string Realm
{
[Dot42.DexImport("org/apache/http/auth/AuthScheme", "getRealm", "()Ljava/lang/String;", AccessFlags = 1025)]
get{ return GetRealm(); }
}
}
/// <summary>
/// <para><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/BasicSchemeFactory
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/BasicSchemeFactory", AccessFlags = 33)]
public partial class BasicSchemeFactory : global::Org.Apache.Http.Auth.IAuthSchemeFactory
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public BasicSchemeFactory() /* MethodBuilder.Create */
{
}
/// <java-name>
/// newInstance
/// </java-name>
[Dot42.DexImport("newInstance", "(Lorg/apache/http/params/HttpParams;)Lorg/apache/http/auth/AuthScheme;", AccessFlags = 1)]
public virtual global::Org.Apache.Http.Auth.IAuthScheme NewInstance(global::Org.Apache.Http.Params.IHttpParams @params) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Auth.IAuthScheme);
}
}
/// <summary>
/// <para>Basic authentication scheme as defined in RFC 2617. </para><para><para> </para><simplesectsep></simplesectsep><para>Rodney Waldhoff </para><simplesectsep></simplesectsep><para> </para><simplesectsep></simplesectsep><para>Ortwin Glueck </para><simplesectsep></simplesectsep><para>Sean C. Sullivan </para><simplesectsep></simplesectsep><para> </para><simplesectsep></simplesectsep><para> </para><simplesectsep></simplesectsep><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/BasicScheme
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/BasicScheme", AccessFlags = 33)]
public partial class BasicScheme : global::Org.Apache.Http.Impl.Auth.RFC2617Scheme
/* scope: __dot42__ */
{
/// <summary>
/// <para>Default constructor for the basic authetication scheme. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public BasicScheme() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Returns textual designation of the basic authentication scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>basic</code> </para>
/// </returns>
/// <java-name>
/// getSchemeName
/// </java-name>
[Dot42.DexImport("getSchemeName", "()Ljava/lang/String;", AccessFlags = 1)]
public override string GetSchemeName() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Processes the Basic challenge.</para><para></para>
/// </summary>
/// <java-name>
/// processChallenge
/// </java-name>
[Dot42.DexImport("processChallenge", "(Lorg/apache/http/Header;)V", AccessFlags = 1)]
public override void ProcessChallenge(global::Org.Apache.Http.IHeader header) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Tests if the Basic authentication process has been completed.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> if Basic authorization has been processed, <code>false</code> otherwise. </para>
/// </returns>
/// <java-name>
/// isComplete
/// </java-name>
[Dot42.DexImport("isComplete", "()Z", AccessFlags = 1)]
public override bool IsComplete() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Returns <code>false</code>. Basic authentication scheme is request based.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>false</code>. </para>
/// </returns>
/// <java-name>
/// isConnectionBased
/// </java-name>
[Dot42.DexImport("isConnectionBased", "()Z", AccessFlags = 1)]
public override bool IsConnectionBased() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Produces basic authorization header for the given set of Credentials.</para><para></para>
/// </summary>
/// <returns>
/// <para>a basic authorization string </para>
/// </returns>
/// <java-name>
/// authenticate
/// </java-name>
[Dot42.DexImport("authenticate", "(Lorg/apache/http/auth/Credentials;Lorg/apache/http/HttpRequest;)Lorg/apache/http" +
"/Header;", AccessFlags = 1)]
public override global::Org.Apache.Http.IHeader Authenticate(global::Org.Apache.Http.Auth.ICredentials credentials, global::Org.Apache.Http.IHttpRequest request) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHeader);
}
/// <summary>
/// <para>Returns a basic <code>Authorization</code> header value for the given Credentials and charset.</para><para></para>
/// </summary>
/// <returns>
/// <para>a basic authorization header </para>
/// </returns>
/// <java-name>
/// authenticate
/// </java-name>
[Dot42.DexImport("authenticate", "(Lorg/apache/http/auth/Credentials;Ljava/lang/String;Z)Lorg/apache/http/Header;", AccessFlags = 9)]
public static global::Org.Apache.Http.IHeader Authenticate(global::Org.Apache.Http.Auth.ICredentials credentials, string charset, bool proxy) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHeader);
}
/// <summary>
/// <para>Returns textual designation of the basic authentication scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>basic</code> </para>
/// </returns>
/// <java-name>
/// getSchemeName
/// </java-name>
public string SchemeName
{
[Dot42.DexImport("getSchemeName", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetSchemeName(); }
}
}
/// <summary>
/// <para>Digest authentication scheme as defined in RFC 2617. Both MD5 (default) and MD5-sess are supported. Currently only qop=auth or no qop is supported. qop=auth-int is unsupported. If auth and auth-int are provided, auth is used. </para><para>Credential charset is configured via the credential charset parameter. Since the digest username is included as clear text in the generated Authentication header, the charset of the username must be compatible with the http element charset. </para><para><para> </para><simplesectsep></simplesectsep><para>Rodney Waldhoff </para><simplesectsep></simplesectsep><para> </para><simplesectsep></simplesectsep><para>Ortwin Glueck </para><simplesectsep></simplesectsep><para>Sean C. Sullivan </para><simplesectsep></simplesectsep><para> </para><simplesectsep></simplesectsep><para> </para><simplesectsep></simplesectsep><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/DigestScheme
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/DigestScheme", AccessFlags = 33)]
public partial class DigestScheme : global::Org.Apache.Http.Impl.Auth.RFC2617Scheme
/* scope: __dot42__ */
{
/// <summary>
/// <para>Default constructor for the digest authetication scheme. </para>
/// </summary>
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public DigestScheme() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Processes the Digest challenge.</para><para></para>
/// </summary>
/// <java-name>
/// processChallenge
/// </java-name>
[Dot42.DexImport("processChallenge", "(Lorg/apache/http/Header;)V", AccessFlags = 1)]
public override void ProcessChallenge(global::Org.Apache.Http.IHeader header) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Tests if the Digest authentication process has been completed.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>true</code> if Digest authorization has been processed, <code>false</code> otherwise. </para>
/// </returns>
/// <java-name>
/// isComplete
/// </java-name>
[Dot42.DexImport("isComplete", "()Z", AccessFlags = 1)]
public override bool IsComplete() /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Returns textual designation of the digest authentication scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>digest</code> </para>
/// </returns>
/// <java-name>
/// getSchemeName
/// </java-name>
[Dot42.DexImport("getSchemeName", "()Ljava/lang/String;", AccessFlags = 1)]
public override string GetSchemeName() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Returns <code>false</code>. Digest authentication scheme is request based.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>false</code>. </para>
/// </returns>
/// <java-name>
/// isConnectionBased
/// </java-name>
[Dot42.DexImport("isConnectionBased", "()Z", AccessFlags = 1)]
public override bool IsConnectionBased() /* MethodBuilder.Create */
{
return default(bool);
}
/// <java-name>
/// overrideParamter
/// </java-name>
[Dot42.DexImport("overrideParamter", "(Ljava/lang/String;Ljava/lang/String;)V", AccessFlags = 1)]
public virtual void OverrideParamter(string name, string value) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Produces a digest authorization string for the given set of Credentials, method name and URI.</para><para></para>
/// </summary>
/// <returns>
/// <para>a digest authorization string </para>
/// </returns>
/// <java-name>
/// authenticate
/// </java-name>
[Dot42.DexImport("authenticate", "(Lorg/apache/http/auth/Credentials;Lorg/apache/http/HttpRequest;)Lorg/apache/http" +
"/Header;", AccessFlags = 1)]
public override global::Org.Apache.Http.IHeader Authenticate(global::Org.Apache.Http.Auth.ICredentials credentials, global::Org.Apache.Http.IHttpRequest request) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHeader);
}
/// <summary>
/// <para>Creates a random cnonce value based on the current time.</para><para></para>
/// </summary>
/// <returns>
/// <para>The cnonce value as String. </para>
/// </returns>
/// <java-name>
/// createCnonce
/// </java-name>
[Dot42.DexImport("createCnonce", "()Ljava/lang/String;", AccessFlags = 9)]
public static string CreateCnonce() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para>Returns textual designation of the digest authentication scheme.</para><para></para>
/// </summary>
/// <returns>
/// <para><code>digest</code> </para>
/// </returns>
/// <java-name>
/// getSchemeName
/// </java-name>
public string SchemeName
{
[Dot42.DexImport("getSchemeName", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetSchemeName(); }
}
}
/// <summary>
/// <para>Abstract NTLM authentication engine. The engine can be used to generate Type1 messages and Type3 messages in response to a Type2 challenge. </para><para>For details see </para><para><para> </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/NTLMEngine
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/NTLMEngine", AccessFlags = 1537)]
public partial interface INTLMEngine
/* scope: __dot42__ */
{
/// <summary>
/// <para>Generates a Type1 message given the domain and workstation.</para><para></para>
/// </summary>
/// <returns>
/// <para>Type1 message </para>
/// </returns>
/// <java-name>
/// generateType1Msg
/// </java-name>
[Dot42.DexImport("generateType1Msg", "(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;", AccessFlags = 1025)]
string GenerateType1Msg(string domain, string workstation) /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Generates a Type3 message given the user credentials and the authentication challenge.</para><para></para>
/// </summary>
/// <returns>
/// <para>Type3 response. </para>
/// </returns>
/// <java-name>
/// generateType3Msg
/// </java-name>
[Dot42.DexImport("generateType3Msg", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/la" +
"ng/String;)Ljava/lang/String;", AccessFlags = 1025)]
string GenerateType3Msg(string username, string password, string domain, string workstation, string challenge) /* MethodBuilder.Create */ ;
}
/// <summary>
/// <para><para></para><para>4.0 </para></para>
/// </summary>
/// <java-name>
/// org/apache/http/impl/auth/DigestSchemeFactory
/// </java-name>
[Dot42.DexImport("org/apache/http/impl/auth/DigestSchemeFactory", AccessFlags = 33)]
public partial class DigestSchemeFactory : global::Org.Apache.Http.Auth.IAuthSchemeFactory
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "()V", AccessFlags = 1)]
public DigestSchemeFactory() /* MethodBuilder.Create */
{
}
/// <java-name>
/// newInstance
/// </java-name>
[Dot42.DexImport("newInstance", "(Lorg/apache/http/params/HttpParams;)Lorg/apache/http/auth/AuthScheme;", AccessFlags = 1)]
public virtual global::Org.Apache.Http.Auth.IAuthScheme NewInstance(global::Org.Apache.Http.Params.IHttpParams @params) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Auth.IAuthScheme);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using Xunit;
namespace System.Linq.Tests
{
public class SequenceEqualTests : EnumerableTests
{
[Fact]
public void SameResultsRepeatCallsIntQuery()
{
var q1 = from x1 in new int?[] { 2, 3, null, 2, null, 4, 5 }
select x1;
var q2 = from x2 in new int?[] { 1, 9, null, 4 }
select x2;
Assert.Equal(q1.SequenceEqual(q2), q1.SequenceEqual(q2));
}
[Fact]
public void SameResultsRepeatCallsStringQuery()
{
var q1 = from x1 in new[] { "AAA", string.Empty, "q", "C", "#", "!@#$%^", "0987654321", "Calling Twice" }
select x1;
var q2 = from x2 in new[] { "!@#$%^", "C", "AAA", "", "Calling Twice", "SoS" }
select x2;
Assert.Equal(q1.SequenceEqual(q2), q1.SequenceEqual(q2));
}
[Fact]
public void BothEmpty()
{
int[] first = { };
int[] second = { };
Assert.True(first.SequenceEqual(second));
Assert.True(FlipIsCollection(first).SequenceEqual(second));
Assert.True(first.SequenceEqual(FlipIsCollection(second)));
Assert.True(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void MismatchInMiddle()
{
int?[] first = { 1, 2, 3, 4 };
int?[] second = { 1, 2, 6, 4 };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void NullComparer()
{
string[] first = { "Bob", "Tim", "Chris" };
string[] second = { "Bbo", "mTi", "rishC" };
Assert.False(first.SequenceEqual(second, null));
Assert.False(FlipIsCollection(first).SequenceEqual(second, null));
Assert.False(first.SequenceEqual(FlipIsCollection(second), null));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second), null));
}
[Fact]
public void CustomComparer()
{
string[] first = { "Bob", "Tim", "Chris" };
string[] second = { "Bbo", "mTi", "rishC" };
Assert.True(first.SequenceEqual(second, new AnagramEqualityComparer()));
Assert.True(FlipIsCollection(first).SequenceEqual(second, new AnagramEqualityComparer()));
Assert.True(first.SequenceEqual(FlipIsCollection(second), new AnagramEqualityComparer()));
Assert.True(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second), new AnagramEqualityComparer()));
}
[Fact]
public void RunOnce()
{
string[] first = { "Bob", "Tim", "Chris" };
string[] second = { "Bbo", "mTi", "rishC" };
Assert.True(first.RunOnce().SequenceEqual(second.RunOnce(), new AnagramEqualityComparer()));
}
[Fact]
public void BothSingleNullExplicitComparer()
{
string[] first = { null };
string[] second = { null };
Assert.True(first.SequenceEqual(second, StringComparer.Ordinal));
Assert.True(FlipIsCollection(first).SequenceEqual(second, StringComparer.Ordinal));
Assert.True(first.SequenceEqual(FlipIsCollection(second), StringComparer.Ordinal));
Assert.True(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second), StringComparer.Ordinal));
}
[Fact]
public void BothMatchIncludingNullElements()
{
int?[] first = { -6, null, 0, -4, 9, 10, 20 };
int?[] second = { -6, null, 0, -4, 9, 10, 20 };
Assert.True(first.SequenceEqual(second));
Assert.True(FlipIsCollection(first).SequenceEqual(second));
Assert.True(first.SequenceEqual(FlipIsCollection(second)));
Assert.True(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void EmptyWithNonEmpty()
{
int?[] first = { };
int?[] second = { 2, 3, 4 };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void NonEmptyWithEmpty()
{
int?[] first = { 2, 3, 4 };
int?[] second = { };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void MismatchingSingletons()
{
int?[] first = { 2 };
int?[] second = { 4 };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void MismatchOnFirst()
{
int?[] first = { 1, 2, 3, 4, 5 };
int?[] second = { 2, 2, 3, 4, 5 };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void MismatchOnLast()
{
int?[] first = { 1, 2, 3, 4, 4 };
int?[] second = { 1, 2, 3, 4, 5 };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void SecondLargerThanFirst()
{
int?[] first = { 1, 2, 3, 4 };
int?[] second = { 1, 2, 3, 4, 4 };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void FirstLargerThanSecond()
{
int?[] first = { 1, 2, 3, 4, 4 };
int?[] second = { 1, 2, 3, 4 };
Assert.False(first.SequenceEqual(second));
Assert.False(FlipIsCollection(first).SequenceEqual(second));
Assert.False(first.SequenceEqual(FlipIsCollection(second)));
Assert.False(FlipIsCollection(first).SequenceEqual(FlipIsCollection(second)));
}
[Fact]
public void FirstSourceNull()
{
int[] first = null;
int[] second = { };
AssertExtensions.Throws<ArgumentNullException>("first", () => first.SequenceEqual(second));
}
[Fact]
public void SecondSourceNull()
{
int[] first = { };
int[] second = null;
AssertExtensions.Throws<ArgumentNullException>("second", () => first.SequenceEqual(second));
}
}
}
| |
// ***********************************************************************
// Assembly : MetroFramework
// Author : velez
// Created : 12-09-2015
//
// Last Modified By : velez
// Last Modified On : 12-08-2015
// ***********************************************************************
// <copyright file="MetroMessageBox.cs" company="MetroFrameworkAssembly.Company">
// MetroFrameworkAssembly.Copyright
// </copyright>
// <summary></summary>
// ***********************************************************************
/**
* MetroFramework - Modern UI for WinForms
*
* The MIT License (MIT)
* Copyright (c) 2011 Sven Walter, http://github.com/viperneo
*
* Copyright (c) 2013 Dennis Magno, http://github.com/dennismagno
*
* Copyright (c) 2015 Hector Velez, http://github.com/barecool
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in the
* Software without restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject to the
* following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
* PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Drawing;
using System.Media;
using System.Threading;
using System.Windows.Forms;
#pragma warning disable 1587
/// <summary>
/// The MessageBox namespace.
/// </summary>
#pragma warning restore 1587
namespace MetroFramework.MessageBox
{
/// <summary>
/// Metro-styled message notification.
/// </summary>
public static class MetroMessageBox
{
/// <summary>
/// The default height
/// </summary>
private const int defaultHeight = 211;
/// <summary>
/// Shows the specified owner.
/// </summary>
/// <param name="owner">The owner.</param>
/// <param name="message">The message.</param>
/// <param name="height">The height.</param>
/// <returns>DialogResult.</returns>
public static DialogResult Show(IWin32Window owner, String message, int height = defaultHeight)
{ return Show(owner, message, "Notification", height); }
/// <summary>
/// Shows the specified owner.
/// </summary>
/// <param name="owner">The owner.</param>
/// <param name="message">The message.</param>
/// <param name="title">The title.</param>
/// <param name="height">The height.</param>
/// <returns>DialogResult.</returns>
public static DialogResult Show(IWin32Window owner, String message, String title, int height = defaultHeight)
{ return Show(owner, message, title, MessageBoxButtons.OK, height); }
/// <summary>
/// Shows the specified owner.
/// </summary>
/// <param name="owner">The owner.</param>
/// <param name="message">The message.</param>
/// <param name="title">The title.</param>
/// <param name="buttons">The buttons.</param>
/// <param name="height">The height.</param>
/// <returns>DialogResult.</returns>
public static DialogResult Show(IWin32Window owner, String message, String title, MessageBoxButtons buttons, int height = defaultHeight)
{ return Show(owner, message, title, buttons, MessageBoxIcon.None, height); }
/// <summary>
/// Shows the specified owner.
/// </summary>
/// <param name="owner">The owner.</param>
/// <param name="message">The message.</param>
/// <param name="title">The title.</param>
/// <param name="buttons">The buttons.</param>
/// <param name="icon">The icon.</param>
/// <returns>DialogResult.</returns>
public static DialogResult Show(IWin32Window owner, String message, String title, MessageBoxButtons buttons, MessageBoxIcon icon)
{ return Show(owner, message, title, buttons, icon, MessageBoxDefaultButton.Button1, 211); }
/// <summary>
/// Shows the specified owner.
/// </summary>
/// <param name="owner">The owner.</param>
/// <param name="message">The message.</param>
/// <param name="title">The title.</param>
/// <param name="buttons">The buttons.</param>
/// <param name="icon">The icon.</param>
/// <param name="height">The height.</param>
/// <returns>DialogResult.</returns>
public static DialogResult Show(IWin32Window owner, String message, String title, MessageBoxButtons buttons, MessageBoxIcon icon, int height = defaultHeight)
{
string refstring= null;
return Show(owner, message, title, buttons, icon, MessageBoxDefaultButton.Button1, ref refstring, height);
}
/// <summary>
/// Shows the specified owner.
/// </summary>
/// <param name="owner">The owner.</param>
/// <param name="message">The message.</param>
/// <param name="title">The title.</param>
/// <param name="buttons">The buttons.</param>
/// <param name="icon">The icon.</param>
/// <param name="defaultbutton">The defaultbutton.</param>
/// <param name="height">The height.</param>
/// <returns>DialogResult.</returns>
public static DialogResult Show(IWin32Window owner, String message, String title, MessageBoxButtons buttons, MessageBoxIcon icon, MessageBoxDefaultButton defaultbutton, int height = defaultHeight)
{
string refstring = null;
return Show(owner, message, title, buttons, icon, defaultbutton, ref refstring, height);
}
/// <summary>
/// Shows the specified owner.
/// </summary>
/// <param name="owner">The owner.</param>
/// <param name="message">The message.</param>
/// <param name="title">The title.</param>
/// <param name="buttons">The buttons.</param>
/// <param name="icon">The icon.</param>
/// <param name="defaultbutton">The defaultbutton.</param>
/// <param name="inputValue">The input value.</param>
/// <param name="height">The height.</param>
/// <returns>DialogResult.</returns>
public static DialogResult Show(IWin32Window owner, String message, String title, MessageBoxButtons buttons, MessageBoxIcon icon, MessageBoxDefaultButton defaultbutton, ref string inputValue, int height = defaultHeight)
{
DialogResult _result = DialogResult.None;
if (owner != null)
{
Form _owner = (owner as Form == null) ? ((UserControl)owner).ParentForm : (Form)owner;
//int _minWidth = 500;
//int _minHeight = 350;
//if (_owner.Size.Width < _minWidth ||
// _owner.Size.Height < _minHeight)
//{
// if (_owner.Size.Width < _minWidth && _owner.Size.Height < _minHeight) {
// _owner.Size = new Size(_minWidth, _minHeight);
// }
// else
// {
// if (_owner.Size.Width < _minWidth) _owner.Size = new Size(_minWidth, _owner.Size.Height);
// else _owner.Size = new Size(_owner.Size.Width, _minHeight);
// }
// int x = Convert.ToInt32(Math.Ceiling((decimal)(Screen.PrimaryScreen.WorkingArea.Size.Width / 2) - (_owner.Size.Width / 2)));
// int y = Convert.ToInt32(Math.Ceiling((decimal)(Screen.PrimaryScreen.WorkingArea.Size.Height / 2) - (_owner.Size.Height / 2)));
// _owner.Location = new Point(x, y);
//}
switch (icon)
{
case MessageBoxIcon.Error:
SystemSounds.Hand.Play(); break;
case MessageBoxIcon.Exclamation:
SystemSounds.Exclamation.Play(); break;
case MessageBoxIcon.Question:
SystemSounds.Beep.Play(); break;
default:
SystemSounds.Asterisk.Play(); break;
}
MetroMessageBoxControl _control = new MetroMessageBoxControl();
_control.BackColor = _owner.BackColor;
_control.Properties.Buttons = buttons;
_control.Properties.DefaultButton = defaultbutton;
_control.Properties.Icon = icon;
_control.Properties.Message = message;
_control.Properties.Title = title;
_control.Padding = new Padding(0, 0, 0, 0);
_control.ControlBox = false;
_control.ShowInTaskbar = false;
//_owner.Controls.Add(_control);
//if (_owner is IMetroForm)
//{
// //if (((MetroForm)_owner).DisplayHeader)
// //{
// // _offset += 30;
// //}
// _control.Theme = ((MetroForm)_owner).Theme;
// _control.Style = ((MetroForm)_owner).Style;
//}
_control.Properties.InputBox = (inputValue != null);
_control.Size = new Size(_owner.Size.Width, height);
_control.Location = new Point(_owner.Location.X, _owner.Location.Y + (_owner.Height - _control.Height) / 2);
_control.ArrangeApperance();
int _overlaySizes = Convert.ToInt32(Math.Floor(_control.Size.Height * 0.28));
//_control.OverlayPanelTop.Size = new Size(_control.Size.Width, _overlaySizes - 30);
//_control.OverlayPanelBottom.Size = new Size(_control.Size.Width, _overlaySizes);
_control.TopMost = true;
_control.ShowDialog();
_control.BringToFront();
//_control.SetFocus();
Action<MetroMessageBoxControl> _delegate = new Action<MetroMessageBoxControl>(ModalState);
IAsyncResult _asyncresult = _delegate.BeginInvoke(_control, null, _delegate);
bool _cancelled = false;
try
{
while (!_asyncresult.IsCompleted)
{ Thread.Sleep(1); Application.DoEvents(); }
}
catch
{
_cancelled = true;
if (!_asyncresult.IsCompleted)
{
try { _asyncresult = null; }
catch { }
}
_delegate = null;
}
if (!_cancelled)
{
_result = _control.Result;
inputValue = _control.InputBoxText;
//_owner.Controls.Remove(_control);
_control.Dispose(); _control = null;
}
}
return _result;
}
/// <summary>
/// Modals the state.
/// </summary>
/// <param name="control">The control.</param>
private static void ModalState(MetroMessageBoxControl control)
{
while (control.Visible)
{ }
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Data;
using System.Reactive.Disposables;
using FluentAssertions;
using Microsoft.Its.Recipes;
using Moq;
using NUnit.Framework;
using Assert = NUnit.Framework.Assert;
namespace Microsoft.Its.Domain.Tests
{
[TestFixture]
public class UnitOfWorkTests
{
[SetUp]
public void SetUp()
{
UnitOfWork<IDisposable>.ConfigureDefault();
UnitOfWork<Bag>.ConfigureDefault();
UnitOfWork<string>.ConfigureDefault();
}
[Test]
public void Default_configuration_returns_UnitOfWork_having_null_Subject()
{
using (new UnitOfWork<IDisposable>())
{
UnitOfWork<IDisposable>.Current.Subject.Should().BeNull();
}
}
[Test]
public void Default_configuration_Commit_does_not_throw()
{
using (var work = new UnitOfWork<IDisposable>())
{
work.VoteCommit();
}
}
[Test]
public void Default_configuration_Reject_does_not_throw()
{
using (new UnitOfWork<IDisposable>())
{
}
}
[Test]
public void Nested_unit_of_work_does_not_call_Commit_and_outer_unit_of_work_does()
{
var disposed = false;
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => disposed = true));
using (var outer = new UnitOfWork<IDisposable>())
{
using (var inner = new UnitOfWork<IDisposable>())
{
inner.VoteCommit();
}
Assert.False(disposed);
outer.VoteCommit();
}
Assert.True(disposed);
}
[Test]
public void When_nested_unit_of_work_does_not_commit_resource_then_outer_unit_of_work_closing_does_not_not_commit_it_either()
{
var committed = false;
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => { }));
UnitOfWork<IDisposable>.Commit = disposable => { committed = true; };
using (var outer = new UnitOfWork<IDisposable>())
{
using (new UnitOfWork<IDisposable>())
{
}
outer.VoteCommit();
}
Assert.False(committed);
}
[Test]
public void Nested_unit_of_work_does_not_recreate_resource_when_factory_is_specified_in_both_units_of_work()
{
var factoryCalls = 0;
UnitOfWork<IDisposable>.Create = (_, setSubject) =>
{
factoryCalls++;
setSubject(Disposable.Create(() => { }));
};
using (new UnitOfWork<IDisposable>())
{
Assert.That(factoryCalls, Is.EqualTo(1));
using (new UnitOfWork<IDisposable>())
{
Assert.That(factoryCalls, Is.EqualTo(1));
}
}
}
[Test]
public void Nested_unit_of_work_does_not_recreate_resource_when_factory_is_specified_only_in_outer_units_of_work()
{
var factoryCalls = 0;
UnitOfWork<IDisposable>.Create = (_, setSubject) =>
{
factoryCalls++;
setSubject(Disposable.Create(() => { }));
};
using (new UnitOfWork<IDisposable>())
{
Assert.That(factoryCalls, Is.EqualTo(1));
using (new UnitOfWork<IDisposable>())
{
Assert.That(factoryCalls, Is.EqualTo(1));
}
}
}
[Test]
public void When_UnitOfWork_is_disposed_more_than_once_it_does_not_recommit_work()
{
var commitCount = 0;
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => { }));
UnitOfWork<IDisposable>.Commit = disposable => { commitCount++; };
var work = new UnitOfWork<IDisposable>();
work.VoteCommit();
work.Dispose();
work.Dispose();
commitCount.Should().Be(1);
}
[Test]
public void When_UnitOfWork_is_disposed_more_than_once_it_does_not_rereject_work()
{
var rejectCount = 0;
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => { }));
UnitOfWork<IDisposable>.Reject = disposable => { rejectCount++; };
var work = new UnitOfWork<IDisposable>();
work.Dispose();
work.Dispose();
rejectCount.Should().Be(1);
}
[Test]
public void Create_is_not_called_more_than_once_during_unit_of_work_nesting()
{
int callCount = 0;
UnitOfWork<IDbConnection>.Create = (_, setSubject) =>
{
callCount++;
setSubject(new Mock<IDbConnection>().Object);
};
using (new UnitOfWork<IDbConnection>())
using (new UnitOfWork<IDbConnection>())
using (new UnitOfWork<IDbConnection>())
{
}
Assert.That(callCount, Is.EqualTo(1));
}
[Test]
public void Commit_is_not_called_more_than_once_when_nested_units_of_work_exit()
{
int callCount = 0;
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Empty);
UnitOfWork<IDisposable>.Commit = _ => { callCount++; };
using (var one = new UnitOfWork<IDisposable>())
{
using (var two = new UnitOfWork<IDisposable>())
{
using (var three = new UnitOfWork<IDisposable>())
{
one.VoteCommit();
two.VoteCommit();
three.VoteCommit();
}
}
}
callCount.Should().Be(1);
}
[Test]
public void Current_unit_of_work_returns_expected_instance_when_there_is_an_active_unit_of_work()
{
UnitOfWork<Bag>.Create = (_, setSubject) => setSubject(new Bag());
using (var work = new UnitOfWork<Bag>())
{
Assert.That(UnitOfWork<Bag>.Current.Subject, Is.SameAs(work.Subject));
}
}
[Test]
public void Current_unit_of_work_returns_null_when_there_is_no_active_unit_of_work()
{
Assert.That(UnitOfWork<Bag>.Current, Is.Null);
}
[Test]
public void When_UnitOfWork_is_committed_then_static_Committed_event_is_raised()
{
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => { }));
bool committedRaised = false;
UnitOfWork<IDisposable>.Committed += (sender, disposable) => { committedRaised = true; };
using (var work = new UnitOfWork<IDisposable>())
{
work.VoteCommit();
}
committedRaised.Should().BeTrue();
}
[Test]
public void When_UnitOfWork_is_rejected_then_static_Committed_event_is_not_raised()
{
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => { }));
bool committedRaised = false;
UnitOfWork<IDisposable>.Committed += (sender, disposable) => { committedRaised = true; };
using (new UnitOfWork<IDisposable>())
{
}
committedRaised.Should().BeFalse();
}
[Test]
public void When_UnitOfWork_is_rejected_due_to_exception_then_Exception_is_set()
{
using (var one = new UnitOfWork<IDisposable>())
{
one.RejectDueTo(new Exception("nope."));
one.Exception.Should().NotBeNull();
one.Exception.Message.Should().Be("nope.");
}
}
[Test]
public void When_nested_UnitOfWork_is_rejected_due_to_exception_then_Exception_is_set_on_outer()
{
using (var one = new UnitOfWork<IDisposable>())
{
using (var two = new UnitOfWork<IDisposable>())
{
using (var three = new UnitOfWork<IDisposable>())
{
three.RejectDueTo(new Exception("nope."));
}
two.VoteCommit();
}
one.Exception.Should().NotBeNull();
one.Exception.Message.Should().Be("nope.");
}
}
[Test]
public void When_UnitOfWork_is_committed_then_static_Rejected_event_is_not_raised()
{
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => { }));
bool rejectedRaised = false;
UnitOfWork<IDisposable>.Rejected += (sender, disposable) => { rejectedRaised = true; };
using (var work = new UnitOfWork<IDisposable>())
{
work.VoteCommit();
}
rejectedRaised.Should().BeFalse();
}
[Test]
public void When_UnitOfWork_is_rejected_then_static_Rejected_event_is_raised()
{
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => { }));
bool rejectedRaised = false;
UnitOfWork<IDisposable>.Rejected += (sender, disposable) => { rejectedRaised = true; };
using (var work = new UnitOfWork<IDisposable>())
{
}
rejectedRaised.Should().BeTrue();
}
[Test]
public void Create_can_be_used_to_specify_resources_available_to_the_unit_of_work_and_nested_units_of_work()
{
var obj = new object();
UnitOfWork<string>.Create = (work, setSubject) =>
{
work.AddResource(obj);
setSubject("hello");
};
using (var outer = new UnitOfWork<string>())
{
outer.Resource<object>().Should().BeSameAs(obj);
using (var inner = new UnitOfWork<string>())
{
inner.Resource<object>().Should().BeSameAs(obj);
}
}
}
[Test]
public void Disposable_Subject_is_disposed_by_default()
{
var disposed = false;
UnitOfWork<IDisposable>.Create = (_, setSubject) => setSubject(Disposable.Create(() => disposed = true));
using (new UnitOfWork<IDisposable>())
{
}
disposed.Should().BeTrue();
}
[Test]
public void Disposable_resources_are_disposed_by_default()
{
var disposed = false;
UnitOfWork<string>.Create = (work, setSubject) =>
{
work.AddResource(Disposable.Create(() => disposed = true));
setSubject("hello");
};
using (new UnitOfWork<string>())
{
}
disposed.Should().BeTrue();
}
[Test]
public void Disposable_resources_can_be_flagged_to_not_be_disposed()
{
var disposed = false;
UnitOfWork<string>.Create = (work, setSubject) =>
{
work.AddResource(Disposable.Create(() => disposed = true), dispose: false);
setSubject("hello");
};
using (new UnitOfWork<string>())
{
}
disposed.Should().BeFalse();
}
[Test]
public void When_Commit_throws_then_unit_of_work_is_rejected()
{
var rejectCalled = false;
UnitOfWork<string>.Create = (work, setSubject) => setSubject(Any.String());
UnitOfWork<string>.Commit = work => { throw new Exception("BOOM!"); };
UnitOfWork<string>.Reject = work => rejectCalled = true;
using (var work = new UnitOfWork<string>())
{
work.VoteCommit();
}
rejectCalled.Should().BeTrue();
}
[Test]
public void When_commit_and_reject_actions_are_specified_in_the_constructor_then_commit_is_only_called_when_the_outer_unit_of_work_is_committed()
{
var disposable = new BooleanDisposable();
var rejectCount = 0;
var commitCount = 0;
Func<UnitOfWork<BooleanDisposable>> create = () =>
new UnitOfWork<BooleanDisposable>(() => disposable,
reject: d => rejectCount++,
commit: d => commitCount++);
using (var outer = create())
{
using (var inner = create())
{
inner.VoteCommit();
}
disposable.IsDisposed.Should().BeFalse();
outer.VoteCommit();
}
commitCount.Should().Be(1);
rejectCount.Should().Be(0);
disposable.IsDisposed.Should().BeTrue();
}
[Test]
public void When_commit_and_reject_actions_are_specified_in_the_constructor_then_reject_is_only_called_when_the_outer_unit_of_work_is_committed()
{
var disposable = new BooleanDisposable();
var rejectCount = 0;
var commitCount = 0;
Func<UnitOfWork<BooleanDisposable>> create = () =>
new UnitOfWork<BooleanDisposable>(() => disposable,
reject: d => rejectCount++,
commit: d => commitCount++);
using (var outer = create())
{
using (create())
{
}
disposable.IsDisposed.Should().BeFalse();
rejectCount.Should().Be(1);
outer.VoteCommit();
}
commitCount.Should().Be(0);
rejectCount.Should().Be(1);
disposable.IsDisposed.Should().BeTrue();
}
private class Bag : HashSet<string>, IDisposable
{
public void Dispose()
{
}
}
}
}
| |
// This file is part of Wintermute Engine
// For conditions of distribution and use, see copyright notice in license.txt
// http://dead-code.org/redir.php?target=wme
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using System.Drawing;
namespace DeadCode.WME.Global
{
public class SettingsNode
{
public string Name;
private string Value = "";
//////////////////////////////////////////////////////////////////////////
protected SettingsNode()
{
this.Name = "";
}
//////////////////////////////////////////////////////////////////////////
public SettingsNode(string Name)
{
this.Name = Name;
}
//////////////////////////////////////////////////////////////////////////
public SettingsNode(string Name, string Value)
{
this.Name = Name;
this.Value = Value;
}
//////////////////////////////////////////////////////////////////////////
public override string ToString()
{
if (Children.Count > 0) return Name + "[" + Children.Count.ToString() + "]";
else return Name + " = " + GetString();
}
//////////////////////////////////////////////////////////////////////////
public void Clear()
{
Value = "";
Children.Clear();
}
//////////////////////////////////////////////////////////////////////////
public void SetValue(object Val)
{
Value = Val.ToString();
}
//////////////////////////////////////////////////////////////////////////
public void SetValue(string Path, object Val)
{
SettingsNode Node = GetNode(Path, false, true);
if (Node != null) Node.SetValue(Val);
}
//////////////////////////////////////////////////////////////////////////
public void SetValue(Color Val)
{
Value = Val.R.ToString() + "," + Val.G.ToString() + "," + Val.B.ToString() + "," + Val.A.ToString();
}
//////////////////////////////////////////////////////////////////////////
public void SetValue(string Path, Color Val)
{
SettingsNode Node = GetNode(Path, false, true);
if (Node != null) Node.SetValue(Val);
}
//////////////////////////////////////////////////////////////////////////
public string GetString()
{
return Value;
}
//////////////////////////////////////////////////////////////////////////
public string GetString(string Path, string InitValue)
{
SettingsNode Node = GetNode(Path, false, false);
if (Node != null) return Node.GetString();
else return InitValue;
}
//////////////////////////////////////////////////////////////////////////
public string GetString(string Path)
{
return GetString(Path, "");
}
//////////////////////////////////////////////////////////////////////////
public int GetInt()
{
int Ret;
if (int.TryParse(Value, out Ret)) return Ret;
else return 0;
}
//////////////////////////////////////////////////////////////////////////
public int GetInt(string Path, int InitValue)
{
SettingsNode Node = GetNode(Path, false, false);
if (Node != null) return Node.GetInt();
else return InitValue;
}
//////////////////////////////////////////////////////////////////////////
public int GetInt(string Path)
{
return GetInt(Path, 0);
}
//////////////////////////////////////////////////////////////////////////
public bool GetBool()
{
try
{
return Convert.ToBoolean(Value);
}
catch
{
return false;
}
}
//////////////////////////////////////////////////////////////////////////
public bool GetBool(string Path, bool InitValue)
{
SettingsNode Node = GetNode(Path, false, false);
if (Node != null) return Node.GetBool();
else return InitValue;
}
//////////////////////////////////////////////////////////////////////////
public bool GetBool(string Path)
{
return GetBool(Path, false);
}
//////////////////////////////////////////////////////////////////////////
public Color GetColor()
{
try
{
string[] RGBA = Value.Split(new char[] { ',' });
int R = 0, G = 0, B = 0, A = 0;
if (RGBA.Length > 0) R = int.Parse(RGBA[0]);
if (RGBA.Length > 1) G = int.Parse(RGBA[1]);
if (RGBA.Length > 2) B = int.Parse(RGBA[2]);
if (RGBA.Length > 3) A = int.Parse(RGBA[3]);
return Color.FromArgb(A, R, G, B);
}
catch
{
return Color.Black;
}
}
//////////////////////////////////////////////////////////////////////////
public Color GetColor(string Path, Color InitValue)
{
SettingsNode Node = GetNode(Path, false, false);
if (Node != null) return Node.GetColor();
else return InitValue;
}
//////////////////////////////////////////////////////////////////////////
public Color GetColor(string Path)
{
return GetColor(Path, Color.Black);
}
//////////////////////////////////////////////////////////////////////////
private List<SettingsNode> _Children;
public List<SettingsNode> Children
{
get
{
if (_Children == null) _Children = new List<SettingsNode>();
return _Children;
}
}
//////////////////////////////////////////////////////////////////////////
public bool LoadFromXmlNode(XmlNode Node)
{
try
{
this.Name = Node.Name;
XmlNode ChildNode = Node.FirstChild;
while (ChildNode != null)
{
if (ChildNode is XmlText)
{
SetValue(ChildNode.Value);
}
else if (ChildNode is XmlElement)
{
SettingsNode SetNode = new SettingsNode();
if (SetNode.LoadFromXmlNode(ChildNode)) Children.Add(SetNode);
}
ChildNode = ChildNode.NextSibling;
}
return true;
}
catch
{
return false;
}
}
//////////////////////////////////////////////////////////////////////////
protected XmlNode SaveToXmlNode(XmlDocument Doc)
{
XmlNode Node = Doc.CreateElement(Name);
if (Children.Count > 0)
{
foreach(SettingsNode SetNode in Children)
{
XmlNode ChildNode = SetNode.SaveToXmlNode(Doc);
if (ChildNode != null) Node.AppendChild(ChildNode);
}
}
else
{
Node.AppendChild(Doc.CreateTextNode(GetString()));
}
return Node;
}
//////////////////////////////////////////////////////////////////////////
protected SettingsNode GetChildByName(string Name, bool CaseSensitive)
{
foreach(SettingsNode Node in Children)
{
if (string.Compare(Node.Name, Name, !CaseSensitive)==0) return Node;
}
return null;
}
//////////////////////////////////////////////////////////////////////////
protected SettingsNode GetChildByName(string Name)
{
return GetChildByName(Name, false);
}
//////////////////////////////////////////////////////////////////////////
public SettingsNode GetNode(string Path, bool CaseSensitive, bool CreateIfDoesntExist)
{
string[] PathSplit = Path.Split(new char[] { '\\', '/' });
SettingsNode Child = this;
for(int i=0; i<PathSplit.Length; i++)
{
string Name = PathSplit[i];
SettingsNode NewChild = Child.GetChildByName(Name, CaseSensitive);
if (NewChild == null && CreateIfDoesntExist)
{
NewChild = new SettingsNode(Name);
Child.Children.Add(NewChild);
}
Child = NewChild;
if (Child == null) break;
}
return Child;
}
//////////////////////////////////////////////////////////////////////////
public SettingsNode GetNode(string Path, bool CaseSensitive)
{
return GetNode(Path, CaseSensitive, false);
}
//////////////////////////////////////////////////////////////////////////
public SettingsNode GetNode(string Path)
{
return GetNode(Path, false);
}
}
}
| |
using psm.display;
using Sce.PlayStation.Core.Graphics;
using System.Collections.Generic;
using System;
namespace psm.display
{
public class RendererUniversal : Renderer
{
private ShaderProgram program;
protected static ShaderProgram shaderUniversal;
protected static ShaderProgram shaderColored;
private VertexBuffer vertexBuffer;
private static Texture2D texture;
private static Dictionary<string, SpriteData> sprites;
private static int sheetW;
private static int sheetH;
private static SpriteData white;
private System.Collections.Generic.List<float> verts;
private System.Collections.Generic.List<float> colors;
private System.Collections.Generic.List<float> uvs;
public RendererUniversal (Sprite sprite) : base(sprite)
{
verts = new System.Collections.Generic.List<float>();
colors = new System.Collections.Generic.List<float>();
uvs = new System.Collections.Generic.List<float>();
}
public static void init() {
if (shaderUniversal == null){
shaderUniversal = new ShaderProgram("/Application/shaders/Universal.cgx");
shaderUniversal.SetAttributeBinding(0, "a_Position");
shaderUniversal.SetAttributeBinding(1, "a_VertexColor");
shaderUniversal.SetAttributeBinding(2, "a_TexCoord");
shaderUniversal.SetUniformBinding(0, "u_SceneMatrix");
shaderUniversal.SetUniformBinding(1, "u_ScreenMatrix");
shaderUniversal.SetUniformBinding(2, "u_Alpha");
texture = new Texture2D("/Application/assets/texturepack/rymdkapsel-hd.png", false);
texture.SetFilter(TextureFilterMode.Disabled);
texture.SetWrap(TextureWrapMode.ClampToEdge);
texture.SetMaxAnisotropy(0);
}
if (shaderColored == null){
shaderColored = new ShaderProgram("/Application/shaders/Colored.cgx");
shaderColored.SetAttributeBinding(0, "a_Position");
shaderColored.SetAttributeBinding(1, "a_VertexColor");
shaderColored.SetUniformBinding(0, "u_SceneMatrix");
shaderColored.SetUniformBinding(1, "u_ScreenMatrix");
shaderColored.SetUniformBinding(2, "u_Alpha");
}
}
override public void clearGraphics(){
verts.Clear();
colors.Clear();
uvs.Clear();
dirtyBuffer = true;
}
override public void drawFilledRect(int color, double alpha, double x, double y, double width, double height){
verts.Add((float) x); verts.Add((float) y); // 0
verts.Add((float) (x + width)); verts.Add((float) y); // 1
verts.Add((float) x); verts.Add((float) (y + height)); // 2
verts.Add((float) (x + width)); verts.Add((float) (y + height)); // 3
verts.Add((float) (x + width)); verts.Add((float) y); // 1
verts.Add((float) x); verts.Add((float) (y + height)); // 2
float r = ((color >> 16) & 255) / 255f;
float g = ((color >> 8) & 255) / 255f;
float b = (color & 255) / 255f;
float a = (float) alpha;
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
dirtyBuffer = true;
}
override public void drawPolygon(int color, double alpha, Array<double> vertices) {
float r = ((color >> 16) & 255) / 255f;
float g = ((color >> 8) & 255) / 255f;
float b = (color & 255) / 255f;
float a = (float) alpha;
for (int i = 4; i < vertices.length; i+=2){
verts.Add((float) vertices[i - 4]); verts.Add((float) vertices[i - 4 + 1]);
verts.Add((float) vertices[i - 2]); verts.Add((float) vertices[i - 2 + 1]);
verts.Add((float) vertices[i - 0]); verts.Add((float) vertices[i - 0 + 1]);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
colors.Add(r); colors.Add(g); colors.Add(b); colors.Add(a);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
uvs.Add((float) (white.x + white.width / 2) / sheetW); uvs.Add((float) (white.y + white.height / 2) / sheetH);
}
dirtyBuffer = true;
}
override protected void drawTexturedRectOffset(string texture, double x, double y, double width, double height, double offsetX) {
SpriteData data;
sprites.TryGetValue(texture, out data);
if (width == -1) width = data.width;
if (height == -1) height = data.height;
verts.Add((float) x); verts.Add((float) y); // 0
verts.Add((float) (x + width)); verts.Add((float) y); // 1
verts.Add((float) x); verts.Add((float) (y + height)); // 2
verts.Add((float) (x + width)); verts.Add((float) (y + height)); // 3
verts.Add((float) (x + width)); verts.Add((float) y); // 1
verts.Add((float) x); verts.Add((float) (y + height)); // 2
colors.Add(1); colors.Add(1); colors.Add(1); colors.Add(1);
colors.Add(1); colors.Add(1); colors.Add(1); colors.Add(1);
colors.Add(1); colors.Add(1); colors.Add(1); colors.Add(1);
colors.Add(1); colors.Add(1); colors.Add(1); colors.Add(1);
colors.Add(1); colors.Add(1); colors.Add(1); colors.Add(1);
colors.Add(1); colors.Add(1); colors.Add(1); colors.Add(1);
float doubled = data.doubled ? 2 : 1;
offsetX *= doubled;
width *= doubled;
height *= doubled;
uvs.Add((float) (data.x + offsetX) / sheetW); uvs.Add((float) (data.y) / sheetH);
uvs.Add((float) (data.x + width + offsetX) / sheetW); uvs.Add((float) (data.y) / sheetH);
uvs.Add((float) (data.x + offsetX) / sheetW); uvs.Add((float) (data.y + height) / sheetH);
uvs.Add((float) (data.x + width + offsetX) / sheetW); uvs.Add((float) (data.y + height) / sheetH);
uvs.Add((float) (data.x + width + offsetX) / sheetW); uvs.Add((float) (data.y) / sheetH);
uvs.Add((float) (data.x + offsetX) / sheetW); uvs.Add((float) (data.y + height) / sheetH);
dirtyBuffer = true;
}
public override void drawFrame (string texture, double x, double y, int frameX)
{
SpriteData data;
sprites.TryGetValue(texture, out data);
drawTexturedRectOffset(texture, x + data.offsetX, y + data.offsetY, data.frameW, data.frameH, data.frameW * frameX);
}
override public void validateBuffer(){
dirtyBuffer = false;
if (vertexBuffer != null){
vertexBuffer.Dispose();
vertexBuffer = null;
}
if (verts.Count == 0) return;
//if (renderable.usesTextures) {
vertexBuffer = new VertexBuffer(verts.Count / 2, VertexFormat.Float2, VertexFormat.Float4, VertexFormat.Float2);
vertexBuffer.SetVertices(0, verts.ToArray());
vertexBuffer.SetVertices(1, colors.ToArray());
vertexBuffer.SetVertices(2, uvs.ToArray());
program = shaderUniversal;
/*} else {
vertexBuffer = new VertexBuffer(verts.Count / 2, VertexFormat.Float2, VertexFormat.Float4);
vertexBuffer.SetVertices(0, verts.ToArray());
vertexBuffer.SetVertices(1, colors.ToArray());
program = shaderColored;
}*/
//Console.WriteLine("buffer validated " + renderable.numPolyVerts + " commands");
}
override public void render(GraphicsContext context){
if (dirtyBuffer) {
validateBuffer();
}
if (vertexBuffer == null) return;
program.SetUniformValue(0, ref sceneTransform);
program.SetUniformValue(1, ref screenMatrix);
program.SetUniformValue(2, (float) sceneAlpha);
context.SetShaderProgram(program);
context.SetVertexBuffer(0, vertexBuffer);
if (program == shaderUniversal) context.SetTexture(0, texture);
context.DrawArrays(DrawMode.Triangles, 0, vertexBuffer.VertexCount);
if (program == shaderUniversal) context.SetTexture(0, null);
}
public static void defineSprite(string name, int x, int y, int width, int height, int frameW, int frameH, int offsetX, int offsetY, int sheetW, int sheetH, bool doubled) {
if (sprites == null) sprites = new Dictionary<string, SpriteData>();
SpriteData data = new SpriteData(x, y, width, height, frameW, frameH, offsetX, offsetY, doubled);
sprites.Add(name, data);
RendererUniversal.sheetW = sheetW;
RendererUniversal.sheetH = sheetH;
if (name == "white") white = data;
}
}
}
struct SpriteData {
public int x;
public int y;
public int width;
public int height;
public int frameW;
public int frameH;
public int offsetX;
public int offsetY;
public bool doubled;
public SpriteData(int x, int y, int width, int height, int frameW, int frameH, int offsetX, int offsetY, bool doubled){
this.x = x;
this.y = y;
this.width = width;
this.height = height;
this.frameW = frameW;
this.frameH = frameH;
this.offsetX = offsetX;
this.offsetY = offsetY;
this.doubled = doubled;
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Xml;
using System.Xml.Serialization;
using OpenMetaverse;
namespace OpenSim.Framework
{
public class LandAccessEntry
{
public UUID AgentID;
public int Expires;
public AccessList Flags;
}
/// <summary>
/// Details of a Parcel of land
/// </summary>
public class LandData
{
// use only one serializer to give the runtime a chance to
// optimize it (it won't do that if you use a new instance
// every time)
private static XmlSerializer serializer = new XmlSerializer(typeof(LandData));
private Vector3 _AABBMax = new Vector3();
private Vector3 _AABBMin = new Vector3();
private int _area = 0;
private uint _auctionID = 0; //Unemplemented. If set to 0, not being auctioned
private UUID _authBuyerID = UUID.Zero; //Unemplemented. Authorized Buyer's UUID
private ParcelCategory _category = ParcelCategory.None; //Unemplemented. Parcel's chosen category
private int _claimDate = 0;
private int _claimPrice = 0; //Unemplemented
private UUID _globalID = UUID.Zero;
private UUID _groupID = UUID.Zero;
private bool _isGroupOwned = false;
private byte[] _bitmap = new byte[512];
private string _description = String.Empty;
private uint _flags = (uint)ParcelFlags.AllowFly | (uint)ParcelFlags.AllowLandmark |
(uint)ParcelFlags.AllowAPrimitiveEntry |
(uint)ParcelFlags.AllowDeedToGroup | (uint)ParcelFlags.AllowTerraform |
(uint)ParcelFlags.CreateObjects | (uint)ParcelFlags.AllowOtherScripts |
(uint)ParcelFlags.SoundLocal | (uint)ParcelFlags.AllowVoiceChat;
private byte _landingType = 0;
private string _name = "Your Parcel";
private ParcelStatus _status = ParcelStatus.Leased;
private int _localID = 0;
private byte _mediaAutoScale = 0;
private UUID _mediaID = UUID.Zero;
private string _mediaURL = String.Empty;
private string _musicURL = String.Empty;
private UUID _ownerID = UUID.Zero;
private List<LandAccessEntry> _parcelAccessList = new List<LandAccessEntry>();
private float _passHours = 0;
private int _passPrice = 0;
private int _salePrice = 0; //Unemeplemented. Parcels price.
private int _simwideArea = 0;
private int _simwidePrims = 0;
private UUID _snapshotID = UUID.Zero;
private Vector3 _userLocation = new Vector3();
private Vector3 _userLookAt = new Vector3();
private int _otherCleanTime = 0;
private string _mediaType = "none/none";
private string _mediaDescription = "";
private int _mediaHeight = 0;
private int _mediaWidth = 0;
private bool _mediaLoop = false;
private bool _obscureMusic = false;
private bool _obscureMedia = false;
private float _dwell = 0;
/// <summary>
/// Traffic count of parcel
/// </summary>
[XmlIgnore]
public float Dwell
{
get
{
return _dwell;
}
set
{
_dwell = value;
}
}
/// <summary>
/// Whether to obscure parcel media URL
/// </summary>
[XmlIgnore]
public bool ObscureMedia
{
get
{
return _obscureMedia;
}
set
{
_obscureMedia = value;
}
}
/// <summary>
/// Whether to obscure parcel music URL
/// </summary>
[XmlIgnore]
public bool ObscureMusic
{
get
{
return _obscureMusic;
}
set
{
_obscureMusic = value;
}
}
/// <summary>
/// Whether to loop parcel media
/// </summary>
[XmlIgnore]
public bool MediaLoop
{
get
{
return _mediaLoop;
}
set
{
_mediaLoop = value;
}
}
/// <summary>
/// Height of parcel media render
/// </summary>
[XmlIgnore]
public int MediaHeight
{
get
{
return _mediaHeight;
}
set
{
_mediaHeight = value;
}
}
/// <summary>
/// Width of parcel media render
/// </summary>
[XmlIgnore]
public int MediaWidth
{
get
{
return _mediaWidth;
}
set
{
_mediaWidth = value;
}
}
/// <summary>
/// Upper corner of the AABB for the parcel
/// </summary>
[XmlIgnore]
public Vector3 AABBMax
{
get
{
return _AABBMax;
}
set
{
_AABBMax = value;
}
}
/// <summary>
/// Lower corner of the AABB for the parcel
/// </summary>
[XmlIgnore]
public Vector3 AABBMin
{
get
{
return _AABBMin;
}
set
{
_AABBMin = value;
}
}
/// <summary>
/// Area in meters^2 the parcel contains
/// </summary>
public int Area
{
get
{
return _area;
}
set
{
_area = value;
}
}
/// <summary>
/// ID of auction (3rd Party Integration) when parcel is being auctioned
/// </summary>
public uint AuctionID
{
get
{
return _auctionID;
}
set
{
_auctionID = value;
}
}
/// <summary>
/// UUID of authorized buyer of parcel. This is UUID.Zero if anyone can buy it.
/// </summary>
public UUID AuthBuyerID
{
get
{
return _authBuyerID;
}
set
{
_authBuyerID = value;
}
}
/// <summary>
/// Category of parcel. Used for classifying the parcel in classified listings
/// </summary>
public ParcelCategory Category
{
get
{
return _category;
}
set
{
_category = value;
}
}
/// <summary>
/// Date that the current owner purchased or claimed the parcel
/// </summary>
public int ClaimDate
{
get
{
return _claimDate;
}
set
{
_claimDate = value;
}
}
/// <summary>
/// The last price that the parcel was sold at
/// </summary>
public int ClaimPrice
{
get
{
return _claimPrice;
}
set
{
_claimPrice = value;
}
}
/// <summary>
/// Global ID for the parcel. (3rd Party Integration)
/// </summary>
public UUID GlobalID
{
get
{
return _globalID;
}
set
{
_globalID = value;
}
}
/// <summary>
/// Unique ID of the Group that owns
/// </summary>
public UUID GroupID
{
get
{
return _groupID;
}
set
{
_groupID = value;
}
}
/// <summary>
/// Returns true if the Land Parcel is owned by a group
/// </summary>
public bool IsGroupOwned
{
get
{
return _isGroupOwned;
}
set
{
_isGroupOwned = value;
}
}
/// <summary>
/// jp2 data for the image representative of the parcel in the parcel dialog
/// </summary>
public byte[] Bitmap
{
get
{
return _bitmap;
}
set
{
_bitmap = value;
}
}
/// <summary>
/// Parcel Description
/// </summary>
public string Description
{
get
{
return _description;
}
set
{
_description = value;
}
}
/// <summary>
/// Parcel settings. Access flags, Fly, NoPush, Voice, Scripts allowed, etc. ParcelFlags
/// </summary>
public uint Flags
{
get
{
return _flags;
}
set
{
_flags = value;
}
}
/// <summary>
/// Determines if people are able to teleport where they please on the parcel or if they
/// get constrainted to a specific point on teleport within the parcel
/// </summary>
public byte LandingType
{
get
{
return _landingType;
}
set
{
_landingType = value;
}
}
/// <summary>
/// Parcel Name
/// </summary>
public string Name
{
get
{
return _name;
}
set
{
_name = value;
}
}
/// <summary>
/// Status of Parcel, Leased, Abandoned, For Sale
/// </summary>
public ParcelStatus Status
{
get
{
return _status;
}
set
{
_status = value;
}
}
/// <summary>
/// Internal ID of the parcel. Sometimes the client will try to use this value
/// </summary>
public int LocalID
{
get
{
return _localID;
}
set
{
_localID = value;
}
}
/// <summary>
/// Determines if we scale the media based on the surface it's on
/// </summary>
public byte MediaAutoScale
{
get
{
return _mediaAutoScale;
}
set
{
_mediaAutoScale = value;
}
}
/// <summary>
/// Texture Guid to replace with the output of the media stream
/// </summary>
public UUID MediaID
{
get
{
return _mediaID;
}
set
{
_mediaID = value;
}
}
/// <summary>
/// URL to the media file to display
/// </summary>
public string MediaURL
{
get
{
return _mediaURL;
}
set
{
_mediaURL = value;
}
}
public string MediaType
{
get
{
return _mediaType;
}
set
{
_mediaType = value;
}
}
/// <summary>
/// URL to the shoutcast music stream to play on the parcel
/// </summary>
public string MusicURL
{
get
{
return _musicURL;
}
set
{
_musicURL = value;
}
}
/// <summary>
/// Owner Avatar or Group of the parcel. Naturally, all land masses must be
/// owned by someone
/// </summary>
public UUID OwnerID
{
get
{
return _ownerID;
}
set
{
_ownerID = value;
}
}
/// <summary>
/// List of access data for the parcel. User data, some bitflags, and a time
/// </summary>
public List<LandAccessEntry> ParcelAccessList
{
get
{
return _parcelAccessList;
}
set
{
_parcelAccessList = value;
}
}
/// <summary>
/// How long in hours a Pass to the parcel is given
/// </summary>
public float PassHours
{
get
{
return _passHours;
}
set
{
_passHours = value;
}
}
/// <summary>
/// Price to purchase a Pass to a restricted parcel
/// </summary>
public int PassPrice
{
get
{
return _passPrice;
}
set
{
_passPrice = value;
}
}
/// <summary>
/// When the parcel is being sold, this is the price to purchase the parcel
/// </summary>
public int SalePrice
{
get
{
return _salePrice;
}
set
{
_salePrice = value;
}
}
/// <summary>
/// Number of meters^2 in the Simulator
/// </summary>
[XmlIgnore]
public int SimwideArea
{
get
{
return _simwideArea;
}
set
{
_simwideArea = value;
}
}
/// <summary>
/// Number of SceneObjectPart in the Simulator
/// </summary>
[XmlIgnore]
public int SimwidePrims
{
get
{
return _simwidePrims;
}
set
{
_simwidePrims = value;
}
}
/// <summary>
/// ID of the snapshot used in the client parcel dialog of the parcel
/// </summary>
public UUID SnapshotID
{
get
{
return _snapshotID;
}
set
{
_snapshotID = value;
}
}
/// <summary>
/// When teleporting is restricted to a certain point, this is the location
/// that the user will be redirected to
/// </summary>
public Vector3 UserLocation
{
get
{
return _userLocation;
}
set
{
_userLocation = value;
}
}
/// <summary>
/// When teleporting is restricted to a certain point, this is the rotation
/// that the user will be positioned
/// </summary>
public Vector3 UserLookAt
{
get
{
return _userLookAt;
}
set
{
_userLookAt = value;
}
}
/// <summary>
/// Autoreturn number of minutes to return SceneObjectGroup that are owned by someone who doesn't own
/// the parcel and isn't set to the same 'group' as the parcel.
/// </summary>
public int OtherCleanTime
{
get
{
return _otherCleanTime;
}
set
{
_otherCleanTime = value;
}
}
/// <summary>
/// parcel media description
/// </summary>
public string MediaDescription
{
get
{
return _mediaDescription;
}
set
{
_mediaDescription = value;
}
}
public LandData()
{
_globalID = UUID.Random();
}
/// <summary>
/// Make a new copy of the land data
/// </summary>
/// <returns></returns>
public LandData Copy()
{
LandData landData = new LandData();
landData._AABBMax = _AABBMax;
landData._AABBMin = _AABBMin;
landData._area = _area;
landData._auctionID = _auctionID;
landData._authBuyerID = _authBuyerID;
landData._category = _category;
landData._claimDate = _claimDate;
landData._claimPrice = _claimPrice;
landData._globalID = _globalID;
landData._groupID = _groupID;
landData._isGroupOwned = _isGroupOwned;
landData._localID = _localID;
landData._landingType = _landingType;
landData._mediaAutoScale = _mediaAutoScale;
landData._mediaID = _mediaID;
landData._mediaURL = _mediaURL;
landData._musicURL = _musicURL;
landData._ownerID = _ownerID;
landData._bitmap = (byte[])_bitmap.Clone();
landData._description = _description;
landData._flags = _flags;
landData._name = _name;
landData._status = _status;
landData._passHours = _passHours;
landData._passPrice = _passPrice;
landData._salePrice = _salePrice;
landData._snapshotID = _snapshotID;
landData._userLocation = _userLocation;
landData._userLookAt = _userLookAt;
landData._otherCleanTime = _otherCleanTime;
landData._mediaType = _mediaType;
landData._mediaDescription = _mediaDescription;
landData._mediaWidth = _mediaWidth;
landData._mediaHeight = _mediaHeight;
landData._mediaLoop = _mediaLoop;
landData._obscureMusic = _obscureMusic;
landData._obscureMedia = _obscureMedia;
landData._simwideArea = _simwideArea;
landData._simwidePrims = _simwidePrims;
landData._dwell = _dwell;
landData._parcelAccessList.Clear();
foreach (LandAccessEntry entry in _parcelAccessList)
{
LandAccessEntry newEntry = new LandAccessEntry();
newEntry.AgentID = entry.AgentID;
newEntry.Flags = entry.Flags;
newEntry.Expires = entry.Expires;
landData._parcelAccessList.Add(newEntry);
}
return landData;
}
public void ToXml(XmlWriter xmlWriter)
{
serializer.Serialize(xmlWriter, this);
}
/// <summary>
/// Restore a LandData object from the serialized xml representation.
/// </summary>
/// <param name="xmlReader"></param>
/// <returns></returns>
public static LandData FromXml(XmlReader xmlReader)
{
LandData land = (LandData)serializer.Deserialize(xmlReader);
return land;
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="DataPortalTests.cs" company="Marimer LLC">
// Copyright (c) Marimer LLC. All rights reserved.
// Website: https://cslanet.com
// </copyright>
// <summary>no summary</summary>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Text;
using Csla.Test.DataBinding;
using System.Data;
using System.Data.SqlClient;
#if NUNIT
using NUnit.Framework;
using TestClass = NUnit.Framework.TestFixtureAttribute;
using TestInitialize = NUnit.Framework.SetUpAttribute;
using TestCleanup = NUnit.Framework.TearDownAttribute;
using TestMethod = NUnit.Framework.TestAttribute;
using TestSetup = NUnit.Framework.SetUpAttribute;
#elif MSTEST
using Microsoft.VisualStudio.TestTools.UnitTesting;
#endif
namespace Csla.Test.DataPortal
{
[TestClass()]
public class DataPortalTests
{
private static string CONNECTION_STRING = WellKnownValues.DataPortalTestDatabase;
public void ClearDataBase()
{
SqlConnection cn = new SqlConnection(CONNECTION_STRING);
SqlCommand cm = new SqlCommand("DELETE FROM Table2", cn);
try
{
cn.Open();
cm.ExecuteNonQuery();
}
catch (Exception)
{
//do nothing
}
finally
{
cn.Close();
}
}
[TestMethod()]
[Ignore]
public void TestEnterpriseServicesTransactionalUpdate()
{
Csla.Test.DataPortal.ESTransactionalRoot tr = Csla.Test.DataPortal.ESTransactionalRoot.NewESTransactionalRoot();
tr.FirstName = "Bill";
tr.LastName = "Johnson";
//setting smallColumn to a string less than or equal to 5 characters will
//not cause the transaction to rollback
tr.SmallColumn = "abc";
tr = tr.Save();
SqlConnection cn = new SqlConnection(CONNECTION_STRING);
SqlCommand cm = new SqlCommand("SELECT * FROM Table2", cn);
try
{
cn.Open();
SqlDataReader dr = cm.ExecuteReader();
//will have rows since no sqlexception was thrown on the insert
Assert.AreEqual(true, dr.HasRows);
dr.Close();
}
catch
{
//do nothing
}
finally
{
cn.Close();
}
ClearDataBase();
Csla.Test.DataPortal.ESTransactionalRoot tr2 = Csla.Test.DataPortal.ESTransactionalRoot.NewESTransactionalRoot();
tr2.FirstName = "Jimmy";
tr2.LastName = "Smith";
//intentionally input a string longer than varchar(5) to
//cause a sql exception and rollback the transaction
tr2.SmallColumn = "this will cause a sql exception";
try
{
//will throw a sql exception since the SmallColumn property is too long
tr2 = tr2.Save();
}
catch (Exception ex)
{
Assert.IsTrue(ex.Message.StartsWith("DataPortal.Update failed"), "Invalid exception message");
}
//within the DataPortal_Insert method, two commands are run to insert data into
//the database. Here we verify that both commands have been rolled back
try
{
cn.Open();
SqlDataReader dr = cm.ExecuteReader();
//should not have rows since both commands were rolled back
Assert.AreEqual(false, dr.HasRows);
dr.Close();
}
catch
{
//do nothing
}
finally
{
cn.Close();
}
ClearDataBase();
}
#if DEBUG
[TestMethod()]
public void TestTransactionScopeUpdate()
{
Csla.Test.DataPortal.TransactionalRoot tr = Csla.Test.DataPortal.TransactionalRoot.NewTransactionalRoot();
tr.FirstName = "Bill";
tr.LastName = "Johnson";
//setting smallColumn to a string less than or equal to 5 characters will
//not cause the transaction to rollback
tr.SmallColumn = "abc";
tr = tr.Save();
SqlConnection cn = new SqlConnection(CONNECTION_STRING);
SqlCommand cm = new SqlCommand("SELECT * FROM Table2", cn);
try
{
cn.Open();
SqlDataReader dr = cm.ExecuteReader();
//will have rows since no sqlexception was thrown on the insert
Assert.AreEqual(true, dr.HasRows);
dr.Close();
}
catch (Exception ex)
{
//do nothing
}
finally
{
cn.Close();
}
ClearDataBase();
Csla.Test.DataPortal.TransactionalRoot tr2 = Csla.Test.DataPortal.TransactionalRoot.NewTransactionalRoot();
tr2.FirstName = "Jimmy";
tr2.LastName = "Smith";
//intentionally input a string longer than varchar(5) to
//cause a sql exception and rollback the transaction
tr2.SmallColumn = "this will cause a sql exception";
try
{
//will throw a sql exception since the SmallColumn property is too long
tr2 = tr2.Save();
}
catch (Exception ex)
{
Assert.IsTrue(ex.Message.StartsWith("DataPortal.Update failed"), "Invalid exception message");
}
//within the DataPortal_Insert method, two commands are run to insert data into
//the database. Here we verify that both commands have been rolled back
try
{
cn.Open();
SqlDataReader dr = cm.ExecuteReader();
//should not have rows since both commands were rolled back
Assert.AreEqual(false, dr.HasRows);
dr.Close();
}
catch (Exception ex)
{
//do nothing
}
finally
{
cn.Close();
}
ClearDataBase();
}
#endif
[TestMethod()]
public void StronglyTypedDataPortalMethods()
{
//test strongly-typed DataPortal_Fetch method
Csla.ApplicationContext.GlobalContext.Clear();
Csla.Test.DataPortal.StronglyTypedDP root = Csla.Test.DataPortal.StronglyTypedDP.GetStronglyTypedDP(456);
Assert.AreEqual("Fetched", Csla.ApplicationContext.GlobalContext["StronglyTypedDP"]);
Assert.AreEqual("fetched existing data", root.Data);
Assert.AreEqual(456, root.Id);
//test strongly-typed DataPortal_Create method
Csla.ApplicationContext.GlobalContext.Clear();
Csla.Test.DataPortal.StronglyTypedDP root2 = Csla.Test.DataPortal.StronglyTypedDP.NewStronglyTypedDP();
Assert.AreEqual("Created", Csla.ApplicationContext.GlobalContext["StronglyTypedDP"]);
Assert.AreEqual("new default data", root2.Data);
Assert.AreEqual(56, root2.Id);
//test strongly-typed DataPortal_Delete method
Csla.Test.DataPortal.StronglyTypedDP.DeleteStronglyTypedDP(567);
Assert.AreEqual(567, Csla.ApplicationContext.GlobalContext["StronglyTypedDP_Criteria"]);
}
[TestMethod]
public void EncapsulatedIsBusyFails()
{
try
{
var obj = Csla.DataPortal.Fetch<EncapsulatedBusy>();
}
catch (DataPortalException ex)
{
Assert.IsInstanceOfType(ex.InnerException, typeof(InvalidOperationException));
return;
}
Assert.Fail("Expected exception");
}
[TestMethod]
public void FactoryIsBusyFails()
{
try
{
var obj = Csla.DataPortal.Fetch<FactoryBusy>();
}
catch (DataPortalException ex)
{
Assert.IsInstanceOfType(ex.InnerException, typeof(InvalidOperationException));
return;
}
Assert.Fail("Expected exception");
}
[TestMethod()]
public void DataPortalEvents()
{
Csla.DataPortal.DataPortalInvoke += new Action<DataPortalEventArgs>(ClientPortal_DataPortalInvoke);
Csla.DataPortal.DataPortalInvokeComplete += new Action<DataPortalEventArgs>(ClientPortal_DataPortalInvokeComplete);
try
{
ApplicationContext.GlobalContext.Clear();
DpRoot root = DpRoot.NewRoot();
root.Data = "saved";
Csla.ApplicationContext.GlobalContext.Clear();
root = root.Save();
Assert.IsTrue((bool)ApplicationContext.GlobalContext["dpinvoke"], "DataPortalInvoke not called");
Assert.IsTrue((bool)ApplicationContext.GlobalContext["dpinvokecomplete"], "DataPortalInvokeComplete not called");
Assert.IsTrue((bool)ApplicationContext.GlobalContext["serverinvoke"], "Server DataPortalInvoke not called");
Assert.IsTrue((bool)ApplicationContext.GlobalContext["serverinvokecomplete"], "Server DataPortalInvokeComplete not called");
}
finally
{
Csla.DataPortal.DataPortalInvoke -= new Action<DataPortalEventArgs>(ClientPortal_DataPortalInvoke);
Csla.DataPortal.DataPortalInvokeComplete -= new Action<DataPortalEventArgs>(ClientPortal_DataPortalInvokeComplete);
}
}
[TestMethod]
public void DataPortalBrokerTests()
{
ApplicationContext.GlobalContext.Clear();
Csla.Server.DataPortalBroker.DataPortalServer = new CustomDataPortalServer();
try
{
var single = Csla.Test.DataPortalTest.Single.NewObject();
Assert.AreEqual(ApplicationContext.GlobalContext["Single"], "Created");
Assert.AreEqual(ApplicationContext.GlobalContext["CustomDataPortalServer"], "Create Called");
ApplicationContext.GlobalContext.Clear();
single.Save();
Assert.AreEqual(ApplicationContext.GlobalContext["Single"], "Inserted");
Assert.AreEqual(ApplicationContext.GlobalContext["CustomDataPortalServer"], "Update Called");
ApplicationContext.GlobalContext.Clear();
single = Csla.Test.DataPortalTest.Single.GetObject(1);
Assert.AreEqual(ApplicationContext.GlobalContext["Single"], "Fetched");
Assert.AreEqual(ApplicationContext.GlobalContext["CustomDataPortalServer"], "Fetch Called");
ApplicationContext.GlobalContext.Clear();
single.Save();
Assert.AreEqual(ApplicationContext.GlobalContext["Single"], "Updated");
Assert.AreEqual(ApplicationContext.GlobalContext["CustomDataPortalServer"], "Update Called");
ApplicationContext.GlobalContext.Clear();
Csla.Test.DataPortalTest.Single.DeleteObject(1);
Assert.AreEqual(ApplicationContext.GlobalContext["Single"], "Deleted");
Assert.AreEqual(ApplicationContext.GlobalContext["CustomDataPortalServer"], "Delete Called");
}
finally
{
ApplicationContext.GlobalContext.Clear();
Csla.Server.DataPortalBroker.DataPortalServer = null;
}
}
[TestMethod]
public void CallDataPortalOverrides()
{
Csla.ApplicationContext.GlobalContext.Clear();
ParentEntity parent = ParentEntity.NewParentEntity();
parent.Data = "something";
Assert.AreEqual(false, parent.IsDeleted);
Assert.AreEqual(true, parent.IsValid);
Assert.AreEqual(true, parent.IsNew);
Assert.AreEqual(true, parent.IsDirty);
Assert.AreEqual(true, parent.IsSavable);
parent = parent.Save();
Assert.AreEqual("Inserted", Csla.ApplicationContext.GlobalContext["ParentEntity"]);
Assert.AreEqual(false, parent.IsDeleted);
Assert.AreEqual(true, parent.IsValid);
Assert.AreEqual(false, parent.IsNew);
Assert.AreEqual(false, parent.IsDirty);
Assert.AreEqual(false, parent.IsSavable);
parent.Data = "something new";
Assert.AreEqual(false, parent.IsDeleted);
Assert.AreEqual(true, parent.IsValid);
Assert.AreEqual(false, parent.IsNew);
Assert.AreEqual(true, parent.IsDirty);
Assert.AreEqual(true, parent.IsSavable);
parent = parent.Save();
Assert.AreEqual("Updated", Csla.ApplicationContext.GlobalContext["ParentEntity"]);
parent.Delete();
Assert.AreEqual(true, parent.IsDeleted);
parent = parent.Save();
Assert.AreEqual("Deleted Self", Csla.ApplicationContext.GlobalContext["ParentEntity"]);
ParentEntity.DeleteParentEntity(33);
Assert.AreEqual("Deleted", Csla.ApplicationContext.GlobalContext["ParentEntity"]);
Assert.AreEqual(false, parent.IsDeleted);
Assert.AreEqual(true, parent.IsValid);
Assert.AreEqual(true, parent.IsNew);
Assert.AreEqual(true, parent.IsDirty);
Assert.AreEqual(true, parent.IsSavable);
ParentEntity.GetParentEntity(33);
Assert.AreEqual("Fetched", Csla.ApplicationContext.GlobalContext["ParentEntity"]);
}
private void ClientPortal_DataPortalInvoke(DataPortalEventArgs obj)
{
ApplicationContext.GlobalContext["dpinvoke"] = true;
}
private void ClientPortal_DataPortalInvokeComplete(DataPortalEventArgs obj)
{
ApplicationContext.GlobalContext["dpinvokecomplete"] = true;
}
}
[Serializable]
public class EncapsulatedBusy : BusinessBase<EncapsulatedBusy>
{
protected override void DataPortal_Create()
{
base.DataPortal_Create();
MarkBusy();
}
private void DataPortal_Fetch()
{
MarkBusy();
}
}
[Serializable]
[Csla.Server.ObjectFactory(typeof(FactoryBusyFactory))]
public class FactoryBusy : BusinessBase<FactoryBusy>
{
public void MarkObjectBusy()
{
MarkBusy();
}
}
public class FactoryBusyFactory : Csla.Server.ObjectFactory
{
public FactoryBusy Fetch()
{
var obj = new FactoryBusy();
MarkOld(obj);
obj.MarkObjectBusy();
return obj;
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using sys = System;
namespace Google.Ads.GoogleAds.V9.Resources
{
/// <summary>Resource name for the <c>GenderView</c> resource.</summary>
public sealed partial class GenderViewName : gax::IResourceName, sys::IEquatable<GenderViewName>
{
/// <summary>The possible contents of <see cref="GenderViewName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c>.
/// </summary>
CustomerAdGroupCriterion = 1,
}
private static gax::PathTemplate s_customerAdGroupCriterion = new gax::PathTemplate("customers/{customer_id}/genderViews/{ad_group_id_criterion_id}");
/// <summary>Creates a <see cref="GenderViewName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="GenderViewName"/> containing the provided
/// <paramref name="unparsedResourceName"/>.
/// </returns>
public static GenderViewName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new GenderViewName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="GenderViewName"/> with the pattern
/// <c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="GenderViewName"/> constructed from the provided ids.</returns>
public static GenderViewName FromCustomerAdGroupCriterion(string customerId, string adGroupId, string criterionId) =>
new GenderViewName(ResourceNameType.CustomerAdGroupCriterion, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="GenderViewName"/> with pattern
/// <c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="GenderViewName"/> with pattern
/// <c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c>.
/// </returns>
public static string Format(string customerId, string adGroupId, string criterionId) =>
FormatCustomerAdGroupCriterion(customerId, adGroupId, criterionId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="GenderViewName"/> with pattern
/// <c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="GenderViewName"/> with pattern
/// <c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c>.
/// </returns>
public static string FormatCustomerAdGroupCriterion(string customerId, string adGroupId, string criterionId) =>
s_customerAdGroupCriterion.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), $"{(gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)))}~{(gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId)))}");
/// <summary>Parses the given resource name string into a new <see cref="GenderViewName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c></description>
/// </item>
/// </list>
/// </remarks>
/// <param name="genderViewName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="GenderViewName"/> if successful.</returns>
public static GenderViewName Parse(string genderViewName) => Parse(genderViewName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="GenderViewName"/> instance; optionally allowing
/// an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c></description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="genderViewName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="GenderViewName"/> if successful.</returns>
public static GenderViewName Parse(string genderViewName, bool allowUnparsed) =>
TryParse(genderViewName, allowUnparsed, out GenderViewName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="GenderViewName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c></description>
/// </item>
/// </list>
/// </remarks>
/// <param name="genderViewName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="GenderViewName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string genderViewName, out GenderViewName result) =>
TryParse(genderViewName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="GenderViewName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c></description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="genderViewName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="GenderViewName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string genderViewName, bool allowUnparsed, out GenderViewName result)
{
gax::GaxPreconditions.CheckNotNull(genderViewName, nameof(genderViewName));
gax::TemplatedResourceName resourceName;
if (s_customerAdGroupCriterion.TryParseName(genderViewName, out resourceName))
{
string[] split1 = ParseSplitHelper(resourceName[1], new char[] { '~', });
if (split1 == null)
{
result = null;
return false;
}
result = FromCustomerAdGroupCriterion(resourceName[0], split1[0], split1[1]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(genderViewName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private static string[] ParseSplitHelper(string s, char[] separators)
{
string[] result = new string[separators.Length + 1];
int i0 = 0;
for (int i = 0; i <= separators.Length; i++)
{
int i1 = i < separators.Length ? s.IndexOf(separators[i], i0) : s.Length;
if (i1 < 0 || i1 == i0)
{
return null;
}
result[i] = s.Substring(i0, i1 - i0);
i0 = i1 + 1;
}
return result;
}
private GenderViewName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string adGroupId = null, string criterionId = null, string customerId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
AdGroupId = adGroupId;
CriterionId = criterionId;
CustomerId = customerId;
}
/// <summary>
/// Constructs a new instance of a <see cref="GenderViewName"/> class from the component parts of pattern
/// <c>customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}</c>
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="adGroupId">The <c>AdGroup</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="criterionId">The <c>Criterion</c> ID. Must not be <c>null</c> or empty.</param>
public GenderViewName(string customerId, string adGroupId, string criterionId) : this(ResourceNameType.CustomerAdGroupCriterion, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), adGroupId: gax::GaxPreconditions.CheckNotNullOrEmpty(adGroupId, nameof(adGroupId)), criterionId: gax::GaxPreconditions.CheckNotNullOrEmpty(criterionId, nameof(criterionId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>AdGroup</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string AdGroupId { get; }
/// <summary>
/// The <c>Criterion</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string CriterionId { get; }
/// <summary>
/// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string CustomerId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.CustomerAdGroupCriterion: return s_customerAdGroupCriterion.Expand(CustomerId, $"{AdGroupId}~{CriterionId}");
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as GenderViewName);
/// <inheritdoc/>
public bool Equals(GenderViewName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(GenderViewName a, GenderViewName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(GenderViewName a, GenderViewName b) => !(a == b);
}
public partial class GenderView
{
/// <summary>
/// <see cref="GenderViewName"/>-typed view over the <see cref="ResourceName"/> resource name property.
/// </summary>
internal GenderViewName ResourceNameAsGenderViewName
{
get => string.IsNullOrEmpty(ResourceName) ? null : GenderViewName.Parse(ResourceName, allowUnparsed: true);
set => ResourceName = value?.ToString() ?? "";
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.IO;
using System.Runtime.ExceptionServices;
using System.Runtime.InteropServices;
using System.Threading;
namespace System.Net
{
internal sealed unsafe partial class HttpRequestStream : Stream
{
private bool _closed;
private readonly HttpListenerContext _httpContext;
private uint _dataChunkOffset;
private int _dataChunkIndex;
internal const int MaxReadSize = 0x20000; //http.sys recommends we limit reads to 128k
private bool _inOpaqueMode;
internal HttpRequestStream(HttpListenerContext httpContext)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"httpContextt:{httpContext}");
_httpContext = httpContext;
}
internal bool BufferedDataChunksAvailable
{
get
{
return _dataChunkIndex > -1;
}
}
// This low level API should only be consumed if the caller can make sure that the state is not corrupted
// WebSocketHttpListenerDuplexStream (a duplex wrapper around HttpRequestStream/HttpResponseStream)
// is currenlty the only consumer of this API
internal HttpListenerContext InternalHttpContext
{
get
{
return _httpContext;
}
}
private int ReadCore(byte[] buffer, int offset, int size)
{
uint dataRead = 0;
if (_dataChunkIndex != -1)
{
dataRead = Interop.HttpApi.GetChunks(_httpContext.Request.RequestBuffer, _httpContext.Request.OriginalBlobAddress, ref _dataChunkIndex, ref _dataChunkOffset, buffer, offset, size);
}
if (_dataChunkIndex == -1 && dataRead < size)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "size:" + size + " offset:" + offset);
uint statusCode = 0;
uint extraDataRead = 0;
offset += (int)dataRead;
size -= (int)dataRead;
//the http.sys team recommends that we limit the size to 128kb
if (size > MaxReadSize)
{
size = MaxReadSize;
}
fixed (byte* pBuffer = buffer)
{
// issue unmanaged blocking call
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Calling Interop.HttpApi.HttpReceiveRequestEntityBody");
uint flags = 0;
if (!_inOpaqueMode)
{
flags = (uint)Interop.HttpApi.HTTP_FLAGS.HTTP_RECEIVE_REQUEST_FLAG_COPY_BODY;
}
statusCode =
Interop.HttpApi.HttpReceiveRequestEntityBody(
_httpContext.RequestQueueHandle,
_httpContext.RequestId,
flags,
(void*)(pBuffer + offset),
(uint)size,
out extraDataRead,
null);
dataRead += extraDataRead;
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Call to Interop.HttpApi.HttpReceiveRequestEntityBody returned:" + statusCode + " dataRead:" + dataRead);
}
if (statusCode != Interop.HttpApi.ERROR_SUCCESS && statusCode != Interop.HttpApi.ERROR_HANDLE_EOF)
{
Exception exception = new HttpListenerException((int)statusCode);
if (NetEventSource.IsEnabled) NetEventSource.Error(this, exception.ToString());
throw exception;
}
UpdateAfterRead(statusCode, dataRead);
}
if (NetEventSource.IsEnabled)
{
NetEventSource.DumpBuffer(this, buffer, offset, (int)dataRead);
NetEventSource.Info(this, "returning dataRead:" + dataRead);
NetEventSource.Exit(this, "dataRead:" + dataRead);
}
return (int)dataRead;
}
private void UpdateAfterRead(uint statusCode, uint dataRead)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "statusCode:" + statusCode + " _closed:" + _closed);
if (statusCode == Interop.HttpApi.ERROR_HANDLE_EOF || dataRead == 0)
{
Close();
}
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "statusCode:" + statusCode + " _closed:" + _closed);
}
public IAsyncResult BeginReadCore(byte[] buffer, int offset, int size, AsyncCallback callback, object state)
{
if (size == 0 || _closed)
{
if (NetEventSource.IsEnabled) NetEventSource.Exit(this);
HttpRequestStreamAsyncResult result = new HttpRequestStreamAsyncResult(this, state, callback);
result.InvokeCallback((uint)0);
return result;
}
HttpRequestStreamAsyncResult asyncResult = null;
uint dataRead = 0;
if (_dataChunkIndex != -1)
{
dataRead = Interop.HttpApi.GetChunks(_httpContext.Request.RequestBuffer, _httpContext.Request.OriginalBlobAddress, ref _dataChunkIndex, ref _dataChunkOffset, buffer, offset, size);
if (_dataChunkIndex != -1 && dataRead == size)
{
asyncResult = new HttpRequestStreamAsyncResult(_httpContext.RequestQueueBoundHandle, this, state, callback, buffer, offset, (uint)size, 0);
asyncResult.InvokeCallback(dataRead);
}
}
if (_dataChunkIndex == -1 && dataRead < size)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "size:" + size + " offset:" + offset);
uint statusCode = 0;
offset += (int)dataRead;
size -= (int)dataRead;
//the http.sys team recommends that we limit the size to 128kb
if (size > MaxReadSize)
{
size = MaxReadSize;
}
asyncResult = new HttpRequestStreamAsyncResult(_httpContext.RequestQueueBoundHandle, this, state, callback, buffer, offset, (uint)size, dataRead);
uint bytesReturned;
try
{
fixed (byte* pBuffer = buffer)
{
// issue unmanaged blocking call
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Calling Interop.HttpApi.HttpReceiveRequestEntityBody");
uint flags = 0;
if (!_inOpaqueMode)
{
flags = (uint)Interop.HttpApi.HTTP_FLAGS.HTTP_RECEIVE_REQUEST_FLAG_COPY_BODY;
}
statusCode =
Interop.HttpApi.HttpReceiveRequestEntityBody(
_httpContext.RequestQueueHandle,
_httpContext.RequestId,
flags,
asyncResult._pPinnedBuffer,
(uint)size,
out bytesReturned,
asyncResult._pOverlapped);
if (NetEventSource.IsEnabled) NetEventSource.Info(this, "Call to Interop.HttpApi.HttpReceiveRequestEntityBody returned:" + statusCode + " dataRead:" + dataRead);
}
}
catch (Exception e)
{
if (NetEventSource.IsEnabled) NetEventSource.Error(this, e.ToString());
asyncResult.InternalCleanup();
throw;
}
if (statusCode != Interop.HttpApi.ERROR_SUCCESS && statusCode != Interop.HttpApi.ERROR_IO_PENDING)
{
asyncResult.InternalCleanup();
if (statusCode == Interop.HttpApi.ERROR_HANDLE_EOF)
{
asyncResult = new HttpRequestStreamAsyncResult(this, state, callback, dataRead);
asyncResult.InvokeCallback((uint)0);
}
else
{
Exception exception = new HttpListenerException((int)statusCode);
if (NetEventSource.IsEnabled) NetEventSource.Error(this, exception.ToString());
asyncResult.InternalCleanup();
throw exception;
}
}
else if (statusCode == Interop.HttpApi.ERROR_SUCCESS &&
HttpListener.SkipIOCPCallbackOnSuccess)
{
// IO operation completed synchronously - callback won't be called to signal completion.
asyncResult.IOCompleted(statusCode, bytesReturned);
}
}
if (NetEventSource.IsEnabled) NetEventSource.Exit(this);
return asyncResult;
}
public override int EndRead(IAsyncResult asyncResult)
{
if (NetEventSource.IsEnabled)
{
NetEventSource.Enter(this);
NetEventSource.Info(this, $"asyncResult: {asyncResult}");
}
if (asyncResult == null)
{
throw new ArgumentNullException(nameof(asyncResult));
}
HttpRequestStreamAsyncResult castedAsyncResult = asyncResult as HttpRequestStreamAsyncResult;
if (castedAsyncResult == null || castedAsyncResult.AsyncObject != this)
{
throw new ArgumentException(SR.net_io_invalidasyncresult, nameof(asyncResult));
}
if (castedAsyncResult.EndCalled)
{
throw new InvalidOperationException(SR.Format(SR.net_io_invalidendcall, nameof(EndRead)));
}
castedAsyncResult.EndCalled = true;
// wait & then check for errors
object returnValue = castedAsyncResult.InternalWaitForCompletion();
Exception exception = returnValue as Exception;
if (exception != null)
{
if (NetEventSource.IsEnabled)
{
NetEventSource.Info(this, "Rethrowing exception:" + exception);
NetEventSource.Error(this, exception.ToString());
}
ExceptionDispatchInfo.Throw(exception);
}
uint dataRead = (uint)returnValue;
UpdateAfterRead((uint)castedAsyncResult.ErrorCode, dataRead);
if (NetEventSource.IsEnabled)
{
NetEventSource.Info(this, $"returnValue:{returnValue}");
NetEventSource.Exit(this);
}
return (int)dataRead + (int)castedAsyncResult._dataAlreadyRead;
}
internal void SwitchToOpaqueMode()
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this);
_inOpaqueMode = true;
}
// This low level API should only be consumed if the caller can make sure that the state is not corrupted
// WebSocketHttpListenerDuplexStream (a duplex wrapper around HttpRequestStream/HttpResponseStream)
// is currenlty the only consumer of this API
internal uint GetChunks(byte[] buffer, int offset, int size)
{
return Interop.HttpApi.GetChunks(_httpContext.Request.RequestBuffer,
_httpContext.Request.OriginalBlobAddress,
ref _dataChunkIndex,
ref _dataChunkOffset,
buffer,
offset,
size);
}
private sealed unsafe class HttpRequestStreamAsyncResult : LazyAsyncResult
{
private readonly ThreadPoolBoundHandle _boundHandle;
internal NativeOverlapped* _pOverlapped;
internal void* _pPinnedBuffer;
internal uint _dataAlreadyRead = 0;
private static readonly IOCompletionCallback s_IOCallback = new IOCompletionCallback(Callback);
internal HttpRequestStreamAsyncResult(object asyncObject, object userState, AsyncCallback callback) : base(asyncObject, userState, callback)
{
}
internal HttpRequestStreamAsyncResult(object asyncObject, object userState, AsyncCallback callback, uint dataAlreadyRead) : base(asyncObject, userState, callback)
{
_dataAlreadyRead = dataAlreadyRead;
}
internal HttpRequestStreamAsyncResult(ThreadPoolBoundHandle boundHandle, object asyncObject, object userState, AsyncCallback callback, byte[] buffer, int offset, uint size, uint dataAlreadyRead) : base(asyncObject, userState, callback)
{
_dataAlreadyRead = dataAlreadyRead;
_boundHandle = boundHandle;
_pOverlapped = boundHandle.AllocateNativeOverlapped(s_IOCallback, state: this, pinData: buffer);
_pPinnedBuffer = (void*)(Marshal.UnsafeAddrOfPinnedArrayElement(buffer, offset));
}
internal void IOCompleted(uint errorCode, uint numBytes)
{
IOCompleted(this, errorCode, numBytes);
}
private static void IOCompleted(HttpRequestStreamAsyncResult asyncResult, uint errorCode, uint numBytes)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(null, $"asyncResult: {asyncResult} errorCode:0x {errorCode.ToString("x8")} numBytes: {numBytes}");
object result = null;
try
{
if (errorCode != Interop.HttpApi.ERROR_SUCCESS && errorCode != Interop.HttpApi.ERROR_HANDLE_EOF)
{
asyncResult.ErrorCode = (int)errorCode;
result = new HttpListenerException((int)errorCode);
}
else
{
result = numBytes;
if (NetEventSource.IsEnabled) NetEventSource.DumpBuffer(asyncResult, (IntPtr)asyncResult._pPinnedBuffer, (int)numBytes);
}
if (NetEventSource.IsEnabled) NetEventSource.Info(null, $"asyncResult: {asyncResult} calling Complete()");
}
catch (Exception e)
{
result = e;
}
asyncResult.InvokeCallback(result);
}
private static unsafe void Callback(uint errorCode, uint numBytes, NativeOverlapped* nativeOverlapped)
{
HttpRequestStreamAsyncResult asyncResult = (HttpRequestStreamAsyncResult)ThreadPoolBoundHandle.GetNativeOverlappedState(nativeOverlapped);
if (NetEventSource.IsEnabled) NetEventSource.Info(null, $"asyncResult: {asyncResult} errorCode:0x {errorCode.ToString("x8")} numBytes: {numBytes} nativeOverlapped:0x {((IntPtr)nativeOverlapped).ToString("x8")}");
IOCompleted(asyncResult, errorCode, numBytes);
}
// Will be called from the base class upon InvokeCallback()
protected override void Cleanup()
{
base.Cleanup();
if (_pOverlapped != null)
{
_boundHandle.FreeNativeOverlapped(_pOverlapped);
}
}
}
}
}
| |
namespace System.Workflow.ComponentModel
{
using System;
using System.Text;
using System.Reflection;
using System.Collections;
using System.CodeDom;
using System.ComponentModel;
using System.ComponentModel.Design;
using System.ComponentModel.Design.Serialization;
using System.Drawing;
using System.Runtime.Serialization;
using System.Globalization;
using System.Workflow.ComponentModel;
using System.Workflow.ComponentModel.Design;
using System.Collections.Generic;
using System.Workflow.Runtime;
using System.Workflow.ComponentModel.Compiler;
[SRDescription(SR.FaultActivityDescription)]
[ToolboxItem(typeof(ActivityToolboxItem))]
[Designer(typeof(ThrowDesigner), typeof(IDesigner))]
[ToolboxBitmap(typeof(ThrowActivity), "Resources.Throw.png")]
[SRCategory(SR.Standard)]
[Obsolete("The System.Workflow.* types are deprecated. Instead, please use the new types from System.Activities.*")]
public sealed class ThrowActivity : Activity, ITypeFilterProvider, IDynamicPropertyTypeProvider
{
[Browsable(false)]
public static readonly DependencyProperty FaultProperty = DependencyProperty.Register("Fault", typeof(Exception), typeof(ThrowActivity));
[Browsable(false)]
public static readonly DependencyProperty FaultTypeProperty = DependencyProperty.Register("FaultType", typeof(Type), typeof(ThrowActivity));
#region Constructors
public ThrowActivity()
{
}
public ThrowActivity(string name)
: base(name)
{
}
#endregion
protected internal override void Initialize(IServiceProvider provider)
{
if (this.Parent == null)
throw new InvalidOperationException(SR.GetString(SR.Error_MustHaveParent));
base.Initialize(provider);
}
protected internal override sealed ActivityExecutionStatus Execute(ActivityExecutionContext executionContext)
{
if (this.Fault == null && this.FaultType == null)
{
throw new InvalidOperationException(SR.GetString(CultureInfo.CurrentCulture, SR.Error_PropertyNotSet, FaultProperty.Name));
}
if (this.FaultType != null && !typeof(Exception).IsAssignableFrom(this.FaultType))
{
throw new InvalidOperationException(SR.GetString(CultureInfo.CurrentCulture, SR.Error_ExceptionTypeNotException, this.FaultType, FaultTypeProperty.Name));
}
if (this.Fault != null && this.FaultType != null && !this.FaultType.IsInstanceOfType(this.Fault))
{
throw new InvalidOperationException(SR.GetString(CultureInfo.CurrentCulture, SR.Error_FaultIsNotOfFaultType));
}
if (this.Fault != null)
throw this.Fault;
ConstructorInfo cInfo = this.FaultType.GetConstructor(new Type[] { });
if (cInfo != null)
throw (Exception)cInfo.Invoke(null);
throw new InvalidOperationException(SR.GetString(CultureInfo.CurrentCulture, SR.Error_FaultTypeNoDefaultConstructor, this.FaultType));
}
[TypeConverter(typeof(FaultConverter))]
[SRCategory(SR.Handlers)]
[SRDescription(SR.FaultDescription)]
[MergableProperty(false)]
[DefaultValue(null)]
public Exception Fault
{
get
{
return base.GetValue(FaultProperty) as Exception;
}
set
{
base.SetValue(FaultProperty, value);
}
}
[Editor(typeof(TypeBrowserEditor), typeof(System.Drawing.Design.UITypeEditor))]
[SRCategory(SR.Handlers)]
[SRDescription(SR.FaultTypeDescription)]
[MergableProperty(false)]
[DefaultValue(null)]
[TypeConverter(typeof(FaultTypeConverter))]
public Type FaultType
{
get
{
return base.GetValue(FaultTypeProperty) as Type;
}
set
{
base.SetValue(FaultTypeProperty, value);
}
}
#region ITypeFilterProvider Members
bool ITypeFilterProvider.CanFilterType(Type type, bool throwOnError)
{
bool isAssignable = TypeProvider.IsAssignable(typeof(Exception), type);
if (throwOnError && !isAssignable)
throw new Exception(SR.GetString(SR.Error_ExceptionTypeNotException, type, "Type"));
return isAssignable;
}
string ITypeFilterProvider.FilterDescription
{
get
{
return SR.GetString(SR.FilterDescription_FaultHandlerActivity);
}
}
#endregion
#region IDynamicPropertyTypeProvider Members
Type IDynamicPropertyTypeProvider.GetPropertyType(IServiceProvider serviceProvider, string propertyName)
{
if (!String.IsNullOrEmpty(propertyName) && propertyName.Equals("Fault", StringComparison.Ordinal))
return FaultType;
else
return null;
}
AccessTypes IDynamicPropertyTypeProvider.GetAccessType(IServiceProvider serviceProvider, string propertyName)
{
return AccessTypes.Read;
}
#endregion
#region Class FaultConverter
private sealed class FaultConverter : TypeConverter
{
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(string))
return false;
return base.CanConvertFrom(context, sourceType);
}
}
#endregion
#region Class FaultTypeConverter
private sealed class FaultTypeConverter : TypeConverter
{
public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType)
{
if (sourceType == typeof(string))
return true;
return base.CanConvertFrom(context, sourceType);
}
public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value)
{
object convertedValue = value;
string stringValue = value as string;
ITypeProvider typeProvider = context.GetService(typeof(ITypeProvider)) as ITypeProvider;
if (context != null && typeProvider != null && !String.IsNullOrEmpty(stringValue))
{
Type type = typeProvider.GetType(stringValue, false);
if (type != null)
{
ITypeFilterProvider typeFilterProvider = context.Instance as ITypeFilterProvider;
if (typeFilterProvider != null)
typeFilterProvider.CanFilterType(type, true);
convertedValue = type;
}
}
else if (stringValue != null && stringValue.Length == 0)
{
convertedValue = null;
}
return convertedValue;
}
public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType)
{
if (destinationType == typeof(string))
return true;
return base.CanConvertTo(context, destinationType);
}
public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType)
{
if (destinationType == typeof(string))
{
Type type = value as Type;
if (type != null)
return type.FullName;
}
return base.ConvertTo(context, culture, value, destinationType);
}
}
#endregion
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V9.Resources;
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V9.Services
{
/// <summary>Settings for <see cref="ConversionValueRuleSetServiceClient"/> instances.</summary>
public sealed partial class ConversionValueRuleSetServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="ConversionValueRuleSetServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="ConversionValueRuleSetServiceSettings"/>.</returns>
public static ConversionValueRuleSetServiceSettings GetDefault() => new ConversionValueRuleSetServiceSettings();
/// <summary>
/// Constructs a new <see cref="ConversionValueRuleSetServiceSettings"/> object with default settings.
/// </summary>
public ConversionValueRuleSetServiceSettings()
{
}
private ConversionValueRuleSetServiceSettings(ConversionValueRuleSetServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
GetConversionValueRuleSetSettings = existing.GetConversionValueRuleSetSettings;
MutateConversionValueRuleSetsSettings = existing.MutateConversionValueRuleSetsSettings;
OnCopy(existing);
}
partial void OnCopy(ConversionValueRuleSetServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>ConversionValueRuleSetServiceClient.GetConversionValueRuleSet</c> and
/// <c>ConversionValueRuleSetServiceClient.GetConversionValueRuleSetAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings GetConversionValueRuleSetSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>ConversionValueRuleSetServiceClient.MutateConversionValueRuleSets</c> and
/// <c>ConversionValueRuleSetServiceClient.MutateConversionValueRuleSetsAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings MutateConversionValueRuleSetsSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="ConversionValueRuleSetServiceSettings"/> object.</returns>
public ConversionValueRuleSetServiceSettings Clone() => new ConversionValueRuleSetServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="ConversionValueRuleSetServiceClient"/> to provide simple configuration of
/// credentials, endpoint etc.
/// </summary>
internal sealed partial class ConversionValueRuleSetServiceClientBuilder : gaxgrpc::ClientBuilderBase<ConversionValueRuleSetServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public ConversionValueRuleSetServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public ConversionValueRuleSetServiceClientBuilder()
{
UseJwtAccessWithScopes = ConversionValueRuleSetServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref ConversionValueRuleSetServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<ConversionValueRuleSetServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override ConversionValueRuleSetServiceClient Build()
{
ConversionValueRuleSetServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<ConversionValueRuleSetServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<ConversionValueRuleSetServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private ConversionValueRuleSetServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return ConversionValueRuleSetServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<ConversionValueRuleSetServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return ConversionValueRuleSetServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => ConversionValueRuleSetServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() =>
ConversionValueRuleSetServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => ConversionValueRuleSetServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>ConversionValueRuleSetService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to manage conversion value rule sets.
/// </remarks>
public abstract partial class ConversionValueRuleSetServiceClient
{
/// <summary>
/// The default endpoint for the ConversionValueRuleSetService service, which is a host of
/// "googleads.googleapis.com" and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default ConversionValueRuleSetService scopes.</summary>
/// <remarks>
/// The default ConversionValueRuleSetService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="ConversionValueRuleSetServiceClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="ConversionValueRuleSetServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="ConversionValueRuleSetServiceClient"/>.</returns>
public static stt::Task<ConversionValueRuleSetServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new ConversionValueRuleSetServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="ConversionValueRuleSetServiceClient"/> using the default credentials,
/// endpoint and settings. To specify custom credentials or other settings, use
/// <see cref="ConversionValueRuleSetServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="ConversionValueRuleSetServiceClient"/>.</returns>
public static ConversionValueRuleSetServiceClient Create() =>
new ConversionValueRuleSetServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="ConversionValueRuleSetServiceClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="ConversionValueRuleSetServiceSettings"/>.</param>
/// <returns>The created <see cref="ConversionValueRuleSetServiceClient"/>.</returns>
internal static ConversionValueRuleSetServiceClient Create(grpccore::CallInvoker callInvoker, ConversionValueRuleSetServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
ConversionValueRuleSetService.ConversionValueRuleSetServiceClient grpcClient = new ConversionValueRuleSetService.ConversionValueRuleSetServiceClient(callInvoker);
return new ConversionValueRuleSetServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC ConversionValueRuleSetService client</summary>
public virtual ConversionValueRuleSetService.ConversionValueRuleSetServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::ConversionValueRuleSet GetConversionValueRuleSet(GetConversionValueRuleSetRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::ConversionValueRuleSet> GetConversionValueRuleSetAsync(GetConversionValueRuleSetRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::ConversionValueRuleSet> GetConversionValueRuleSetAsync(GetConversionValueRuleSetRequest request, st::CancellationToken cancellationToken) =>
GetConversionValueRuleSetAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the conversion value rule set to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::ConversionValueRuleSet GetConversionValueRuleSet(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetConversionValueRuleSet(new GetConversionValueRuleSetRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the conversion value rule set to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::ConversionValueRuleSet> GetConversionValueRuleSetAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetConversionValueRuleSetAsync(new GetConversionValueRuleSetRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the conversion value rule set to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::ConversionValueRuleSet> GetConversionValueRuleSetAsync(string resourceName, st::CancellationToken cancellationToken) =>
GetConversionValueRuleSetAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the conversion value rule set to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::ConversionValueRuleSet GetConversionValueRuleSet(gagvr::ConversionValueRuleSetName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetConversionValueRuleSet(new GetConversionValueRuleSetRequest
{
ResourceNameAsConversionValueRuleSetName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the conversion value rule set to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::ConversionValueRuleSet> GetConversionValueRuleSetAsync(gagvr::ConversionValueRuleSetName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetConversionValueRuleSetAsync(new GetConversionValueRuleSetRequest
{
ResourceNameAsConversionValueRuleSetName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="resourceName">
/// Required. The resource name of the conversion value rule set to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::ConversionValueRuleSet> GetConversionValueRuleSetAsync(gagvr::ConversionValueRuleSetName resourceName, st::CancellationToken cancellationToken) =>
GetConversionValueRuleSetAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual MutateConversionValueRuleSetsResponse MutateConversionValueRuleSets(MutateConversionValueRuleSetsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateConversionValueRuleSetsResponse> MutateConversionValueRuleSetsAsync(MutateConversionValueRuleSetsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateConversionValueRuleSetsResponse> MutateConversionValueRuleSetsAsync(MutateConversionValueRuleSetsRequest request, st::CancellationToken cancellationToken) =>
MutateConversionValueRuleSetsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose conversion value rule sets are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual conversion value rule sets.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual MutateConversionValueRuleSetsResponse MutateConversionValueRuleSets(string customerId, scg::IEnumerable<ConversionValueRuleSetOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
MutateConversionValueRuleSets(new MutateConversionValueRuleSetsRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose conversion value rule sets are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual conversion value rule sets.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateConversionValueRuleSetsResponse> MutateConversionValueRuleSetsAsync(string customerId, scg::IEnumerable<ConversionValueRuleSetOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
MutateConversionValueRuleSetsAsync(new MutateConversionValueRuleSetsRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose conversion value rule sets are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual conversion value rule sets.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateConversionValueRuleSetsResponse> MutateConversionValueRuleSetsAsync(string customerId, scg::IEnumerable<ConversionValueRuleSetOperation> operations, st::CancellationToken cancellationToken) =>
MutateConversionValueRuleSetsAsync(customerId, operations, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>ConversionValueRuleSetService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to manage conversion value rule sets.
/// </remarks>
public sealed partial class ConversionValueRuleSetServiceClientImpl : ConversionValueRuleSetServiceClient
{
private readonly gaxgrpc::ApiCall<GetConversionValueRuleSetRequest, gagvr::ConversionValueRuleSet> _callGetConversionValueRuleSet;
private readonly gaxgrpc::ApiCall<MutateConversionValueRuleSetsRequest, MutateConversionValueRuleSetsResponse> _callMutateConversionValueRuleSets;
/// <summary>
/// Constructs a client wrapper for the ConversionValueRuleSetService service, with the specified gRPC client
/// and settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">
/// The base <see cref="ConversionValueRuleSetServiceSettings"/> used within this client.
/// </param>
public ConversionValueRuleSetServiceClientImpl(ConversionValueRuleSetService.ConversionValueRuleSetServiceClient grpcClient, ConversionValueRuleSetServiceSettings settings)
{
GrpcClient = grpcClient;
ConversionValueRuleSetServiceSettings effectiveSettings = settings ?? ConversionValueRuleSetServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callGetConversionValueRuleSet = clientHelper.BuildApiCall<GetConversionValueRuleSetRequest, gagvr::ConversionValueRuleSet>(grpcClient.GetConversionValueRuleSetAsync, grpcClient.GetConversionValueRuleSet, effectiveSettings.GetConversionValueRuleSetSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName);
Modify_ApiCall(ref _callGetConversionValueRuleSet);
Modify_GetConversionValueRuleSetApiCall(ref _callGetConversionValueRuleSet);
_callMutateConversionValueRuleSets = clientHelper.BuildApiCall<MutateConversionValueRuleSetsRequest, MutateConversionValueRuleSetsResponse>(grpcClient.MutateConversionValueRuleSetsAsync, grpcClient.MutateConversionValueRuleSets, effectiveSettings.MutateConversionValueRuleSetsSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId);
Modify_ApiCall(ref _callMutateConversionValueRuleSets);
Modify_MutateConversionValueRuleSetsApiCall(ref _callMutateConversionValueRuleSets);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_GetConversionValueRuleSetApiCall(ref gaxgrpc::ApiCall<GetConversionValueRuleSetRequest, gagvr::ConversionValueRuleSet> call);
partial void Modify_MutateConversionValueRuleSetsApiCall(ref gaxgrpc::ApiCall<MutateConversionValueRuleSetsRequest, MutateConversionValueRuleSetsResponse> call);
partial void OnConstruction(ConversionValueRuleSetService.ConversionValueRuleSetServiceClient grpcClient, ConversionValueRuleSetServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC ConversionValueRuleSetService client</summary>
public override ConversionValueRuleSetService.ConversionValueRuleSetServiceClient GrpcClient { get; }
partial void Modify_GetConversionValueRuleSetRequest(ref GetConversionValueRuleSetRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_MutateConversionValueRuleSetsRequest(ref MutateConversionValueRuleSetsRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override gagvr::ConversionValueRuleSet GetConversionValueRuleSet(GetConversionValueRuleSetRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetConversionValueRuleSetRequest(ref request, ref callSettings);
return _callGetConversionValueRuleSet.Sync(request, callSettings);
}
/// <summary>
/// Returns the requested conversion value rule set.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<gagvr::ConversionValueRuleSet> GetConversionValueRuleSetAsync(GetConversionValueRuleSetRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetConversionValueRuleSetRequest(ref request, ref callSettings);
return _callGetConversionValueRuleSet.Async(request, callSettings);
}
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override MutateConversionValueRuleSetsResponse MutateConversionValueRuleSets(MutateConversionValueRuleSetsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_MutateConversionValueRuleSetsRequest(ref request, ref callSettings);
return _callMutateConversionValueRuleSets.Sync(request, callSettings);
}
/// <summary>
/// Creates, updates or removes conversion value rule sets. Operation statuses
/// are returned.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<MutateConversionValueRuleSetsResponse> MutateConversionValueRuleSetsAsync(MutateConversionValueRuleSetsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_MutateConversionValueRuleSetsRequest(ref request, ref callSettings);
return _callMutateConversionValueRuleSets.Async(request, callSettings);
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using log4net;
using Mono.Addins;
using Nini.Config;
using OpenSim;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim.ApplicationPlugins.RegionModulesController
{
public class RegionModulesControllerPlugin : IRegionModulesController,
IApplicationPlugin
{
// Logger
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// Controls whether we load modules from Mono.Addins.
/// </summary>
/// <remarks>For debug purposes. Defaults to true.</remarks>
public bool LoadModulesFromAddins { get; set; }
// Config access
private OpenSimBase m_openSim;
// Our name
private string m_name;
// Internal lists to collect information about modules present
private List<TypeExtensionNode> m_nonSharedModules =
new List<TypeExtensionNode>();
private List<TypeExtensionNode> m_sharedModules =
new List<TypeExtensionNode>();
// List of shared module instances, for adding to Scenes
private List<ISharedRegionModule> m_sharedInstances =
new List<ISharedRegionModule>();
public RegionModulesControllerPlugin()
{
LoadModulesFromAddins = true;
}
#region IApplicationPlugin implementation
public void Initialise (OpenSimBase openSim)
{
m_openSim = openSim;
m_openSim.ApplicationRegistry.RegisterInterface<IRegionModulesController>(this);
m_log.DebugFormat("[REGIONMODULES]: Initializing...");
if (!LoadModulesFromAddins)
return;
// Who we are
string id = AddinManager.CurrentAddin.Id;
// Make friendly name
int pos = id.LastIndexOf(".");
if (pos == -1)
m_name = id;
else
m_name = id.Substring(pos + 1);
// The [Modules] section in the ini file
IConfig modulesConfig =
m_openSim.ConfigSource.Source.Configs["Modules"];
if (modulesConfig == null)
modulesConfig = m_openSim.ConfigSource.Source.AddConfig("Modules");
Dictionary<RuntimeAddin, IList<int>> loadedModules = new Dictionary<RuntimeAddin, IList<int>>();
// Scan modules and load all that aren't disabled
foreach (TypeExtensionNode node in AddinManager.GetExtensionNodes("/OpenSim/RegionModules"))
AddNode(node, modulesConfig, loadedModules);
foreach (KeyValuePair<RuntimeAddin, IList<int>> loadedModuleData in loadedModules)
{
m_log.InfoFormat(
"[REGIONMODULES]: From plugin {0}, (version {1}), loaded {2} modules, {3} shared, {4} non-shared {5} unknown",
loadedModuleData.Key.Id,
loadedModuleData.Key.Version,
loadedModuleData.Value[0] + loadedModuleData.Value[1] + loadedModuleData.Value[2],
loadedModuleData.Value[0], loadedModuleData.Value[1], loadedModuleData.Value[2]);
}
// Load and init the module. We try a constructor with a port
// if a port was given, fall back to one without if there is
// no port or the more specific constructor fails.
// This will be removed, so that any module capable of using a port
// must provide a constructor with a port in the future.
// For now, we do this so migration is easy.
//
foreach (TypeExtensionNode node in m_sharedModules)
{
Object[] ctorArgs = new Object[] { (uint)0 };
// Read the config again
string moduleString =
modulesConfig.GetString("Setup_" + node.Id, String.Empty);
// Get the port number, if there is one
if (moduleString != String.Empty)
{
// Get the port number from the string
string[] moduleParts = moduleString.Split(new char[] { '/' },
2);
if (moduleParts.Length > 1)
ctorArgs[0] = Convert.ToUInt32(moduleParts[0]);
}
// Try loading and initilaizing the module, using the
// port if appropriate
ISharedRegionModule module = null;
try
{
module = (ISharedRegionModule)Activator.CreateInstance(
node.Type, ctorArgs);
}
catch
{
module = (ISharedRegionModule)Activator.CreateInstance(
node.Type);
}
// OK, we're up and running
m_sharedInstances.Add(module);
module.Initialise(m_openSim.ConfigSource.Source);
}
}
public void PostInitialise ()
{
m_log.DebugFormat("[REGIONMODULES]: PostInitializing...");
// Immediately run PostInitialise on shared modules
foreach (ISharedRegionModule module in m_sharedInstances)
{
module.PostInitialise();
}
}
#endregion
#region IPlugin implementation
private void AddNode(
TypeExtensionNode node, IConfig modulesConfig, Dictionary<RuntimeAddin, IList<int>> loadedModules)
{
IList<int> loadedModuleData;
if (!loadedModules.ContainsKey(node.Addin))
loadedModules.Add(node.Addin, new List<int> { 0, 0, 0 });
loadedModuleData = loadedModules[node.Addin];
if (node.Type.GetInterface(typeof(ISharedRegionModule).ToString()) != null)
{
if (CheckModuleEnabled(node, modulesConfig))
{
m_log.DebugFormat("[REGIONMODULES]: Found shared region module {0}, class {1}", node.Id, node.Type);
m_sharedModules.Add(node);
loadedModuleData[0]++;
}
}
else if (node.Type.GetInterface(typeof(INonSharedRegionModule).ToString()) != null)
{
if (CheckModuleEnabled(node, modulesConfig))
{
m_log.DebugFormat("[REGIONMODULES]: Found non-shared region module {0}, class {1}", node.Id, node.Type);
m_nonSharedModules.Add(node);
loadedModuleData[1]++;
}
}
else
{
m_log.WarnFormat("[REGIONMODULES]: Found unknown type of module {0}, class {1}", node.Id, node.Type);
loadedModuleData[2]++;
}
}
// We don't do that here
//
public void Initialise ()
{
throw new System.NotImplementedException();
}
#endregion
#region IDisposable implementation
// Cleanup
//
public void Dispose ()
{
// We expect that all regions have been removed already
while (m_sharedInstances.Count > 0)
{
m_sharedInstances[0].Close();
m_sharedInstances.RemoveAt(0);
}
m_sharedModules.Clear();
m_nonSharedModules.Clear();
}
#endregion
public string Version
{
get
{
return AddinManager.CurrentAddin.Version;
}
}
public string Name
{
get
{
return m_name;
}
}
#region Region Module interfacesController implementation
/// <summary>
/// Check that the given module is no disabled in the [Modules] section of the config files.
/// </summary>
/// <param name="node"></param>
/// <param name="modulesConfig">The config section</param>
/// <returns>true if the module is enabled, false if it is disabled</returns>
protected bool CheckModuleEnabled(TypeExtensionNode node, IConfig modulesConfig)
{
// Get the config string
string moduleString =
modulesConfig.GetString("Setup_" + node.Id, String.Empty);
// We have a selector
if (moduleString != String.Empty)
{
// Allow disabling modules even if they don't have
// support for it
if (moduleString == "disabled")
return false;
// Split off port, if present
string[] moduleParts = moduleString.Split(new char[] { '/' }, 2);
// Format is [port/][class]
string className = moduleParts[0];
if (moduleParts.Length > 1)
className = moduleParts[1];
// Match the class name if given
if (className != String.Empty &&
node.Type.ToString() != className)
return false;
}
return true;
}
// The root of all evil.
// This is where we handle adding the modules to scenes when they
// load. This means that here we deal with replaceable interfaces,
// nonshared modules, etc.
//
public void AddRegionToModules (Scene scene)
{
Dictionary<Type, ISharedRegionModule> deferredSharedModules =
new Dictionary<Type, ISharedRegionModule>();
Dictionary<Type, INonSharedRegionModule> deferredNonSharedModules =
new Dictionary<Type, INonSharedRegionModule>();
// We need this to see if a module has already been loaded and
// has defined a replaceable interface. It's a generic call,
// so this can't be used directly. It will be used later
Type s = scene.GetType();
MethodInfo mi = s.GetMethod("RequestModuleInterface");
// This will hold the shared modules we actually load
List<ISharedRegionModule> sharedlist =
new List<ISharedRegionModule>();
// Iterate over the shared modules that have been loaded
// Add them to the new Scene
foreach (ISharedRegionModule module in m_sharedInstances)
{
// Here is where we check if a replaceable interface
// is defined. If it is, the module is checked against
// the interfaces already defined. If the interface is
// defined, we simply skip the module. Else, if the module
// defines a replaceable interface, we add it to the deferred
// list.
Type replaceableInterface = module.ReplaceableInterface;
if (replaceableInterface != null)
{
MethodInfo mii = mi.MakeGenericMethod(replaceableInterface);
if (mii.Invoke(scene, new object[0]) != null)
{
m_log.DebugFormat("[REGIONMODULE]: Not loading {0} because another module has registered {1}", module.Name, replaceableInterface.ToString());
continue;
}
deferredSharedModules[replaceableInterface] = module;
m_log.DebugFormat("[REGIONMODULE]: Deferred load of {0}", module.Name);
continue;
}
m_log.DebugFormat("[REGIONMODULE]: Adding scene {0} to shared module {1}",
scene.RegionInfo.RegionName, module.Name);
module.AddRegion(scene);
scene.AddRegionModule(module.Name, module);
sharedlist.Add(module);
}
IConfig modulesConfig =
m_openSim.ConfigSource.Source.Configs["Modules"];
// Scan for, and load, nonshared modules
List<INonSharedRegionModule> list = new List<INonSharedRegionModule>();
foreach (TypeExtensionNode node in m_nonSharedModules)
{
Object[] ctorArgs = new Object[] {0};
// Read the config
string moduleString =
modulesConfig.GetString("Setup_" + node.Id, String.Empty);
// Get the port number, if there is one
if (moduleString != String.Empty)
{
// Get the port number from the string
string[] moduleParts = moduleString.Split(new char[] {'/'},
2);
if (moduleParts.Length > 1)
ctorArgs[0] = Convert.ToUInt32(moduleParts[0]);
}
// Actually load it
INonSharedRegionModule module = null;
Type[] ctorParamTypes = new Type[ctorArgs.Length];
for (int i = 0; i < ctorParamTypes.Length; i++)
ctorParamTypes[i] = ctorArgs[i].GetType();
if (node.Type.GetConstructor(ctorParamTypes) != null)
module = (INonSharedRegionModule)Activator.CreateInstance(node.Type, ctorArgs);
else
module = (INonSharedRegionModule)Activator.CreateInstance(node.Type);
// Check for replaceable interfaces
Type replaceableInterface = module.ReplaceableInterface;
if (replaceableInterface != null)
{
MethodInfo mii = mi.MakeGenericMethod(replaceableInterface);
if (mii.Invoke(scene, new object[0]) != null)
{
m_log.DebugFormat("[REGIONMODULE]: Not loading {0} because another module has registered {1}", module.Name, replaceableInterface.ToString());
continue;
}
deferredNonSharedModules[replaceableInterface] = module;
m_log.DebugFormat("[REGIONMODULE]: Deferred load of {0}", module.Name);
continue;
}
m_log.DebugFormat("[REGIONMODULE]: Adding scene {0} to non-shared module {1}",
scene.RegionInfo.RegionName, module.Name);
// Initialise the module
module.Initialise(m_openSim.ConfigSource.Source);
list.Add(module);
}
// Now add the modules that we found to the scene. If a module
// wishes to override a replaceable interface, it needs to
// register it in Initialise, so that the deferred module
// won't load.
foreach (INonSharedRegionModule module in list)
{
module.AddRegion(scene);
scene.AddRegionModule(module.Name, module);
}
// Now all modules without a replaceable base interface are loaded
// Replaceable modules have either been skipped, or omitted.
// Now scan the deferred modules here
foreach (ISharedRegionModule module in deferredSharedModules.Values)
{
// Determine if the interface has been replaced
Type replaceableInterface = module.ReplaceableInterface;
MethodInfo mii = mi.MakeGenericMethod(replaceableInterface);
if (mii.Invoke(scene, new object[0]) != null)
{
m_log.DebugFormat("[REGIONMODULE]: Not loading {0} because another module has registered {1}", module.Name, replaceableInterface.ToString());
continue;
}
m_log.DebugFormat("[REGIONMODULE]: Adding scene {0} to shared module {1} (deferred)",
scene.RegionInfo.RegionName, module.Name);
// Not replaced, load the module
module.AddRegion(scene);
scene.AddRegionModule(module.Name, module);
sharedlist.Add(module);
}
// Same thing for nonshared modules, load them unless overridden
List<INonSharedRegionModule> deferredlist =
new List<INonSharedRegionModule>();
foreach (INonSharedRegionModule module in deferredNonSharedModules.Values)
{
// Check interface override
Type replaceableInterface = module.ReplaceableInterface;
if (replaceableInterface != null)
{
MethodInfo mii = mi.MakeGenericMethod(replaceableInterface);
if (mii.Invoke(scene, new object[0]) != null)
{
m_log.DebugFormat("[REGIONMODULE]: Not loading {0} because another module has registered {1}", module.Name, replaceableInterface.ToString());
continue;
}
}
m_log.DebugFormat("[REGIONMODULE]: Adding scene {0} to non-shared module {1} (deferred)",
scene.RegionInfo.RegionName, module.Name);
module.Initialise(m_openSim.ConfigSource.Source);
list.Add(module);
deferredlist.Add(module);
}
// Finally, load valid deferred modules
foreach (INonSharedRegionModule module in deferredlist)
{
module.AddRegion(scene);
scene.AddRegionModule(module.Name, module);
}
// This is needed for all module types. Modules will register
// Interfaces with scene in AddScene, and will also need a means
// to access interfaces registered by other modules. Without
// this extra method, a module attempting to use another modules's
// interface would be successful only depending on load order,
// which can't be depended upon, or modules would need to resort
// to ugly kludges to attempt to request interfaces when needed
// and unneccessary caching logic repeated in all modules.
// The extra function stub is just that much cleaner
//
foreach (ISharedRegionModule module in sharedlist)
{
module.RegionLoaded(scene);
}
foreach (INonSharedRegionModule module in list)
{
module.RegionLoaded(scene);
}
}
public void RemoveRegionFromModules (Scene scene)
{
foreach (IRegionModuleBase module in scene.RegionModules.Values)
{
m_log.DebugFormat("[REGIONMODULE]: Removing scene {0} from module {1}",
scene.RegionInfo.RegionName, module.Name);
module.RemoveRegion(scene);
if (module is INonSharedRegionModule)
{
// as we were the only user, this instance has to die
module.Close();
}
}
scene.RegionModules.Clear();
}
#endregion
}
}
| |
// Copyright (c) 2007-2017 ppy Pty Ltd <contact@ppy.sh>.
// Licensed under the MIT Licence - https://raw.githubusercontent.com/ppy/osu/master/LICENCE
using System;
using OpenTK;
using OpenTK.Graphics;
using osu.Framework.Allocation;
using osu.Framework.Configuration;
using osu.Framework.Extensions.Color4Extensions;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Shapes;
using osu.Game.Graphics;
using osu.Game.Graphics.Sprites;
using osu.Game.Overlays;
namespace osu.Game.Users
{
public class UserPanel : ClickableContainer
{
private readonly User user;
private const float height = 100;
private const float content_padding = 10;
private const float status_height = 30;
private readonly Container statusBar;
private readonly Box statusBg;
private readonly OsuSpriteText statusMessage;
public readonly Bindable<UserStatus> Status = new Bindable<UserStatus>();
public new Action Action;
public UserPanel(User user)
{
this.user = user;
Height = height - status_height;
Masking = true;
CornerRadius = 5;
EdgeEffect = new EdgeEffectParameters
{
Type = EdgeEffectType.Shadow,
Colour = Color4.Black.Opacity(0.25f),
Radius = 4,
};
Children = new Drawable[]
{
new AsyncLoadWrapper(new UserCoverBackground(user)
{
RelativeSizeAxes = Axes.Both,
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
FillMode = FillMode.Fill,
OnLoadComplete = d => d.FadeInFromZero(200),
}) { RelativeSizeAxes = Axes.Both },
new Box
{
RelativeSizeAxes = Axes.Both,
Colour = Color4.Black.Opacity(0.7f),
},
new Container
{
RelativeSizeAxes = Axes.X,
AutoSizeAxes = Axes.Y,
Padding = new MarginPadding { Top = content_padding, Left = content_padding, Right = content_padding },
Children = new Drawable[]
{
new UpdateableAvatar
{
Size = new Vector2(height - status_height - content_padding * 2),
User = user,
Masking = true,
CornerRadius = 5,
EdgeEffect = new EdgeEffectParameters
{
Type = EdgeEffectType.Shadow,
Colour = Color4.Black.Opacity(0.25f),
Radius = 4,
},
},
new Container
{
RelativeSizeAxes = Axes.Both,
Padding = new MarginPadding { Left = height - status_height - content_padding },
Children = new Drawable[]
{
new OsuSpriteText
{
Text = user.Username,
TextSize = 18,
Font = @"Exo2.0-SemiBoldItalic",
},
new FillFlowContainer
{
Anchor = Anchor.BottomLeft,
Origin = Anchor.BottomLeft,
AutoSizeAxes = Axes.X,
Height = 20f,
Direction = FillDirection.Horizontal,
Spacing = new Vector2(5f, 0f),
Children = new Drawable[]
{
new DrawableFlag(user.Country?.FlagName)
{
Width = 30f,
RelativeSizeAxes = Axes.Y,
},
new Container
{
Width = 40f,
RelativeSizeAxes = Axes.Y,
},
new CircularContainer
{
Width = 20f,
RelativeSizeAxes = Axes.Y,
},
},
},
},
},
},
},
statusBar = new Container
{
Anchor = Anchor.BottomLeft,
Origin = Anchor.BottomLeft,
RelativeSizeAxes = Axes.X,
Alpha = 0f,
Children = new Drawable[]
{
statusBg = new Box
{
RelativeSizeAxes = Axes.Both,
Alpha = 0.5f,
},
new FillFlowContainer
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
AutoSizeAxes = Axes.Both,
Spacing = new Vector2(5f, 0f),
Children = new Drawable[]
{
new SpriteIcon
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
Icon = FontAwesome.fa_circle_o,
Shadow = true,
Size = new Vector2(14),
},
statusMessage = new OsuSpriteText
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft,
Font = @"Exo2.0-Semibold",
},
},
},
},
},
};
}
[BackgroundDependencyLoader(permitNulls: true)]
private void load(OsuColour colours, UserProfileOverlay profile)
{
Status.ValueChanged += displayStatus;
Status.ValueChanged += status => statusBg.FadeColour(status?.GetAppropriateColour(colours) ?? colours.Gray5, 500, Easing.OutQuint);
base.Action = () =>
{
Action?.Invoke();
profile?.ShowUser(user);
};
}
protected override void LoadComplete()
{
base.LoadComplete();
Status.TriggerChange();
}
private void displayStatus(UserStatus status)
{
const float transition_duration = 500;
if (status == null)
{
statusBar.ResizeHeightTo(0f, transition_duration, Easing.OutQuint);
statusBar.FadeOut(transition_duration, Easing.OutQuint);
this.ResizeHeightTo(height - status_height, transition_duration, Easing.OutQuint);
}
else
{
statusBar.ResizeHeightTo(status_height, transition_duration, Easing.OutQuint);
statusBar.FadeIn(transition_duration, Easing.OutQuint);
this.ResizeHeightTo(height, transition_duration, Easing.OutQuint);
statusMessage.Text = status.Message;
}
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using System.Collections.Generic;
using System.Linq;
using osu.Framework.Extensions.EnumExtensions;
using osuTK;
using osu.Game.Audio;
using osu.Game.Beatmaps;
using osu.Game.Beatmaps.ControlPoints;
using osu.Game.Rulesets.Mania.MathUtils;
using osu.Game.Rulesets.Mania.Objects;
using osu.Game.Rulesets.Objects;
using osu.Game.Rulesets.Objects.Types;
using osu.Framework.Extensions.IEnumerableExtensions;
namespace osu.Game.Rulesets.Mania.Beatmaps.Patterns.Legacy
{
internal class HitObjectPatternGenerator : PatternGenerator
{
public PatternType StairType { get; private set; }
private readonly PatternType convertType;
public HitObjectPatternGenerator(FastRandom random, HitObject hitObject, ManiaBeatmap beatmap, Pattern previousPattern, double previousTime, Vector2 previousPosition, double density,
PatternType lastStair, IBeatmap originalBeatmap)
: base(random, hitObject, beatmap, previousPattern, originalBeatmap)
{
StairType = lastStair;
TimingControlPoint timingPoint = beatmap.ControlPointInfo.TimingPointAt(hitObject.StartTime);
EffectControlPoint effectPoint = beatmap.ControlPointInfo.EffectPointAt(hitObject.StartTime);
var positionData = hitObject as IHasPosition;
float positionSeparation = ((positionData?.Position ?? Vector2.Zero) - previousPosition).Length;
double timeSeparation = hitObject.StartTime - previousTime;
if (timeSeparation <= 80)
{
// More than 187 BPM
convertType |= PatternType.ForceNotStack | PatternType.KeepSingle;
}
else if (timeSeparation <= 95)
{
// More than 157 BPM
convertType |= PatternType.ForceNotStack | PatternType.KeepSingle | lastStair;
}
else if (timeSeparation <= 105)
{
// More than 140 BPM
convertType |= PatternType.ForceNotStack | PatternType.LowProbability;
}
else if (timeSeparation <= 125)
{
// More than 120 BPM
convertType |= PatternType.ForceNotStack;
}
else if (timeSeparation <= 135 && positionSeparation < 20)
{
// More than 111 BPM stream
convertType |= PatternType.Cycle | PatternType.KeepSingle;
}
else if (timeSeparation <= 150 && positionSeparation < 20)
{
// More than 100 BPM stream
convertType |= PatternType.ForceStack | PatternType.LowProbability;
}
else if (positionSeparation < 20 && density >= timingPoint.BeatLength / 2.5)
{
// Low density stream
convertType |= PatternType.Reverse | PatternType.LowProbability;
}
else if (density < timingPoint.BeatLength / 2.5 || effectPoint.KiaiMode)
{
// High density
}
else
convertType |= PatternType.LowProbability;
if (!convertType.HasFlagFast(PatternType.KeepSingle))
{
if (HitObject.Samples.Any(s => s.Name == HitSampleInfo.HIT_FINISH) && TotalColumns != 8)
convertType |= PatternType.Mirror;
else if (HitObject.Samples.Any(s => s.Name == HitSampleInfo.HIT_CLAP))
convertType |= PatternType.Gathered;
}
}
public override IEnumerable<Pattern> Generate()
{
Pattern generateCore()
{
var pattern = new Pattern();
if (TotalColumns == 1)
{
addToPattern(pattern, 0);
return pattern;
}
int lastColumn = PreviousPattern.HitObjects.FirstOrDefault()?.Column ?? 0;
if (convertType.HasFlagFast(PatternType.Reverse) && PreviousPattern.HitObjects.Any())
{
// Generate a new pattern by copying the last hit objects in reverse-column order
for (int i = RandomStart; i < TotalColumns; i++)
{
if (PreviousPattern.ColumnHasObject(i))
addToPattern(pattern, RandomStart + TotalColumns - i - 1);
}
return pattern;
}
if (convertType.HasFlagFast(PatternType.Cycle) && PreviousPattern.HitObjects.Count() == 1
// If we convert to 7K + 1, let's not overload the special key
&& (TotalColumns != 8 || lastColumn != 0)
// Make sure the last column was not the centre column
&& (TotalColumns % 2 == 0 || lastColumn != TotalColumns / 2))
{
// Generate a new pattern by cycling backwards (similar to Reverse but for only one hit object)
int column = RandomStart + TotalColumns - lastColumn - 1;
addToPattern(pattern, column);
return pattern;
}
if (convertType.HasFlagFast(PatternType.ForceStack) && PreviousPattern.HitObjects.Any())
{
// Generate a new pattern by placing on the already filled columns
for (int i = RandomStart; i < TotalColumns; i++)
{
if (PreviousPattern.ColumnHasObject(i))
addToPattern(pattern, i);
}
return pattern;
}
if (PreviousPattern.HitObjects.Count() == 1)
{
if (convertType.HasFlagFast(PatternType.Stair))
{
// Generate a new pattern by placing on the next column, cycling back to the start if there is no "next"
int targetColumn = lastColumn + 1;
if (targetColumn == TotalColumns)
targetColumn = RandomStart;
addToPattern(pattern, targetColumn);
return pattern;
}
if (convertType.HasFlagFast(PatternType.ReverseStair))
{
// Generate a new pattern by placing on the previous column, cycling back to the end if there is no "previous"
int targetColumn = lastColumn - 1;
if (targetColumn == RandomStart - 1)
targetColumn = TotalColumns - 1;
addToPattern(pattern, targetColumn);
return pattern;
}
}
if (convertType.HasFlagFast(PatternType.KeepSingle))
return generateRandomNotes(1);
if (convertType.HasFlagFast(PatternType.Mirror))
{
if (ConversionDifficulty > 6.5)
return generateRandomPatternWithMirrored(0.12, 0.38, 0.12);
if (ConversionDifficulty > 4)
return generateRandomPatternWithMirrored(0.12, 0.17, 0);
return generateRandomPatternWithMirrored(0.12, 0, 0);
}
if (ConversionDifficulty > 6.5)
{
if (convertType.HasFlagFast(PatternType.LowProbability))
return generateRandomPattern(0.78, 0.42, 0, 0);
return generateRandomPattern(1, 0.62, 0, 0);
}
if (ConversionDifficulty > 4)
{
if (convertType.HasFlagFast(PatternType.LowProbability))
return generateRandomPattern(0.35, 0.08, 0, 0);
return generateRandomPattern(0.52, 0.15, 0, 0);
}
if (ConversionDifficulty > 2)
{
if (convertType.HasFlagFast(PatternType.LowProbability))
return generateRandomPattern(0.18, 0, 0, 0);
return generateRandomPattern(0.45, 0, 0, 0);
}
return generateRandomPattern(0, 0, 0, 0);
}
var p = generateCore();
foreach (var obj in p.HitObjects)
{
if (convertType.HasFlagFast(PatternType.Stair) && obj.Column == TotalColumns - 1)
StairType = PatternType.ReverseStair;
if (convertType.HasFlagFast(PatternType.ReverseStair) && obj.Column == RandomStart)
StairType = PatternType.Stair;
}
return p.Yield();
}
/// <summary>
/// Generates random notes.
/// <para>
/// This will generate as many as it can up to <paramref name="noteCount"/>, accounting for
/// any stacks if <see cref="convertType"/> is forcing no stacks.
/// </para>
/// </summary>
/// <param name="noteCount">The amount of notes to generate.</param>
/// <returns>The <see cref="Pattern"/> containing the hit objects.</returns>
private Pattern generateRandomNotes(int noteCount)
{
var pattern = new Pattern();
bool allowStacking = !convertType.HasFlagFast(PatternType.ForceNotStack);
if (!allowStacking)
noteCount = Math.Min(noteCount, TotalColumns - RandomStart - PreviousPattern.ColumnWithObjects);
int nextColumn = GetColumn((HitObject as IHasXPosition)?.X ?? 0, true);
for (int i = 0; i < noteCount; i++)
{
nextColumn = allowStacking
? FindAvailableColumn(nextColumn, nextColumn: getNextColumn, patterns: pattern)
: FindAvailableColumn(nextColumn, nextColumn: getNextColumn, patterns: new[] { pattern, PreviousPattern });
addToPattern(pattern, nextColumn);
}
return pattern;
int getNextColumn(int last)
{
if (convertType.HasFlagFast(PatternType.Gathered))
{
last++;
if (last == TotalColumns)
last = RandomStart;
}
else
last = GetRandomColumn();
return last;
}
}
/// <summary>
/// Whether this hit object can generate a note in the special column.
/// </summary>
private bool hasSpecialColumn => HitObject.Samples.Any(s => s.Name == HitSampleInfo.HIT_CLAP) && HitObject.Samples.Any(s => s.Name == HitSampleInfo.HIT_FINISH);
/// <summary>
/// Generates a random pattern.
/// </summary>
/// <param name="p2">Probability for 2 notes to be generated.</param>
/// <param name="p3">Probability for 3 notes to be generated.</param>
/// <param name="p4">Probability for 4 notes to be generated.</param>
/// <param name="p5">Probability for 5 notes to be generated.</param>
/// <returns>The <see cref="Pattern"/> containing the hit objects.</returns>
private Pattern generateRandomPattern(double p2, double p3, double p4, double p5)
{
var pattern = new Pattern();
pattern.Add(generateRandomNotes(getRandomNoteCount(p2, p3, p4, p5)));
if (RandomStart > 0 && hasSpecialColumn)
addToPattern(pattern, 0);
return pattern;
}
/// <summary>
/// Generates a random pattern which has both normal and mirrored notes.
/// </summary>
/// <param name="centreProbability">The probability for a note to be added to the centre column.</param>
/// <param name="p2">Probability for 2 notes to be generated.</param>
/// <param name="p3">Probability for 3 notes to be generated.</param>
/// <returns>The <see cref="Pattern"/> containing the hit objects.</returns>
private Pattern generateRandomPatternWithMirrored(double centreProbability, double p2, double p3)
{
if (convertType.HasFlagFast(PatternType.ForceNotStack))
return generateRandomPattern(1 / 2f + p2 / 2, p2, (p2 + p3) / 2, p3);
var pattern = new Pattern();
int noteCount = getRandomNoteCountMirrored(centreProbability, p2, p3, out bool addToCentre);
int columnLimit = (TotalColumns % 2 == 0 ? TotalColumns : TotalColumns - 1) / 2;
int nextColumn = GetRandomColumn(upperBound: columnLimit);
for (int i = 0; i < noteCount; i++)
{
nextColumn = FindAvailableColumn(nextColumn, upperBound: columnLimit, patterns: pattern);
// Add normal note
addToPattern(pattern, nextColumn);
// Add mirrored note
addToPattern(pattern, RandomStart + TotalColumns - nextColumn - 1);
}
if (addToCentre)
addToPattern(pattern, TotalColumns / 2);
if (RandomStart > 0 && hasSpecialColumn)
addToPattern(pattern, 0);
return pattern;
}
/// <summary>
/// Generates a count of notes to be generated from a list of probabilities.
/// </summary>
/// <param name="p2">Probability for 2 notes to be generated.</param>
/// <param name="p3">Probability for 3 notes to be generated.</param>
/// <param name="p4">Probability for 4 notes to be generated.</param>
/// <param name="p5">Probability for 5 notes to be generated.</param>
/// <returns>The amount of notes to be generated.</returns>
private int getRandomNoteCount(double p2, double p3, double p4, double p5)
{
switch (TotalColumns)
{
case 2:
p2 = 0;
p3 = 0;
p4 = 0;
p5 = 0;
break;
case 3:
p2 = Math.Min(p2, 0.1);
p3 = 0;
p4 = 0;
p5 = 0;
break;
case 4:
p2 = Math.Min(p2, 0.23);
p3 = Math.Min(p3, 0.04);
p4 = 0;
p5 = 0;
break;
case 5:
p3 = Math.Min(p3, 0.15);
p4 = Math.Min(p4, 0.03);
p5 = 0;
break;
}
if (HitObject.Samples.Any(s => s.Name == HitSampleInfo.HIT_CLAP))
p2 = 1;
return GetRandomNoteCount(p2, p3, p4, p5);
}
/// <summary>
/// Generates a count of notes to be generated from a list of probabilities.
/// </summary>
/// <param name="centreProbability">The probability for a note to be added to the centre column.</param>
/// <param name="p2">Probability for 2 notes to be generated.</param>
/// <param name="p3">Probability for 3 notes to be generated.</param>
/// <param name="addToCentre">Whether to add a note to the centre column.</param>
/// <returns>The amount of notes to be generated. The note to be added to the centre column will NOT be part of this count.</returns>
private int getRandomNoteCountMirrored(double centreProbability, double p2, double p3, out bool addToCentre)
{
switch (TotalColumns)
{
case 2:
centreProbability = 0;
p2 = 0;
p3 = 0;
break;
case 3:
centreProbability = Math.Min(centreProbability, 0.03);
p2 = 0;
p3 = 0;
break;
case 4:
centreProbability = 0;
// Stable requires rngValue > x, which is an inverse-probability. Lazer uses true probability (1 - x).
// But multiplying this value by 2 (stable) is not the same operation as dividing it by 2 (lazer),
// so it needs to be converted to from a probability and then back after the multiplication.
p2 = 1 - Math.Max((1 - p2) * 2, 0.8);
p3 = 0;
break;
case 5:
centreProbability = Math.Min(centreProbability, 0.03);
p3 = 0;
break;
case 6:
centreProbability = 0;
// Stable requires rngValue > x, which is an inverse-probability. Lazer uses true probability (1 - x).
// But multiplying this value by 2 (stable) is not the same operation as dividing it by 2 (lazer),
// so it needs to be converted to from a probability and then back after the multiplication.
p2 = 1 - Math.Max((1 - p2) * 2, 0.5);
p3 = 1 - Math.Max((1 - p3) * 2, 0.85);
break;
}
// The stable values were allowed to exceed 1, which indicate <0% probability.
// These values needs to be clamped otherwise GetRandomNoteCount() will throw an exception.
p2 = Math.Clamp(p2, 0, 1);
p3 = Math.Clamp(p3, 0, 1);
double centreVal = Random.NextDouble();
int noteCount = GetRandomNoteCount(p2, p3);
addToCentre = TotalColumns % 2 != 0 && noteCount != 3 && centreVal > 1 - centreProbability;
return noteCount;
}
/// <summary>
/// Constructs and adds a note to a pattern.
/// </summary>
/// <param name="pattern">The pattern to add to.</param>
/// <param name="column">The column to add the note to.</param>
private void addToPattern(Pattern pattern, int column)
{
pattern.Add(new Note
{
StartTime = HitObject.StartTime,
Samples = HitObject.Samples,
Column = column
});
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Net;
using log4net.Config;
using Nini.Config;
using NUnit.Framework;
using OpenMetaverse;
using OpenMetaverse.Packets;
using OpenSim.Framework;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Tests.Common;
using OpenSim.Tests.Common.Mock;
namespace OpenSim.Region.ClientStack.LindenUDP.Tests
{
/// <summary>
/// This will contain basic tests for the LindenUDP client stack
/// </summary>
[TestFixture]
public class BasicCircuitTests : OpenSimTestCase
{
private Scene m_scene;
private TestLLUDPServer m_udpServer;
[TestFixtureSetUp]
public void FixtureInit()
{
// Don't allow tests to be bamboozled by asynchronous events. Execute everything on the same thread.
Util.FireAndForgetMethod = FireAndForgetMethod.RegressionTest;
}
[TestFixtureTearDown]
public void TearDown()
{
// We must set this back afterwards, otherwise later tests will fail since they're expecting multiple
// threads. Possibly, later tests should be rewritten so none of them require async stuff (which regression
// tests really shouldn't).
Util.FireAndForgetMethod = Util.DefaultFireAndForgetMethod;
}
[SetUp]
public override void SetUp()
{
base.SetUp();
m_scene = new SceneHelpers().SetupScene();
}
/// <summary>
/// Build an object name packet for test purposes
/// </summary>
/// <param name="objectLocalId"></param>
/// <param name="objectName"></param>
private ObjectNamePacket BuildTestObjectNamePacket(uint objectLocalId, string objectName)
{
ObjectNamePacket onp = new ObjectNamePacket();
ObjectNamePacket.ObjectDataBlock odb = new ObjectNamePacket.ObjectDataBlock();
odb.LocalID = objectLocalId;
odb.Name = Utils.StringToBytes(objectName);
onp.ObjectData = new ObjectNamePacket.ObjectDataBlock[] { odb };
onp.Header.Zerocoded = false;
return onp;
}
private void AddUdpServer()
{
AddUdpServer(new IniConfigSource());
}
private void AddUdpServer(IniConfigSource configSource)
{
uint port = 0;
AgentCircuitManager acm = m_scene.AuthenticateHandler;
m_udpServer = new TestLLUDPServer(IPAddress.Any, ref port, 0, false, configSource, acm);
m_udpServer.AddScene(m_scene);
}
/// <summary>
/// Used by tests that aren't testing this stage.
/// </summary>
private ScenePresence AddClient()
{
UUID myAgentUuid = TestHelpers.ParseTail(0x1);
UUID mySessionUuid = TestHelpers.ParseTail(0x2);
uint myCircuitCode = 123456;
IPEndPoint testEp = new IPEndPoint(IPAddress.Loopback, 999);
UseCircuitCodePacket uccp = new UseCircuitCodePacket();
UseCircuitCodePacket.CircuitCodeBlock uccpCcBlock
= new UseCircuitCodePacket.CircuitCodeBlock();
uccpCcBlock.Code = myCircuitCode;
uccpCcBlock.ID = myAgentUuid;
uccpCcBlock.SessionID = mySessionUuid;
uccp.CircuitCode = uccpCcBlock;
byte[] uccpBytes = uccp.ToBytes();
UDPPacketBuffer upb = new UDPPacketBuffer(testEp, uccpBytes.Length);
upb.DataLength = uccpBytes.Length; // God knows why this isn't set by the constructor.
Buffer.BlockCopy(uccpBytes, 0, upb.Data, 0, uccpBytes.Length);
AgentCircuitData acd = new AgentCircuitData();
acd.AgentID = myAgentUuid;
acd.SessionID = mySessionUuid;
m_scene.AuthenticateHandler.AddNewCircuit(myCircuitCode, acd);
m_udpServer.PacketReceived(upb);
return m_scene.GetScenePresence(myAgentUuid);
}
/// <summary>
/// Test adding a client to the stack
/// </summary>
[Test]
public void TestAddClient()
{
TestHelpers.InMethod();
// TestHelpers.EnableLogging();
AddUdpServer();
UUID myAgentUuid = TestHelpers.ParseTail(0x1);
UUID mySessionUuid = TestHelpers.ParseTail(0x2);
uint myCircuitCode = 123456;
IPEndPoint testEp = new IPEndPoint(IPAddress.Loopback, 999);
UseCircuitCodePacket uccp = new UseCircuitCodePacket();
UseCircuitCodePacket.CircuitCodeBlock uccpCcBlock
= new UseCircuitCodePacket.CircuitCodeBlock();
uccpCcBlock.Code = myCircuitCode;
uccpCcBlock.ID = myAgentUuid;
uccpCcBlock.SessionID = mySessionUuid;
uccp.CircuitCode = uccpCcBlock;
byte[] uccpBytes = uccp.ToBytes();
UDPPacketBuffer upb = new UDPPacketBuffer(testEp, uccpBytes.Length);
upb.DataLength = uccpBytes.Length; // God knows why this isn't set by the constructor.
Buffer.BlockCopy(uccpBytes, 0, upb.Data, 0, uccpBytes.Length);
m_udpServer.PacketReceived(upb);
// Presence shouldn't exist since the circuit manager doesn't know about this circuit for authentication yet
Assert.That(m_scene.GetScenePresence(myAgentUuid), Is.Null);
AgentCircuitData acd = new AgentCircuitData();
acd.AgentID = myAgentUuid;
acd.SessionID = mySessionUuid;
m_scene.AuthenticateHandler.AddNewCircuit(myCircuitCode, acd);
m_udpServer.PacketReceived(upb);
// Should succeed now
ScenePresence sp = m_scene.GetScenePresence(myAgentUuid);
Assert.That(sp.UUID, Is.EqualTo(myAgentUuid));
Assert.That(m_udpServer.PacketsSent.Count, Is.EqualTo(1));
Packet packet = m_udpServer.PacketsSent[0];
Assert.That(packet, Is.InstanceOf(typeof(PacketAckPacket)));
PacketAckPacket ackPacket = packet as PacketAckPacket;
Assert.That(ackPacket.Packets.Length, Is.EqualTo(1));
Assert.That(ackPacket.Packets[0].ID, Is.EqualTo(0));
}
[Test]
public void TestLogoutClientDueToAck()
{
TestHelpers.InMethod();
// TestHelpers.EnableLogging();
IniConfigSource ics = new IniConfigSource();
IConfig config = ics.AddConfig("ClientStack.LindenUDP");
config.Set("AckTimeout", -1);
AddUdpServer(ics);
ScenePresence sp = AddClient();
m_udpServer.ClientOutgoingPacketHandler(sp.ControllingClient, true, false, false);
ScenePresence spAfterAckTimeout = m_scene.GetScenePresence(sp.UUID);
Assert.That(spAfterAckTimeout, Is.Null);
// TestHelpers.DisableLogging();
}
// /// <summary>
// /// Test removing a client from the stack
// /// </summary>
// [Test]
// public void TestRemoveClient()
// {
// TestHelper.InMethod();
//
// uint myCircuitCode = 123457;
//
// TestLLUDPServer testLLUDPServer;
// TestLLPacketServer testLLPacketServer;
// AgentCircuitManager acm;
// SetupStack(new MockScene(), out testLLUDPServer, out testLLPacketServer, out acm);
// AddClient(myCircuitCode, new IPEndPoint(IPAddress.Loopback, 1000), testLLUDPServer, acm);
//
// testLLUDPServer.RemoveClientCircuit(myCircuitCode);
// Assert.IsFalse(testLLUDPServer.HasCircuit(myCircuitCode));
//
// // Check that removing a non-existant circuit doesn't have any bad effects
// testLLUDPServer.RemoveClientCircuit(101);
// Assert.IsFalse(testLLUDPServer.HasCircuit(101));
// }
//
// /// <summary>
// /// Make sure that the client stack reacts okay to malformed packets
// /// </summary>
// [Test]
// public void TestMalformedPacketSend()
// {
// TestHelper.InMethod();
//
// uint myCircuitCode = 123458;
// EndPoint testEp = new IPEndPoint(IPAddress.Loopback, 1001);
// MockScene scene = new MockScene();
//
// TestLLUDPServer testLLUDPServer;
// TestLLPacketServer testLLPacketServer;
// AgentCircuitManager acm;
// SetupStack(scene, out testLLUDPServer, out testLLPacketServer, out acm);
// AddClient(myCircuitCode, testEp, testLLUDPServer, acm);
//
// byte[] data = new byte[] { 0x01, 0x02, 0x03, 0x04 };
//
// // Send two garbled 'packets' in succession
// testLLUDPServer.LoadReceive(data, testEp);
// testLLUDPServer.LoadReceive(data, testEp);
// testLLUDPServer.ReceiveData(null);
//
// // Check that we are still here
// Assert.IsTrue(testLLUDPServer.HasCircuit(myCircuitCode));
// Assert.That(testLLPacketServer.GetTotalPacketsReceived(), Is.EqualTo(0));
//
// // Check that sending a valid packet to same circuit still succeeds
// Assert.That(scene.ObjectNameCallsReceived, Is.EqualTo(0));
//
// testLLUDPServer.LoadReceive(BuildTestObjectNamePacket(1, "helloooo"), testEp);
// testLLUDPServer.ReceiveData(null);
//
// Assert.That(testLLPacketServer.GetTotalPacketsReceived(), Is.EqualTo(1));
// Assert.That(testLLPacketServer.GetPacketsReceivedFor(PacketType.ObjectName), Is.EqualTo(1));
// }
//
// /// <summary>
// /// Test that the stack continues to work even if some client has caused a
// /// SocketException on Socket.BeginReceive()
// /// </summary>
// [Test]
// public void TestExceptionOnBeginReceive()
// {
// TestHelper.InMethod();
//
// MockScene scene = new MockScene();
//
// uint circuitCodeA = 130000;
// EndPoint epA = new IPEndPoint(IPAddress.Loopback, 1300);
// UUID agentIdA = UUID.Parse("00000000-0000-0000-0000-000000001300");
// UUID sessionIdA = UUID.Parse("00000000-0000-0000-0000-000000002300");
//
// uint circuitCodeB = 130001;
// EndPoint epB = new IPEndPoint(IPAddress.Loopback, 1301);
// UUID agentIdB = UUID.Parse("00000000-0000-0000-0000-000000001301");
// UUID sessionIdB = UUID.Parse("00000000-0000-0000-0000-000000002301");
//
// TestLLUDPServer testLLUDPServer;
// TestLLPacketServer testLLPacketServer;
// AgentCircuitManager acm;
// SetupStack(scene, out testLLUDPServer, out testLLPacketServer, out acm);
// AddClient(circuitCodeA, epA, agentIdA, sessionIdA, testLLUDPServer, acm);
// AddClient(circuitCodeB, epB, agentIdB, sessionIdB, testLLUDPServer, acm);
//
// testLLUDPServer.LoadReceive(BuildTestObjectNamePacket(1, "packet1"), epA);
// testLLUDPServer.LoadReceive(BuildTestObjectNamePacket(1, "packet2"), epB);
// testLLUDPServer.LoadReceiveWithBeginException(epA);
// testLLUDPServer.LoadReceive(BuildTestObjectNamePacket(2, "packet3"), epB);
// testLLUDPServer.ReceiveData(null);
//
// Assert.IsFalse(testLLUDPServer.HasCircuit(circuitCodeA));
//
// Assert.That(testLLPacketServer.GetTotalPacketsReceived(), Is.EqualTo(3));
// Assert.That(testLLPacketServer.GetPacketsReceivedFor(PacketType.ObjectName), Is.EqualTo(3));
// }
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using Baseline;
using Marten.Events;
using Marten.Internal.Operations;
using Marten.Services;
using Marten.Storage;
#nullable enable
namespace Marten.Internal.Sessions
{
public abstract partial class DocumentSessionBase: QuerySession, IDocumentSession
{
internal readonly ISessionWorkTracker _workTracker;
private Dictionary<string, NestedTenantSession>? _byTenant;
/// <summary>
/// Used for code generation
/// </summary>
#nullable disable
protected DocumentSessionBase(StoreOptions options): base(options)
{
}
#nullable enable
protected DocumentSessionBase(DocumentStore store, SessionOptions sessionOptions, IManagedConnection database,
ITenant tenant): base(store, sessionOptions, database, tenant)
{
Concurrency = sessionOptions.ConcurrencyChecks;
_workTracker = new UnitOfWork(this);
}
internal DocumentSessionBase(DocumentStore store, SessionOptions sessionOptions, IManagedConnection database,
ITenant tenant, ISessionWorkTracker workTracker): base(store, sessionOptions, database, tenant)
{
Concurrency = sessionOptions.ConcurrencyChecks;
_workTracker = workTracker;
}
internal ITenancy Tenancy => DocumentStore.As<DocumentStore>().Tenancy;
internal ISessionWorkTracker WorkTracker => _workTracker;
public void Store<T>(IEnumerable<T> entities) where T : notnull
{
Store(entities?.ToArray()!);
}
public void Store<T>(params T[] entities) where T : notnull
{
if (entities == null)
{
throw new ArgumentNullException(nameof(entities));
}
if (typeof(T).IsGenericEnumerable())
{
throw new ArgumentOutOfRangeException(typeof(T).Name,
"Do not use IEnumerable<T> here as the document type. Either cast entities to an array instead or use the IEnumerable<T> Store() overload instead.");
}
store(entities);
}
public void Store<T>(T entity, Guid version) where T : notnull
{
assertNotDisposed();
var storage = StorageFor<T>();
storage.Store(this, entity, version);
var op = storage.Upsert(entity, this, Tenant);
_workTracker.Add(op);
}
public void Insert<T>(IEnumerable<T> entities) where T : notnull
{
Insert(entities.ToArray());
}
public void Insert<T>(params T[] entities) where T : notnull
{
assertNotDisposed();
if (entities == null)
{
throw new ArgumentNullException(nameof(entities));
}
if (typeof(T).IsGenericEnumerable())
{
throw new ArgumentOutOfRangeException(typeof(T).Name,
"Do not use IEnumerable<T> here as the document type. You may need to cast entities to an array instead.");
}
if (typeof(T) == typeof(object))
{
InsertObjects(entities.OfType<object>());
}
else
{
var storage = StorageFor<T>();
foreach (var entity in entities)
{
storage.Store(this, entity);
var op = storage.Insert(entity, this, Tenant);
_workTracker.Add(op);
}
}
}
public void Update<T>(IEnumerable<T> entities) where T : notnull
{
Update(entities.ToArray());
}
public void Update<T>(params T[] entities) where T : notnull
{
assertNotDisposed();
if (entities == null)
{
throw new ArgumentNullException(nameof(entities));
}
if (typeof(T).IsGenericEnumerable())
{
throw new ArgumentOutOfRangeException(typeof(T).Name,
"Do not use IEnumerable<T> here as the document type. You may need to cast entities to an array instead.");
}
if (typeof(T) == typeof(object))
{
InsertObjects(entities.OfType<object>());
}
else
{
var storage = StorageFor<T>();
foreach (var entity in entities)
{
storage.Store(this, entity);
var op = storage.Update(entity, this, Tenant);
_workTracker.Add(op);
}
}
}
public void InsertObjects(IEnumerable<object> documents)
{
assertNotDisposed();
documents.Where(x => x != null).GroupBy(x => x.GetType()).Each(group =>
{
var handler = typeof(InsertHandler<>).CloseAndBuildAs<IHandler>(group.Key);
handler.Store(this, group);
});
}
public IUnitOfWork PendingChanges => _workTracker;
public void StoreObjects(IEnumerable<object> documents)
{
assertNotDisposed();
var documentsGroupedByType = documents
.Where(x => x != null)
.GroupBy(x => x.GetType());
foreach (var group in documentsGroupedByType)
{
// Build the right handler for the group type
var handler = typeof(Handler<>).CloseAndBuildAs<IHandler>(group.Key);
handler.Store(this, group);
}
}
public new IEventStore Events => (IEventStore)base.Events;
protected override IQueryEventStore CreateEventStore(DocumentStore store, ITenant tenant)
=> new EventStore(this, store, tenant);
public void QueueOperation(IStorageOperation storageOperation)
{
_workTracker.Add(storageOperation);
}
public virtual void Eject<T>(T document) where T : notnull
{
StorageFor<T>().Eject(this, document);
_workTracker.Eject(document);
ChangeTrackers.RemoveAll(x => ReferenceEquals(document, x.Document));
}
public virtual void EjectAllOfType(Type type)
{
ItemMap.Remove(type);
_workTracker.EjectAllOfType(type);
ChangeTrackers.RemoveAll(x => x.Document.GetType().CanBeCastTo(type));
}
public void SetHeader(string key, object value)
{
Headers ??= new Dictionary<string, object>();
Headers[key] = value;
}
public object? GetHeader(string key)
{
return Headers?[key];
}
/// <summary>
/// Access data from another tenant and apply document or event updates to this
/// IDocumentSession for a separate tenant
/// </summary>
/// <param name="tenantId"></param>
/// <returns></returns>
public new ITenantOperations ForTenant(string tenantId)
{
_byTenant ??= new Dictionary<string, NestedTenantSession>();
if (_byTenant.TryGetValue(tenantId, out var tenantSession))
{
return tenantSession;
}
var tenant = Options.Tenancy[tenantId];
tenantSession = new NestedTenantSession(this, tenant);
_byTenant[tenantId] = tenantSession;
return tenantSession;
}
protected internal abstract void ejectById<T>(long id) where T : notnull;
protected internal abstract void ejectById<T>(int id) where T : notnull;
protected internal abstract void ejectById<T>(Guid id) where T : notnull;
protected internal abstract void ejectById<T>(string id) where T : notnull;
protected internal virtual void processChangeTrackers()
{
// Nothing
}
protected internal virtual void resetDirtyChecking()
{
// Nothing
}
private void store<T>(IEnumerable<T> entities) where T : notnull
{
assertNotDisposed();
if (typeof(T) == typeof(object))
{
StoreObjects(entities.OfType<object>());
}
else
{
var storage = StorageFor<T>();
if (Concurrency == ConcurrencyChecks.Disabled && storage.UseOptimisticConcurrency)
{
foreach (var entity in entities)
{
// Put it in the identity map -- if necessary
storage.Store(this, entity);
var overwrite = storage.Overwrite(entity, this, Tenant);
_workTracker.Add(overwrite);
}
}
else
{
foreach (var entity in entities)
{
// Put it in the identity map -- if necessary
storage.Store(this, entity);
var upsert = storage.Upsert(entity, this, Tenant);
_workTracker.Add(upsert);
}
}
}
}
public void EjectPatchedTypes(IUnitOfWork changes)
{
var patchedTypes = changes.Operations().Where(x => x.Role() == OperationRole.Patch).Select(x => x.DocumentType).Distinct().ToArray();
foreach (var type in patchedTypes) EjectAllOfType(type);
}
internal interface IHandler
{
void Store(IDocumentSession session, IEnumerable<object> objects);
}
internal class Handler<T>: IHandler where T : notnull
{
public void Store(IDocumentSession session, IEnumerable<object> objects)
{
// Delegate to the Store<T>() method
session.Store(objects.OfType<T>().ToArray());
}
}
internal class InsertHandler<T>: IHandler where T : notnull
{
public void Store(IDocumentSession session, IEnumerable<object> objects)
{
session.Insert(objects.OfType<T>().ToArray());
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using System.Threading;
namespace System.IO.Tests
{
public class MemoryStreamTests
{
[Fact]
public static void MemoryStream_Write_BeyondCapacity()
{
using (MemoryStream memoryStream = new MemoryStream())
{
long origLength = memoryStream.Length;
byte[] bytes = new byte[10];
for (int i = 0; i < bytes.Length; i++)
bytes[i] = (byte)i;
int spanPastEnd = 5;
memoryStream.Seek(spanPastEnd, SeekOrigin.End);
Assert.Equal(memoryStream.Length + spanPastEnd, memoryStream.Position);
// Test Write
memoryStream.Write(bytes, 0, bytes.Length);
long pos = memoryStream.Position;
Assert.Equal(pos, origLength + spanPastEnd + bytes.Length);
Assert.Equal(memoryStream.Length, origLength + spanPastEnd + bytes.Length);
// Verify bytes were correct.
memoryStream.Position = origLength;
byte[] newData = new byte[bytes.Length + spanPastEnd];
int n = memoryStream.Read(newData, 0, newData.Length);
Assert.Equal(n, newData.Length);
for (int i = 0; i < spanPastEnd; i++)
Assert.Equal(0, newData[i]);
for (int i = 0; i < bytes.Length; i++)
Assert.Equal(bytes[i], newData[i + spanPastEnd]);
}
}
[Fact]
public static void MemoryStream_WriteByte_BeyondCapacity()
{
using (MemoryStream memoryStream = new MemoryStream())
{
long origLength = memoryStream.Length;
byte[] bytes = new byte[10];
for (int i = 0; i < bytes.Length; i++)
bytes[i] = (byte)i;
int spanPastEnd = 5;
memoryStream.Seek(spanPastEnd, SeekOrigin.End);
Assert.Equal(memoryStream.Length + spanPastEnd, memoryStream.Position);
// Test WriteByte
origLength = memoryStream.Length;
memoryStream.Position = memoryStream.Length + spanPastEnd;
memoryStream.WriteByte(0x42);
long expected = origLength + spanPastEnd + 1;
Assert.Equal(expected, memoryStream.Position);
Assert.Equal(expected, memoryStream.Length);
}
}
[Fact]
public static void MemoryStream_GetPositionTest_Negative()
{
int iArrLen = 100;
byte[] bArr = new byte[iArrLen];
using (MemoryStream ms = new MemoryStream(bArr))
{
long iCurrentPos = ms.Position;
for (int i = -1; i > -6; i--)
{
Assert.Throws<ArgumentOutOfRangeException>(() => ms.Position = i);
Assert.Equal(ms.Position, iCurrentPos);
}
}
}
[Fact]
public static void MemoryStream_LengthTest()
{
using (MemoryStream ms2 = new MemoryStream())
{
// [] Get the Length when position is at length
ms2.SetLength(50);
ms2.Position = 50;
StreamWriter sw2 = new StreamWriter(ms2);
for (char c = 'a'; c < 'f'; c++)
sw2.Write(c);
sw2.Flush();
Assert.Equal(55, ms2.Length);
// Somewhere in the middle (set the length to be shorter.)
ms2.SetLength(30);
Assert.Equal(30, ms2.Length);
Assert.Equal(30, ms2.Position);
// Increase the length
ms2.SetLength(100);
Assert.Equal(100, ms2.Length);
Assert.Equal(30, ms2.Position);
}
}
[Fact]
public static void MemoryStream_LengthTest_Negative()
{
using (MemoryStream ms2 = new MemoryStream())
{
Assert.Throws<ArgumentOutOfRangeException>(() => ms2.SetLength(Int64.MaxValue));
Assert.Throws<ArgumentOutOfRangeException>(() => ms2.SetLength(-2));
}
}
[Fact]
public static void MemoryStream_ReadTest_Negative()
{
MemoryStream ms2 = new MemoryStream();
Assert.Throws<ArgumentNullException>(() => ms2.Read(null, 0, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => ms2.Read(new byte[] { 1 }, -1, 0));
Assert.Throws<ArgumentOutOfRangeException>(() => ms2.Read(new byte[] { 1 }, 0, -1));
Assert.Throws<ArgumentException>(() => ms2.Read(new byte[] { 1 }, 2, 0));
Assert.Throws<ArgumentException>(() => ms2.Read(new byte[] { 1 }, 0, 2));
ms2.Dispose();
Assert.Throws<ObjectDisposedException>(() => ms2.Read(new byte[] { 1 }, 0, 1));
}
[Fact]
public static void MemoryStream_WriteToTests()
{
using (MemoryStream ms2 = new MemoryStream())
{
byte[] bytArrRet;
byte[] bytArr = new byte[] { byte.MinValue, byte.MaxValue, 1, 2, 3, 4, 5, 6, 128, 250 };
// [] Write to FileStream, check the filestream
ms2.Write(bytArr, 0, bytArr.Length);
using (MemoryStream readonlyStream = new MemoryStream())
{
ms2.WriteTo(readonlyStream);
readonlyStream.Flush();
readonlyStream.Position = 0;
bytArrRet = new byte[(int)readonlyStream.Length];
readonlyStream.Read(bytArrRet, 0, (int)readonlyStream.Length);
for (int i = 0; i < bytArr.Length; i++)
{
Assert.Equal(bytArr[i], bytArrRet[i]);
}
}
}
// [] Write to memoryStream, check the memoryStream
using (MemoryStream ms2 = new MemoryStream())
using (MemoryStream ms3 = new MemoryStream())
{
byte[] bytArrRet;
byte[] bytArr = new byte[] { byte.MinValue, byte.MaxValue, 1, 2, 3, 4, 5, 6, 128, 250 };
ms2.Write(bytArr, 0, bytArr.Length);
ms2.WriteTo(ms3);
ms3.Position = 0;
bytArrRet = new byte[(int)ms3.Length];
ms3.Read(bytArrRet, 0, (int)ms3.Length);
for (int i = 0; i < bytArr.Length; i++)
{
Assert.Equal(bytArr[i], bytArrRet[i]);
}
}
}
[Fact]
public static void MemoryStream_WriteToTests_Negative()
{
using (MemoryStream ms2 = new MemoryStream())
{
Assert.Throws<ArgumentNullException>(() => ms2.WriteTo(null));
ms2.Write(new byte[] { 1 }, 0, 1);
MemoryStream readonlyStream = new MemoryStream(new byte[1028], false);
Assert.Throws<NotSupportedException>(() => ms2.WriteTo(readonlyStream));
readonlyStream.Dispose();
// [] Pass in a closed stream
Assert.Throws<ObjectDisposedException>(() => ms2.WriteTo(readonlyStream));
}
}
[Fact]
public static void MemoryStream_CopyTo_Invalid()
{
MemoryStream memoryStream;
using (memoryStream = new MemoryStream())
{
AssertExtensions.Throws<ArgumentNullException>("destination", () => memoryStream.CopyTo(destination: null));
// Validate the destination parameter first.
AssertExtensions.Throws<ArgumentNullException>("destination", () => memoryStream.CopyTo(destination: null, bufferSize: 0));
AssertExtensions.Throws<ArgumentNullException>("destination", () => memoryStream.CopyTo(destination: null, bufferSize: -1));
// Then bufferSize.
AssertExtensions.Throws<ArgumentOutOfRangeException>("bufferSize", () => memoryStream.CopyTo(Stream.Null, bufferSize: 0)); // 0-length buffer doesn't make sense.
AssertExtensions.Throws<ArgumentOutOfRangeException>("bufferSize", () => memoryStream.CopyTo(Stream.Null, bufferSize: -1));
}
// After the Stream is disposed, we should fail on all CopyTos.
AssertExtensions.Throws<ArgumentOutOfRangeException>("bufferSize", () => memoryStream.CopyTo(Stream.Null, bufferSize: 0)); // Not before bufferSize is validated.
AssertExtensions.Throws<ArgumentOutOfRangeException>("bufferSize", () => memoryStream.CopyTo(Stream.Null, bufferSize: -1));
MemoryStream disposedStream = memoryStream;
// We should throw first for the source being disposed...
Assert.Throws<ObjectDisposedException>(() => memoryStream.CopyTo(disposedStream, 1));
// Then for the destination being disposed.
memoryStream = new MemoryStream();
Assert.Throws<ObjectDisposedException>(() => memoryStream.CopyTo(disposedStream, 1));
// Then we should check whether we can't read but can write, which isn't possible for non-subclassed MemoryStreams.
// THen we should check whether the destination can read but can't write.
var readOnlyStream = new DelegateStream(
canReadFunc: () => true,
canWriteFunc: () => false
);
Assert.Throws<NotSupportedException>(() => memoryStream.CopyTo(readOnlyStream, 1));
}
[Theory]
[MemberData(nameof(CopyToData))]
public void CopyTo(Stream source, byte[] expected)
{
using (var destination = new MemoryStream())
{
source.CopyTo(destination);
Assert.InRange(source.Position, source.Length, int.MaxValue); // Copying the data should have read to the end of the stream or stayed past the end.
Assert.Equal(expected, destination.ToArray());
}
}
public static IEnumerable<object[]> CopyToData()
{
// Stream is positioned @ beginning of data
var data1 = new byte[] { 1, 2, 3 };
var stream1 = new MemoryStream(data1);
yield return new object[] { stream1, data1 };
// Stream is positioned in the middle of data
var data2 = new byte[] { 0xff, 0xf3, 0xf0 };
var stream2 = new MemoryStream(data2) { Position = 1 };
yield return new object[] { stream2, new byte[] { 0xf3, 0xf0 } };
// Stream is positioned after end of data
var data3 = data2;
var stream3 = new MemoryStream(data3) { Position = data3.Length + 1 };
yield return new object[] { stream3, Array.Empty<byte>() };
}
}
}
| |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: StockSharp.Algo.Candles.Compression.Algo
File: StorageCandleBuilderSource.cs
Created: 2015, 11, 11, 2:32 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace StockSharp.Algo.Candles.Compression
{
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using Ecng.Collections;
using Ecng.Common;
using Ecng.ComponentModel;
using MoreLinq;
using StockSharp.Algo.Storages;
using StockSharp.BusinessEntities;
using StockSharp.Localization;
/// <summary>
/// The base data source for <see cref="ICandleBuilder"/>, which receives data from the external storage.
/// </summary>
/// <typeparam name="TSourceValue">The source data type (for example, <see cref="Trade"/>).</typeparam>
public abstract class StorageCandleBuilderSource<TSourceValue> : ConvertableCandleBuilderSource<TSourceValue>, IStorageCandleSource
{
[DebuggerDisplay("{Series} {Reader}")]
private sealed class SeriesInfo
{
public SeriesInfo(CandleSeries series, IEnumerator<TSourceValue> reader)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
if (reader == null)
throw new ArgumentNullException(nameof(reader));
Series = series;
Reader = reader;
}
public CandleSeries Series { get; }
public IEnumerator<TSourceValue> Reader { get; }
public bool IsStopping { get; set; }
}
private readonly CachedSynchronizedDictionary<CandleSeries, SeriesInfo> _series = new CachedSynchronizedDictionary<CandleSeries, SeriesInfo>();
/// <summary>
/// Initialize <see cref="StorageCandleBuilderSource{T}"/>.
/// </summary>
protected StorageCandleBuilderSource()
{
ThreadingHelper
.Thread(OnLoading)
.Background(true)
.Name(GetType().Name)
.Launch();
}
/// <summary>
/// The source priority by speed (0 - the best).
/// </summary>
public override int SpeedPriority => 1;
/// <summary>
/// Market data storage.
/// </summary>
public IStorageRegistry StorageRegistry { get; set; }
private IMarketDataDrive _drive;
/// <summary>
/// The storage which is used by default. By default, <see cref="IStorageRegistry.DefaultDrive"/> is used.
/// </summary>
public IMarketDataDrive Drive
{
get
{
if (_drive == null)
{
if (StorageRegistry != null)
return StorageRegistry.DefaultDrive;
}
return _drive;
}
set
{
_drive = value;
}
}
/// <summary>
/// To get the data storage <typeparamref name="TSourceValue" />.
/// </summary>
/// <param name="security">Security.</param>
/// <returns>Market data storage.</returns>
protected abstract IMarketDataStorage<TSourceValue> GetStorage(Security security);
/// <summary>
/// To get time ranges for which this source of passed candles series has data.
/// </summary>
/// <param name="series">Candles series.</param>
/// <returns>Time ranges.</returns>
public override IEnumerable<Range<DateTimeOffset>> GetSupportedRanges(CandleSeries series)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
if (StorageRegistry == null)
return Enumerable.Empty<Range<DateTimeOffset>>();
return GetStorage(series.Security).GetRanges();
}
/// <summary>
/// To get data.
/// </summary>
/// <param name="series">Candles series.</param>
/// <param name="from">The initial date from which you need to get data.</param>
/// <param name="to">The final date by which you need to get data.</param>
/// <returns>Data. If data does not exist for the specified range then <see langword="null" /> will be returned.</returns>
protected virtual IEnumerable<TSourceValue> GetValues(CandleSeries series, DateTimeOffset from, DateTimeOffset to)
{
var storage = GetStorage(series.Security);
var range = storage.GetRange(from, to);
if (range == null)
return null;
return storage.Load(range.Min, range.Max);
}
/// <summary>
/// To send data request.
/// </summary>
/// <param name="series">The candles series for which data receiving should be started.</param>
/// <param name="from">The initial date from which you need to get data.</param>
/// <param name="to">The final date by which you need to get data.</param>
public override void Start(CandleSeries series, DateTimeOffset from, DateTimeOffset to)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
var values = GetValues(series, from, to);
if (values == null)
return;
lock (_series.SyncRoot)
{
if (_series.ContainsKey(series))
throw new ArgumentException(LocalizedStrings.Str650Params.Put(series), nameof(series));
_series.Add(series, new SeriesInfo(series, values.GetEnumerator()));
if (_series.Count == 1)
Monitor.Pulse(_series.SyncRoot);
}
}
/// <summary>
/// To stop data receiving starting through <see cref="Start"/>.
/// </summary>
/// <param name="series">Candles series.</param>
public override void Stop(CandleSeries series)
{
lock (_series.SyncRoot)
{
var info = _series.TryGetValue(series);
if (info != null)
info.IsStopping = true;
}
}
private void OnLoading()
{
try
{
while (!IsDisposed)
{
var removingSeries = new List<CandleSeries>();
foreach (var info in _series.CachedValues)
{
if (info.IsStopping)
removingSeries.Add(info.Series);
else
{
var values = new List<TSourceValue>(100);
for (var i = 0; i < 100; i++)
{
if (!info.Reader.MoveNext())
{
removingSeries.Add(info.Series);
break;
}
values.Add(info.Reader.Current);
}
if (values.Count > 0)
NewSourceValues(info.Series, values);
}
}
if (removingSeries.Count > 0)
{
lock (_series.SyncRoot)
removingSeries.ForEach(s => _series.Remove(s));
removingSeries.ForEach(RaiseStopped);
}
lock (_series.SyncRoot)
{
if (_series.IsEmpty())
Monitor.Wait(_series.SyncRoot);
}
}
}
catch (Exception ex)
{
RaiseError(ex);
_series.CopyAndClear().ForEach(p => RaiseStopped(p.Key));
}
}
/// <summary>
/// Release resources.
/// </summary>
public override void Dispose()
{
base.Dispose();
lock (_series.SyncRoot)
{
_series.ForEach(p => p.Value.IsStopping = true);
Monitor.Pulse(_series.SyncRoot);
}
}
}
/// <summary>
/// The data source for <see cref="CandleBuilder{T}"/> getting tick trades from the external storage <see cref="IStorageRegistry"/>.
/// </summary>
public class TradeStorageCandleBuilderSource : StorageCandleBuilderSource<Trade>
{
/// <summary>
/// Initializes a new instance of the <see cref="TradeStorageCandleBuilderSource"/>.
/// </summary>
public TradeStorageCandleBuilderSource()
{
}
/// <summary>
/// To get the storage of tick trades.
/// </summary>
/// <param name="security">Security.</param>
/// <returns>The storage of tick trades.</returns>
protected override IMarketDataStorage<Trade> GetStorage(Security security)
{
return StorageRegistry.GetTradeStorage(security, Drive);
}
}
/// <summary>
/// The data source for <see cref="CandleBuilder{T}"/> getting tick trades from the external storage <see cref="IStorageRegistry"/>.
/// </summary>
public class MarketDepthStorageCandleBuilderSource : StorageCandleBuilderSource<MarketDepth>
{
/// <summary>
/// Initializes a new instance of the <see cref="MarketDepthStorageCandleBuilderSource"/>.
/// </summary>
public MarketDepthStorageCandleBuilderSource()
{
}
/// <summary>
/// To get the order books storage.
/// </summary>
/// <param name="security">Security.</param>
/// <returns>The order books storage.</returns>
protected override IMarketDataStorage<MarketDepth> GetStorage(Security security)
{
return StorageRegistry.GetMarketDepthStorage(security, Drive);
}
}
/// <summary>
/// The data source for <see cref="CandleBuilder{T}"/> getting tick trades from the external storage <see cref="IStorageRegistry"/>.
/// </summary>
public class OrderLogStorageCandleBuilderSource : StorageCandleBuilderSource<Trade>
{
/// <summary>
/// Initializes a new instance of the <see cref="OrderLogStorageCandleBuilderSource"/>.
/// </summary>
public OrderLogStorageCandleBuilderSource()
{
}
/// <summary>
/// The source priority by speed (0 - the best).
/// </summary>
public override int SpeedPriority => 2;
/// <summary>
/// To get the data storage.
/// </summary>
/// <param name="security">Security.</param>
/// <returns>Market data storage.</returns>
protected override IMarketDataStorage<Trade> GetStorage(Security security)
{
throw new NotSupportedException();
}
/// <summary>
/// To get time ranges for which this source of passed candles series has data.
/// </summary>
/// <param name="series">Candles series.</param>
/// <returns>Time ranges.</returns>
public override IEnumerable<Range<DateTimeOffset>> GetSupportedRanges(CandleSeries series)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
if (StorageRegistry == null)
return Enumerable.Empty<Range<DateTimeOffset>>();
return StorageRegistry.GetOrderLogStorage(series.Security, Drive).GetRanges();
}
/// <summary>
/// To get data.
/// </summary>
/// <param name="series">Candles series.</param>
/// <param name="from">The initial date from which you need to get data.</param>
/// <param name="to">The final date by which you need to get data.</param>
/// <returns>Data. If data does not exist for the specified range then <see langword="null" /> will be returned.</returns>
protected override IEnumerable<Trade> GetValues(CandleSeries series, DateTimeOffset from, DateTimeOffset to)
{
var storage = StorageRegistry.GetOrderLogStorage(series.Security, Drive);
var range = storage.GetRange(from, to);
if (range == null)
return null;
return storage.Load(range.Min, range.Max).ToTrades();
}
}
}
| |
/*
* Region.cs - Region management for X applications.
*
* Copyright (C) 2002, 2003 Southern Storm Software, Pty Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
namespace Xsharp
{
using System;
using Xsharp.Types;
/// <summary>
/// <para>The <see cref="T:Xsharp.Region"/> class manages a
/// region structure, constructed from a list of rectangles.</para>
///
/// <para>Regions are used in X to manage non-rectangular clip regions,
/// window shapes, and expose areas.</para>
/// </summary>
public sealed class Region : IDisposable, ICloneable
{
// Pointer to the raw X11 region structure.
private IntPtr region;
/// <summary>
/// <para>Construct a new <see cref="T:Xsharp.Region"/>
/// instance that is initially set to the empty region.</para>
/// </summary>
public Region()
{
lock(typeof(Region))
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
}
/// <summary>
/// <para>Construct a new <see cref="T:Xsharp.Region"/>
/// instance that is initially set to the same area as another
/// region object.</para>
/// </summary>
///
/// <param name="r">
/// <para>The other region object to copy. If <paramref name="r"/>
/// is <see langword="null"/> or has been disposed, the new region
/// will be set to the empty region.</para>
/// </param>
public Region(Region r)
{
lock(typeof(Region))
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
Union(r);
}
}
/// <summary>
/// <para>Construct a new <see cref="T:Xsharp.Region"/>
/// instance that is initially set to the same area as
/// a rectangle.</para>
/// </summary>
///
/// <param name="rect">
/// <para>The rectangle to set the region to initially.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
public Region(Rectangle rect)
{
lock(typeof(Region))
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
Union(rect.x, rect.y, rect.width, rect.height);
}
}
/// <summary>
/// <para>Construct a new <see cref="T:Xsharp.Region"/>
/// instance that is initially set to the same area as
/// an explicitly-specified rectangle.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="width">
/// <para>The width of the rectangle.</para>
/// </param>
///
/// <param name="height">
/// <para>The height of the rectangle.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
public Region(int x, int y, int width, int height)
{
lock(typeof(Region))
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
Union(x, y, width, height);
}
}
/// <summary>
/// <para>Construct a new <see cref="T:Xsharp.Region"/>
/// instance that is initially set to a polygon.</para>
/// </summary>
///
/// <param name="points">
/// <para>An array of points that defines the polygon.</para>
/// </param>
///
/// <param name="fillRule">
/// <para>The area fill rule to use for the polygon.</para>
/// </param>
///
/// <exception cref="T:System.ArgumentNullException">
/// <para>Raised if <paramref name="points"/> is <see langword="null"/>.
/// </para>
/// </exception>
///
/// <exception cref="T:System.ArgumentOutOfRangeException">
/// <para>Raised if <paramref name="points"/> has less than 3
/// elements.</para>
/// </exception>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if any of the elements in <paramref name="points"/>
/// has co-ordinates that are out of range, or if
/// <paramref name="fillRule"/> is invalid.</para>
/// </exception>
public Region(Point[] points, FillRule fillRule)
{
// Validate the parameters.
if(points == null)
{
throw new ArgumentNullException("points");
}
else if(points.Length < 3)
{
throw new ArgumentOutOfRangeException
("points", S._("X_PolygonNeeds3Pts"));
}
// Convert "points" into an "XPoint[]" array.
XPoint[] pts = new XPoint [points.Length];
try
{
checked
{
for(int index = 0; index < points.Length; ++index)
{
pts[index].x = (short)(points[index].x);
pts[index].y = (short)(points[index].y);
}
}
}
catch(OverflowException)
{
throw new XException(S._("X_PointCoordRange"));
}
// Validate the fill rule.
if(fillRule != FillRule.EvenOddRule &&
fillRule != FillRule.WindingRule)
{
throw new XException
(String.Format(S._("X_FillRule"), (int)fillRule));
}
// Create the polygon region.
lock(typeof(Region))
{
region = Xlib.XPolygonRegion(pts, pts.Length,
(int)fillRule);
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
}
/// <summary>
/// <para>Destroy an instance of <see cref="T:Xsharp.Region"/>.</para>
/// </summary>
~Region()
{
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
Xlib.XDestroyRegion(region);
region = IntPtr.Zero;
}
}
}
/// <summary>
/// <para>Dispose an instance of <see cref="T:Xsharp.Region"/>.</para>
/// </summary>
///
/// <remarks>
/// <para>This method implements the <see cref="T:System.IDisposeable"/>
/// interface.</para>
/// </remarks>
public void Dispose()
{
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
Xlib.XDestroyRegion(region);
region = IntPtr.Zero;
}
}
}
/// <summary>
/// <para>Clone an instance of <see cref="T:Xsharp.Region"/>.</para>
/// </summary>
///
/// <remarks>
/// <para>This method implements the <see cref="T:System.ICloneable"/>
/// interface.</para>
/// </remarks>
public Object Clone()
{
Region r = new Region();
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
Xlib.XUnionRegion(region, r.region, r.region);
}
}
return r;
}
/// <summary>
/// <para>Determine if this region is empty.</para>
/// </summary>
///
/// <returns>
/// <para>Returns <see langword="true"/> if this region is empty
/// or disposed; <see langword="false"/> otherwise.</para>
/// </returns>
public bool IsEmpty()
{
lock(typeof(Region))
{
return (region == IntPtr.Zero ||
Xlib.XEmptyRegion(region) != 0);
}
}
/// <summary>
/// <para>Determine if two regions are equal.</para>
/// </summary>
///
/// <param name="obj">
/// <para>The region object to compare against.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if the two regions are equal;
/// <see langword="false"/> otherwise. For the purposes of this
/// method, disposed regions are considered the same as empty
/// regions.</para>
/// </returns>
public override bool Equals(Object obj)
{
lock(typeof(Region))
{
Region other = (obj as Region);
if(other != null)
{
if(this == other)
{
return true;
}
else if(IsEmpty())
{
return other.IsEmpty();
}
else if(other.IsEmpty())
{
return false;
}
else
{
return (Xlib.XEqualRegion
(region, other.region) != 0);
}
}
else
{
return false;
}
}
}
/// <summary>
/// <para>Get the hash code for a region.</para>
/// </summary>
///
/// <returns>
/// <para>Returns the hash code.</para>
/// </returns>
public override int GetHashCode()
{
lock(typeof(Region))
{
// TODO
return 0;
}
}
/// <summary>
/// <para>Union a rectangle with this region.</para>
/// </summary>
///
/// <param name="rect">
/// <para>The rectangle to union with this region.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to <paramref name="rect"/>.</para>
/// </remarks>
public void Union(Rectangle rect)
{
lock(typeof(Region))
{
XRectangle xrect = new XRectangle
(rect.x, rect.y, rect.width, rect.height);
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
Xlib.XUnionRectWithRegion(ref xrect, region, region);
}
}
/// <summary>
/// <para>Union an explicitly-specified rectangle with this region.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="width">
/// <para>The width of the rectangle.</para>
/// </param>
///
/// <param name="height">
/// <para>The height of the rectangle.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to the rectangle.</para>
/// </remarks>
public void Union(int x, int y, int width, int height)
{
lock(typeof(Region))
{
XRectangle xrect = new XRectangle(x, y, width, height);
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
Xlib.XUnionRectWithRegion(ref xrect, region, region);
}
}
/// <summary>
/// <para>Union another region with this one.</para>
/// </summary>
///
/// <param name="r">
/// <para>The other region to union with this one. If <paramref name="r"/>
/// is <see langword="null"/>, the same as <see langword="this"/>, or
/// disposed, then this method will do nothing.</para>
/// </param>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to a copy of <paramref name="r"/>.</para>
/// </remarks>
public void Union(Region r)
{
lock(typeof(Region))
{
if(r != null && r != this && r.region != IntPtr.Zero)
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
Xlib.XUnionRegion(region, r.region, region);
}
}
}
/// <summary>
/// <para>Intersect a rectangle with this region.</para>
/// </summary>
///
/// <param name="rect">
/// <para>The rectangle to intersect with this region.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to empty.</para>
/// </remarks>
public void Intersect(Rectangle rect)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else
{
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
XRectangle xrect = new XRectangle
(rect.x, rect.y, rect.width, rect.height);
Xlib.XUnionRectWithRegion(ref xrect, reg, reg);
Xlib.XIntersectRegion(reg, region, region);
Xlib.XDestroyRegion(reg);
}
}
}
/// <summary>
/// <para>Intersect an explicitly-specified rectangle with
/// this region.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="width">
/// <para>The width of the rectangle.</para>
/// </param>
///
/// <param name="height">
/// <para>The height of the rectangle.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to empty.</para>
/// </remarks>
public void Intersect(int x, int y, int width, int height)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else
{
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
XRectangle xrect = new XRectangle(x, y, width, height);
Xlib.XUnionRectWithRegion(ref xrect, reg, reg);
Xlib.XIntersectRegion(reg, region, region);
Xlib.XDestroyRegion(reg);
}
}
}
/// <summary>
/// <para>Intersect another region with this one.</para>
/// </summary>
///
/// <param name="r">
/// <para>The other region to intersect with this one. If
/// <paramref name="r"/> is <see langword="null"/> or disposed,
/// the method operates as an intersection with the empty region.</para>
/// </param>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to empty.</para>
/// </remarks>
public void Intersect(Region r)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else if(r == null || r.region == IntPtr.Zero)
{
Xlib.XDestroyRegion(region);
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else if(r != this)
{
Xlib.XIntersectRegion(r.region, region, region);
}
}
}
/// <summary>
/// <para>Subtract a rectangle from this region.</para>
/// </summary>
///
/// <param name="rect">
/// <para>The rectangle to subtract from this region.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to empty.</para>
/// </remarks>
public void Subtract(Rectangle rect)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else
{
XRectangle xrect = new XRectangle
(rect.x, rect.y, rect.width, rect.height);
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
Xlib.XUnionRectWithRegion(ref xrect, reg, reg);
Xlib.XSubtractRegion(region, reg, region);
Xlib.XDestroyRegion(reg);
}
}
}
/// <summary>
/// <para>Subtract an explicitly-specified rectangle from
/// this region.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="width">
/// <para>The width of the rectangle.</para>
/// </param>
///
/// <param name="height">
/// <para>The height of the rectangle.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to empty.</para>
/// </remarks>
public void Subtract(int x, int y, int width, int height)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else
{
XRectangle xrect = new XRectangle(x, y, width, height);
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
Xlib.XUnionRectWithRegion(ref xrect, reg, reg);
Xlib.XSubtractRegion(region, reg, region);
Xlib.XDestroyRegion(reg);
}
}
}
/// <summary>
/// <para>Subtract another region from this one.</para>
/// </summary>
///
/// <param name="r">
/// <para>The other region to subtract from this one. If
/// <paramref name="r"/> is <see langword="null"/> or disposed,
/// the method does nothing.</para>
/// </param>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be re-created
/// with its initial contents set to empty.</para>
/// </remarks>
public void Subtract(Region r)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else if(r == null || r.region == IntPtr.Zero)
{
// Nothing to do here: subtracting an empty region.
}
else if(r == this)
{
// Subtract the region from itself: result is empty.
Xlib.XDestroyRegion(region);
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else
{
Xlib.XSubtractRegion(region, r.region, region);
}
}
}
/// <summary>
/// <para>Xor a rectangle with this region.</para>
/// </summary>
///
/// <param name="rect">
/// <para>The rectangle to xor with this region.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be treated
/// as empty prior to the xor operation.</para>
/// </remarks>
public void Xor(Rectangle rect)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
XRectangle xrect = new XRectangle
(rect.x, rect.y, rect.width, rect.height);
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
Xlib.XUnionRectWithRegion(ref xrect, reg, reg);
Xlib.XXorRegion(region, reg, region);
Xlib.XDestroyRegion(reg);
}
}
/// <summary>
/// <para>Xor an explicitly-specified rectangle with
/// this region.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="width">
/// <para>The width of the rectangle.</para>
/// </param>
///
/// <param name="height">
/// <para>The height of the rectangle.</para>
/// </param>
///
/// <exception cref="T:Xsharp.XException">
/// <para>Raised if the rectangle co-ordinates are out of range.</para>
/// </exception>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be treated
/// as empty prior to the xor operation.</para>
/// </remarks>
public void Xor(int x, int y, int width, int height)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
XRectangle xrect = new XRectangle(x, y, width, height);
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
Xlib.XUnionRectWithRegion(ref xrect, reg, reg);
Xlib.XXorRegion(region, reg, region);
Xlib.XDestroyRegion(reg);
}
}
/// <summary>
/// <para>Xor another region with this one.</para>
/// </summary>
///
/// <param name="r">
/// <para>The other region to xor with this one. If
/// <paramref name="r"/> is <see langword="null"/> or disposed,
/// then it will be treated as the empty region.</para>
/// </param>
///
/// <remarks>
/// <para>If this region has been disposed, then it will be treated
/// as empty prior to the xor operation.</para>
/// </remarks>
public void Xor(Region r)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
if(r == null || r.region == IntPtr.Zero)
{
// Xor of an empty and a non-empty region gives
// the non-empty region as the result.
}
else if(r == this)
{
// Xor the region with itself: result is empty.
Xlib.XDestroyRegion(region);
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
else
{
Xlib.XXorRegion(region, r.region, region);
}
}
}
/// <summary>
/// <para>Set this region to empty.</para>
/// </summary>
public void SetEmpty()
{
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
Xlib.XDestroyRegion(region);
}
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
}
/// <summary>
/// <para>Offset this region by a specified delta.</para>
/// </summary>
///
/// <param name="dx">
/// <para>The X delta adjustment to apply to the region.</para>
/// </param>
///
/// <param name="dy">
/// <para>The Y delta adjustment to apply to the region.</para>
/// </param>
public void Offset(int dx, int dy)
{
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
Xlib.XOffsetRegion(region, dx, dy);
}
}
}
/// <summary>
/// <para>Shrink this region by a specified delta.</para>
/// </summary>
///
/// <param name="dx">
/// <para>The X delta adjustment to apply to the region.</para>
/// </param>
///
/// <param name="dy">
/// <para>The Y delta adjustment to apply to the region.</para>
/// </param>
public void Shrink(int dx, int dy)
{
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
Xlib.XShrinkRegion(region, dx, dy);
}
}
}
/// <summary>
/// <para>Determine if a point is contained within this region.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the point.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the point.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if the point is contained
/// within this region; <see langword="false"/> otherwise.</para>
/// </returns>
public bool Contains(int x, int y)
{
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
return (Xlib.XPointInRegion(region, x, y) != 0);
}
else
{
return false;
}
}
}
/// <summary>
/// <para>Determine if a point is contained within this region.</para>
/// </summary>
///
/// <param name="point">
/// <para>The point to test.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if the point is contained
/// within this region; <see langword="false"/> otherwise.</para>
/// </returns>
public bool Contains(Point point)
{
lock(typeof(Region))
{
if(region != IntPtr.Zero)
{
return (Xlib.XPointInRegion
(region, point.x, point.y) != 0);
}
else
{
return false;
}
}
}
/// <summary>
/// <para>Determine if a rectangle is completely contained
/// in this region.</para>
/// </summary>
///
/// <param name="rect">
/// <para>The rectangle to test against this region.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if the rectangle is completely
/// contained within this region; <see langword="false"/> otherwise.</para>
/// </returns>
public bool Contains(Rectangle rect)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
return false;
}
else
{
return (Xlib.XRectInRegion(region, rect.x, rect.y,
(uint)(rect.width),
(uint)(rect.height))
== 1); // RectangleIn
}
}
}
/// <summary>
/// <para>Determine if an explicitly-specified rectangle is completely
/// contained in this region.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="width">
/// <para>The width of the rectangle.</para>
/// </param>
///
/// <param name="height">
/// <para>The height of the rectangle.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if the rectangle is completely
/// contained within this region; <see langword="false"/> otherwise.</para>
/// </returns>
public bool Contains(int x, int y, int width, int height)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
return false;
}
else
{
return (Xlib.XRectInRegion(region, x, y,
(uint)(width),
(uint)(height))
== 1); // RectangleIn
}
}
}
/// <summary>
/// <para>Determine if another region is completely contained
/// in this region.</para>
/// </summary>
///
/// <param name="r">
/// <para>The other region to test against this region.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if <paramref name="r"/> is
/// completely contained within this region; <see langword="false"/>
/// otherwise.</para>
/// </returns>
public bool Contains(Region r)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
return false;
}
else if(r == null || r.region == IntPtr.Zero)
{
return false;
}
else if(r == this)
{
return true;
}
else
{
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
Xlib.XIntersectRegion(region, r.region, reg);
bool result = (Xlib.XEqualRegion(reg, r.region) != 0);
Xlib.XDestroyRegion(reg);
return result;
}
}
}
/// <summary>
/// <para>Determine if a rectangle is overlaps with this region.</para>
/// </summary>
///
/// <param name="rect">
/// <para>The rectangle to test against this region.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if the rectangle overlaps
/// with this region; <see langword="false"/> otherwise.</para>
/// </returns>
public bool Overlaps(Rectangle rect)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
return false;
}
else
{
return (Xlib.XRectInRegion(region, rect.x, rect.y,
(uint)(rect.width),
(uint)(rect.height))
!= 0); // RectangleOut
}
}
}
/// <summary>
/// <para>Determine if an explicitly-specified rectangle overlaps
/// with this region.</para>
/// </summary>
///
/// <param name="x">
/// <para>The X co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="y">
/// <para>The Y co-ordinate of the top-left corner of the rectangle.</para>
/// </param>
///
/// <param name="width">
/// <para>The width of the rectangle.</para>
/// </param>
///
/// <param name="height">
/// <para>The height of the rectangle.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if the rectangle overlaps
/// with this region; <see langword="false"/> otherwise.</para>
/// </returns>
public bool Overlaps(int x, int y, int width, int height)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
return false;
}
else
{
return (Xlib.XRectInRegion(region, x, y,
(uint)(width),
(uint)(height))
!= 0); // RectangleOut
}
}
}
/// <summary>
/// <para>Determine if another region overlaps with this region.</para>
/// </summary>
///
/// <param name="r">
/// <para>The other region to test against this region.</para>
/// </param>
///
/// <returns>
/// <para>Returns <see langword="true"/> if <paramref name="r"/> overlaps
/// with this region; <see langword="false"/> otherwise.</para>
/// </returns>
public bool Overlaps(Region r)
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
return false;
}
else if(r == null || r.region == IntPtr.Zero)
{
return false;
}
else if(r == this)
{
return true;
}
else
{
IntPtr reg = Xlib.XCreateRegion();
if(reg == IntPtr.Zero)
{
Display.OutOfMemory();
}
Xlib.XIntersectRegion(region, r.region, reg);
bool result = (Xlib.XEmptyRegion(reg) == 0);
Xlib.XDestroyRegion(reg);
return result;
}
}
}
/// <summary>
/// <para>Get the smallest rectangle that completely contains
/// this region.</para>
/// </summary>
///
/// <returns>
/// <para>A <see cref="T:Xsharp.Rectangle"/> instance corresponding
/// to the smallest rectangle that contains the region.</para>
/// </returns>
public Rectangle ClipBox()
{
lock(typeof(Region))
{
Rectangle rect;
if(region == IntPtr.Zero)
{
rect = new Rectangle(0, 0, 0, 0);
}
else
{
XRectangle xrect;
Xlib.XClipBox(region, out xrect);
rect = new Rectangle(xrect.x, xrect.y,
xrect.width, xrect.height);
}
return rect;
}
}
/// <summary>
/// <para>Get the list of rectangles that defines this region.</para>
/// </summary>
///
/// <returns>
/// <para>An array of <see cref="T:Xsharp.Rectangle"/> instances
/// corresponding to the rectangles that make up the region.
/// Returns a zero-length array if the region is empty.</para>
/// </returns>
public Rectangle[] GetRectangles()
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
return new Rectangle [0];
}
else
{
Rectangle[] rects;
XRectangle xrect;
int size, index;
size = Xlib.XSharpGetRegionSize(region);
rects = new Rectangle [size];
for(index = 0; index < size; ++index)
{
Xlib.XSharpGetRegionRect(region, index, out xrect);
rects[index].x = xrect.x;
rects[index].y = xrect.y;
rects[index].width = xrect.width;
rects[index].height = xrect.height;
}
return rects;
}
}
}
// Get the Xlib region structure, and make sure it is non-NULL.
internal IntPtr GetRegion()
{
lock(typeof(Region))
{
if(region == IntPtr.Zero)
{
region = Xlib.XCreateRegion();
if(region == IntPtr.Zero)
{
Display.OutOfMemory();
}
}
return region;
}
}
} // class Region
} // namespace Xsharp
| |
using System;
using System.Collections.Generic;
using System.Drawing;
using FastQuant;
using FastQuant.Indicators;
using System.Linq;
namespace Samples.Roll
{
public class MyStrategy : InstrumentStrategy
{
private Group barsGroup;
private Group fillGroup;
private Group equityGroup;
[Parameter]
public double AllocationPerInstrument = 100000;
[Parameter]
public double Qty = 100;
public MyStrategy(Framework framework, string name)
: base(framework, name)
{
}
protected override void OnStrategyStart()
{
Portfolio.Account.Deposit(AllocationPerInstrument, CurrencyId.USD, "Initial allocation");
AddGroups();
}
protected override void OnBar(Instrument instrument, Bar bar)
{
// Add bar to bar series.
Bars.Add(bar);
// Add bar to group.
Log(bar, barsGroup);
// Calculate performance.
Portfolio.Performance.Update();
// Add equity to group.
Log(Portfolio.Value, equityGroup);
// Check strategy logic.
if (Bars.Count > 2)
{
if (!HasPosition(Instrument))
{
if (Bars[Bars.Count - 1].Close > Bars[Bars.Count - 2].Close &&
Bars[Bars.Count - 2].Close > Bars[Bars.Count - 3].Close)
{
Order enterOrder = BuyOrder(Instrument, Qty, "Enter Long");
Send(enterOrder);
}
else if (Bars[Bars.Count - 1].Close < Bars[Bars.Count - 2].Close &&
Bars[Bars.Count - 2].Close < Bars[Bars.Count - 3].Close)
{
Order enterOrder = SellOrder(Instrument, Qty, "Enter Short");
Send(enterOrder);
}
}
else
{
if (Position.Side == PositionSide.Short &&
Bars[Bars.Count - 1].Close > Bars[Bars.Count - 2].Close &&
Bars[Bars.Count - 2].Close > Bars[Bars.Count - 3].Close)
{
Order reverseOrder = BuyOrder(Instrument, Math.Abs(Position.Amount) + Qty, "Reverse to Long");
Send(reverseOrder);
}
else if (Position.Side == PositionSide.Long &&
Bars[Bars.Count - 1].Close < Bars[Bars.Count - 2].Close &&
Bars[Bars.Count - 2].Close < Bars[Bars.Count - 3].Close)
{
Order reverseOrder = SellOrder(Instrument, Math.Abs(Position.Amount) + Qty, "Reverse to Short");
Send(reverseOrder);
}
}
}
}
protected override void OnFill(Fill fill)
{
// Add fill to fill group.
Log(fill, fillGroup);
}
private void AddGroups()
{
// Create bars group.
barsGroup = new Group("Bars");
barsGroup.Add("Pad", DataObjectType.String, 0);
barsGroup.Add("SelectorKey", Instrument.Symbol);
// Create fills group.
fillGroup = new Group("Fills");
fillGroup.Add("Pad", 0);
fillGroup.Add("SelectorKey", Instrument.Symbol);
// Create equity group.
equityGroup = new Group("Equity");
equityGroup.Add("Pad", 1);
equityGroup.Add("SelectorKey", Instrument.Symbol);
// Add groups to manager.
GroupManager.Add(barsGroup);
GroupManager.Add(fillGroup);
GroupManager.Add(equityGroup);
}
}
public class Backtest : Scenario
{
// Bar size in seconds. 14400 seconds is 4 hour.
private long barSize = 14400;
public Backtest(Framework framework)
: base(framework)
{
}
public override void Run()
{
// Get synthetic trading instrument.
Instrument instrument1 = InstrumentManager.Instruments["NQ"];
// Init roll info - leg index, symbol and maturity date.
List<RollInfo> rollInfo = new List<RollInfo>()
{
new RollInfo(0, "NQZ3", new DateTime(2013, 12, 20)),
new RollInfo(1, "NQH4", new DateTime(2014, 03, 21)),
};
// Add legs.
for (var i = 0; i < rollInfo.Count; i++)
instrument1.Legs.Add(new Leg(InstrumentManager.Instruments[rollInfo[i].Symbol]));
// Main strategy.
strategy = new Strategy(framework, "Roll");
// Create BuySide strategy and add trading instrument.
MyStrategy buySide = new MyStrategy(framework, "BuySide");
buySide.Instruments.Add(instrument1);
// Create SellSide strategy.
RollSellSide sellSide = new RollSellSide(framework, "SellSide");
sellSide.Global[RollSellSide.barSizeCode] = barSize;
sellSide.Global[RollSellSide.rollInfoCode] = rollInfo;
// Set SellSide as data and execution provider for BuySide strategy.
buySide.DataProvider = sellSide;
buySide.ExecutionProvider = sellSide;
// Add strategies to main.
strategy.AddStrategy(buySide);
strategy.AddStrategy(sellSide);
// Set DataSimulator's dates.
DataSimulator.DateTime1 = new DateTime(2013, 01, 01);
DataSimulator.DateTime2 = new DateTime(2013, 12, 31);
// Add 4 hours bars (14400 seconds) for ins1.
BarFactory.Add(instrument1, BarType.Time, barSize);
// Run.
StartStrategy();
}
}
public class Realtime : Scenario
{
private long barSize;
public Realtime(Framework framework)
: base(framework)
{
// Set bar size in seconds. 14400 seconds is 4 hour.
barSize = 14400;
}
public override void Run()
{
// Synthetic instrument.
Instrument instrument1 = InstrumentManager.Instruments["NQ"];
// Init roll info - leg index, symbol and maturity date.
List<RollInfo> rollInfo = new List<RollInfo>()
{
new RollInfo(0, "NQZ3", new DateTime(2013, 12, 20)),
new RollInfo(1, "NQH4", new DateTime(2014, 03, 21)),
};
// Add legs.
for (var i = 0; i < rollInfo.Count; i++)
instrument1.Legs.Add(new Leg(InstrumentManager.Instruments[rollInfo[i].Symbol]));
// Main strategy.
strategy = new Strategy(framework, "Roll");
// Create BuySide strategy and add trading instrument.
MyStrategy buySide = new MyStrategy(framework, "BuySide");
buySide.Instruments.Add(instrument1);
// Create SellSide strategy.
RollSellSide sellSide = new RollSellSide(framework, "SellSide");
sellSide.Global[RollSellSide.barSizeCode] = barSize;
sellSide.Global[RollSellSide.rollInfoCode] = rollInfo;
// Set SellSide as data and execution provider for BuySide strategy.
buySide.DataProvider = sellSide;
buySide.ExecutionProvider = sellSide;
// Add strategies to main.
strategy.AddStrategy(buySide);
strategy.AddStrategy(sellSide);
// Get provider for realtime.
Provider quantRouter = framework.ProviderManager.Providers["QuantRouter"] as Provider;
if (quantRouter.Status == ProviderStatus.Disconnected)
quantRouter.Connect();
if (StrategyManager.Mode == StrategyMode.Paper)
{
// Set QuantRouter as data provider.
strategy.DataProvider = quantRouter as IDataProvider;
}
else if (StrategyManager.Mode == StrategyMode.Live)
{
// Set QuantRouter as data and execution provider.
strategy.DataProvider = quantRouter as IDataProvider;
strategy.ExecutionProvider = quantRouter as IExecutionProvider;
}
BarFactory.Add(instrument1, BarType.Time, barSize);
// Run.
StartStrategy();
}
}
public class RollSellSide : SellSideStrategy
{
class RollInfo
{
public int LegIndex { get; private set; }
public string Symbol { get; private set; }
public DateTime Maturity { get; private set; }
public RollInfo(int legIndex, string symbol, DateTime maturity)
{
LegIndex = legIndex;
Symbol = symbol;
Maturity = maturity;
}
}
public const string rollInfoCode = "RollInfo";
public const string barSizeCode = "BarSize";
private long barSize;
private List<RollInfo> rollInfo;
private int legIndex;
private Instrument rootInstrument;
private Instrument currentFuturesContract;
private Dictionary<Instrument, Group> barsGroups;
private Dictionary<Instrument, Group> fillGroups;
private Dictionary<Order, ExecutionCommand> orderTable;
private List<Order> rollOrders;
#region Parameters
[Parameter]
public TimeSpan TimeOfRoll = new TimeSpan(09, 00, 05);
#endregion
public RollSellSide(Framework framework, string name)
: base(framework, name)
{
barsGroups = new Dictionary<Instrument, Group>();
fillGroups = new Dictionary<Instrument, Group>();
orderTable = new Dictionary<Order, ExecutionCommand>();
rollOrders = new List<Order>();
}
protected override void OnSubscribe(InstrumentList instruments)
{
// Get size of bar.
barSize = (long)Global[barSizeCode];
// Get roll info.
rollInfo = (List<RollInfo>)Global[rollInfoCode];
// Get root instrument.
rootInstrument = instruments.GetByIndex(0);
// Get current futures contract.
currentFuturesContract = rootInstrument.Legs[legIndex].Instrument;
// Add current futures contract to bar factory.
BarFactory.Add(currentFuturesContract, BarType.Time, barSize);
// Add current futures contract to strategy.
AddInstrument(currentFuturesContract);
// Add reminder to maturity date and roll time.
AddReminder(rollInfo[legIndex].Maturity.Date + TimeOfRoll);
AddGroups();
}
public override void OnSendCommand(ExecutionCommand command)
{
// Logic for send command.
if (command.Type == ExecutionCommandType.Send)
{
Order order;
// Create and send order to current futures contract.
switch (command.Order.Type)
{
case OrderType.Market:
if (command.Side == OrderSide.Buy)
order = BuyOrder(currentFuturesContract, command.Qty, command.Text);
else
order = SellOrder(currentFuturesContract, command.Qty, command.Text);
orderTable[order] = command;
Send(order);
break;
case OrderType.Limit:
if (command.Side == OrderSide.Buy)
order = BuyLimitOrder(currentFuturesContract, command.Qty, command.Price, command.Text);
else
order = SellLimitOrder(currentFuturesContract, command.Qty, command.Price, command.Text);
orderTable[order] = command;
Send(order);
break;
default:
break;
}
}
}
protected override void OnOrderFilled(Order order)
{
// Emit fill for BuySide strategy's order.
if (!rollOrders.Contains(order))
EmitFilled(order, orderTable[order]);
}
private void EmitFilled(Order order, ExecutionCommand command)
{
// Create execution report for BuySide strategy.
Instrument instrument = command.Instrument;
ExecutionReport execution = new ExecutionReport();
execution.AvgPx = order.AvgPx;
execution.Commission = 0;
execution.CumQty = order.CumQty;
execution.DateTime = framework.Clock.DateTime;
execution.ExecType = ExecType.ExecTrade;
execution.Instrument = instrument;
execution.LastPx = order.AvgPx;
execution.LastQty = command.Qty;
execution.LeavesQty = 0;
execution.Order = command.Order;
execution.OrdQty = command.Qty;
execution.OrdStatus = OrderStatus.Filled;
execution.OrdType = command.Order.Type;
execution.Price = command.Order.Price;
execution.Side = command.Order.Side;
execution.StopPx = command.Order.StopPx;
execution.Text = command.Order.Text;
// Emit execution report to BuySide strategy.
EmitExecutionReport(execution);
}
protected override void OnAsk(Instrument instrument, Ask ask)
{
if (instrument.Id == currentFuturesContract.Id)
{
Ask rootAsk = new Ask(ask.DateTime, 0, rootInstrument.Id, ask.Price, ask.Size);
// Emit ask to to BuySide strategy.
EmitAsk(rootAsk);
}
}
protected override void OnBid(Instrument instrument, Bid bid)
{
if (instrument.Id == currentFuturesContract.Id)
{
Bid rootBid = new Bid(bid.DateTime, 0, rootInstrument.Id, bid.Price, bid.Size);
// Emit bid to to BuySide strategy.
EmitBid(rootBid);
}
}
protected override void OnTrade(Instrument instrument, Trade trade)
{
if (instrument.Id == currentFuturesContract.Id)
{
Trade rootTrade = new Trade(trade.DateTime, 0, rootInstrument.Id, trade.Price, trade.Size);
// Emit trade to to BuySide strategy.
EmitTrade(rootTrade);
}
}
protected override void OnBar(Instrument instrument, Bar bar)
{
// Add bar to bar group.
Log(bar, barsGroups[instrument]);
}
protected override void OnFill(Fill fill)
{
// Add fill to fill group.
Log(fill, fillGroups[fill.Instrument]);
}
protected override void OnReminder(DateTime dateTime, object data)
{
legIndex++;
Position position = Portfolio.GetPosition(currentFuturesContract);
double rollAmount = 0;
if (position != null)
rollAmount = position.Amount;
if (legIndex > rollInfo.Count - 1)
return;
Instrument prevFuturesContract = currentFuturesContract;
// Get new current futures contract.
currentFuturesContract = rootInstrument.Legs[legIndex].Instrument;
// Add current futures contract to bar factory.
BarFactory.Add(currentFuturesContract, BarType.Time, barSize);
// Add current futures contract to strategy.
AddInstrument(currentFuturesContract);
// Add reminder to maturity date and roll time.
AddReminder(rollInfo[legIndex].Maturity.AddDays(-1).Date + TimeOfRoll);
AddGroups();
// Roll from previous contract to current contract if needed.
if (rollAmount > 0)
{
Order order1 = SellOrder(prevFuturesContract, Math.Abs(rollAmount), "Roll");
Order order2 = BuyOrder(currentFuturesContract, Math.Abs(rollAmount), "Roll");
rollOrders.Add(order1);
rollOrders.Add(order2);
Send(order1);
Send(order2);
}
else if (rollAmount < 0)
{
Order order1 = BuyOrder(prevFuturesContract, Math.Abs(rollAmount), "Roll");
Order order2 = SellOrder(currentFuturesContract, Math.Abs(rollAmount), "Roll");
rollOrders.Add(order1);
rollOrders.Add(order2);
Send(order1);
Send(order2);
}
}
private void AddGroups()
{
// Create bars group.
Group barGroup = new Group("Bars");
barGroup.Add("Pad", DataObjectType.Int, legIndex + 2);
barGroup.Add("SelectorKey", DataObjectType.String, rootInstrument.Symbol);
// Create fills group.
Group fillGroup = new Group("Fills");
fillGroup.Add("Pad", DataObjectType.Int, legIndex + 2);
fillGroup.Add("SelectorKey", DataObjectType.String, rootInstrument.Symbol);
// Add groups to manager.
GroupManager.Add(fillGroup);
GroupManager.Add(barGroup);
// Add groups to dictionary.
fillGroups[currentFuturesContract] = fillGroup;
barsGroups[currentFuturesContract] = barGroup;
}
}
public class RollInfo
{
public int LegIndex { get; private set; }
public string Symbol { get; private set; }
public DateTime Maturity { get; private set; }
public RollInfo(int legIndex, string symbol, DateTime maturity)
{
LegIndex = legIndex;
Symbol = symbol;
Maturity = maturity;
}
}
class Program
{
static void Main(string[] args)
{
var scenario = args.Contains("--realtime") ? (Scenario)new Realtime(Framework.Current) : (Scenario)new Backtest(Framework.Current);
scenario.Run();
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvc = Google.Ads.GoogleAds.V9.Common;
using gagve = Google.Ads.GoogleAds.V9.Enums;
using gagvr = Google.Ads.GoogleAds.V9.Resources;
using gaxgrpc = Google.Api.Gax.Grpc;
using gr = Google.Rpc;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using NUnit.Framework;
using Google.Ads.GoogleAds.V9.Services;
namespace Google.Ads.GoogleAds.Tests.V9.Services
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedBiddingStrategyServiceClientTest
{
[Category("Autogenerated")][Test]
public void GetBiddingStrategyRequestObject()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
GetBiddingStrategyRequest request = new GetBiddingStrategyRequest
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
};
gagvr::BiddingStrategy expectedResponse = new gagvr::BiddingStrategy
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
Type = gagve::BiddingStrategyTypeEnum.Types.BiddingStrategyType.MaximizeConversionValue,
EnhancedCpc = new gagvc::EnhancedCpc(),
TargetCpa = new gagvc::TargetCpa(),
TargetRoas = new gagvc::TargetRoas(),
TargetSpend = new gagvc::TargetSpend(),
Status = gagve::BiddingStrategyStatusEnum.Types.BiddingStrategyStatus.Enabled,
Id = -6774108720365892680L,
BiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
CampaignCount = 7086295369533367171L,
NonRemovedCampaignCount = 8279371121198864414L,
EffectiveCurrencyCode = "effective_currency_code0045faae",
MaximizeConversionValue = new gagvc::MaximizeConversionValue(),
MaximizeConversions = new gagvc::MaximizeConversions(),
CurrencyCode = "currency_code7f81e352",
TargetImpressionShare = new gagvc::TargetImpressionShare(),
};
mockGrpcClient.Setup(x => x.GetBiddingStrategy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
gagvr::BiddingStrategy response = client.GetBiddingStrategy(request);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task GetBiddingStrategyRequestObjectAsync()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
GetBiddingStrategyRequest request = new GetBiddingStrategyRequest
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
};
gagvr::BiddingStrategy expectedResponse = new gagvr::BiddingStrategy
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
Type = gagve::BiddingStrategyTypeEnum.Types.BiddingStrategyType.MaximizeConversionValue,
EnhancedCpc = new gagvc::EnhancedCpc(),
TargetCpa = new gagvc::TargetCpa(),
TargetRoas = new gagvc::TargetRoas(),
TargetSpend = new gagvc::TargetSpend(),
Status = gagve::BiddingStrategyStatusEnum.Types.BiddingStrategyStatus.Enabled,
Id = -6774108720365892680L,
BiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
CampaignCount = 7086295369533367171L,
NonRemovedCampaignCount = 8279371121198864414L,
EffectiveCurrencyCode = "effective_currency_code0045faae",
MaximizeConversionValue = new gagvc::MaximizeConversionValue(),
MaximizeConversions = new gagvc::MaximizeConversions(),
CurrencyCode = "currency_code7f81e352",
TargetImpressionShare = new gagvc::TargetImpressionShare(),
};
mockGrpcClient.Setup(x => x.GetBiddingStrategyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::BiddingStrategy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
gagvr::BiddingStrategy responseCallSettings = await client.GetBiddingStrategyAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
gagvr::BiddingStrategy responseCancellationToken = await client.GetBiddingStrategyAsync(request, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void GetBiddingStrategy()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
GetBiddingStrategyRequest request = new GetBiddingStrategyRequest
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
};
gagvr::BiddingStrategy expectedResponse = new gagvr::BiddingStrategy
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
Type = gagve::BiddingStrategyTypeEnum.Types.BiddingStrategyType.MaximizeConversionValue,
EnhancedCpc = new gagvc::EnhancedCpc(),
TargetCpa = new gagvc::TargetCpa(),
TargetRoas = new gagvc::TargetRoas(),
TargetSpend = new gagvc::TargetSpend(),
Status = gagve::BiddingStrategyStatusEnum.Types.BiddingStrategyStatus.Enabled,
Id = -6774108720365892680L,
BiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
CampaignCount = 7086295369533367171L,
NonRemovedCampaignCount = 8279371121198864414L,
EffectiveCurrencyCode = "effective_currency_code0045faae",
MaximizeConversionValue = new gagvc::MaximizeConversionValue(),
MaximizeConversions = new gagvc::MaximizeConversions(),
CurrencyCode = "currency_code7f81e352",
TargetImpressionShare = new gagvc::TargetImpressionShare(),
};
mockGrpcClient.Setup(x => x.GetBiddingStrategy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
gagvr::BiddingStrategy response = client.GetBiddingStrategy(request.ResourceName);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task GetBiddingStrategyAsync()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
GetBiddingStrategyRequest request = new GetBiddingStrategyRequest
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
};
gagvr::BiddingStrategy expectedResponse = new gagvr::BiddingStrategy
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
Type = gagve::BiddingStrategyTypeEnum.Types.BiddingStrategyType.MaximizeConversionValue,
EnhancedCpc = new gagvc::EnhancedCpc(),
TargetCpa = new gagvc::TargetCpa(),
TargetRoas = new gagvc::TargetRoas(),
TargetSpend = new gagvc::TargetSpend(),
Status = gagve::BiddingStrategyStatusEnum.Types.BiddingStrategyStatus.Enabled,
Id = -6774108720365892680L,
BiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
CampaignCount = 7086295369533367171L,
NonRemovedCampaignCount = 8279371121198864414L,
EffectiveCurrencyCode = "effective_currency_code0045faae",
MaximizeConversionValue = new gagvc::MaximizeConversionValue(),
MaximizeConversions = new gagvc::MaximizeConversions(),
CurrencyCode = "currency_code7f81e352",
TargetImpressionShare = new gagvc::TargetImpressionShare(),
};
mockGrpcClient.Setup(x => x.GetBiddingStrategyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::BiddingStrategy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
gagvr::BiddingStrategy responseCallSettings = await client.GetBiddingStrategyAsync(request.ResourceName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
gagvr::BiddingStrategy responseCancellationToken = await client.GetBiddingStrategyAsync(request.ResourceName, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void GetBiddingStrategyResourceNames()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
GetBiddingStrategyRequest request = new GetBiddingStrategyRequest
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
};
gagvr::BiddingStrategy expectedResponse = new gagvr::BiddingStrategy
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
Type = gagve::BiddingStrategyTypeEnum.Types.BiddingStrategyType.MaximizeConversionValue,
EnhancedCpc = new gagvc::EnhancedCpc(),
TargetCpa = new gagvc::TargetCpa(),
TargetRoas = new gagvc::TargetRoas(),
TargetSpend = new gagvc::TargetSpend(),
Status = gagve::BiddingStrategyStatusEnum.Types.BiddingStrategyStatus.Enabled,
Id = -6774108720365892680L,
BiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
CampaignCount = 7086295369533367171L,
NonRemovedCampaignCount = 8279371121198864414L,
EffectiveCurrencyCode = "effective_currency_code0045faae",
MaximizeConversionValue = new gagvc::MaximizeConversionValue(),
MaximizeConversions = new gagvc::MaximizeConversions(),
CurrencyCode = "currency_code7f81e352",
TargetImpressionShare = new gagvc::TargetImpressionShare(),
};
mockGrpcClient.Setup(x => x.GetBiddingStrategy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
gagvr::BiddingStrategy response = client.GetBiddingStrategy(request.ResourceNameAsBiddingStrategyName);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task GetBiddingStrategyResourceNamesAsync()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
GetBiddingStrategyRequest request = new GetBiddingStrategyRequest
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
};
gagvr::BiddingStrategy expectedResponse = new gagvr::BiddingStrategy
{
ResourceNameAsBiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
Type = gagve::BiddingStrategyTypeEnum.Types.BiddingStrategyType.MaximizeConversionValue,
EnhancedCpc = new gagvc::EnhancedCpc(),
TargetCpa = new gagvc::TargetCpa(),
TargetRoas = new gagvc::TargetRoas(),
TargetSpend = new gagvc::TargetSpend(),
Status = gagve::BiddingStrategyStatusEnum.Types.BiddingStrategyStatus.Enabled,
Id = -6774108720365892680L,
BiddingStrategyName = gagvr::BiddingStrategyName.FromCustomerBiddingStrategy("[CUSTOMER_ID]", "[BIDDING_STRATEGY_ID]"),
CampaignCount = 7086295369533367171L,
NonRemovedCampaignCount = 8279371121198864414L,
EffectiveCurrencyCode = "effective_currency_code0045faae",
MaximizeConversionValue = new gagvc::MaximizeConversionValue(),
MaximizeConversions = new gagvc::MaximizeConversions(),
CurrencyCode = "currency_code7f81e352",
TargetImpressionShare = new gagvc::TargetImpressionShare(),
};
mockGrpcClient.Setup(x => x.GetBiddingStrategyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<gagvr::BiddingStrategy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
gagvr::BiddingStrategy responseCallSettings = await client.GetBiddingStrategyAsync(request.ResourceNameAsBiddingStrategyName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
gagvr::BiddingStrategy responseCancellationToken = await client.GetBiddingStrategyAsync(request.ResourceNameAsBiddingStrategyName, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void MutateBiddingStrategiesRequestObject()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
MutateBiddingStrategiesRequest request = new MutateBiddingStrategiesRequest
{
CustomerId = "customer_id3b3724cb",
Operations =
{
new BiddingStrategyOperation(),
},
PartialFailure = false,
ValidateOnly = true,
ResponseContentType = gagve::ResponseContentTypeEnum.Types.ResponseContentType.ResourceNameOnly,
};
MutateBiddingStrategiesResponse expectedResponse = new MutateBiddingStrategiesResponse
{
Results =
{
new MutateBiddingStrategyResult(),
},
PartialFailureError = new gr::Status(),
};
mockGrpcClient.Setup(x => x.MutateBiddingStrategies(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
MutateBiddingStrategiesResponse response = client.MutateBiddingStrategies(request);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task MutateBiddingStrategiesRequestObjectAsync()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
MutateBiddingStrategiesRequest request = new MutateBiddingStrategiesRequest
{
CustomerId = "customer_id3b3724cb",
Operations =
{
new BiddingStrategyOperation(),
},
PartialFailure = false,
ValidateOnly = true,
ResponseContentType = gagve::ResponseContentTypeEnum.Types.ResponseContentType.ResourceNameOnly,
};
MutateBiddingStrategiesResponse expectedResponse = new MutateBiddingStrategiesResponse
{
Results =
{
new MutateBiddingStrategyResult(),
},
PartialFailureError = new gr::Status(),
};
mockGrpcClient.Setup(x => x.MutateBiddingStrategiesAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateBiddingStrategiesResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
MutateBiddingStrategiesResponse responseCallSettings = await client.MutateBiddingStrategiesAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
MutateBiddingStrategiesResponse responseCancellationToken = await client.MutateBiddingStrategiesAsync(request, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public void MutateBiddingStrategies()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
MutateBiddingStrategiesRequest request = new MutateBiddingStrategiesRequest
{
CustomerId = "customer_id3b3724cb",
Operations =
{
new BiddingStrategyOperation(),
},
};
MutateBiddingStrategiesResponse expectedResponse = new MutateBiddingStrategiesResponse
{
Results =
{
new MutateBiddingStrategyResult(),
},
PartialFailureError = new gr::Status(),
};
mockGrpcClient.Setup(x => x.MutateBiddingStrategies(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
MutateBiddingStrategiesResponse response = client.MutateBiddingStrategies(request.CustomerId, request.Operations);
Assert.AreEqual(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Category("Autogenerated")][Test]
public async stt::Task MutateBiddingStrategiesAsync()
{
moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient> mockGrpcClient = new moq::Mock<BiddingStrategyService.BiddingStrategyServiceClient>(moq::MockBehavior.Strict);
MutateBiddingStrategiesRequest request = new MutateBiddingStrategiesRequest
{
CustomerId = "customer_id3b3724cb",
Operations =
{
new BiddingStrategyOperation(),
},
};
MutateBiddingStrategiesResponse expectedResponse = new MutateBiddingStrategiesResponse
{
Results =
{
new MutateBiddingStrategyResult(),
},
PartialFailureError = new gr::Status(),
};
mockGrpcClient.Setup(x => x.MutateBiddingStrategiesAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<MutateBiddingStrategiesResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BiddingStrategyServiceClient client = new BiddingStrategyServiceClientImpl(mockGrpcClient.Object, null);
MutateBiddingStrategiesResponse responseCallSettings = await client.MutateBiddingStrategiesAsync(request.CustomerId, request.Operations, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
Assert.AreEqual(expectedResponse, responseCallSettings);
MutateBiddingStrategiesResponse responseCancellationToken = await client.MutateBiddingStrategiesAsync(request.CustomerId, request.Operations, st::CancellationToken.None);
Assert.AreEqual(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.